package.dist.node.chunks.dep-Cyk9bIUq.js Maven / Gradle / Ivy
Show all versions of vite Show documentation
import * as fs$j from 'node:fs';
import fs__default, { promises as promises$1 } from 'node:fs';
import fsp, { lstat as lstat$3, readdir as readdir$5, readlink, realpath as realpath$2 } from 'node:fs/promises';
import path$n, { win32 as win32$1, posix as posix$1, isAbsolute as isAbsolute$1, join as join$2, extname as extname$1, dirname as dirname$2, relative as relative$2, basename as basename$2 } from 'node:path';
import { fileURLToPath, URL as URL$3, parse as parse$h, pathToFileURL } from 'node:url';
import { promisify as promisify$4, format as format$2, inspect } from 'node:util';
import { performance as performance$1 } from 'node:perf_hooks';
import { createRequire as createRequire$1, builtinModules } from 'node:module';
import require$$0$3 from 'tty';
import require$$0$4, { win32, posix, isAbsolute, resolve as resolve$3, relative as relative$1, basename as basename$1, extname, dirname as dirname$1, join as join$1, sep as sep$1, normalize as normalize$1 } from 'path';
import esbuild, { transform as transform$1, formatMessages, build as build$3 } from 'esbuild';
import { CLIENT_ENTRY, OPTIMIZABLE_ENTRY_RE, wildcardHosts, loopbackHosts, FS_PREFIX, CLIENT_PUBLIC_PATH, ENV_PUBLIC_PATH, DEFAULT_ASSETS_INLINE_LIMIT, CSS_LANGS_RE, ESBUILD_MODULES_TARGET, SPECIAL_QUERY_RE, ENV_ENTRY, DEP_VERSION_RE, DEFAULT_MAIN_FIELDS, DEFAULT_EXTENSIONS, KNOWN_ASSET_TYPES, JS_TYPES_RE, METADATA_FILENAME, VITE_PACKAGE_DIR, DEFAULT_DEV_PORT, CLIENT_DIR, VERSION, DEFAULT_PREVIEW_PORT, DEFAULT_ASSETS_RE, DEFAULT_CONFIG_FILES } from '../constants.js';
import * as require$$0$2 from 'fs';
import require$$0__default, { lstatSync, readdir as readdir$4, readdirSync, readlinkSync, realpathSync as realpathSync$1, existsSync, readFileSync, statSync as statSync$1 } from 'fs';
import { EventEmitter as EventEmitter$4 } from 'node:events';
import Stream$1 from 'node:stream';
import { StringDecoder } from 'node:string_decoder';
import { exec, execSync } from 'node:child_process';
import { createServer as createServer$3, STATUS_CODES, get as get$2 } from 'node:http';
import { createServer as createServer$2, get as get$1 } from 'node:https';
import require$$0$5 from 'util';
import require$$4$1 from 'net';
import require$$0$7 from 'events';
import require$$0$9 from 'url';
import require$$1 from 'http';
import require$$0$6 from 'stream';
import require$$2 from 'os';
import require$$2$1 from 'child_process';
import os$5 from 'node:os';
import { createHash as createHash$2 } from 'node:crypto';
import { promises } from 'node:dns';
import require$$3$1 from 'crypto';
import require$$0$8, { createRequire as createRequire$2 } from 'module';
import assert$1 from 'node:assert';
import v8 from 'node:v8';
import { Worker as Worker$1 } from 'node:worker_threads';
import { Buffer as Buffer$1 } from 'node:buffer';
import { parseAstAsync, parseAst } from 'rollup/parseAst';
import * as qs from 'querystring';
import readline from 'node:readline';
import zlib$1 from 'zlib';
import require$$0$a from 'buffer';
import require$$1$1 from 'https';
import require$$4$2 from 'tls';
import require$$4$3 from 'assert';
import { gzip } from 'node:zlib';
import { fileURLToPath as __cjs_fileURLToPath } from 'node:url';
import { dirname as __cjs_dirname } from 'node:path';
import { createRequire as __cjs_createRequire } from 'node:module';
const __filename = __cjs_fileURLToPath(import.meta.url);
const __dirname = __cjs_dirname(__filename);
const require = __cjs_createRequire(import.meta.url);
const __require = require;
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function getDefaultExportFromCjs (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
}
function getAugmentedNamespace(n) {
if (n.__esModule) return n;
var f = n.default;
if (typeof f == "function") {
var a = function a () {
if (this instanceof a) {
return Reflect.construct(f, arguments, this.constructor);
}
return f.apply(this, arguments);
};
a.prototype = f.prototype;
} else a = {};
Object.defineProperty(a, '__esModule', {value: true});
Object.keys(n).forEach(function (k) {
var d = Object.getOwnPropertyDescriptor(n, k);
Object.defineProperty(a, k, d.get ? d : {
enumerable: true,
get: function () {
return n[k];
}
});
});
return a;
}
function commonjsRequire(path) {
throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');
}
var picocolors = {exports: {}};
let argv = process.argv || [],
env$1 = process.env;
let isColorSupported =
!("NO_COLOR" in env$1 || argv.includes("--no-color")) &&
("FORCE_COLOR" in env$1 ||
argv.includes("--color") ||
process.platform === "win32" ||
(commonjsRequire != null && require$$0$3.isatty(1) && env$1.TERM !== "dumb") ||
"CI" in env$1);
let formatter =
(open, close, replace = open) =>
input => {
let string = "" + input;
let index = string.indexOf(close, open.length);
return ~index
? open + replaceClose(string, close, replace, index) + close
: open + string + close
};
let replaceClose = (string, close, replace, index) => {
let result = "";
let cursor = 0;
do {
result += string.substring(cursor, index) + replace;
cursor = index + close.length;
index = string.indexOf(close, cursor);
} while (~index)
return result + string.substring(cursor)
};
let createColors = (enabled = isColorSupported) => {
let init = enabled ? formatter : () => String;
return {
isColorSupported: enabled,
reset: init("\x1b[0m", "\x1b[0m"),
bold: init("\x1b[1m", "\x1b[22m", "\x1b[22m\x1b[1m"),
dim: init("\x1b[2m", "\x1b[22m", "\x1b[22m\x1b[2m"),
italic: init("\x1b[3m", "\x1b[23m"),
underline: init("\x1b[4m", "\x1b[24m"),
inverse: init("\x1b[7m", "\x1b[27m"),
hidden: init("\x1b[8m", "\x1b[28m"),
strikethrough: init("\x1b[9m", "\x1b[29m"),
black: init("\x1b[30m", "\x1b[39m"),
red: init("\x1b[31m", "\x1b[39m"),
green: init("\x1b[32m", "\x1b[39m"),
yellow: init("\x1b[33m", "\x1b[39m"),
blue: init("\x1b[34m", "\x1b[39m"),
magenta: init("\x1b[35m", "\x1b[39m"),
cyan: init("\x1b[36m", "\x1b[39m"),
white: init("\x1b[37m", "\x1b[39m"),
gray: init("\x1b[90m", "\x1b[39m"),
bgBlack: init("\x1b[40m", "\x1b[49m"),
bgRed: init("\x1b[41m", "\x1b[49m"),
bgGreen: init("\x1b[42m", "\x1b[49m"),
bgYellow: init("\x1b[43m", "\x1b[49m"),
bgBlue: init("\x1b[44m", "\x1b[49m"),
bgMagenta: init("\x1b[45m", "\x1b[49m"),
bgCyan: init("\x1b[46m", "\x1b[49m"),
bgWhite: init("\x1b[47m", "\x1b[49m"),
}
};
picocolors.exports = createColors();
picocolors.exports.createColors = createColors;
var picocolorsExports = picocolors.exports;
var colors$1 = /*@__PURE__*/getDefaultExportFromCjs(picocolorsExports);
function matches$1(pattern, importee) {
if (pattern instanceof RegExp) {
return pattern.test(importee);
}
if (importee.length < pattern.length) {
return false;
}
if (importee === pattern) {
return true;
}
// eslint-disable-next-line prefer-template
return importee.startsWith(pattern + '/');
}
function getEntries({ entries, customResolver }) {
if (!entries) {
return [];
}
const resolverFunctionFromOptions = resolveCustomResolver(customResolver);
if (Array.isArray(entries)) {
return entries.map((entry) => {
return {
find: entry.find,
replacement: entry.replacement,
resolverFunction: resolveCustomResolver(entry.customResolver) || resolverFunctionFromOptions
};
});
}
return Object.entries(entries).map(([key, value]) => {
return { find: key, replacement: value, resolverFunction: resolverFunctionFromOptions };
});
}
function getHookFunction(hook) {
if (typeof hook === 'function') {
return hook;
}
if (hook && 'handler' in hook && typeof hook.handler === 'function') {
return hook.handler;
}
return null;
}
function resolveCustomResolver(customResolver) {
if (typeof customResolver === 'function') {
return customResolver;
}
if (customResolver) {
return getHookFunction(customResolver.resolveId);
}
return null;
}
function alias$1(options = {}) {
const entries = getEntries(options);
if (entries.length === 0) {
return {
name: 'alias',
resolveId: () => null
};
}
return {
name: 'alias',
async buildStart(inputOptions) {
await Promise.all([...(Array.isArray(options.entries) ? options.entries : []), options].map(({ customResolver }) => { var _a; return customResolver && ((_a = getHookFunction(customResolver.buildStart)) === null || _a === void 0 ? void 0 : _a.call(this, inputOptions)); }));
},
resolveId(importee, importer, resolveOptions) {
// First match is supposed to be the correct one
const matchedEntry = entries.find((entry) => matches$1(entry.find, importee));
if (!matchedEntry) {
return null;
}
const updatedId = importee.replace(matchedEntry.find, matchedEntry.replacement);
if (matchedEntry.resolverFunction) {
return matchedEntry.resolverFunction.call(this, updatedId, importer, resolveOptions);
}
return this.resolve(updatedId, importer, Object.assign({ skipSelf: true }, resolveOptions)).then((resolved) => {
if (resolved)
return resolved;
if (!require$$0$4.isAbsolute(updatedId)) {
this.warn(`rewrote ${importee} to ${updatedId} but was not an abolute path and was not handled by other plugins. ` +
`This will lead to duplicated modules for the same path. ` +
`To avoid duplicating modules, you should resolve to an absolute path.`);
}
return { id: updatedId };
});
}
};
}
const VALID_ID_PREFIX = `/@id/`;
const NULL_BYTE_PLACEHOLDER = `__x00__`;
let SOURCEMAPPING_URL = "sourceMa";
SOURCEMAPPING_URL += "ppingURL";
const VITE_RUNTIME_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-runtime";
const isWindows$3 = typeof process !== "undefined" && process.platform === "win32";
function wrapId$1(id) {
return id.startsWith(VALID_ID_PREFIX) ? id : VALID_ID_PREFIX + id.replace("\0", NULL_BYTE_PLACEHOLDER);
}
function unwrapId$1(id) {
return id.startsWith(VALID_ID_PREFIX) ? id.slice(VALID_ID_PREFIX.length).replace(NULL_BYTE_PLACEHOLDER, "\0") : id;
}
const windowsSlashRE = /\\/g;
function slash$1(p) {
return p.replace(windowsSlashRE, "/");
}
const postfixRE = /[?#].*$/;
function cleanUrl(url) {
return url.replace(postfixRE, "");
}
function withTrailingSlash(path) {
if (path[path.length - 1] !== "/") {
return `${path}/`;
}
return path;
}
const AsyncFunction = async function() {
}.constructor;
const asyncFunctionDeclarationPaddingLineCount = /* @__PURE__ */ (() => {
const body = "/*code*/";
const source = new AsyncFunction("a", "b", body).toString();
return source.slice(0, source.indexOf(body)).split("\n").length - 1;
})();
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef {{
skip: () => void;
remove: () => void;
replace: (node: BaseNode) => void;
}} WalkerContext */
let WalkerBase$1 = class WalkerBase {
constructor() {
/** @type {boolean} */
this.should_skip = false;
/** @type {boolean} */
this.should_remove = false;
/** @type {BaseNode | null} */
this.replacement = null;
/** @type {WalkerContext} */
this.context = {
skip: () => (this.should_skip = true),
remove: () => (this.should_remove = true),
replace: (node) => (this.replacement = node)
};
}
/**
*
* @param {any} parent
* @param {string} prop
* @param {number} index
* @param {BaseNode} node
*/
replace(parent, prop, index, node) {
if (parent) {
if (index !== null) {
parent[prop][index] = node;
} else {
parent[prop] = node;
}
}
}
/**
*
* @param {any} parent
* @param {string} prop
* @param {number} index
*/
remove(parent, prop, index) {
if (parent) {
if (index !== null) {
parent[prop].splice(index, 1);
} else {
delete parent[prop];
}
}
}
};
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef { import('./walker.js').WalkerContext} WalkerContext */
/** @typedef {(
* this: WalkerContext,
* node: BaseNode,
* parent: BaseNode,
* key: string,
* index: number
* ) => void} SyncHandler */
let SyncWalker$1 = class SyncWalker extends WalkerBase$1 {
/**
*
* @param {SyncHandler} enter
* @param {SyncHandler} leave
*/
constructor(enter, leave) {
super();
/** @type {SyncHandler} */
this.enter = enter;
/** @type {SyncHandler} */
this.leave = leave;
}
/**
*
* @param {BaseNode} node
* @param {BaseNode} parent
* @param {string} [prop]
* @param {number} [index]
* @returns {BaseNode}
*/
visit(node, parent, prop, index) {
if (node) {
if (this.enter) {
const _should_skip = this.should_skip;
const _should_remove = this.should_remove;
const _replacement = this.replacement;
this.should_skip = false;
this.should_remove = false;
this.replacement = null;
this.enter.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const skipped = this.should_skip;
const removed = this.should_remove;
this.should_skip = _should_skip;
this.should_remove = _should_remove;
this.replacement = _replacement;
if (skipped) return node;
if (removed) return null;
}
for (const key in node) {
const value = node[key];
if (typeof value !== "object") {
continue;
} else if (Array.isArray(value)) {
for (let i = 0; i < value.length; i += 1) {
if (value[i] !== null && typeof value[i].type === 'string') {
if (!this.visit(value[i], node, key, i)) {
// removed
i--;
}
}
}
} else if (value !== null && typeof value.type === "string") {
this.visit(value, node, key, null);
}
}
if (this.leave) {
const _replacement = this.replacement;
const _should_remove = this.should_remove;
this.replacement = null;
this.should_remove = false;
this.leave.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const removed = this.should_remove;
this.replacement = _replacement;
this.should_remove = _should_remove;
if (removed) return null;
}
}
return node;
}
};
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef { import('./sync.js').SyncHandler} SyncHandler */
/** @typedef { import('./async.js').AsyncHandler} AsyncHandler */
/**
*
* @param {BaseNode} ast
* @param {{
* enter?: SyncHandler
* leave?: SyncHandler
* }} walker
* @returns {BaseNode}
*/
function walk$3(ast, { enter, leave }) {
const instance = new SyncWalker$1(enter, leave);
return instance.visit(ast, null);
}
var utils$k = {};
const path$m = require$$0$4;
const WIN_SLASH = '\\\\/';
const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
/**
* Posix glob regex
*/
const DOT_LITERAL = '\\.';
const PLUS_LITERAL = '\\+';
const QMARK_LITERAL = '\\?';
const SLASH_LITERAL = '\\/';
const ONE_CHAR = '(?=.)';
const QMARK = '[^/]';
const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
const NO_DOT = `(?!${DOT_LITERAL})`;
const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
const STAR$1 = `${QMARK}*?`;
const POSIX_CHARS = {
DOT_LITERAL,
PLUS_LITERAL,
QMARK_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
QMARK,
END_ANCHOR,
DOTS_SLASH,
NO_DOT,
NO_DOTS,
NO_DOT_SLASH,
NO_DOTS_SLASH,
QMARK_NO_DOT,
STAR: STAR$1,
START_ANCHOR
};
/**
* Windows glob regex
*/
const WINDOWS_CHARS = {
...POSIX_CHARS,
SLASH_LITERAL: `[${WIN_SLASH}]`,
QMARK: WIN_NO_SLASH,
STAR: `${WIN_NO_SLASH}*?`,
DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
NO_DOT: `(?!${DOT_LITERAL})`,
NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
END_ANCHOR: `(?:[${WIN_SLASH}]|$)`
};
/**
* POSIX Bracket Regex
*/
const POSIX_REGEX_SOURCE$1 = {
alnum: 'a-zA-Z0-9',
alpha: 'a-zA-Z',
ascii: '\\x00-\\x7F',
blank: ' \\t',
cntrl: '\\x00-\\x1F\\x7F',
digit: '0-9',
graph: '\\x21-\\x7E',
lower: 'a-z',
print: '\\x20-\\x7E ',
punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~',
space: ' \\t\\r\\n\\v\\f',
upper: 'A-Z',
word: 'A-Za-z0-9_',
xdigit: 'A-Fa-f0-9'
};
var constants$6 = {
MAX_LENGTH: 1024 * 64,
POSIX_REGEX_SOURCE: POSIX_REGEX_SOURCE$1,
// regular expressions
REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
// Replace globs with equivalent patterns to reduce parsing time.
REPLACEMENTS: {
'***': '*',
'**/**': '**',
'**/**/**': '**'
},
// Digits
CHAR_0: 48, /* 0 */
CHAR_9: 57, /* 9 */
// Alphabet chars.
CHAR_UPPERCASE_A: 65, /* A */
CHAR_LOWERCASE_A: 97, /* a */
CHAR_UPPERCASE_Z: 90, /* Z */
CHAR_LOWERCASE_Z: 122, /* z */
CHAR_LEFT_PARENTHESES: 40, /* ( */
CHAR_RIGHT_PARENTHESES: 41, /* ) */
CHAR_ASTERISK: 42, /* * */
// Non-alphabetic chars.
CHAR_AMPERSAND: 38, /* & */
CHAR_AT: 64, /* @ */
CHAR_BACKWARD_SLASH: 92, /* \ */
CHAR_CARRIAGE_RETURN: 13, /* \r */
CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */
CHAR_COLON: 58, /* : */
CHAR_COMMA: 44, /* , */
CHAR_DOT: 46, /* . */
CHAR_DOUBLE_QUOTE: 34, /* " */
CHAR_EQUAL: 61, /* = */
CHAR_EXCLAMATION_MARK: 33, /* ! */
CHAR_FORM_FEED: 12, /* \f */
CHAR_FORWARD_SLASH: 47, /* / */
CHAR_GRAVE_ACCENT: 96, /* ` */
CHAR_HASH: 35, /* # */
CHAR_HYPHEN_MINUS: 45, /* - */
CHAR_LEFT_ANGLE_BRACKET: 60, /* < */
CHAR_LEFT_CURLY_BRACE: 123, /* { */
CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */
CHAR_LINE_FEED: 10, /* \n */
CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */
CHAR_PERCENT: 37, /* % */
CHAR_PLUS: 43, /* + */
CHAR_QUESTION_MARK: 63, /* ? */
CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */
CHAR_RIGHT_CURLY_BRACE: 125, /* } */
CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */
CHAR_SEMICOLON: 59, /* ; */
CHAR_SINGLE_QUOTE: 39, /* ' */
CHAR_SPACE: 32, /* */
CHAR_TAB: 9, /* \t */
CHAR_UNDERSCORE: 95, /* _ */
CHAR_VERTICAL_LINE: 124, /* | */
CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */
SEP: path$m.sep,
/**
* Create EXTGLOB_CHARS
*/
extglobChars(chars) {
return {
'!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },
'?': { type: 'qmark', open: '(?:', close: ')?' },
'+': { type: 'plus', open: '(?:', close: ')+' },
'*': { type: 'star', open: '(?:', close: ')*' },
'@': { type: 'at', open: '(?:', close: ')' }
};
},
/**
* Create GLOB_CHARS
*/
globChars(win32) {
return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
}
};
(function (exports) {
const path = require$$0$4;
const win32 = process.platform === 'win32';
const {
REGEX_BACKSLASH,
REGEX_REMOVE_BACKSLASH,
REGEX_SPECIAL_CHARS,
REGEX_SPECIAL_CHARS_GLOBAL
} = constants$6;
exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);
exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);
exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1');
exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');
exports.removeBackslashes = str => {
return str.replace(REGEX_REMOVE_BACKSLASH, match => {
return match === '\\' ? '' : match;
});
};
exports.supportsLookbehinds = () => {
const segs = process.version.slice(1).split('.').map(Number);
if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {
return true;
}
return false;
};
exports.isWindows = options => {
if (options && typeof options.windows === 'boolean') {
return options.windows;
}
return win32 === true || path.sep === '\\';
};
exports.escapeLast = (input, char, lastIdx) => {
const idx = input.lastIndexOf(char, lastIdx);
if (idx === -1) return input;
if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1);
return `${input.slice(0, idx)}\\${input.slice(idx)}`;
};
exports.removePrefix = (input, state = {}) => {
let output = input;
if (output.startsWith('./')) {
output = output.slice(2);
state.prefix = './';
}
return output;
};
exports.wrapOutput = (input, state = {}, options = {}) => {
const prepend = options.contains ? '' : '^';
const append = options.contains ? '' : '$';
let output = `${prepend}(?:${input})${append}`;
if (state.negated === true) {
output = `(?:^(?!${output}).*$)`;
}
return output;
};
} (utils$k));
const utils$j = utils$k;
const {
CHAR_ASTERISK, /* * */
CHAR_AT, /* @ */
CHAR_BACKWARD_SLASH, /* \ */
CHAR_COMMA: CHAR_COMMA$1, /* , */
CHAR_DOT: CHAR_DOT$1, /* . */
CHAR_EXCLAMATION_MARK, /* ! */
CHAR_FORWARD_SLASH, /* / */
CHAR_LEFT_CURLY_BRACE: CHAR_LEFT_CURLY_BRACE$1, /* { */
CHAR_LEFT_PARENTHESES: CHAR_LEFT_PARENTHESES$1, /* ( */
CHAR_LEFT_SQUARE_BRACKET: CHAR_LEFT_SQUARE_BRACKET$1, /* [ */
CHAR_PLUS, /* + */
CHAR_QUESTION_MARK, /* ? */
CHAR_RIGHT_CURLY_BRACE: CHAR_RIGHT_CURLY_BRACE$1, /* } */
CHAR_RIGHT_PARENTHESES: CHAR_RIGHT_PARENTHESES$1, /* ) */
CHAR_RIGHT_SQUARE_BRACKET: CHAR_RIGHT_SQUARE_BRACKET$1 /* ] */
} = constants$6;
const isPathSeparator = code => {
return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
};
const depth = token => {
if (token.isPrefix !== true) {
token.depth = token.isGlobstar ? Infinity : 1;
}
};
/**
* Quickly scans a glob pattern and returns an object with a handful of
* useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
* `glob` (the actual pattern), `negated` (true if the path starts with `!` but not
* with `!(`) and `negatedExtglob` (true if the path starts with `!(`).
*
* ```js
* const pm = require('picomatch');
* console.log(pm.scan('foo/bar/*.js'));
* { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
* ```
* @param {String} `str`
* @param {Object} `options`
* @return {Object} Returns an object with tokens and regex source string.
* @api public
*/
const scan$2 = (input, options) => {
const opts = options || {};
const length = input.length - 1;
const scanToEnd = opts.parts === true || opts.scanToEnd === true;
const slashes = [];
const tokens = [];
const parts = [];
let str = input;
let index = -1;
let start = 0;
let lastIndex = 0;
let isBrace = false;
let isBracket = false;
let isGlob = false;
let isExtglob = false;
let isGlobstar = false;
let braceEscaped = false;
let backslashes = false;
let negated = false;
let negatedExtglob = false;
let finished = false;
let braces = 0;
let prev;
let code;
let token = { value: '', depth: 0, isGlob: false };
const eos = () => index >= length;
const peek = () => str.charCodeAt(index + 1);
const advance = () => {
prev = code;
return str.charCodeAt(++index);
};
while (index < length) {
code = advance();
let next;
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
code = advance();
if (code === CHAR_LEFT_CURLY_BRACE$1) {
braceEscaped = true;
}
continue;
}
if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE$1) {
braces++;
while (eos() !== true && (code = advance())) {
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
advance();
continue;
}
if (code === CHAR_LEFT_CURLY_BRACE$1) {
braces++;
continue;
}
if (braceEscaped !== true && code === CHAR_DOT$1 && (code = advance()) === CHAR_DOT$1) {
isBrace = token.isBrace = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (braceEscaped !== true && code === CHAR_COMMA$1) {
isBrace = token.isBrace = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_RIGHT_CURLY_BRACE$1) {
braces--;
if (braces === 0) {
braceEscaped = false;
isBrace = token.isBrace = true;
finished = true;
break;
}
}
}
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_FORWARD_SLASH) {
slashes.push(index);
tokens.push(token);
token = { value: '', depth: 0, isGlob: false };
if (finished === true) continue;
if (prev === CHAR_DOT$1 && index === (start + 1)) {
start += 2;
continue;
}
lastIndex = index + 1;
continue;
}
if (opts.noext !== true) {
const isExtglobChar = code === CHAR_PLUS
|| code === CHAR_AT
|| code === CHAR_ASTERISK
|| code === CHAR_QUESTION_MARK
|| code === CHAR_EXCLAMATION_MARK;
if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES$1) {
isGlob = token.isGlob = true;
isExtglob = token.isExtglob = true;
finished = true;
if (code === CHAR_EXCLAMATION_MARK && index === start) {
negatedExtglob = true;
}
if (scanToEnd === true) {
while (eos() !== true && (code = advance())) {
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
code = advance();
continue;
}
if (code === CHAR_RIGHT_PARENTHESES$1) {
isGlob = token.isGlob = true;
finished = true;
break;
}
}
continue;
}
break;
}
}
if (code === CHAR_ASTERISK) {
if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_QUESTION_MARK) {
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_LEFT_SQUARE_BRACKET$1) {
while (eos() !== true && (next = advance())) {
if (next === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
advance();
continue;
}
if (next === CHAR_RIGHT_SQUARE_BRACKET$1) {
isBracket = token.isBracket = true;
isGlob = token.isGlob = true;
finished = true;
break;
}
}
if (scanToEnd === true) {
continue;
}
break;
}
if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
negated = token.negated = true;
start++;
continue;
}
if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES$1) {
isGlob = token.isGlob = true;
if (scanToEnd === true) {
while (eos() !== true && (code = advance())) {
if (code === CHAR_LEFT_PARENTHESES$1) {
backslashes = token.backslashes = true;
code = advance();
continue;
}
if (code === CHAR_RIGHT_PARENTHESES$1) {
finished = true;
break;
}
}
continue;
}
break;
}
if (isGlob === true) {
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
}
if (opts.noext === true) {
isExtglob = false;
isGlob = false;
}
let base = str;
let prefix = '';
let glob = '';
if (start > 0) {
prefix = str.slice(0, start);
str = str.slice(start);
lastIndex -= start;
}
if (base && isGlob === true && lastIndex > 0) {
base = str.slice(0, lastIndex);
glob = str.slice(lastIndex);
} else if (isGlob === true) {
base = '';
glob = str;
} else {
base = str;
}
if (base && base !== '' && base !== '/' && base !== str) {
if (isPathSeparator(base.charCodeAt(base.length - 1))) {
base = base.slice(0, -1);
}
}
if (opts.unescape === true) {
if (glob) glob = utils$j.removeBackslashes(glob);
if (base && backslashes === true) {
base = utils$j.removeBackslashes(base);
}
}
const state = {
prefix,
input,
start,
base,
glob,
isBrace,
isBracket,
isGlob,
isExtglob,
isGlobstar,
negated,
negatedExtglob
};
if (opts.tokens === true) {
state.maxDepth = 0;
if (!isPathSeparator(code)) {
tokens.push(token);
}
state.tokens = tokens;
}
if (opts.parts === true || opts.tokens === true) {
let prevIndex;
for (let idx = 0; idx < slashes.length; idx++) {
const n = prevIndex ? prevIndex + 1 : start;
const i = slashes[idx];
const value = input.slice(n, i);
if (opts.tokens) {
if (idx === 0 && start !== 0) {
tokens[idx].isPrefix = true;
tokens[idx].value = prefix;
} else {
tokens[idx].value = value;
}
depth(tokens[idx]);
state.maxDepth += tokens[idx].depth;
}
if (idx !== 0 || value !== '') {
parts.push(value);
}
prevIndex = i;
}
if (prevIndex && prevIndex + 1 < input.length) {
const value = input.slice(prevIndex + 1);
parts.push(value);
if (opts.tokens) {
tokens[tokens.length - 1].value = value;
depth(tokens[tokens.length - 1]);
state.maxDepth += tokens[tokens.length - 1].depth;
}
}
state.slashes = slashes;
state.parts = parts;
}
return state;
};
var scan_1 = scan$2;
const constants$5 = constants$6;
const utils$i = utils$k;
/**
* Constants
*/
const {
MAX_LENGTH: MAX_LENGTH$1,
POSIX_REGEX_SOURCE,
REGEX_NON_SPECIAL_CHARS,
REGEX_SPECIAL_CHARS_BACKREF,
REPLACEMENTS
} = constants$5;
/**
* Helpers
*/
const expandRange = (args, options) => {
if (typeof options.expandRange === 'function') {
return options.expandRange(...args, options);
}
args.sort();
const value = `[${args.join('-')}]`;
return value;
};
/**
* Create the message for a syntax error
*/
const syntaxError = (type, char) => {
return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`;
};
/**
* Parse the given input string.
* @param {String} input
* @param {Object} options
* @return {Object}
*/
const parse$g = (input, options) => {
if (typeof input !== 'string') {
throw new TypeError('Expected a string');
}
input = REPLACEMENTS[input] || input;
const opts = { ...options };
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH$1, opts.maxLength) : MAX_LENGTH$1;
let len = input.length;
if (len > max) {
throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
}
const bos = { type: 'bos', value: '', output: opts.prepend || '' };
const tokens = [bos];
const capture = opts.capture ? '' : '?:';
const win32 = utils$i.isWindows(options);
// create constants based on platform, for windows or posix
const PLATFORM_CHARS = constants$5.globChars(win32);
const EXTGLOB_CHARS = constants$5.extglobChars(PLATFORM_CHARS);
const {
DOT_LITERAL,
PLUS_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
DOTS_SLASH,
NO_DOT,
NO_DOT_SLASH,
NO_DOTS_SLASH,
QMARK,
QMARK_NO_DOT,
STAR,
START_ANCHOR
} = PLATFORM_CHARS;
const globstar = opts => {
return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
};
const nodot = opts.dot ? '' : NO_DOT;
const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;
let star = opts.bash === true ? globstar(opts) : STAR;
if (opts.capture) {
star = `(${star})`;
}
// minimatch options support
if (typeof opts.noext === 'boolean') {
opts.noextglob = opts.noext;
}
const state = {
input,
index: -1,
start: 0,
dot: opts.dot === true,
consumed: '',
output: '',
prefix: '',
backtrack: false,
negated: false,
brackets: 0,
braces: 0,
parens: 0,
quotes: 0,
globstar: false,
tokens
};
input = utils$i.removePrefix(input, state);
len = input.length;
const extglobs = [];
const braces = [];
const stack = [];
let prev = bos;
let value;
/**
* Tokenizing helpers
*/
const eos = () => state.index === len - 1;
const peek = state.peek = (n = 1) => input[state.index + n];
const advance = state.advance = () => input[++state.index] || '';
const remaining = () => input.slice(state.index + 1);
const consume = (value = '', num = 0) => {
state.consumed += value;
state.index += num;
};
const append = token => {
state.output += token.output != null ? token.output : token.value;
consume(token.value);
};
const negate = () => {
let count = 1;
while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {
advance();
state.start++;
count++;
}
if (count % 2 === 0) {
return false;
}
state.negated = true;
state.start++;
return true;
};
const increment = type => {
state[type]++;
stack.push(type);
};
const decrement = type => {
state[type]--;
stack.pop();
};
/**
* Push tokens onto the tokens array. This helper speeds up
* tokenizing by 1) helping us avoid backtracking as much as possible,
* and 2) helping us avoid creating extra tokens when consecutive
* characters are plain text. This improves performance and simplifies
* lookbehinds.
*/
const push = tok => {
if (prev.type === 'globstar') {
const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');
const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));
if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {
state.output = state.output.slice(0, -prev.output.length);
prev.type = 'star';
prev.value = '*';
prev.output = star;
state.output += prev.output;
}
}
if (extglobs.length && tok.type !== 'paren') {
extglobs[extglobs.length - 1].inner += tok.value;
}
if (tok.value || tok.output) append(tok);
if (prev && prev.type === 'text' && tok.type === 'text') {
prev.value += tok.value;
prev.output = (prev.output || '') + tok.value;
return;
}
tok.prev = prev;
tokens.push(tok);
prev = tok;
};
const extglobOpen = (type, value) => {
const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };
token.prev = prev;
token.parens = state.parens;
token.output = state.output;
const output = (opts.capture ? '(' : '') + token.open;
increment('parens');
push({ type, value, output: state.output ? '' : ONE_CHAR });
push({ type: 'paren', extglob: true, value: advance(), output });
extglobs.push(token);
};
const extglobClose = token => {
let output = token.close + (opts.capture ? ')' : '');
let rest;
if (token.type === 'negate') {
let extglobStar = star;
if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {
extglobStar = globstar(opts);
}
if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) {
output = token.close = `)$))${extglobStar}`;
}
if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) {
// Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis.
// In this case, we need to parse the string and use it in the output of the original pattern.
// Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`.
//
// Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`.
const expression = parse$g(rest, { ...options, fastpaths: false }).output;
output = token.close = `)${expression})${extglobStar})`;
}
if (token.prev.type === 'bos') {
state.negatedExtglob = true;
}
}
push({ type: 'paren', extglob: true, value, output });
decrement('parens');
};
/**
* Fast paths
*/
if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) {
let backslashes = false;
let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {
if (first === '\\') {
backslashes = true;
return m;
}
if (first === '?') {
if (esc) {
return esc + first + (rest ? QMARK.repeat(rest.length) : '');
}
if (index === 0) {
return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');
}
return QMARK.repeat(chars.length);
}
if (first === '.') {
return DOT_LITERAL.repeat(chars.length);
}
if (first === '*') {
if (esc) {
return esc + first + (rest ? star : '');
}
return star;
}
return esc ? m : `\\${m}`;
});
if (backslashes === true) {
if (opts.unescape === true) {
output = output.replace(/\\/g, '');
} else {
output = output.replace(/\\+/g, m => {
return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : '');
});
}
}
if (output === input && opts.contains === true) {
state.output = input;
return state;
}
state.output = utils$i.wrapOutput(output, state, options);
return state;
}
/**
* Tokenize input until we reach end-of-string
*/
while (!eos()) {
value = advance();
if (value === '\u0000') {
continue;
}
/**
* Escaped characters
*/
if (value === '\\') {
const next = peek();
if (next === '/' && opts.bash !== true) {
continue;
}
if (next === '.' || next === ';') {
continue;
}
if (!next) {
value += '\\';
push({ type: 'text', value });
continue;
}
// collapse slashes to reduce potential for exploits
const match = /^\\+/.exec(remaining());
let slashes = 0;
if (match && match[0].length > 2) {
slashes = match[0].length;
state.index += slashes;
if (slashes % 2 !== 0) {
value += '\\';
}
}
if (opts.unescape === true) {
value = advance();
} else {
value += advance();
}
if (state.brackets === 0) {
push({ type: 'text', value });
continue;
}
}
/**
* If we're inside a regex character class, continue
* until we reach the closing bracket.
*/
if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {
if (opts.posix !== false && value === ':') {
const inner = prev.value.slice(1);
if (inner.includes('[')) {
prev.posix = true;
if (inner.includes(':')) {
const idx = prev.value.lastIndexOf('[');
const pre = prev.value.slice(0, idx);
const rest = prev.value.slice(idx + 2);
const posix = POSIX_REGEX_SOURCE[rest];
if (posix) {
prev.value = pre + posix;
state.backtrack = true;
advance();
if (!bos.output && tokens.indexOf(prev) === 1) {
bos.output = ONE_CHAR;
}
continue;
}
}
}
}
if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {
value = `\\${value}`;
}
if (value === ']' && (prev.value === '[' || prev.value === '[^')) {
value = `\\${value}`;
}
if (opts.posix === true && value === '!' && prev.value === '[') {
value = '^';
}
prev.value += value;
append({ value });
continue;
}
/**
* If we're inside a quoted string, continue
* until we reach the closing double quote.
*/
if (state.quotes === 1 && value !== '"') {
value = utils$i.escapeRegex(value);
prev.value += value;
append({ value });
continue;
}
/**
* Double quotes
*/
if (value === '"') {
state.quotes = state.quotes === 1 ? 0 : 1;
if (opts.keepQuotes === true) {
push({ type: 'text', value });
}
continue;
}
/**
* Parentheses
*/
if (value === '(') {
increment('parens');
push({ type: 'paren', value });
continue;
}
if (value === ')') {
if (state.parens === 0 && opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('opening', '('));
}
const extglob = extglobs[extglobs.length - 1];
if (extglob && state.parens === extglob.parens + 1) {
extglobClose(extglobs.pop());
continue;
}
push({ type: 'paren', value, output: state.parens ? ')' : '\\)' });
decrement('parens');
continue;
}
/**
* Square brackets
*/
if (value === '[') {
if (opts.nobracket === true || !remaining().includes(']')) {
if (opts.nobracket !== true && opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('closing', ']'));
}
value = `\\${value}`;
} else {
increment('brackets');
}
push({ type: 'bracket', value });
continue;
}
if (value === ']') {
if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {
push({ type: 'text', value, output: `\\${value}` });
continue;
}
if (state.brackets === 0) {
if (opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('opening', '['));
}
push({ type: 'text', value, output: `\\${value}` });
continue;
}
decrement('brackets');
const prevValue = prev.value.slice(1);
if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {
value = `/${value}`;
}
prev.value += value;
append({ value });
// when literal brackets are explicitly disabled
// assume we should match with a regex character class
if (opts.literalBrackets === false || utils$i.hasRegexChars(prevValue)) {
continue;
}
const escaped = utils$i.escapeRegex(prev.value);
state.output = state.output.slice(0, -prev.value.length);
// when literal brackets are explicitly enabled
// assume we should escape the brackets to match literal characters
if (opts.literalBrackets === true) {
state.output += escaped;
prev.value = escaped;
continue;
}
// when the user specifies nothing, try to match both
prev.value = `(${capture}${escaped}|${prev.value})`;
state.output += prev.value;
continue;
}
/**
* Braces
*/
if (value === '{' && opts.nobrace !== true) {
increment('braces');
const open = {
type: 'brace',
value,
output: '(',
outputIndex: state.output.length,
tokensIndex: state.tokens.length
};
braces.push(open);
push(open);
continue;
}
if (value === '}') {
const brace = braces[braces.length - 1];
if (opts.nobrace === true || !brace) {
push({ type: 'text', value, output: value });
continue;
}
let output = ')';
if (brace.dots === true) {
const arr = tokens.slice();
const range = [];
for (let i = arr.length - 1; i >= 0; i--) {
tokens.pop();
if (arr[i].type === 'brace') {
break;
}
if (arr[i].type !== 'dots') {
range.unshift(arr[i].value);
}
}
output = expandRange(range, opts);
state.backtrack = true;
}
if (brace.comma !== true && brace.dots !== true) {
const out = state.output.slice(0, brace.outputIndex);
const toks = state.tokens.slice(brace.tokensIndex);
brace.value = brace.output = '\\{';
value = output = '\\}';
state.output = out;
for (const t of toks) {
state.output += (t.output || t.value);
}
}
push({ type: 'brace', value, output });
decrement('braces');
braces.pop();
continue;
}
/**
* Pipes
*/
if (value === '|') {
if (extglobs.length > 0) {
extglobs[extglobs.length - 1].conditions++;
}
push({ type: 'text', value });
continue;
}
/**
* Commas
*/
if (value === ',') {
let output = value;
const brace = braces[braces.length - 1];
if (brace && stack[stack.length - 1] === 'braces') {
brace.comma = true;
output = '|';
}
push({ type: 'comma', value, output });
continue;
}
/**
* Slashes
*/
if (value === '/') {
// if the beginning of the glob is "./", advance the start
// to the current index, and don't add the "./" characters
// to the state. This greatly simplifies lookbehinds when
// checking for BOS characters like "!" and "." (not "./")
if (prev.type === 'dot' && state.index === state.start + 1) {
state.start = state.index + 1;
state.consumed = '';
state.output = '';
tokens.pop();
prev = bos; // reset "prev" to the first token
continue;
}
push({ type: 'slash', value, output: SLASH_LITERAL });
continue;
}
/**
* Dots
*/
if (value === '.') {
if (state.braces > 0 && prev.type === 'dot') {
if (prev.value === '.') prev.output = DOT_LITERAL;
const brace = braces[braces.length - 1];
prev.type = 'dots';
prev.output += value;
prev.value += value;
brace.dots = true;
continue;
}
if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {
push({ type: 'text', value, output: DOT_LITERAL });
continue;
}
push({ type: 'dot', value, output: DOT_LITERAL });
continue;
}
/**
* Question marks
*/
if (value === '?') {
const isGroup = prev && prev.value === '(';
if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
extglobOpen('qmark', value);
continue;
}
if (prev && prev.type === 'paren') {
const next = peek();
let output = value;
if (next === '<' && !utils$i.supportsLookbehinds()) {
throw new Error('Node.js v10 or higher is required for regex lookbehinds');
}
if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) {
output = `\\${value}`;
}
push({ type: 'text', value, output });
continue;
}
if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {
push({ type: 'qmark', value, output: QMARK_NO_DOT });
continue;
}
push({ type: 'qmark', value, output: QMARK });
continue;
}
/**
* Exclamation
*/
if (value === '!') {
if (opts.noextglob !== true && peek() === '(') {
if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {
extglobOpen('negate', value);
continue;
}
}
if (opts.nonegate !== true && state.index === 0) {
negate();
continue;
}
}
/**
* Plus
*/
if (value === '+') {
if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
extglobOpen('plus', value);
continue;
}
if ((prev && prev.value === '(') || opts.regex === false) {
push({ type: 'plus', value, output: PLUS_LITERAL });
continue;
}
if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {
push({ type: 'plus', value });
continue;
}
push({ type: 'plus', value: PLUS_LITERAL });
continue;
}
/**
* Plain text
*/
if (value === '@') {
if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
push({ type: 'at', extglob: true, value, output: '' });
continue;
}
push({ type: 'text', value });
continue;
}
/**
* Plain text
*/
if (value !== '*') {
if (value === '$' || value === '^') {
value = `\\${value}`;
}
const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());
if (match) {
value += match[0];
state.index += match[0].length;
}
push({ type: 'text', value });
continue;
}
/**
* Stars
*/
if (prev && (prev.type === 'globstar' || prev.star === true)) {
prev.type = 'star';
prev.star = true;
prev.value += value;
prev.output = star;
state.backtrack = true;
state.globstar = true;
consume(value);
continue;
}
let rest = remaining();
if (opts.noextglob !== true && /^\([^?]/.test(rest)) {
extglobOpen('star', value);
continue;
}
if (prev.type === 'star') {
if (opts.noglobstar === true) {
consume(value);
continue;
}
const prior = prev.prev;
const before = prior.prev;
const isStart = prior.type === 'slash' || prior.type === 'bos';
const afterStar = before && (before.type === 'star' || before.type === 'globstar');
if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {
push({ type: 'star', value, output: '' });
continue;
}
const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');
const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');
if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {
push({ type: 'star', value, output: '' });
continue;
}
// strip consecutive `/**/`
while (rest.slice(0, 3) === '/**') {
const after = input[state.index + 4];
if (after && after !== '/') {
break;
}
rest = rest.slice(3);
consume('/**', 3);
}
if (prior.type === 'bos' && eos()) {
prev.type = 'globstar';
prev.value += value;
prev.output = globstar(opts);
state.output = prev.output;
state.globstar = true;
consume(value);
continue;
}
if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {
state.output = state.output.slice(0, -(prior.output + prev.output).length);
prior.output = `(?:${prior.output}`;
prev.type = 'globstar';
prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');
prev.value += value;
state.globstar = true;
state.output += prior.output + prev.output;
consume(value);
continue;
}
if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {
const end = rest[1] !== void 0 ? '|$' : '';
state.output = state.output.slice(0, -(prior.output + prev.output).length);
prior.output = `(?:${prior.output}`;
prev.type = 'globstar';
prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;
prev.value += value;
state.output += prior.output + prev.output;
state.globstar = true;
consume(value + advance());
push({ type: 'slash', value: '/', output: '' });
continue;
}
if (prior.type === 'bos' && rest[0] === '/') {
prev.type = 'globstar';
prev.value += value;
prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;
state.output = prev.output;
state.globstar = true;
consume(value + advance());
push({ type: 'slash', value: '/', output: '' });
continue;
}
// remove single star from output
state.output = state.output.slice(0, -prev.output.length);
// reset previous token to globstar
prev.type = 'globstar';
prev.output = globstar(opts);
prev.value += value;
// reset output with globstar
state.output += prev.output;
state.globstar = true;
consume(value);
continue;
}
const token = { type: 'star', value, output: star };
if (opts.bash === true) {
token.output = '.*?';
if (prev.type === 'bos' || prev.type === 'slash') {
token.output = nodot + token.output;
}
push(token);
continue;
}
if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {
token.output = value;
push(token);
continue;
}
if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {
if (prev.type === 'dot') {
state.output += NO_DOT_SLASH;
prev.output += NO_DOT_SLASH;
} else if (opts.dot === true) {
state.output += NO_DOTS_SLASH;
prev.output += NO_DOTS_SLASH;
} else {
state.output += nodot;
prev.output += nodot;
}
if (peek() !== '*') {
state.output += ONE_CHAR;
prev.output += ONE_CHAR;
}
}
push(token);
}
while (state.brackets > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));
state.output = utils$i.escapeLast(state.output, '[');
decrement('brackets');
}
while (state.parens > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));
state.output = utils$i.escapeLast(state.output, '(');
decrement('parens');
}
while (state.braces > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));
state.output = utils$i.escapeLast(state.output, '{');
decrement('braces');
}
if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {
push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });
}
// rebuild the output if we had to backtrack at any point
if (state.backtrack === true) {
state.output = '';
for (const token of state.tokens) {
state.output += token.output != null ? token.output : token.value;
if (token.suffix) {
state.output += token.suffix;
}
}
}
return state;
};
/**
* Fast paths for creating regular expressions for common glob patterns.
* This can significantly speed up processing and has very little downside
* impact when none of the fast paths match.
*/
parse$g.fastpaths = (input, options) => {
const opts = { ...options };
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH$1, opts.maxLength) : MAX_LENGTH$1;
const len = input.length;
if (len > max) {
throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
}
input = REPLACEMENTS[input] || input;
const win32 = utils$i.isWindows(options);
// create constants based on platform, for windows or posix
const {
DOT_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
DOTS_SLASH,
NO_DOT,
NO_DOTS,
NO_DOTS_SLASH,
STAR,
START_ANCHOR
} = constants$5.globChars(win32);
const nodot = opts.dot ? NO_DOTS : NO_DOT;
const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;
const capture = opts.capture ? '' : '?:';
const state = { negated: false, prefix: '' };
let star = opts.bash === true ? '.*?' : STAR;
if (opts.capture) {
star = `(${star})`;
}
const globstar = opts => {
if (opts.noglobstar === true) return star;
return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
};
const create = str => {
switch (str) {
case '*':
return `${nodot}${ONE_CHAR}${star}`;
case '.*':
return `${DOT_LITERAL}${ONE_CHAR}${star}`;
case '*.*':
return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
case '*/*':
return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;
case '**':
return nodot + globstar(opts);
case '**/*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;
case '**/*.*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
case '**/.*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;
default: {
const match = /^(.*?)\.(\w+)$/.exec(str);
if (!match) return;
const source = create(match[1]);
if (!source) return;
return source + DOT_LITERAL + match[2];
}
}
};
const output = utils$i.removePrefix(input, state);
let source = create(output);
if (source && opts.strictSlashes !== true) {
source += `${SLASH_LITERAL}?`;
}
return source;
};
var parse_1$3 = parse$g;
const path$l = require$$0$4;
const scan$1 = scan_1;
const parse$f = parse_1$3;
const utils$h = utils$k;
const constants$4 = constants$6;
const isObject$3 = val => val && typeof val === 'object' && !Array.isArray(val);
/**
* Creates a matcher function from one or more glob patterns. The
* returned function takes a string to match as its first argument,
* and returns true if the string is a match. The returned matcher
* function also takes a boolean as the second argument that, when true,
* returns an object with additional information.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch(glob[, options]);
*
* const isMatch = picomatch('*.!(*a)');
* console.log(isMatch('a.a')); //=> false
* console.log(isMatch('a.b')); //=> true
* ```
* @name picomatch
* @param {String|Array} `globs` One or more glob patterns.
* @param {Object=} `options`
* @return {Function=} Returns a matcher function.
* @api public
*/
const picomatch$5 = (glob, options, returnState = false) => {
if (Array.isArray(glob)) {
const fns = glob.map(input => picomatch$5(input, options, returnState));
const arrayMatcher = str => {
for (const isMatch of fns) {
const state = isMatch(str);
if (state) return state;
}
return false;
};
return arrayMatcher;
}
const isState = isObject$3(glob) && glob.tokens && glob.input;
if (glob === '' || (typeof glob !== 'string' && !isState)) {
throw new TypeError('Expected pattern to be a non-empty string');
}
const opts = options || {};
const posix = utils$h.isWindows(options);
const regex = isState
? picomatch$5.compileRe(glob, options)
: picomatch$5.makeRe(glob, options, false, true);
const state = regex.state;
delete regex.state;
let isIgnored = () => false;
if (opts.ignore) {
const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
isIgnored = picomatch$5(opts.ignore, ignoreOpts, returnState);
}
const matcher = (input, returnObject = false) => {
const { isMatch, match, output } = picomatch$5.test(input, regex, options, { glob, posix });
const result = { glob, state, regex, posix, input, output, match, isMatch };
if (typeof opts.onResult === 'function') {
opts.onResult(result);
}
if (isMatch === false) {
result.isMatch = false;
return returnObject ? result : false;
}
if (isIgnored(input)) {
if (typeof opts.onIgnore === 'function') {
opts.onIgnore(result);
}
result.isMatch = false;
return returnObject ? result : false;
}
if (typeof opts.onMatch === 'function') {
opts.onMatch(result);
}
return returnObject ? result : true;
};
if (returnState) {
matcher.state = state;
}
return matcher;
};
/**
* Test `input` with the given `regex`. This is used by the main
* `picomatch()` function to test the input string.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.test(input, regex[, options]);
*
* console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
* // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
* ```
* @param {String} `input` String to test.
* @param {RegExp} `regex`
* @return {Object} Returns an object with matching info.
* @api public
*/
picomatch$5.test = (input, regex, options, { glob, posix } = {}) => {
if (typeof input !== 'string') {
throw new TypeError('Expected input to be a string');
}
if (input === '') {
return { isMatch: false, output: '' };
}
const opts = options || {};
const format = opts.format || (posix ? utils$h.toPosixSlashes : null);
let match = input === glob;
let output = (match && format) ? format(input) : input;
if (match === false) {
output = format ? format(input) : input;
match = output === glob;
}
if (match === false || opts.capture === true) {
if (opts.matchBase === true || opts.basename === true) {
match = picomatch$5.matchBase(input, regex, options, posix);
} else {
match = regex.exec(output);
}
}
return { isMatch: Boolean(match), match, output };
};
/**
* Match the basename of a filepath.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.matchBase(input, glob[, options]);
* console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
* ```
* @param {String} `input` String to test.
* @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
* @return {Boolean}
* @api public
*/
picomatch$5.matchBase = (input, glob, options, posix = utils$h.isWindows(options)) => {
const regex = glob instanceof RegExp ? glob : picomatch$5.makeRe(glob, options);
return regex.test(path$l.basename(input));
};
/**
* Returns true if **any** of the given glob `patterns` match the specified `string`.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.isMatch(string, patterns[, options]);
*
* console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
* console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
* ```
* @param {String|Array} str The string to test.
* @param {String|Array} patterns One or more glob patterns to use for matching.
* @param {Object} [options] See available [options](#options).
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
picomatch$5.isMatch = (str, patterns, options) => picomatch$5(patterns, options)(str);
/**
* Parse a glob pattern to create the source string for a regular
* expression.
*
* ```js
* const picomatch = require('picomatch');
* const result = picomatch.parse(pattern[, options]);
* ```
* @param {String} `pattern`
* @param {Object} `options`
* @return {Object} Returns an object with useful properties and output to be used as a regex source string.
* @api public
*/
picomatch$5.parse = (pattern, options) => {
if (Array.isArray(pattern)) return pattern.map(p => picomatch$5.parse(p, options));
return parse$f(pattern, { ...options, fastpaths: false });
};
/**
* Scan a glob pattern to separate the pattern into segments.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.scan(input[, options]);
*
* const result = picomatch.scan('!./foo/*.js');
* console.log(result);
* { prefix: '!./',
* input: '!./foo/*.js',
* start: 3,
* base: 'foo',
* glob: '*.js',
* isBrace: false,
* isBracket: false,
* isGlob: true,
* isExtglob: false,
* isGlobstar: false,
* negated: true }
* ```
* @param {String} `input` Glob pattern to scan.
* @param {Object} `options`
* @return {Object} Returns an object with
* @api public
*/
picomatch$5.scan = (input, options) => scan$1(input, options);
/**
* Compile a regular expression from the `state` object returned by the
* [parse()](#parse) method.
*
* @param {Object} `state`
* @param {Object} `options`
* @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser.
* @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
* @return {RegExp}
* @api public
*/
picomatch$5.compileRe = (state, options, returnOutput = false, returnState = false) => {
if (returnOutput === true) {
return state.output;
}
const opts = options || {};
const prepend = opts.contains ? '' : '^';
const append = opts.contains ? '' : '$';
let source = `${prepend}(?:${state.output})${append}`;
if (state && state.negated === true) {
source = `^(?!${source}).*$`;
}
const regex = picomatch$5.toRegex(source, options);
if (returnState === true) {
regex.state = state;
}
return regex;
};
/**
* Create a regular expression from a parsed glob pattern.
*
* ```js
* const picomatch = require('picomatch');
* const state = picomatch.parse('*.js');
* // picomatch.compileRe(state[, options]);
*
* console.log(picomatch.compileRe(state));
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
* ```
* @param {String} `state` The object returned from the `.parse` method.
* @param {Object} `options`
* @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
* @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
* @return {RegExp} Returns a regex created from the given pattern.
* @api public
*/
picomatch$5.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
if (!input || typeof input !== 'string') {
throw new TypeError('Expected a non-empty string');
}
let parsed = { negated: false, fastpaths: true };
if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {
parsed.output = parse$f.fastpaths(input, options);
}
if (!parsed.output) {
parsed = parse$f(input, options);
}
return picomatch$5.compileRe(parsed, options, returnOutput, returnState);
};
/**
* Create a regular expression from the given regex source string.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.toRegex(source[, options]);
*
* const { output } = picomatch.parse('*.js');
* console.log(picomatch.toRegex(output));
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
* ```
* @param {String} `source` Regular expression source string.
* @param {Object} `options`
* @return {RegExp}
* @api public
*/
picomatch$5.toRegex = (source, options) => {
try {
const opts = options || {};
return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));
} catch (err) {
if (options && options.debug === true) throw err;
return /$^/;
}
};
/**
* Picomatch constants.
* @return {Object}
*/
picomatch$5.constants = constants$4;
/**
* Expose "picomatch"
*/
var picomatch_1 = picomatch$5;
var picomatch$3 = picomatch_1;
var picomatch$4 = /*@__PURE__*/getDefaultExportFromCjs(picomatch$3);
const extractors = {
ArrayPattern(names, param) {
for (const element of param.elements) {
if (element)
extractors[element.type](names, element);
}
},
AssignmentPattern(names, param) {
extractors[param.left.type](names, param.left);
},
Identifier(names, param) {
names.push(param.name);
},
MemberExpression() { },
ObjectPattern(names, param) {
for (const prop of param.properties) {
// @ts-ignore Typescript reports that this is not a valid type
if (prop.type === 'RestElement') {
extractors.RestElement(names, prop);
}
else {
extractors[prop.value.type](names, prop.value);
}
}
},
RestElement(names, param) {
extractors[param.argument.type](names, param.argument);
}
};
const extractAssignedNames = function extractAssignedNames(param) {
const names = [];
extractors[param.type](names, param);
return names;
};
const blockDeclarations = {
const: true,
let: true
};
class Scope {
constructor(options = {}) {
this.parent = options.parent;
this.isBlockScope = !!options.block;
this.declarations = Object.create(null);
if (options.params) {
options.params.forEach((param) => {
extractAssignedNames(param).forEach((name) => {
this.declarations[name] = true;
});
});
}
}
addDeclaration(node, isBlockDeclaration, isVar) {
if (!isBlockDeclaration && this.isBlockScope) {
// it's a `var` or function node, and this
// is a block scope, so we need to go up
this.parent.addDeclaration(node, isBlockDeclaration, isVar);
}
else if (node.id) {
extractAssignedNames(node.id).forEach((name) => {
this.declarations[name] = true;
});
}
}
contains(name) {
return this.declarations[name] || (this.parent ? this.parent.contains(name) : false);
}
}
const attachScopes = function attachScopes(ast, propertyName = 'scope') {
let scope = new Scope();
walk$3(ast, {
enter(n, parent) {
const node = n;
// function foo () {...}
// class Foo {...}
if (/(Function|Class)Declaration/.test(node.type)) {
scope.addDeclaration(node, false, false);
}
// var foo = 1
if (node.type === 'VariableDeclaration') {
const { kind } = node;
const isBlockDeclaration = blockDeclarations[kind];
node.declarations.forEach((declaration) => {
scope.addDeclaration(declaration, isBlockDeclaration, true);
});
}
let newScope;
// create new function scope
if (/Function/.test(node.type)) {
const func = node;
newScope = new Scope({
parent: scope,
block: false,
params: func.params
});
// named function expressions - the name is considered
// part of the function's scope
if (func.type === 'FunctionExpression' && func.id) {
newScope.addDeclaration(func, false, false);
}
}
// create new for scope
if (/For(In|Of)?Statement/.test(node.type)) {
newScope = new Scope({
parent: scope,
block: true
});
}
// create new block scope
if (node.type === 'BlockStatement' && !/Function/.test(parent.type)) {
newScope = new Scope({
parent: scope,
block: true
});
}
// catch clause has its own block scope
if (node.type === 'CatchClause') {
newScope = new Scope({
parent: scope,
params: node.param ? [node.param] : [],
block: true
});
}
if (newScope) {
Object.defineProperty(node, propertyName, {
value: newScope,
configurable: true
});
scope = newScope;
}
},
leave(n) {
const node = n;
if (node[propertyName])
scope = scope.parent;
}
});
return scope;
};
// Helper since Typescript can't detect readonly arrays with Array.isArray
function isArray(arg) {
return Array.isArray(arg);
}
function ensureArray(thing) {
if (isArray(thing))
return thing;
if (thing == null)
return [];
return [thing];
}
const normalizePath$5 = function normalizePath(filename) {
return filename.split(win32.sep).join(posix.sep);
};
function getMatcherString(id, resolutionBase) {
if (resolutionBase === false || isAbsolute(id) || id.startsWith('**')) {
return normalizePath$5(id);
}
// resolve('') is valid and will default to process.cwd()
const basePath = normalizePath$5(resolve$3(resolutionBase || ''))
// escape all possible (posix + win) path characters that might interfere with regex
.replace(/[-^$*+?.()|[\]{}]/g, '\\$&');
// Note that we use posix.join because:
// 1. the basePath has been normalized to use /
// 2. the incoming glob (id) matcher, also uses /
// otherwise Node will force backslash (\) on windows
return posix.join(basePath, normalizePath$5(id));
}
const createFilter$1 = function createFilter(include, exclude, options) {
const resolutionBase = options && options.resolve;
const getMatcher = (id) => id instanceof RegExp
? id
: {
test: (what) => {
// this refactor is a tad overly verbose but makes for easy debugging
const pattern = getMatcherString(id, resolutionBase);
const fn = picomatch$4(pattern, { dot: true });
const result = fn(what);
return result;
}
};
const includeMatchers = ensureArray(include).map(getMatcher);
const excludeMatchers = ensureArray(exclude).map(getMatcher);
return function result(id) {
if (typeof id !== 'string')
return false;
if (/\0/.test(id))
return false;
const pathId = normalizePath$5(id);
for (let i = 0; i < excludeMatchers.length; ++i) {
const matcher = excludeMatchers[i];
if (matcher.test(pathId))
return false;
}
for (let i = 0; i < includeMatchers.length; ++i) {
const matcher = includeMatchers[i];
if (matcher.test(pathId))
return true;
}
return !includeMatchers.length;
};
};
const reservedWords = 'break case class catch const continue debugger default delete do else export extends finally for function if import in instanceof let new return super switch this throw try typeof var void while with yield enum await implements package protected static interface private public';
const builtins = 'arguments Infinity NaN undefined null true false eval uneval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Symbol Error EvalError InternalError RangeError ReferenceError SyntaxError TypeError URIError Number Math Date String RegExp Array Int8Array Uint8Array Uint8ClampedArray Int16Array Uint16Array Int32Array Uint32Array Float32Array Float64Array Map Set WeakMap WeakSet SIMD ArrayBuffer DataView JSON Promise Generator GeneratorFunction Reflect Proxy Intl';
const forbiddenIdentifiers = new Set(`${reservedWords} ${builtins}`.split(' '));
forbiddenIdentifiers.add('');
const makeLegalIdentifier = function makeLegalIdentifier(str) {
let identifier = str
.replace(/-(\w)/g, (_, letter) => letter.toUpperCase())
.replace(/[^$_a-zA-Z0-9]/g, '_');
if (/\d/.test(identifier[0]) || forbiddenIdentifiers.has(identifier)) {
identifier = `_${identifier}`;
}
return identifier || '_';
};
function stringify$8(obj) {
return (JSON.stringify(obj) || 'undefined').replace(/[\u2028\u2029]/g, (char) => `\\u${`000${char.charCodeAt(0).toString(16)}`.slice(-4)}`);
}
function serializeArray(arr, indent, baseIndent) {
let output = '[';
const separator = indent ? `\n${baseIndent}${indent}` : '';
for (let i = 0; i < arr.length; i++) {
const key = arr[i];
output += `${i > 0 ? ',' : ''}${separator}${serialize(key, indent, baseIndent + indent)}`;
}
return `${output}${indent ? `\n${baseIndent}` : ''}]`;
}
function serializeObject(obj, indent, baseIndent) {
let output = '{';
const separator = indent ? `\n${baseIndent}${indent}` : '';
const entries = Object.entries(obj);
for (let i = 0; i < entries.length; i++) {
const [key, value] = entries[i];
const stringKey = makeLegalIdentifier(key) === key ? key : stringify$8(key);
output += `${i > 0 ? ',' : ''}${separator}${stringKey}:${indent ? ' ' : ''}${serialize(value, indent, baseIndent + indent)}`;
}
return `${output}${indent ? `\n${baseIndent}` : ''}}`;
}
function serialize(obj, indent, baseIndent) {
if (typeof obj === 'object' && obj !== null) {
if (Array.isArray(obj))
return serializeArray(obj, indent, baseIndent);
if (obj instanceof Date)
return `new Date(${obj.getTime()})`;
if (obj instanceof RegExp)
return obj.toString();
return serializeObject(obj, indent, baseIndent);
}
if (typeof obj === 'number') {
if (obj === Infinity)
return 'Infinity';
if (obj === -Infinity)
return '-Infinity';
if (obj === 0)
return 1 / obj === Infinity ? '0' : '-0';
if (obj !== obj)
return 'NaN'; // eslint-disable-line no-self-compare
}
if (typeof obj === 'symbol') {
const key = Symbol.keyFor(obj);
// eslint-disable-next-line no-undefined
if (key !== undefined)
return `Symbol.for(${stringify$8(key)})`;
}
if (typeof obj === 'bigint')
return `${obj}n`;
return stringify$8(obj);
}
// isWellFormed exists from Node.js 20
const hasStringIsWellFormed = 'isWellFormed' in String.prototype;
function isWellFormedString(input) {
// @ts-expect-error String::isWellFormed exists from ES2024. tsconfig lib is set to ES6
if (hasStringIsWellFormed)
return input.isWellFormed();
// https://github.com/tc39/proposal-is-usv-string/blob/main/README.md#algorithm
return !/\p{Surrogate}/u.test(input);
}
const dataToEsm = function dataToEsm(data, options = {}) {
var _a, _b;
const t = options.compact ? '' : 'indent' in options ? options.indent : '\t';
const _ = options.compact ? '' : ' ';
const n = options.compact ? '' : '\n';
const declarationType = options.preferConst ? 'const' : 'var';
if (options.namedExports === false ||
typeof data !== 'object' ||
Array.isArray(data) ||
data instanceof Date ||
data instanceof RegExp ||
data === null) {
const code = serialize(data, options.compact ? null : t, '');
const magic = _ || (/^[{[\-\/]/.test(code) ? '' : ' '); // eslint-disable-line no-useless-escape
return `export default${magic}${code};`;
}
let maxUnderbarPrefixLength = 0;
for (const key of Object.keys(data)) {
const underbarPrefixLength = (_b = (_a = key.match(/^(_+)/)) === null || _a === void 0 ? void 0 : _a[0].length) !== null && _b !== void 0 ? _b : 0;
if (underbarPrefixLength > maxUnderbarPrefixLength) {
maxUnderbarPrefixLength = underbarPrefixLength;
}
}
const arbitraryNamePrefix = `${'_'.repeat(maxUnderbarPrefixLength + 1)}arbitrary`;
let namedExportCode = '';
const defaultExportRows = [];
const arbitraryNameExportRows = [];
for (const [key, value] of Object.entries(data)) {
if (key === makeLegalIdentifier(key)) {
if (options.objectShorthand)
defaultExportRows.push(key);
else
defaultExportRows.push(`${key}:${_}${key}`);
namedExportCode += `export ${declarationType} ${key}${_}=${_}${serialize(value, options.compact ? null : t, '')};${n}`;
}
else {
defaultExportRows.push(`${stringify$8(key)}:${_}${serialize(value, options.compact ? null : t, '')}`);
if (options.includeArbitraryNames && isWellFormedString(key)) {
const variableName = `${arbitraryNamePrefix}${arbitraryNameExportRows.length}`;
namedExportCode += `${declarationType} ${variableName}${_}=${_}${serialize(value, options.compact ? null : t, '')};${n}`;
arbitraryNameExportRows.push(`${variableName} as ${JSON.stringify(key)}`);
}
}
}
const arbitraryExportCode = arbitraryNameExportRows.length > 0
? `export${_}{${n}${t}${arbitraryNameExportRows.join(`,${n}${t}`)}${n}};${n}`
: '';
const defaultExportCode = `export default${_}{${n}${t}${defaultExportRows.join(`,${n}${t}`)}${n}};${n}`;
return `${namedExportCode}${arbitraryExportCode}${defaultExportCode}`;
};
var path$k = require$$0$4;
var commondir = function (basedir, relfiles) {
if (relfiles) {
var files = relfiles.map(function (r) {
return path$k.resolve(basedir, r);
});
}
else {
var files = basedir;
}
var res = files.slice(1).reduce(function (ps, file) {
if (!file.match(/^([A-Za-z]:)?\/|\\/)) {
throw new Error('relative path without a basedir');
}
var xs = file.split(/\/+|\\+/);
for (
var i = 0;
ps[i] === xs[i] && i < Math.min(ps.length, xs.length);
i++
);
return ps.slice(0, i);
}, files[0].split(/\/+|\\+/));
// Windows correctly handles paths with forward-slashes
return res.length > 1 ? res.join('/') : '/'
};
var getCommonDir = /*@__PURE__*/getDefaultExportFromCjs(commondir);
var balancedMatch = balanced$1;
function balanced$1(a, b, str) {
if (a instanceof RegExp) a = maybeMatch(a, str);
if (b instanceof RegExp) b = maybeMatch(b, str);
var r = range$1(a, b, str);
return r && {
start: r[0],
end: r[1],
pre: str.slice(0, r[0]),
body: str.slice(r[0] + a.length, r[1]),
post: str.slice(r[1] + b.length)
};
}
function maybeMatch(reg, str) {
var m = str.match(reg);
return m ? m[0] : null;
}
balanced$1.range = range$1;
function range$1(a, b, str) {
var begs, beg, left, right, result;
var ai = str.indexOf(a);
var bi = str.indexOf(b, ai + 1);
var i = ai;
if (ai >= 0 && bi > 0) {
if(a===b) {
return [ai, bi];
}
begs = [];
left = str.length;
while (i >= 0 && !result) {
if (i == ai) {
begs.push(i);
ai = str.indexOf(a, i + 1);
} else if (begs.length == 1) {
result = [ begs.pop(), bi ];
} else {
beg = begs.pop();
if (beg < left) {
left = beg;
right = bi;
}
bi = str.indexOf(b, i + 1);
}
i = ai < bi && ai >= 0 ? ai : bi;
}
if (begs.length) {
result = [ left, right ];
}
}
return result;
}
var balanced = balancedMatch;
var braceExpansion = expandTop;
var escSlash = '\0SLASH'+Math.random()+'\0';
var escOpen = '\0OPEN'+Math.random()+'\0';
var escClose = '\0CLOSE'+Math.random()+'\0';
var escComma = '\0COMMA'+Math.random()+'\0';
var escPeriod = '\0PERIOD'+Math.random()+'\0';
function numeric(str) {
return parseInt(str, 10) == str
? parseInt(str, 10)
: str.charCodeAt(0);
}
function escapeBraces(str) {
return str.split('\\\\').join(escSlash)
.split('\\{').join(escOpen)
.split('\\}').join(escClose)
.split('\\,').join(escComma)
.split('\\.').join(escPeriod);
}
function unescapeBraces(str) {
return str.split(escSlash).join('\\')
.split(escOpen).join('{')
.split(escClose).join('}')
.split(escComma).join(',')
.split(escPeriod).join('.');
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts(str) {
if (!str)
return [''];
var parts = [];
var m = balanced('{', '}', str);
if (!m)
return str.split(',');
var pre = m.pre;
var body = m.body;
var post = m.post;
var p = pre.split(',');
p[p.length-1] += '{' + body + '}';
var postParts = parseCommaParts(post);
if (post.length) {
p[p.length-1] += postParts.shift();
p.push.apply(p, postParts);
}
parts.push.apply(parts, p);
return parts;
}
function expandTop(str) {
if (!str)
return [];
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if (str.substr(0, 2) === '{}') {
str = '\\{\\}' + str.substr(2);
}
return expand$3(escapeBraces(str), true).map(unescapeBraces);
}
function embrace(str) {
return '{' + str + '}';
}
function isPadded(el) {
return /^-?0\d/.test(el);
}
function lte(i, y) {
return i <= y;
}
function gte(i, y) {
return i >= y;
}
function expand$3(str, isTop) {
var expansions = [];
var m = balanced('{', '}', str);
if (!m) return [str];
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m.pre;
var post = m.post.length
? expand$3(m.post, false)
: [''];
if (/\$$/.test(m.pre)) {
for (var k = 0; k < post.length; k++) {
var expansion = pre+ '{' + m.body + '}' + post[k];
expansions.push(expansion);
}
} else {
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
var isSequence = isNumericSequence || isAlphaSequence;
var isOptions = m.body.indexOf(',') >= 0;
if (!isSequence && !isOptions) {
// {a},b}
if (m.post.match(/,.*\}/)) {
str = m.pre + '{' + m.body + escClose + m.post;
return expand$3(str);
}
return [str];
}
var n;
if (isSequence) {
n = m.body.split(/\.\./);
} else {
n = parseCommaParts(m.body);
if (n.length === 1) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand$3(n[0], false).map(embrace);
if (n.length === 1) {
return post.map(function(p) {
return m.pre + n[0] + p;
});
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
var N;
if (isSequence) {
var x = numeric(n[0]);
var y = numeric(n[1]);
var width = Math.max(n[0].length, n[1].length);
var incr = n.length == 3
? Math.abs(numeric(n[2]))
: 1;
var test = lte;
var reverse = y < x;
if (reverse) {
incr *= -1;
test = gte;
}
var pad = n.some(isPadded);
N = [];
for (var i = x; test(i, y); i += incr) {
var c;
if (isAlphaSequence) {
c = String.fromCharCode(i);
if (c === '\\')
c = '';
} else {
c = String(i);
if (pad) {
var need = width - c.length;
if (need > 0) {
var z = new Array(need + 1).join('0');
if (i < 0)
c = '-' + z + c.slice(1);
else
c = z + c;
}
}
}
N.push(c);
}
} else {
N = [];
for (var j = 0; j < n.length; j++) {
N.push.apply(N, expand$3(n[j], false));
}
}
for (var j = 0; j < N.length; j++) {
for (var k = 0; k < post.length; k++) {
var expansion = pre + N[j] + post[k];
if (!isTop || isSequence || expansion)
expansions.push(expansion);
}
}
}
return expansions;
}
var expand$4 = /*@__PURE__*/getDefaultExportFromCjs(braceExpansion);
const MAX_PATTERN_LENGTH = 1024 * 64;
const assertValidPattern = (pattern) => {
if (typeof pattern !== 'string') {
throw new TypeError('invalid pattern');
}
if (pattern.length > MAX_PATTERN_LENGTH) {
throw new TypeError('pattern is too long');
}
};
// translate the various posix character classes into unicode properties
// this works across all unicode locales
// { : [, /u flag required, negated]
const posixClasses = {
'[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
'[:alpha:]': ['\\p{L}\\p{Nl}', true],
'[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
'[:blank:]': ['\\p{Zs}\\t', true],
'[:cntrl:]': ['\\p{Cc}', true],
'[:digit:]': ['\\p{Nd}', true],
'[:graph:]': ['\\p{Z}\\p{C}', true, true],
'[:lower:]': ['\\p{Ll}', true],
'[:print:]': ['\\p{C}', true],
'[:punct:]': ['\\p{P}', true],
'[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
'[:upper:]': ['\\p{Lu}', true],
'[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
'[:xdigit:]': ['A-Fa-f0-9', false],
};
// only need to escape a few things inside of brace expressions
// escapes: [ \ ] -
const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
// escape all regexp magic characters
const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
// everything has already been escaped, we just have to join
const rangesToString = (ranges) => ranges.join('');
// takes a glob string at a posix brace expression, and returns
// an equivalent regular expression source, and boolean indicating
// whether the /u flag needs to be applied, and the number of chars
// consumed to parse the character class.
// This also removes out of order ranges, and returns ($.) if the
// entire class just no good.
const parseClass = (glob, position) => {
const pos = position;
/* c8 ignore start */
if (glob.charAt(pos) !== '[') {
throw new Error('not in a brace expression');
}
/* c8 ignore stop */
const ranges = [];
const negs = [];
let i = pos + 1;
let sawStart = false;
let uflag = false;
let escaping = false;
let negate = false;
let endPos = pos;
let rangeStart = '';
WHILE: while (i < glob.length) {
const c = glob.charAt(i);
if ((c === '!' || c === '^') && i === pos + 1) {
negate = true;
i++;
continue;
}
if (c === ']' && sawStart && !escaping) {
endPos = i + 1;
break;
}
sawStart = true;
if (c === '\\') {
if (!escaping) {
escaping = true;
i++;
continue;
}
// escaped \ char, fall through and treat like normal char
}
if (c === '[' && !escaping) {
// either a posix class, a collation equivalent, or just a [
for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
if (glob.startsWith(cls, i)) {
// invalid, [a-[] is fine, but not [a-[:alpha]]
if (rangeStart) {
return ['$.', false, glob.length - pos, true];
}
i += cls.length;
if (neg)
negs.push(unip);
else
ranges.push(unip);
uflag = uflag || u;
continue WHILE;
}
}
}
// now it's just a normal character, effectively
escaping = false;
if (rangeStart) {
// throw this range away if it's not valid, but others
// can still match.
if (c > rangeStart) {
ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
}
else if (c === rangeStart) {
ranges.push(braceEscape(c));
}
rangeStart = '';
i++;
continue;
}
// now might be the start of a range.
// can be either c-d or c-] or c] or c] at this point
if (glob.startsWith('-]', i + 1)) {
ranges.push(braceEscape(c + '-'));
i += 2;
continue;
}
if (glob.startsWith('-', i + 1)) {
rangeStart = c;
i += 2;
continue;
}
// not the start of a range, just a single character
ranges.push(braceEscape(c));
i++;
}
if (endPos < i) {
// didn't see the end of the class, not a valid class,
// but might still be valid as a literal match.
return ['', false, 0, false];
}
// if we got no ranges and no negates, then we have a range that
// cannot possibly match anything, and that poisons the whole glob
if (!ranges.length && !negs.length) {
return ['$.', false, glob.length - pos, true];
}
// if we got one positive range, and it's a single character, then that's
// not actually a magic pattern, it's just that one literal character.
// we should not treat that as "magic", we should just return the literal
// character. [_] is a perfectly valid way to escape glob magic chars.
if (negs.length === 0 &&
ranges.length === 1 &&
/^\\?.$/.test(ranges[0]) &&
!negate) {
const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
return [regexpEscape(r), false, endPos - pos, false];
}
const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
const comb = ranges.length && negs.length
? '(' + sranges + '|' + snegs + ')'
: ranges.length
? sranges
: snegs;
return [comb, uflag, endPos - pos, true];
};
/**
* Un-escape a string that has been escaped with {@link escape}.
*
* If the {@link windowsPathsNoEscape} option is used, then square-brace
* escapes are removed, but not backslash escapes. For example, it will turn
* the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
* becuase `\` is a path separator in `windowsPathsNoEscape` mode.
*
* When `windowsPathsNoEscape` is not set, then both brace escapes and
* backslash escapes are removed.
*
* Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
* or unescaped.
*/
const unescape$1 = (s, { windowsPathsNoEscape = false, } = {}) => {
return windowsPathsNoEscape
? s.replace(/\[([^\/\\])\]/g, '$1')
: s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
};
// parse a single path portion
const types$1 = new Set(['!', '?', '+', '*', '@']);
const isExtglobType = (c) => types$1.has(c);
// Patterns that get prepended to bind to the start of either the
// entire string, or just a single path portion, to prevent dots
// and/or traversal patterns, when needed.
// Exts don't need the ^ or / bit, because the root binds that already.
const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
const startNoDot = '(?!\\.)';
// characters that indicate a start of pattern needs the "no dots" bit,
// because a dot *might* be matched. ( is not in the list, because in
// the case of a child extglob, it will handle the prevention itself.
const addPatternStart = new Set(['[', '.']);
// cases where traversal is A-OK, no dot prevention needed
const justDots = new Set(['..', '.']);
const reSpecials = new Set('().*{}+?[]^$\\!');
const regExpEscape$1 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
// any single thing other than /
const qmark$1 = '[^/]';
// * => any number of characters
const star$1 = qmark$1 + '*?';
// use + when we need to ensure that *something* matches, because the * is
// the only thing in the path portion.
const starNoEmpty = qmark$1 + '+?';
// remove the \ chars that we added if we end up doing a nonmagic compare
// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
class AST {
type;
#root;
#hasMagic;
#uflag = false;
#parts = [];
#parent;
#parentIndex;
#negs;
#filledNegs = false;
#options;
#toString;
// set to true if it's an extglob with no children
// (which really means one child of '')
#emptyExt = false;
constructor(type, parent, options = {}) {
this.type = type;
// extglobs are inherently magical
if (type)
this.#hasMagic = true;
this.#parent = parent;
this.#root = this.#parent ? this.#parent.#root : this;
this.#options = this.#root === this ? options : this.#root.#options;
this.#negs = this.#root === this ? [] : this.#root.#negs;
if (type === '!' && !this.#root.#filledNegs)
this.#negs.push(this);
this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
}
get hasMagic() {
/* c8 ignore start */
if (this.#hasMagic !== undefined)
return this.#hasMagic;
/* c8 ignore stop */
for (const p of this.#parts) {
if (typeof p === 'string')
continue;
if (p.type || p.hasMagic)
return (this.#hasMagic = true);
}
// note: will be undefined until we generate the regexp src and find out
return this.#hasMagic;
}
// reconstructs the pattern
toString() {
if (this.#toString !== undefined)
return this.#toString;
if (!this.type) {
return (this.#toString = this.#parts.map(p => String(p)).join(''));
}
else {
return (this.#toString =
this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
}
}
#fillNegs() {
/* c8 ignore start */
if (this !== this.#root)
throw new Error('should only call on root');
if (this.#filledNegs)
return this;
/* c8 ignore stop */
// call toString() once to fill this out
this.toString();
this.#filledNegs = true;
let n;
while ((n = this.#negs.pop())) {
if (n.type !== '!')
continue;
// walk up the tree, appending everthing that comes AFTER parentIndex
let p = n;
let pp = p.#parent;
while (pp) {
for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
for (const part of n.#parts) {
/* c8 ignore start */
if (typeof part === 'string') {
throw new Error('string part in extglob AST??');
}
/* c8 ignore stop */
part.copyIn(pp.#parts[i]);
}
}
p = pp;
pp = p.#parent;
}
}
return this;
}
push(...parts) {
for (const p of parts) {
if (p === '')
continue;
/* c8 ignore start */
if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
throw new Error('invalid part: ' + p);
}
/* c8 ignore stop */
this.#parts.push(p);
}
}
toJSON() {
const ret = this.type === null
? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
: [this.type, ...this.#parts.map(p => p.toJSON())];
if (this.isStart() && !this.type)
ret.unshift([]);
if (this.isEnd() &&
(this === this.#root ||
(this.#root.#filledNegs && this.#parent?.type === '!'))) {
ret.push({});
}
return ret;
}
isStart() {
if (this.#root === this)
return true;
// if (this.type) return !!this.#parent?.isStart()
if (!this.#parent?.isStart())
return false;
if (this.#parentIndex === 0)
return true;
// if everything AHEAD of this is a negation, then it's still the "start"
const p = this.#parent;
for (let i = 0; i < this.#parentIndex; i++) {
const pp = p.#parts[i];
if (!(pp instanceof AST && pp.type === '!')) {
return false;
}
}
return true;
}
isEnd() {
if (this.#root === this)
return true;
if (this.#parent?.type === '!')
return true;
if (!this.#parent?.isEnd())
return false;
if (!this.type)
return this.#parent?.isEnd();
// if not root, it'll always have a parent
/* c8 ignore start */
const pl = this.#parent ? this.#parent.#parts.length : 0;
/* c8 ignore stop */
return this.#parentIndex === pl - 1;
}
copyIn(part) {
if (typeof part === 'string')
this.push(part);
else
this.push(part.clone(this));
}
clone(parent) {
const c = new AST(this.type, parent);
for (const p of this.#parts) {
c.copyIn(p);
}
return c;
}
static #parseAST(str, ast, pos, opt) {
let escaping = false;
let inBrace = false;
let braceStart = -1;
let braceNeg = false;
if (ast.type === null) {
// outside of a extglob, append until we find a start
let i = pos;
let acc = '';
while (i < str.length) {
const c = str.charAt(i++);
// still accumulate escapes at this point, but we do ignore
// starts that are escaped
if (escaping || c === '\\') {
escaping = !escaping;
acc += c;
continue;
}
if (inBrace) {
if (i === braceStart + 1) {
if (c === '^' || c === '!') {
braceNeg = true;
}
}
else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
inBrace = false;
}
acc += c;
continue;
}
else if (c === '[') {
inBrace = true;
braceStart = i;
braceNeg = false;
acc += c;
continue;
}
if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
ast.push(acc);
acc = '';
const ext = new AST(c, ast);
i = AST.#parseAST(str, ext, i, opt);
ast.push(ext);
continue;
}
acc += c;
}
ast.push(acc);
return i;
}
// some kind of extglob, pos is at the (
// find the next | or )
let i = pos + 1;
let part = new AST(null, ast);
const parts = [];
let acc = '';
while (i < str.length) {
const c = str.charAt(i++);
// still accumulate escapes at this point, but we do ignore
// starts that are escaped
if (escaping || c === '\\') {
escaping = !escaping;
acc += c;
continue;
}
if (inBrace) {
if (i === braceStart + 1) {
if (c === '^' || c === '!') {
braceNeg = true;
}
}
else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
inBrace = false;
}
acc += c;
continue;
}
else if (c === '[') {
inBrace = true;
braceStart = i;
braceNeg = false;
acc += c;
continue;
}
if (isExtglobType(c) && str.charAt(i) === '(') {
part.push(acc);
acc = '';
const ext = new AST(c, part);
part.push(ext);
i = AST.#parseAST(str, ext, i, opt);
continue;
}
if (c === '|') {
part.push(acc);
acc = '';
parts.push(part);
part = new AST(null, ast);
continue;
}
if (c === ')') {
if (acc === '' && ast.#parts.length === 0) {
ast.#emptyExt = true;
}
part.push(acc);
acc = '';
ast.push(...parts, part);
return i;
}
acc += c;
}
// unfinished extglob
// if we got here, it was a malformed extglob! not an extglob, but
// maybe something else in there.
ast.type = null;
ast.#hasMagic = undefined;
ast.#parts = [str.substring(pos - 1)];
return i;
}
static fromGlob(pattern, options = {}) {
const ast = new AST(null, undefined, options);
AST.#parseAST(pattern, ast, 0, options);
return ast;
}
// returns the regular expression if there's magic, or the unescaped
// string if not.
toMMPattern() {
// should only be called on root
/* c8 ignore start */
if (this !== this.#root)
return this.#root.toMMPattern();
/* c8 ignore stop */
const glob = this.toString();
const [re, body, hasMagic, uflag] = this.toRegExpSource();
// if we're in nocase mode, and not nocaseMagicOnly, then we do
// still need a regular expression if we have to case-insensitively
// match capital/lowercase characters.
const anyMagic = hasMagic ||
this.#hasMagic ||
(this.#options.nocase &&
!this.#options.nocaseMagicOnly &&
glob.toUpperCase() !== glob.toLowerCase());
if (!anyMagic) {
return body;
}
const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
return Object.assign(new RegExp(`^${re}$`, flags), {
_src: re,
_glob: glob,
});
}
get options() {
return this.#options;
}
// returns the string match, the regexp source, whether there's magic
// in the regexp (so a regular expression is required) and whether or
// not the uflag is needed for the regular expression (for posix classes)
// TODO: instead of injecting the start/end at this point, just return
// the BODY of the regexp, along with the start/end portions suitable
// for binding the start/end in either a joined full-path makeRe context
// (where we bind to (^|/), or a standalone matchPart context (where
// we bind to ^, and not /). Otherwise slashes get duped!
//
// In part-matching mode, the start is:
// - if not isStart: nothing
// - if traversal possible, but not allowed: ^(?!\.\.?$)
// - if dots allowed or not possible: ^
// - if dots possible and not allowed: ^(?!\.)
// end is:
// - if not isEnd(): nothing
// - else: $
//
// In full-path matching mode, we put the slash at the START of the
// pattern, so start is:
// - if first pattern: same as part-matching mode
// - if not isStart(): nothing
// - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
// - if dots allowed or not possible: /
// - if dots possible and not allowed: /(?!\.)
// end is:
// - if last pattern, same as part-matching mode
// - else nothing
//
// Always put the (?:$|/) on negated tails, though, because that has to be
// there to bind the end of the negated pattern portion, and it's easier to
// just stick it in now rather than try to inject it later in the middle of
// the pattern.
//
// We can just always return the same end, and leave it up to the caller
// to know whether it's going to be used joined or in parts.
// And, if the start is adjusted slightly, can do the same there:
// - if not isStart: nothing
// - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
// - if dots allowed or not possible: (?:/|^)
// - if dots possible and not allowed: (?:/|^)(?!\.)
//
// But it's better to have a simpler binding without a conditional, for
// performance, so probably better to return both start options.
//
// Then the caller just ignores the end if it's not the first pattern,
// and the start always gets applied.
//
// But that's always going to be $ if it's the ending pattern, or nothing,
// so the caller can just attach $ at the end of the pattern when building.
//
// So the todo is:
// - better detect what kind of start is needed
// - return both flavors of starting pattern
// - attach $ at the end of the pattern when creating the actual RegExp
//
// Ah, but wait, no, that all only applies to the root when the first pattern
// is not an extglob. If the first pattern IS an extglob, then we need all
// that dot prevention biz to live in the extglob portions, because eg
// +(*|.x*) can match .xy but not .yx.
//
// So, return the two flavors if it's #root and the first child is not an
// AST, otherwise leave it to the child AST to handle it, and there,
// use the (?:^|/) style of start binding.
//
// Even simplified further:
// - Since the start for a join is eg /(?!\.) and the start for a part
// is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
// or start or whatever) and prepend ^ or / at the Regexp construction.
toRegExpSource(allowDot) {
const dot = allowDot ?? !!this.#options.dot;
if (this.#root === this)
this.#fillNegs();
if (!this.type) {
const noEmpty = this.isStart() && this.isEnd();
const src = this.#parts
.map(p => {
const [re, _, hasMagic, uflag] = typeof p === 'string'
? AST.#parseGlob(p, this.#hasMagic, noEmpty)
: p.toRegExpSource(allowDot);
this.#hasMagic = this.#hasMagic || hasMagic;
this.#uflag = this.#uflag || uflag;
return re;
})
.join('');
let start = '';
if (this.isStart()) {
if (typeof this.#parts[0] === 'string') {
// this is the string that will match the start of the pattern,
// so we need to protect against dots and such.
// '.' and '..' cannot match unless the pattern is that exactly,
// even if it starts with . or dot:true is set.
const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
if (!dotTravAllowed) {
const aps = addPatternStart;
// check if we have a possibility of matching . or ..,
// and prevent that.
const needNoTrav =
// dots are allowed, and the pattern starts with [ or .
(dot && aps.has(src.charAt(0))) ||
// the pattern starts with \., and then [ or .
(src.startsWith('\\.') && aps.has(src.charAt(2))) ||
// the pattern starts with \.\., and then [ or .
(src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
// no need to prevent dots if it can't match a dot, or if a
// sub-pattern will be preventing it anyway.
const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
}
}
}
// append the "end of path portion" pattern to negation tails
let end = '';
if (this.isEnd() &&
this.#root.#filledNegs &&
this.#parent?.type === '!') {
end = '(?:$|\\/)';
}
const final = start + src + end;
return [
final,
unescape$1(src),
(this.#hasMagic = !!this.#hasMagic),
this.#uflag,
];
}
// We need to calculate the body *twice* if it's a repeat pattern
// at the start, once in nodot mode, then again in dot mode, so a
// pattern like *(?) can match 'x.y'
const repeated = this.type === '*' || this.type === '+';
// some kind of extglob
const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
let body = this.#partsToRegExp(dot);
if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
// invalid extglob, has to at least be *something* present, if it's
// the entire path portion.
const s = this.toString();
this.#parts = [s];
this.type = null;
this.#hasMagic = undefined;
return [s, unescape$1(this.toString()), false, false];
}
// XXX abstract out this map method
let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
? ''
: this.#partsToRegExp(true);
if (bodyDotAllowed === body) {
bodyDotAllowed = '';
}
if (bodyDotAllowed) {
body = `(?:${body})(?:${bodyDotAllowed})*?`;
}
// an empty !() is exactly equivalent to a starNoEmpty
let final = '';
if (this.type === '!' && this.#emptyExt) {
final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
}
else {
const close = this.type === '!'
? // !() must match something,but !(x) can match ''
'))' +
(this.isStart() && !dot && !allowDot ? startNoDot : '') +
star$1 +
')'
: this.type === '@'
? ')'
: this.type === '?'
? ')?'
: this.type === '+' && bodyDotAllowed
? ')'
: this.type === '*' && bodyDotAllowed
? `)?`
: `)${this.type}`;
final = start + body + close;
}
return [
final,
unescape$1(body),
(this.#hasMagic = !!this.#hasMagic),
this.#uflag,
];
}
#partsToRegExp(dot) {
return this.#parts
.map(p => {
// extglob ASTs should only contain parent ASTs
/* c8 ignore start */
if (typeof p === 'string') {
throw new Error('string type in extglob ast??');
}
/* c8 ignore stop */
// can ignore hasMagic, because extglobs are already always magic
const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
this.#uflag = this.#uflag || uflag;
return re;
})
.filter(p => !(this.isStart() && this.isEnd()) || !!p)
.join('|');
}
static #parseGlob(glob, hasMagic, noEmpty = false) {
let escaping = false;
let re = '';
let uflag = false;
for (let i = 0; i < glob.length; i++) {
const c = glob.charAt(i);
if (escaping) {
escaping = false;
re += (reSpecials.has(c) ? '\\' : '') + c;
continue;
}
if (c === '\\') {
if (i === glob.length - 1) {
re += '\\\\';
}
else {
escaping = true;
}
continue;
}
if (c === '[') {
const [src, needUflag, consumed, magic] = parseClass(glob, i);
if (consumed) {
re += src;
uflag = uflag || needUflag;
i += consumed - 1;
hasMagic = hasMagic || magic;
continue;
}
}
if (c === '*') {
if (noEmpty && glob === '*')
re += starNoEmpty;
else
re += star$1;
hasMagic = true;
continue;
}
if (c === '?') {
re += qmark$1;
hasMagic = true;
continue;
}
re += regExpEscape$1(c);
}
return [re, unescape$1(glob), !!hasMagic, uflag];
}
}
/**
* Escape all magic characters in a glob pattern.
*
* If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
* option is used, then characters are escaped by wrapping in `[]`, because
* a magic character wrapped in a character class can only be satisfied by
* that exact character. In this mode, `\` is _not_ escaped, because it is
* not interpreted as a magic character, but instead as a path separator.
*/
const escape$2 = (s, { windowsPathsNoEscape = false, } = {}) => {
// don't need to escape +@! because we escape the parens
// that make those magic, and escaping ! as [!] isn't valid,
// because [!]] is a valid glob class meaning not ']'.
return windowsPathsNoEscape
? s.replace(/[?*()[\]]/g, '[$&]')
: s.replace(/[?*()[\]\\]/g, '\\$&');
};
const minimatch = (p, pattern, options = {}) => {
assertValidPattern(pattern);
// shortcut: comments match nothing.
if (!options.nocomment && pattern.charAt(0) === '#') {
return false;
}
return new Minimatch(pattern, options).match(p);
};
// Optimized checking for the most common glob patterns.
const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
const starDotExtTestNocase = (ext) => {
ext = ext.toLowerCase();
return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
};
const starDotExtTestNocaseDot = (ext) => {
ext = ext.toLowerCase();
return (f) => f.toLowerCase().endsWith(ext);
};
const starDotStarRE = /^\*+\.\*+$/;
const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
const dotStarRE = /^\.\*+$/;
const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
const starRE = /^\*+$/;
const starTest = (f) => f.length !== 0 && !f.startsWith('.');
const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
const qmarksTestNocase = ([$0, ext = '']) => {
const noext = qmarksTestNoExt([$0]);
if (!ext)
return noext;
ext = ext.toLowerCase();
return (f) => noext(f) && f.toLowerCase().endsWith(ext);
};
const qmarksTestNocaseDot = ([$0, ext = '']) => {
const noext = qmarksTestNoExtDot([$0]);
if (!ext)
return noext;
ext = ext.toLowerCase();
return (f) => noext(f) && f.toLowerCase().endsWith(ext);
};
const qmarksTestDot = ([$0, ext = '']) => {
const noext = qmarksTestNoExtDot([$0]);
return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
};
const qmarksTest = ([$0, ext = '']) => {
const noext = qmarksTestNoExt([$0]);
return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
};
const qmarksTestNoExt = ([$0]) => {
const len = $0.length;
return (f) => f.length === len && !f.startsWith('.');
};
const qmarksTestNoExtDot = ([$0]) => {
const len = $0.length;
return (f) => f.length === len && f !== '.' && f !== '..';
};
/* c8 ignore start */
const defaultPlatform$2 = (typeof process === 'object' && process
? (typeof process.env === 'object' &&
process.env &&
process.env.__MINIMATCH_TESTING_PLATFORM__) ||
process.platform
: 'posix');
const path$j = {
win32: { sep: '\\' },
posix: { sep: '/' },
};
/* c8 ignore stop */
const sep = defaultPlatform$2 === 'win32' ? path$j.win32.sep : path$j.posix.sep;
minimatch.sep = sep;
const GLOBSTAR$2 = Symbol('globstar **');
minimatch.GLOBSTAR = GLOBSTAR$2;
// any single thing other than /
// don't need to escape / when using new RegExp()
const qmark = '[^/]';
// * => any number of characters
const star = qmark + '*?';
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
const filter$1 = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
minimatch.filter = filter$1;
const ext = (a, b = {}) => Object.assign({}, a, b);
const defaults = (def) => {
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
return minimatch;
}
const orig = minimatch;
const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
return Object.assign(m, {
Minimatch: class Minimatch extends orig.Minimatch {
constructor(pattern, options = {}) {
super(pattern, ext(def, options));
}
static defaults(options) {
return orig.defaults(ext(def, options)).Minimatch;
}
},
AST: class AST extends orig.AST {
/* c8 ignore start */
constructor(type, parent, options = {}) {
super(type, parent, ext(def, options));
}
/* c8 ignore stop */
static fromGlob(pattern, options = {}) {
return orig.AST.fromGlob(pattern, ext(def, options));
}
},
unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
escape: (s, options = {}) => orig.escape(s, ext(def, options)),
filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
defaults: (options) => orig.defaults(ext(def, options)),
makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
sep: orig.sep,
GLOBSTAR: GLOBSTAR$2,
});
};
minimatch.defaults = defaults;
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
const braceExpand = (pattern, options = {}) => {
assertValidPattern(pattern);
// Thanks to Yeting Li for
// improving this regexp to avoid a ReDOS vulnerability.
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
// shortcut. no need to expand.
return [pattern];
}
return expand$4(pattern);
};
minimatch.braceExpand = braceExpand;
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
const makeRe$1 = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
minimatch.makeRe = makeRe$1;
const match = (list, pattern, options = {}) => {
const mm = new Minimatch(pattern, options);
list = list.filter(f => mm.match(f));
if (mm.options.nonull && !list.length) {
list.push(pattern);
}
return list;
};
minimatch.match = match;
// replace stuff like \* with *
const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
class Minimatch {
options;
set;
pattern;
windowsPathsNoEscape;
nonegate;
negate;
comment;
empty;
preserveMultipleSlashes;
partial;
globSet;
globParts;
nocase;
isWindows;
platform;
windowsNoMagicRoot;
regexp;
constructor(pattern, options = {}) {
assertValidPattern(pattern);
options = options || {};
this.options = options;
this.pattern = pattern;
this.platform = options.platform || defaultPlatform$2;
this.isWindows = this.platform === 'win32';
this.windowsPathsNoEscape =
!!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
if (this.windowsPathsNoEscape) {
this.pattern = this.pattern.replace(/\\/g, '/');
}
this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
this.regexp = null;
this.negate = false;
this.nonegate = !!options.nonegate;
this.comment = false;
this.empty = false;
this.partial = !!options.partial;
this.nocase = !!this.options.nocase;
this.windowsNoMagicRoot =
options.windowsNoMagicRoot !== undefined
? options.windowsNoMagicRoot
: !!(this.isWindows && this.nocase);
this.globSet = [];
this.globParts = [];
this.set = [];
// make the set of regexps etc.
this.make();
}
hasMagic() {
if (this.options.magicalBraces && this.set.length > 1) {
return true;
}
for (const pattern of this.set) {
for (const part of pattern) {
if (typeof part !== 'string')
return true;
}
}
return false;
}
debug(..._) { }
make() {
const pattern = this.pattern;
const options = this.options;
// empty patterns and comments match nothing.
if (!options.nocomment && pattern.charAt(0) === '#') {
this.comment = true;
return;
}
if (!pattern) {
this.empty = true;
return;
}
// step 1: figure out negation, etc.
this.parseNegate();
// step 2: expand braces
this.globSet = [...new Set(this.braceExpand())];
if (options.debug) {
this.debug = (...args) => console.error(...args);
}
this.debug(this.pattern, this.globSet);
// step 3: now we have a set, so turn each one into a series of
// path-portion matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
//
// First, we preprocess to make the glob pattern sets a bit simpler
// and deduped. There are some perf-killing patterns that can cause
// problems with a glob walk, but we can simplify them down a bit.
const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
this.globParts = this.preprocess(rawGlobParts);
this.debug(this.pattern, this.globParts);
// glob --> regexps
let set = this.globParts.map((s, _, __) => {
if (this.isWindows && this.windowsNoMagicRoot) {
// check if it's a drive or unc path.
const isUNC = s[0] === '' &&
s[1] === '' &&
(s[2] === '?' || !globMagic.test(s[2])) &&
!globMagic.test(s[3]);
const isDrive = /^[a-z]:/i.test(s[0]);
if (isUNC) {
return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
}
else if (isDrive) {
return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
}
}
return s.map(ss => this.parse(ss));
});
this.debug(this.pattern, set);
// filter out everything that didn't compile properly.
this.set = set.filter(s => s.indexOf(false) === -1);
// do not treat the ? in UNC paths as magic
if (this.isWindows) {
for (let i = 0; i < this.set.length; i++) {
const p = this.set[i];
if (p[0] === '' &&
p[1] === '' &&
this.globParts[i][2] === '?' &&
typeof p[3] === 'string' &&
/^[a-z]:$/i.test(p[3])) {
p[2] = '?';
}
}
}
this.debug(this.pattern, this.set);
}
// various transforms to equivalent pattern sets that are
// faster to process in a filesystem walk. The goal is to
// eliminate what we can, and push all ** patterns as far
// to the right as possible, even if it increases the number
// of patterns that we have to process.
preprocess(globParts) {
// if we're not in globstar mode, then turn all ** into *
if (this.options.noglobstar) {
for (let i = 0; i < globParts.length; i++) {
for (let j = 0; j < globParts[i].length; j++) {
if (globParts[i][j] === '**') {
globParts[i][j] = '*';
}
}
}
}
const { optimizationLevel = 1 } = this.options;
if (optimizationLevel >= 2) {
// aggressive optimization for the purpose of fs walking
globParts = this.firstPhasePreProcess(globParts);
globParts = this.secondPhasePreProcess(globParts);
}
else if (optimizationLevel >= 1) {
// just basic optimizations to remove some .. parts
globParts = this.levelOneOptimize(globParts);
}
else {
// just collapse multiple ** portions into one
globParts = this.adjascentGlobstarOptimize(globParts);
}
return globParts;
}
// just get rid of adjascent ** portions
adjascentGlobstarOptimize(globParts) {
return globParts.map(parts => {
let gs = -1;
while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
let i = gs;
while (parts[i + 1] === '**') {
i++;
}
if (i !== gs) {
parts.splice(gs, i - gs);
}
}
return parts;
});
}
// get rid of adjascent ** and resolve .. portions
levelOneOptimize(globParts) {
return globParts.map(parts => {
parts = parts.reduce((set, part) => {
const prev = set[set.length - 1];
if (part === '**' && prev === '**') {
return set;
}
if (part === '..') {
if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
set.pop();
return set;
}
}
set.push(part);
return set;
}, []);
return parts.length === 0 ? [''] : parts;
});
}
levelTwoFileOptimize(parts) {
if (!Array.isArray(parts)) {
parts = this.slashSplit(parts);
}
let didSomething = false;
do {
didSomething = false;
// // -> /
if (!this.preserveMultipleSlashes) {
for (let i = 1; i < parts.length - 1; i++) {
const p = parts[i];
// don't squeeze out UNC patterns
if (i === 1 && p === '' && parts[0] === '')
continue;
if (p === '.' || p === '') {
didSomething = true;
parts.splice(i, 1);
i--;
}
}
if (parts[0] === '.' &&
parts.length === 2 &&
(parts[1] === '.' || parts[1] === '')) {
didSomething = true;
parts.pop();
}
}
// //../ -> /
let dd = 0;
while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
const p = parts[dd - 1];
if (p && p !== '.' && p !== '..' && p !== '**') {
didSomething = true;
parts.splice(dd - 1, 2);
dd -= 2;
}
}
} while (didSomething);
return parts.length === 0 ? [''] : parts;
}
// First phase: single-pattern processing
// is 1 or more portions
// is 1 or more portions
// is any portion other than ., .., '', or **
// is . or ''
//
// **/.. is *brutal* for filesystem walking performance, because
// it effectively resets the recursive walk each time it occurs,
// and ** cannot be reduced out by a .. pattern part like a regexp
// or most strings (other than .., ., and '') can be.
//
// /**/..//
/ -> {/..//
/,/**//
/}
// // -> /
// //../ -> /
// **/**/ -> **/
//
// **/*/ -> */**/ <== not valid because ** doesn't follow
// this WOULD be allowed if ** did follow symlinks, or * didn't
firstPhasePreProcess(globParts) {
let didSomething = false;
do {
didSomething = false;
// /**/..//
/ -> {/..//
/,/**//
/}
for (let parts of globParts) {
let gs = -1;
while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
let gss = gs;
while (parts[gss + 1] === '**') {
// /**/**/ -> /**/
gss++;
}
// eg, if gs is 2 and gss is 4, that means we have 3 **
// parts, and can remove 2 of them.
if (gss > gs) {
parts.splice(gs + 1, gss - gs);
}
let next = parts[gs + 1];
const p = parts[gs + 2];
const p2 = parts[gs + 3];
if (next !== '..')
continue;
if (!p ||
p === '.' ||
p === '..' ||
!p2 ||
p2 === '.' ||
p2 === '..') {
continue;
}
didSomething = true;
// edit parts in place, and push the new one
parts.splice(gs, 1);
const other = parts.slice(0);
other[gs] = '**';
globParts.push(other);
gs--;
}
// // -> /
if (!this.preserveMultipleSlashes) {
for (let i = 1; i < parts.length - 1; i++) {
const p = parts[i];
// don't squeeze out UNC patterns
if (i === 1 && p === '' && parts[0] === '')
continue;
if (p === '.' || p === '') {
didSomething = true;
parts.splice(i, 1);
i--;
}
}
if (parts[0] === '.' &&
parts.length === 2 &&
(parts[1] === '.' || parts[1] === '')) {
didSomething = true;
parts.pop();
}
}
// //../ -> /
let dd = 0;
while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
const p = parts[dd - 1];
if (p && p !== '.' && p !== '..' && p !== '**') {
didSomething = true;
const needDot = dd === 1 && parts[dd + 1] === '**';
const splin = needDot ? ['.'] : [];
parts.splice(dd - 1, 2, ...splin);
if (parts.length === 0)
parts.push('');
dd -= 2;
}
}
}
} while (didSomething);
return globParts;
}
// second phase: multi-pattern dedupes
// {/*/,//} -> /*/
// {/,/} -> /
// {/**/,/} -> /**/
//
// {/**/,/**//} -> /**/
// ^-- not valid because ** doens't follow symlinks
secondPhasePreProcess(globParts) {
for (let i = 0; i < globParts.length - 1; i++) {
for (let j = i + 1; j < globParts.length; j++) {
const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
if (matched) {
globParts[i] = [];
globParts[j] = matched;
break;
}
}
}
return globParts.filter(gs => gs.length);
}
partsMatch(a, b, emptyGSMatch = false) {
let ai = 0;
let bi = 0;
let result = [];
let which = '';
while (ai < a.length && bi < b.length) {
if (a[ai] === b[bi]) {
result.push(which === 'b' ? b[bi] : a[ai]);
ai++;
bi++;
}
else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
result.push(a[ai]);
ai++;
}
else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
result.push(b[bi]);
bi++;
}
else if (a[ai] === '*' &&
b[bi] &&
(this.options.dot || !b[bi].startsWith('.')) &&
b[bi] !== '**') {
if (which === 'b')
return false;
which = 'a';
result.push(a[ai]);
ai++;
bi++;
}
else if (b[bi] === '*' &&
a[ai] &&
(this.options.dot || !a[ai].startsWith('.')) &&
a[ai] !== '**') {
if (which === 'a')
return false;
which = 'b';
result.push(b[bi]);
ai++;
bi++;
}
else {
return false;
}
}
// if we fall out of the loop, it means they two are identical
// as long as their lengths match
return a.length === b.length && result;
}
parseNegate() {
if (this.nonegate)
return;
const pattern = this.pattern;
let negate = false;
let negateOffset = 0;
for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
negate = !negate;
negateOffset++;
}
if (negateOffset)
this.pattern = pattern.slice(negateOffset);
this.negate = negate;
}
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
matchOne(file, pattern, partial = false) {
const options = this.options;
// UNC paths like //?/X:/... can match X:/... and vice versa
// Drive letters in absolute drive or unc paths are always compared
// case-insensitively.
if (this.isWindows) {
const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
const fileUNC = !fileDrive &&
file[0] === '' &&
file[1] === '' &&
file[2] === '?' &&
/^[a-z]:$/i.test(file[3]);
const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
const patternUNC = !patternDrive &&
pattern[0] === '' &&
pattern[1] === '' &&
pattern[2] === '?' &&
typeof pattern[3] === 'string' &&
/^[a-z]:$/i.test(pattern[3]);
const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
if (typeof fdi === 'number' && typeof pdi === 'number') {
const [fd, pd] = [file[fdi], pattern[pdi]];
if (fd.toLowerCase() === pd.toLowerCase()) {
pattern[pdi] = fd;
if (pdi > fdi) {
pattern = pattern.slice(pdi);
}
else if (fdi > pdi) {
file = file.slice(fdi);
}
}
}
}
// resolve and reduce . and .. portions in the file as well.
// dont' need to do the second phase, because it's only one string[]
const { optimizationLevel = 1 } = this.options;
if (optimizationLevel >= 2) {
file = this.levelTwoFileOptimize(file);
}
this.debug('matchOne', this, { file, pattern });
this.debug('matchOne', file.length, pattern.length);
for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
this.debug('matchOne loop');
var p = pattern[pi];
var f = file[fi];
this.debug(pattern, p, f);
// should be impossible.
// some invalid regexp stuff in the set.
/* c8 ignore start */
if (p === false) {
return false;
}
/* c8 ignore stop */
if (p === GLOBSTAR$2) {
this.debug('GLOBSTAR', [pattern, p, f]);
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi;
var pr = pi + 1;
if (pr === pl) {
this.debug('** at the end');
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for (; fi < fl; fi++) {
if (file[fi] === '.' ||
file[fi] === '..' ||
(!options.dot && file[fi].charAt(0) === '.'))
return false;
}
return true;
}
// ok, let's see if we can swallow whatever we can.
while (fr < fl) {
var swallowee = file[fr];
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
// XXX remove this slice. Just pass the start index.
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
this.debug('globstar found match!', fr, fl, swallowee);
// found a match.
return true;
}
else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if (swallowee === '.' ||
swallowee === '..' ||
(!options.dot && swallowee.charAt(0) === '.')) {
this.debug('dot detected!', file, fr, pattern, pr);
break;
}
// ** swallows a segment, and continue.
this.debug('globstar swallow a segment, and continue');
fr++;
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
/* c8 ignore start */
if (partial) {
// ran out of file
this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
if (fr === fl) {
return true;
}
}
/* c8 ignore stop */
return false;
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
let hit;
if (typeof p === 'string') {
hit = f === p;
this.debug('string match', p, f, hit);
}
else {
hit = p.test(f);
this.debug('pattern match', p, f, hit);
}
if (!hit)
return false;
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if (fi === fl && pi === pl) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true;
}
else if (fi === fl) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial;
}
else if (pi === pl) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
return fi === fl - 1 && file[fi] === '';
/* c8 ignore start */
}
else {
// should be unreachable.
throw new Error('wtf?');
}
/* c8 ignore stop */
}
braceExpand() {
return braceExpand(this.pattern, this.options);
}
parse(pattern) {
assertValidPattern(pattern);
const options = this.options;
// shortcuts
if (pattern === '**')
return GLOBSTAR$2;
if (pattern === '')
return '';
// far and away, the most common glob pattern parts are
// *, *.*, and *. Add a fast check method for those.
let m;
let fastTest = null;
if ((m = pattern.match(starRE))) {
fastTest = options.dot ? starTestDot : starTest;
}
else if ((m = pattern.match(starDotExtRE))) {
fastTest = (options.nocase
? options.dot
? starDotExtTestNocaseDot
: starDotExtTestNocase
: options.dot
? starDotExtTestDot
: starDotExtTest)(m[1]);
}
else if ((m = pattern.match(qmarksRE))) {
fastTest = (options.nocase
? options.dot
? qmarksTestNocaseDot
: qmarksTestNocase
: options.dot
? qmarksTestDot
: qmarksTest)(m);
}
else if ((m = pattern.match(starDotStarRE))) {
fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
}
else if ((m = pattern.match(dotStarRE))) {
fastTest = dotStarTest;
}
const re = AST.fromGlob(pattern, this.options).toMMPattern();
if (fastTest && typeof re === 'object') {
// Avoids overriding in frozen environments
Reflect.defineProperty(re, 'test', { value: fastTest });
}
return re;
}
makeRe() {
if (this.regexp || this.regexp === false)
return this.regexp;
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
const set = this.set;
if (!set.length) {
this.regexp = false;
return this.regexp;
}
const options = this.options;
const twoStar = options.noglobstar
? star
: options.dot
? twoStarDot
: twoStarNoDot;
const flags = new Set(options.nocase ? ['i'] : []);
// regexpify non-globstar patterns
// if ** is only item, then we just do one twoStar
// if ** is first, and there are more, prepend (\/|twoStar\/)? to next
// if ** is last, append (\/twoStar|) to previous
// if ** is in the middle, append (\/|\/twoStar\/) to previous
// then filter out GLOBSTAR symbols
let re = set
.map(pattern => {
const pp = pattern.map(p => {
if (p instanceof RegExp) {
for (const f of p.flags.split(''))
flags.add(f);
}
return typeof p === 'string'
? regExpEscape(p)
: p === GLOBSTAR$2
? GLOBSTAR$2
: p._src;
});
pp.forEach((p, i) => {
const next = pp[i + 1];
const prev = pp[i - 1];
if (p !== GLOBSTAR$2 || prev === GLOBSTAR$2) {
return;
}
if (prev === undefined) {
if (next !== undefined && next !== GLOBSTAR$2) {
pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
}
else {
pp[i] = twoStar;
}
}
else if (next === undefined) {
pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
}
else if (next !== GLOBSTAR$2) {
pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
pp[i + 1] = GLOBSTAR$2;
}
});
return pp.filter(p => p !== GLOBSTAR$2).join('/');
})
.join('|');
// need to wrap in parens if we had more than one thing with |,
// otherwise only the first will be anchored to ^ and the last to $
const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^' + open + re + close + '$';
// can match anything, as long as it's not this.
if (this.negate)
re = '^(?!' + re + ').+$';
try {
this.regexp = new RegExp(re, [...flags].join(''));
/* c8 ignore start */
}
catch (ex) {
// should be impossible
this.regexp = false;
}
/* c8 ignore stop */
return this.regexp;
}
slashSplit(p) {
// if p starts with // on windows, we preserve that
// so that UNC paths aren't broken. Otherwise, any number of
// / characters are coalesced into one, unless
// preserveMultipleSlashes is set to true.
if (this.preserveMultipleSlashes) {
return p.split('/');
}
else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
// add an extra '' for the one we lose
return ['', ...p.split(/\/+/)];
}
else {
return p.split(/\/+/);
}
}
match(f, partial = this.partial) {
this.debug('match', f, this.pattern);
// short-circuit in the case of busted things.
// comments, etc.
if (this.comment) {
return false;
}
if (this.empty) {
return f === '';
}
if (f === '/' && partial) {
return true;
}
const options = this.options;
// windows: need to use /, not \
if (this.isWindows) {
f = f.split('\\').join('/');
}
// treat the test path as a set of pathparts.
const ff = this.slashSplit(f);
this.debug(this.pattern, 'split', ff);
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
const set = this.set;
this.debug(this.pattern, 'set', set);
// Find the basename of the path by looking for the last non-empty segment
let filename = ff[ff.length - 1];
if (!filename) {
for (let i = ff.length - 2; !filename && i >= 0; i--) {
filename = ff[i];
}
}
for (let i = 0; i < set.length; i++) {
const pattern = set[i];
let file = ff;
if (options.matchBase && pattern.length === 1) {
file = [filename];
}
const hit = this.matchOne(file, pattern, partial);
if (hit) {
if (options.flipNegate) {
return true;
}
return !this.negate;
}
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if (options.flipNegate) {
return false;
}
return this.negate;
}
static defaults(def) {
return minimatch.defaults(def).Minimatch;
}
}
/* c8 ignore stop */
minimatch.AST = AST;
minimatch.Minimatch = Minimatch;
minimatch.escape = escape$2;
minimatch.unescape = unescape$1;
/**
* @module LRUCache
*/
const perf = typeof performance === 'object' &&
performance &&
typeof performance.now === 'function'
? performance
: Date;
const warned$1 = new Set();
/* c8 ignore start */
const PROCESS = (typeof process === 'object' && !!process ? process : {});
/* c8 ignore start */
const emitWarning = (msg, type, code, fn) => {
typeof PROCESS.emitWarning === 'function'
? PROCESS.emitWarning(msg, type, code, fn)
: console.error(`[${code}] ${type}: ${msg}`);
};
let AC = globalThis.AbortController;
let AS = globalThis.AbortSignal;
/* c8 ignore start */
if (typeof AC === 'undefined') {
//@ts-ignore
AS = class AbortSignal {
onabort;
_onabort = [];
reason;
aborted = false;
addEventListener(_, fn) {
this._onabort.push(fn);
}
};
//@ts-ignore
AC = class AbortController {
constructor() {
warnACPolyfill();
}
signal = new AS();
abort(reason) {
if (this.signal.aborted)
return;
//@ts-ignore
this.signal.reason = reason;
//@ts-ignore
this.signal.aborted = true;
//@ts-ignore
for (const fn of this.signal._onabort) {
fn(reason);
}
this.signal.onabort?.(reason);
}
};
let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
const warnACPolyfill = () => {
if (!printACPolyfillWarning)
return;
printACPolyfillWarning = false;
emitWarning('AbortController is not defined. If using lru-cache in ' +
'node 14, load an AbortController polyfill from the ' +
'`node-abort-controller` package. A minimal polyfill is ' +
'provided for use by LRUCache.fetch(), but it should not be ' +
'relied upon in other contexts (eg, passing it to other APIs that ' +
'use AbortController/AbortSignal might have undesirable effects). ' +
'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
};
}
/* c8 ignore stop */
const shouldWarn = (code) => !warned$1.has(code);
const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
/* c8 ignore start */
// This is a little bit ridiculous, tbh.
// The maximum array length is 2^32-1 or thereabouts on most JS impls.
// And well before that point, you're caching the entire world, I mean,
// that's ~32GB of just integers for the next/prev links, plus whatever
// else to hold that many keys and values. Just filling the memory with
// zeroes at init time is brutal when you get that big.
// But why not be complete?
// Maybe in the future, these limits will have expanded.
const getUintArray = (max) => !isPosInt(max)
? null
: max <= Math.pow(2, 8)
? Uint8Array
: max <= Math.pow(2, 16)
? Uint16Array
: max <= Math.pow(2, 32)
? Uint32Array
: max <= Number.MAX_SAFE_INTEGER
? ZeroArray
: null;
/* c8 ignore stop */
class ZeroArray extends Array {
constructor(size) {
super(size);
this.fill(0);
}
}
class Stack {
heap;
length;
// private constructor
static #constructing = false;
static create(max) {
const HeapCls = getUintArray(max);
if (!HeapCls)
return [];
Stack.#constructing = true;
const s = new Stack(max, HeapCls);
Stack.#constructing = false;
return s;
}
constructor(max, HeapCls) {
/* c8 ignore start */
if (!Stack.#constructing) {
throw new TypeError('instantiate Stack using Stack.create(n)');
}
/* c8 ignore stop */
this.heap = new HeapCls(max);
this.length = 0;
}
push(n) {
this.heap[this.length++] = n;
}
pop() {
return this.heap[--this.length];
}
}
/**
* Default export, the thing you're using this module to get.
*
* The `K` and `V` types define the key and value types, respectively. The
* optional `FC` type defines the type of the `context` object passed to
* `cache.fetch()` and `cache.memo()`.
*
* Keys and values **must not** be `null` or `undefined`.
*
* All properties from the options object (with the exception of `max`,
* `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
* added as normal public members. (The listed options are read-only getters.)
*
* Changing any of these will alter the defaults for subsequent method calls.
*/
class LRUCache {
// options that cannot be changed without disaster
#max;
#maxSize;
#dispose;
#disposeAfter;
#fetchMethod;
#memoMethod;
/**
* {@link LRUCache.OptionsBase.ttl}
*/
ttl;
/**
* {@link LRUCache.OptionsBase.ttlResolution}
*/
ttlResolution;
/**
* {@link LRUCache.OptionsBase.ttlAutopurge}
*/
ttlAutopurge;
/**
* {@link LRUCache.OptionsBase.updateAgeOnGet}
*/
updateAgeOnGet;
/**
* {@link LRUCache.OptionsBase.updateAgeOnHas}
*/
updateAgeOnHas;
/**
* {@link LRUCache.OptionsBase.allowStale}
*/
allowStale;
/**
* {@link LRUCache.OptionsBase.noDisposeOnSet}
*/
noDisposeOnSet;
/**
* {@link LRUCache.OptionsBase.noUpdateTTL}
*/
noUpdateTTL;
/**
* {@link LRUCache.OptionsBase.maxEntrySize}
*/
maxEntrySize;
/**
* {@link LRUCache.OptionsBase.sizeCalculation}
*/
sizeCalculation;
/**
* {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
*/
noDeleteOnFetchRejection;
/**
* {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
*/
noDeleteOnStaleGet;
/**
* {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
*/
allowStaleOnFetchAbort;
/**
* {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
*/
allowStaleOnFetchRejection;
/**
* {@link LRUCache.OptionsBase.ignoreFetchAbort}
*/
ignoreFetchAbort;
// computed properties
#size;
#calculatedSize;
#keyMap;
#keyList;
#valList;
#next;
#prev;
#head;
#tail;
#free;
#disposed;
#sizes;
#starts;
#ttls;
#hasDispose;
#hasFetchMethod;
#hasDisposeAfter;
/**
* Do not call this method unless you need to inspect the
* inner workings of the cache. If anything returned by this
* object is modified in any way, strange breakage may occur.
*
* These fields are private for a reason!
*
* @internal
*/
static unsafeExposeInternals(c) {
return {
// properties
starts: c.#starts,
ttls: c.#ttls,
sizes: c.#sizes,
keyMap: c.#keyMap,
keyList: c.#keyList,
valList: c.#valList,
next: c.#next,
prev: c.#prev,
get head() {
return c.#head;
},
get tail() {
return c.#tail;
},
free: c.#free,
// methods
isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
moveToTail: (index) => c.#moveToTail(index),
indexes: (options) => c.#indexes(options),
rindexes: (options) => c.#rindexes(options),
isStale: (index) => c.#isStale(index),
};
}
// Protected read-only members
/**
* {@link LRUCache.OptionsBase.max} (read-only)
*/
get max() {
return this.#max;
}
/**
* {@link LRUCache.OptionsBase.maxSize} (read-only)
*/
get maxSize() {
return this.#maxSize;
}
/**
* The total computed size of items in the cache (read-only)
*/
get calculatedSize() {
return this.#calculatedSize;
}
/**
* The number of items stored in the cache (read-only)
*/
get size() {
return this.#size;
}
/**
* {@link LRUCache.OptionsBase.fetchMethod} (read-only)
*/
get fetchMethod() {
return this.#fetchMethod;
}
get memoMethod() {
return this.#memoMethod;
}
/**
* {@link LRUCache.OptionsBase.dispose} (read-only)
*/
get dispose() {
return this.#dispose;
}
/**
* {@link LRUCache.OptionsBase.disposeAfter} (read-only)
*/
get disposeAfter() {
return this.#disposeAfter;
}
constructor(options) {
const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
if (max !== 0 && !isPosInt(max)) {
throw new TypeError('max option must be a nonnegative integer');
}
const UintArray = max ? getUintArray(max) : Array;
if (!UintArray) {
throw new Error('invalid max value: ' + max);
}
this.#max = max;
this.#maxSize = maxSize;
this.maxEntrySize = maxEntrySize || this.#maxSize;
this.sizeCalculation = sizeCalculation;
if (this.sizeCalculation) {
if (!this.#maxSize && !this.maxEntrySize) {
throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
}
if (typeof this.sizeCalculation !== 'function') {
throw new TypeError('sizeCalculation set to non-function');
}
}
if (memoMethod !== undefined &&
typeof memoMethod !== 'function') {
throw new TypeError('memoMethod must be a function if defined');
}
this.#memoMethod = memoMethod;
if (fetchMethod !== undefined &&
typeof fetchMethod !== 'function') {
throw new TypeError('fetchMethod must be a function if specified');
}
this.#fetchMethod = fetchMethod;
this.#hasFetchMethod = !!fetchMethod;
this.#keyMap = new Map();
this.#keyList = new Array(max).fill(undefined);
this.#valList = new Array(max).fill(undefined);
this.#next = new UintArray(max);
this.#prev = new UintArray(max);
this.#head = 0;
this.#tail = 0;
this.#free = Stack.create(max);
this.#size = 0;
this.#calculatedSize = 0;
if (typeof dispose === 'function') {
this.#dispose = dispose;
}
if (typeof disposeAfter === 'function') {
this.#disposeAfter = disposeAfter;
this.#disposed = [];
}
else {
this.#disposeAfter = undefined;
this.#disposed = undefined;
}
this.#hasDispose = !!this.#dispose;
this.#hasDisposeAfter = !!this.#disposeAfter;
this.noDisposeOnSet = !!noDisposeOnSet;
this.noUpdateTTL = !!noUpdateTTL;
this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
this.ignoreFetchAbort = !!ignoreFetchAbort;
// NB: maxEntrySize is set to maxSize if it's set
if (this.maxEntrySize !== 0) {
if (this.#maxSize !== 0) {
if (!isPosInt(this.#maxSize)) {
throw new TypeError('maxSize must be a positive integer if specified');
}
}
if (!isPosInt(this.maxEntrySize)) {
throw new TypeError('maxEntrySize must be a positive integer if specified');
}
this.#initializeSizeTracking();
}
this.allowStale = !!allowStale;
this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
this.updateAgeOnGet = !!updateAgeOnGet;
this.updateAgeOnHas = !!updateAgeOnHas;
this.ttlResolution =
isPosInt(ttlResolution) || ttlResolution === 0
? ttlResolution
: 1;
this.ttlAutopurge = !!ttlAutopurge;
this.ttl = ttl || 0;
if (this.ttl) {
if (!isPosInt(this.ttl)) {
throw new TypeError('ttl must be a positive integer if specified');
}
this.#initializeTTLTracking();
}
// do not allow completely unbounded caches
if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
throw new TypeError('At least one of max, maxSize, or ttl is required');
}
if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
const code = 'LRU_CACHE_UNBOUNDED';
if (shouldWarn(code)) {
warned$1.add(code);
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
'result in unbounded memory consumption.';
emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
}
}
}
/**
* Return the number of ms left in the item's TTL. If item is not in cache,
* returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
*/
getRemainingTTL(key) {
return this.#keyMap.has(key) ? Infinity : 0;
}
#initializeTTLTracking() {
const ttls = new ZeroArray(this.#max);
const starts = new ZeroArray(this.#max);
this.#ttls = ttls;
this.#starts = starts;
this.#setItemTTL = (index, ttl, start = perf.now()) => {
starts[index] = ttl !== 0 ? start : 0;
ttls[index] = ttl;
if (ttl !== 0 && this.ttlAutopurge) {
const t = setTimeout(() => {
if (this.#isStale(index)) {
this.#delete(this.#keyList[index], 'expire');
}
}, ttl + 1);
// unref() not supported on all platforms
/* c8 ignore start */
if (t.unref) {
t.unref();
}
/* c8 ignore stop */
}
};
this.#updateItemAge = index => {
starts[index] = ttls[index] !== 0 ? perf.now() : 0;
};
this.#statusTTL = (status, index) => {
if (ttls[index]) {
const ttl = ttls[index];
const start = starts[index];
/* c8 ignore next */
if (!ttl || !start)
return;
status.ttl = ttl;
status.start = start;
status.now = cachedNow || getNow();
const age = status.now - start;
status.remainingTTL = ttl - age;
}
};
// debounce calls to perf.now() to 1s so we're not hitting
// that costly call repeatedly.
let cachedNow = 0;
const getNow = () => {
const n = perf.now();
if (this.ttlResolution > 0) {
cachedNow = n;
const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
// not available on all platforms
/* c8 ignore start */
if (t.unref) {
t.unref();
}
/* c8 ignore stop */
}
return n;
};
this.getRemainingTTL = key => {
const index = this.#keyMap.get(key);
if (index === undefined) {
return 0;
}
const ttl = ttls[index];
const start = starts[index];
if (!ttl || !start) {
return Infinity;
}
const age = (cachedNow || getNow()) - start;
return ttl - age;
};
this.#isStale = index => {
const s = starts[index];
const t = ttls[index];
return !!t && !!s && (cachedNow || getNow()) - s > t;
};
}
// conditionally set private methods related to TTL
#updateItemAge = () => { };
#statusTTL = () => { };
#setItemTTL = () => { };
/* c8 ignore stop */
#isStale = () => false;
#initializeSizeTracking() {
const sizes = new ZeroArray(this.#max);
this.#calculatedSize = 0;
this.#sizes = sizes;
this.#removeItemSize = index => {
this.#calculatedSize -= sizes[index];
sizes[index] = 0;
};
this.#requireSize = (k, v, size, sizeCalculation) => {
// provisionally accept background fetches.
// actual value size will be checked when they return.
if (this.#isBackgroundFetch(v)) {
return 0;
}
if (!isPosInt(size)) {
if (sizeCalculation) {
if (typeof sizeCalculation !== 'function') {
throw new TypeError('sizeCalculation must be a function');
}
size = sizeCalculation(v, k);
if (!isPosInt(size)) {
throw new TypeError('sizeCalculation return invalid (expect positive integer)');
}
}
else {
throw new TypeError('invalid size value (must be positive integer). ' +
'When maxSize or maxEntrySize is used, sizeCalculation ' +
'or size must be set.');
}
}
return size;
};
this.#addItemSize = (index, size, status) => {
sizes[index] = size;
if (this.#maxSize) {
const maxSize = this.#maxSize - sizes[index];
while (this.#calculatedSize > maxSize) {
this.#evict(true);
}
}
this.#calculatedSize += sizes[index];
if (status) {
status.entrySize = size;
status.totalCalculatedSize = this.#calculatedSize;
}
};
}
#removeItemSize = _i => { };
#addItemSize = (_i, _s, _st) => { };
#requireSize = (_k, _v, size, sizeCalculation) => {
if (size || sizeCalculation) {
throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
}
return 0;
};
*#indexes({ allowStale = this.allowStale } = {}) {
if (this.#size) {
for (let i = this.#tail; true;) {
if (!this.#isValidIndex(i)) {
break;
}
if (allowStale || !this.#isStale(i)) {
yield i;
}
if (i === this.#head) {
break;
}
else {
i = this.#prev[i];
}
}
}
}
*#rindexes({ allowStale = this.allowStale } = {}) {
if (this.#size) {
for (let i = this.#head; true;) {
if (!this.#isValidIndex(i)) {
break;
}
if (allowStale || !this.#isStale(i)) {
yield i;
}
if (i === this.#tail) {
break;
}
else {
i = this.#next[i];
}
}
}
}
#isValidIndex(index) {
return (index !== undefined &&
this.#keyMap.get(this.#keyList[index]) === index);
}
/**
* Return a generator yielding `[key, value]` pairs,
* in order from most recently used to least recently used.
*/
*entries() {
for (const i of this.#indexes()) {
if (this.#valList[i] !== undefined &&
this.#keyList[i] !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield [this.#keyList[i], this.#valList[i]];
}
}
}
/**
* Inverse order version of {@link LRUCache.entries}
*
* Return a generator yielding `[key, value]` pairs,
* in order from least recently used to most recently used.
*/
*rentries() {
for (const i of this.#rindexes()) {
if (this.#valList[i] !== undefined &&
this.#keyList[i] !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield [this.#keyList[i], this.#valList[i]];
}
}
}
/**
* Return a generator yielding the keys in the cache,
* in order from most recently used to least recently used.
*/
*keys() {
for (const i of this.#indexes()) {
const k = this.#keyList[i];
if (k !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield k;
}
}
}
/**
* Inverse order version of {@link LRUCache.keys}
*
* Return a generator yielding the keys in the cache,
* in order from least recently used to most recently used.
*/
*rkeys() {
for (const i of this.#rindexes()) {
const k = this.#keyList[i];
if (k !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield k;
}
}
}
/**
* Return a generator yielding the values in the cache,
* in order from most recently used to least recently used.
*/
*values() {
for (const i of this.#indexes()) {
const v = this.#valList[i];
if (v !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield this.#valList[i];
}
}
}
/**
* Inverse order version of {@link LRUCache.values}
*
* Return a generator yielding the values in the cache,
* in order from least recently used to most recently used.
*/
*rvalues() {
for (const i of this.#rindexes()) {
const v = this.#valList[i];
if (v !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield this.#valList[i];
}
}
}
/**
* Iterating over the cache itself yields the same results as
* {@link LRUCache.entries}
*/
[Symbol.iterator]() {
return this.entries();
}
/**
* A String value that is used in the creation of the default string
* description of an object. Called by the built-in method
* `Object.prototype.toString`.
*/
[Symbol.toStringTag] = 'LRUCache';
/**
* Find a value for which the supplied fn method returns a truthy value,
* similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
*/
find(fn, getOptions = {}) {
for (const i of this.#indexes()) {
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined)
continue;
if (fn(value, this.#keyList[i], this)) {
return this.get(this.#keyList[i], getOptions);
}
}
}
/**
* Call the supplied function on each item in the cache, in order from most
* recently used to least recently used.
*
* `fn` is called as `fn(value, key, cache)`.
*
* If `thisp` is provided, function will be called in the `this`-context of
* the provided object, or the cache if no `thisp` object is provided.
*
* Does not update age or recenty of use, or iterate over stale values.
*/
forEach(fn, thisp = this) {
for (const i of this.#indexes()) {
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined)
continue;
fn.call(thisp, value, this.#keyList[i], this);
}
}
/**
* The same as {@link LRUCache.forEach} but items are iterated over in
* reverse order. (ie, less recently used items are iterated over first.)
*/
rforEach(fn, thisp = this) {
for (const i of this.#rindexes()) {
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined)
continue;
fn.call(thisp, value, this.#keyList[i], this);
}
}
/**
* Delete any stale entries. Returns true if anything was removed,
* false otherwise.
*/
purgeStale() {
let deleted = false;
for (const i of this.#rindexes({ allowStale: true })) {
if (this.#isStale(i)) {
this.#delete(this.#keyList[i], 'expire');
deleted = true;
}
}
return deleted;
}
/**
* Get the extended info about a given entry, to get its value, size, and
* TTL info simultaneously. Returns `undefined` if the key is not present.
*
* Unlike {@link LRUCache#dump}, which is designed to be portable and survive
* serialization, the `start` value is always the current timestamp, and the
* `ttl` is a calculated remaining time to live (negative if expired).
*
* Always returns stale values, if their info is found in the cache, so be
* sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
* if relevant.
*/
info(key) {
const i = this.#keyMap.get(key);
if (i === undefined)
return undefined;
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined)
return undefined;
const entry = { value };
if (this.#ttls && this.#starts) {
const ttl = this.#ttls[i];
const start = this.#starts[i];
if (ttl && start) {
const remain = ttl - (perf.now() - start);
entry.ttl = remain;
entry.start = Date.now();
}
}
if (this.#sizes) {
entry.size = this.#sizes[i];
}
return entry;
}
/**
* Return an array of [key, {@link LRUCache.Entry}] tuples which can be
* passed to {@link LRLUCache#load}.
*
* The `start` fields are calculated relative to a portable `Date.now()`
* timestamp, even if `performance.now()` is available.
*
* Stale entries are always included in the `dump`, even if
* {@link LRUCache.OptionsBase.allowStale} is false.
*
* Note: this returns an actual array, not a generator, so it can be more
* easily passed around.
*/
dump() {
const arr = [];
for (const i of this.#indexes({ allowStale: true })) {
const key = this.#keyList[i];
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined || key === undefined)
continue;
const entry = { value };
if (this.#ttls && this.#starts) {
entry.ttl = this.#ttls[i];
// always dump the start relative to a portable timestamp
// it's ok for this to be a bit slow, it's a rare operation.
const age = perf.now() - this.#starts[i];
entry.start = Math.floor(Date.now() - age);
}
if (this.#sizes) {
entry.size = this.#sizes[i];
}
arr.unshift([key, entry]);
}
return arr;
}
/**
* Reset the cache and load in the items in entries in the order listed.
*
* The shape of the resulting cache may be different if the same options are
* not used in both caches.
*
* The `start` fields are assumed to be calculated relative to a portable
* `Date.now()` timestamp, even if `performance.now()` is available.
*/
load(arr) {
this.clear();
for (const [key, entry] of arr) {
if (entry.start) {
// entry.start is a portable timestamp, but we may be using
// node's performance.now(), so calculate the offset, so that
// we get the intended remaining TTL, no matter how long it's
// been on ice.
//
// it's ok for this to be a bit slow, it's a rare operation.
const age = Date.now() - entry.start;
entry.start = perf.now() - age;
}
this.set(key, entry.value, entry);
}
}
/**
* Add a value to the cache.
*
* Note: if `undefined` is specified as a value, this is an alias for
* {@link LRUCache#delete}
*
* Fields on the {@link LRUCache.SetOptions} options param will override
* their corresponding values in the constructor options for the scope
* of this single `set()` operation.
*
* If `start` is provided, then that will set the effective start
* time for the TTL calculation. Note that this must be a previous
* value of `performance.now()` if supported, or a previous value of
* `Date.now()` if not.
*
* Options object may also include `size`, which will prevent
* calling the `sizeCalculation` function and just use the specified
* number if it is a positive integer, and `noDisposeOnSet` which
* will prevent calling a `dispose` function in the case of
* overwrites.
*
* If the `size` (or return value of `sizeCalculation`) for a given
* entry is greater than `maxEntrySize`, then the item will not be
* added to the cache.
*
* Will update the recency of the entry.
*
* If the value is `undefined`, then this is an alias for
* `cache.delete(key)`. `undefined` is never stored in the cache.
*/
set(k, v, setOptions = {}) {
if (v === undefined) {
this.delete(k);
return this;
}
const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
let { noUpdateTTL = this.noUpdateTTL } = setOptions;
const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
// if the item doesn't fit, don't do anything
// NB: maxEntrySize set to maxSize by default
if (this.maxEntrySize && size > this.maxEntrySize) {
if (status) {
status.set = 'miss';
status.maxEntrySizeExceeded = true;
}
// have to delete, in case something is there already.
this.#delete(k, 'set');
return this;
}
let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
if (index === undefined) {
// addition
index = (this.#size === 0
? this.#tail
: this.#free.length !== 0
? this.#free.pop()
: this.#size === this.#max
? this.#evict(false)
: this.#size);
this.#keyList[index] = k;
this.#valList[index] = v;
this.#keyMap.set(k, index);
this.#next[this.#tail] = index;
this.#prev[index] = this.#tail;
this.#tail = index;
this.#size++;
this.#addItemSize(index, size, status);
if (status)
status.set = 'add';
noUpdateTTL = false;
}
else {
// update
this.#moveToTail(index);
const oldVal = this.#valList[index];
if (v !== oldVal) {
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
oldVal.__abortController.abort(new Error('replaced'));
const { __staleWhileFetching: s } = oldVal;
if (s !== undefined && !noDisposeOnSet) {
if (this.#hasDispose) {
this.#dispose?.(s, k, 'set');
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([s, k, 'set']);
}
}
}
else if (!noDisposeOnSet) {
if (this.#hasDispose) {
this.#dispose?.(oldVal, k, 'set');
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([oldVal, k, 'set']);
}
}
this.#removeItemSize(index);
this.#addItemSize(index, size, status);
this.#valList[index] = v;
if (status) {
status.set = 'replace';
const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
? oldVal.__staleWhileFetching
: oldVal;
if (oldValue !== undefined)
status.oldValue = oldValue;
}
}
else if (status) {
status.set = 'update';
}
}
if (ttl !== 0 && !this.#ttls) {
this.#initializeTTLTracking();
}
if (this.#ttls) {
if (!noUpdateTTL) {
this.#setItemTTL(index, ttl, start);
}
if (status)
this.#statusTTL(status, index);
}
if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
const dt = this.#disposed;
let task;
while ((task = dt?.shift())) {
this.#disposeAfter?.(...task);
}
}
return this;
}
/**
* Evict the least recently used item, returning its value or
* `undefined` if cache is empty.
*/
pop() {
try {
while (this.#size) {
const val = this.#valList[this.#head];
this.#evict(true);
if (this.#isBackgroundFetch(val)) {
if (val.__staleWhileFetching) {
return val.__staleWhileFetching;
}
}
else if (val !== undefined) {
return val;
}
}
}
finally {
if (this.#hasDisposeAfter && this.#disposed) {
const dt = this.#disposed;
let task;
while ((task = dt?.shift())) {
this.#disposeAfter?.(...task);
}
}
}
}
#evict(free) {
const head = this.#head;
const k = this.#keyList[head];
const v = this.#valList[head];
if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
v.__abortController.abort(new Error('evicted'));
}
else if (this.#hasDispose || this.#hasDisposeAfter) {
if (this.#hasDispose) {
this.#dispose?.(v, k, 'evict');
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([v, k, 'evict']);
}
}
this.#removeItemSize(head);
// if we aren't about to use the index, then null these out
if (free) {
this.#keyList[head] = undefined;
this.#valList[head] = undefined;
this.#free.push(head);
}
if (this.#size === 1) {
this.#head = this.#tail = 0;
this.#free.length = 0;
}
else {
this.#head = this.#next[head];
}
this.#keyMap.delete(k);
this.#size--;
return head;
}
/**
* Check if a key is in the cache, without updating the recency of use.
* Will return false if the item is stale, even though it is technically
* in the cache.
*
* Check if a key is in the cache, without updating the recency of
* use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
* to `true` in either the options or the constructor.
*
* Will return `false` if the item is stale, even though it is technically in
* the cache. The difference can be determined (if it matters) by using a
* `status` argument, and inspecting the `has` field.
*
* Will not update item age unless
* {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
*/
has(k, hasOptions = {}) {
const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
const index = this.#keyMap.get(k);
if (index !== undefined) {
const v = this.#valList[index];
if (this.#isBackgroundFetch(v) &&
v.__staleWhileFetching === undefined) {
return false;
}
if (!this.#isStale(index)) {
if (updateAgeOnHas) {
this.#updateItemAge(index);
}
if (status) {
status.has = 'hit';
this.#statusTTL(status, index);
}
return true;
}
else if (status) {
status.has = 'stale';
this.#statusTTL(status, index);
}
}
else if (status) {
status.has = 'miss';
}
return false;
}
/**
* Like {@link LRUCache#get} but doesn't update recency or delete stale
* items.
*
* Returns `undefined` if the item is stale, unless
* {@link LRUCache.OptionsBase.allowStale} is set.
*/
peek(k, peekOptions = {}) {
const { allowStale = this.allowStale } = peekOptions;
const index = this.#keyMap.get(k);
if (index === undefined ||
(!allowStale && this.#isStale(index))) {
return;
}
const v = this.#valList[index];
// either stale and allowed, or forcing a refresh of non-stale value
return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
}
#backgroundFetch(k, index, options, context) {
const v = index === undefined ? undefined : this.#valList[index];
if (this.#isBackgroundFetch(v)) {
return v;
}
const ac = new AC();
const { signal } = options;
// when/if our AC signals, then stop listening to theirs.
signal?.addEventListener('abort', () => ac.abort(signal.reason), {
signal: ac.signal,
});
const fetchOpts = {
signal: ac.signal,
options,
context,
};
const cb = (v, updateCache = false) => {
const { aborted } = ac.signal;
const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
if (options.status) {
if (aborted && !updateCache) {
options.status.fetchAborted = true;
options.status.fetchError = ac.signal.reason;
if (ignoreAbort)
options.status.fetchAbortIgnored = true;
}
else {
options.status.fetchResolved = true;
}
}
if (aborted && !ignoreAbort && !updateCache) {
return fetchFail(ac.signal.reason);
}
// either we didn't abort, and are still here, or we did, and ignored
const bf = p;
if (this.#valList[index] === p) {
if (v === undefined) {
if (bf.__staleWhileFetching) {
this.#valList[index] = bf.__staleWhileFetching;
}
else {
this.#delete(k, 'fetch');
}
}
else {
if (options.status)
options.status.fetchUpdated = true;
this.set(k, v, fetchOpts.options);
}
}
return v;
};
const eb = (er) => {
if (options.status) {
options.status.fetchRejected = true;
options.status.fetchError = er;
}
return fetchFail(er);
};
const fetchFail = (er) => {
const { aborted } = ac.signal;
const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
const noDelete = allowStale || options.noDeleteOnFetchRejection;
const bf = p;
if (this.#valList[index] === p) {
// if we allow stale on fetch rejections, then we need to ensure that
// the stale value is not removed from the cache when the fetch fails.
const del = !noDelete || bf.__staleWhileFetching === undefined;
if (del) {
this.#delete(k, 'fetch');
}
else if (!allowStaleAborted) {
// still replace the *promise* with the stale value,
// since we are done with the promise at this point.
// leave it untouched if we're still waiting for an
// aborted background fetch that hasn't yet returned.
this.#valList[index] = bf.__staleWhileFetching;
}
}
if (allowStale) {
if (options.status && bf.__staleWhileFetching !== undefined) {
options.status.returnedStale = true;
}
return bf.__staleWhileFetching;
}
else if (bf.__returned === bf) {
throw er;
}
};
const pcall = (res, rej) => {
const fmp = this.#fetchMethod?.(k, v, fetchOpts);
if (fmp && fmp instanceof Promise) {
fmp.then(v => res(v === undefined ? undefined : v), rej);
}
// ignored, we go until we finish, regardless.
// defer check until we are actually aborting,
// so fetchMethod can override.
ac.signal.addEventListener('abort', () => {
if (!options.ignoreFetchAbort ||
options.allowStaleOnFetchAbort) {
res(undefined);
// when it eventually resolves, update the cache.
if (options.allowStaleOnFetchAbort) {
res = v => cb(v, true);
}
}
});
};
if (options.status)
options.status.fetchDispatched = true;
const p = new Promise(pcall).then(cb, eb);
const bf = Object.assign(p, {
__abortController: ac,
__staleWhileFetching: v,
__returned: undefined,
});
if (index === undefined) {
// internal, don't expose status.
this.set(k, bf, { ...fetchOpts.options, status: undefined });
index = this.#keyMap.get(k);
}
else {
this.#valList[index] = bf;
}
return bf;
}
#isBackgroundFetch(p) {
if (!this.#hasFetchMethod)
return false;
const b = p;
return (!!b &&
b instanceof Promise &&
b.hasOwnProperty('__staleWhileFetching') &&
b.__abortController instanceof AC);
}
async fetch(k, fetchOptions = {}) {
const {
// get options
allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
// set options
ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
// fetch exclusive options
noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
if (!this.#hasFetchMethod) {
if (status)
status.fetch = 'get';
return this.get(k, {
allowStale,
updateAgeOnGet,
noDeleteOnStaleGet,
status,
});
}
const options = {
allowStale,
updateAgeOnGet,
noDeleteOnStaleGet,
ttl,
noDisposeOnSet,
size,
sizeCalculation,
noUpdateTTL,
noDeleteOnFetchRejection,
allowStaleOnFetchRejection,
allowStaleOnFetchAbort,
ignoreFetchAbort,
status,
signal,
};
let index = this.#keyMap.get(k);
if (index === undefined) {
if (status)
status.fetch = 'miss';
const p = this.#backgroundFetch(k, index, options, context);
return (p.__returned = p);
}
else {
// in cache, maybe already fetching
const v = this.#valList[index];
if (this.#isBackgroundFetch(v)) {
const stale = allowStale && v.__staleWhileFetching !== undefined;
if (status) {
status.fetch = 'inflight';
if (stale)
status.returnedStale = true;
}
return stale ? v.__staleWhileFetching : (v.__returned = v);
}
// if we force a refresh, that means do NOT serve the cached value,
// unless we are already in the process of refreshing the cache.
const isStale = this.#isStale(index);
if (!forceRefresh && !isStale) {
if (status)
status.fetch = 'hit';
this.#moveToTail(index);
if (updateAgeOnGet) {
this.#updateItemAge(index);
}
if (status)
this.#statusTTL(status, index);
return v;
}
// ok, it is stale or a forced refresh, and not already fetching.
// refresh the cache.
const p = this.#backgroundFetch(k, index, options, context);
const hasStale = p.__staleWhileFetching !== undefined;
const staleVal = hasStale && allowStale;
if (status) {
status.fetch = isStale ? 'stale' : 'refresh';
if (staleVal && isStale)
status.returnedStale = true;
}
return staleVal ? p.__staleWhileFetching : (p.__returned = p);
}
}
async forceFetch(k, fetchOptions = {}) {
const v = await this.fetch(k, fetchOptions);
if (v === undefined)
throw new Error('fetch() returned undefined');
return v;
}
memo(k, memoOptions = {}) {
const memoMethod = this.#memoMethod;
if (!memoMethod) {
throw new Error('no memoMethod provided to constructor');
}
const { context, forceRefresh, ...options } = memoOptions;
const v = this.get(k, options);
if (!forceRefresh && v !== undefined)
return v;
const vv = memoMethod(k, v, {
options,
context,
});
this.set(k, vv, options);
return vv;
}
/**
* Return a value from the cache. Will update the recency of the cache
* entry found.
*
* If the key is not found, get() will return `undefined`.
*/
get(k, getOptions = {}) {
const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
const index = this.#keyMap.get(k);
if (index !== undefined) {
const value = this.#valList[index];
const fetching = this.#isBackgroundFetch(value);
if (status)
this.#statusTTL(status, index);
if (this.#isStale(index)) {
if (status)
status.get = 'stale';
// delete only if not an in-flight background fetch
if (!fetching) {
if (!noDeleteOnStaleGet) {
this.#delete(k, 'expire');
}
if (status && allowStale)
status.returnedStale = true;
return allowStale ? value : undefined;
}
else {
if (status &&
allowStale &&
value.__staleWhileFetching !== undefined) {
status.returnedStale = true;
}
return allowStale ? value.__staleWhileFetching : undefined;
}
}
else {
if (status)
status.get = 'hit';
// if we're currently fetching it, we don't actually have it yet
// it's not stale, which means this isn't a staleWhileRefetching.
// If it's not stale, and fetching, AND has a __staleWhileFetching
// value, then that means the user fetched with {forceRefresh:true},
// so it's safe to return that value.
if (fetching) {
return value.__staleWhileFetching;
}
this.#moveToTail(index);
if (updateAgeOnGet) {
this.#updateItemAge(index);
}
return value;
}
}
else if (status) {
status.get = 'miss';
}
}
#connect(p, n) {
this.#prev[n] = p;
this.#next[p] = n;
}
#moveToTail(index) {
// if tail already, nothing to do
// if head, move head to next[index]
// else
// move next[prev[index]] to next[index] (head has no prev)
// move prev[next[index]] to prev[index]
// prev[index] = tail
// next[tail] = index
// tail = index
if (index !== this.#tail) {
if (index === this.#head) {
this.#head = this.#next[index];
}
else {
this.#connect(this.#prev[index], this.#next[index]);
}
this.#connect(this.#tail, index);
this.#tail = index;
}
}
/**
* Deletes a key out of the cache.
*
* Returns true if the key was deleted, false otherwise.
*/
delete(k) {
return this.#delete(k, 'delete');
}
#delete(k, reason) {
let deleted = false;
if (this.#size !== 0) {
const index = this.#keyMap.get(k);
if (index !== undefined) {
deleted = true;
if (this.#size === 1) {
this.#clear(reason);
}
else {
this.#removeItemSize(index);
const v = this.#valList[index];
if (this.#isBackgroundFetch(v)) {
v.__abortController.abort(new Error('deleted'));
}
else if (this.#hasDispose || this.#hasDisposeAfter) {
if (this.#hasDispose) {
this.#dispose?.(v, k, reason);
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([v, k, reason]);
}
}
this.#keyMap.delete(k);
this.#keyList[index] = undefined;
this.#valList[index] = undefined;
if (index === this.#tail) {
this.#tail = this.#prev[index];
}
else if (index === this.#head) {
this.#head = this.#next[index];
}
else {
const pi = this.#prev[index];
this.#next[pi] = this.#next[index];
const ni = this.#next[index];
this.#prev[ni] = this.#prev[index];
}
this.#size--;
this.#free.push(index);
}
}
}
if (this.#hasDisposeAfter && this.#disposed?.length) {
const dt = this.#disposed;
let task;
while ((task = dt?.shift())) {
this.#disposeAfter?.(...task);
}
}
return deleted;
}
/**
* Clear the cache entirely, throwing away all values.
*/
clear() {
return this.#clear('delete');
}
#clear(reason) {
for (const index of this.#rindexes({ allowStale: true })) {
const v = this.#valList[index];
if (this.#isBackgroundFetch(v)) {
v.__abortController.abort(new Error('deleted'));
}
else {
const k = this.#keyList[index];
if (this.#hasDispose) {
this.#dispose?.(v, k, reason);
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([v, k, reason]);
}
}
}
this.#keyMap.clear();
this.#valList.fill(undefined);
this.#keyList.fill(undefined);
if (this.#ttls && this.#starts) {
this.#ttls.fill(0);
this.#starts.fill(0);
}
if (this.#sizes) {
this.#sizes.fill(0);
}
this.#head = 0;
this.#tail = 0;
this.#free.length = 0;
this.#calculatedSize = 0;
this.#size = 0;
if (this.#hasDisposeAfter && this.#disposed) {
const dt = this.#disposed;
let task;
while ((task = dt?.shift())) {
this.#disposeAfter?.(...task);
}
}
}
}
const proc = typeof process === 'object' && process
? process
: {
stdout: null,
stderr: null,
};
/**
* Return true if the argument is a Minipass stream, Node stream, or something
* else that Minipass can interact with.
*/
const isStream = (s) => !!s &&
typeof s === 'object' &&
(s instanceof Minipass ||
s instanceof Stream$1 ||
isReadable(s) ||
isWritable(s));
/**
* Return true if the argument is a valid {@link Minipass.Readable}
*/
const isReadable = (s) => !!s &&
typeof s === 'object' &&
s instanceof EventEmitter$4 &&
typeof s.pipe === 'function' &&
// node core Writable streams have a pipe() method, but it throws
s.pipe !== Stream$1.Writable.prototype.pipe;
/**
* Return true if the argument is a valid {@link Minipass.Writable}
*/
const isWritable = (s) => !!s &&
typeof s === 'object' &&
s instanceof EventEmitter$4 &&
typeof s.write === 'function' &&
typeof s.end === 'function';
const EOF = Symbol('EOF');
const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
const EMITTED_END = Symbol('emittedEnd');
const EMITTING_END = Symbol('emittingEnd');
const EMITTED_ERROR = Symbol('emittedError');
const CLOSED$1 = Symbol('closed');
const READ = Symbol('read');
const FLUSH = Symbol('flush');
const FLUSHCHUNK = Symbol('flushChunk');
const ENCODING$1 = Symbol('encoding');
const DECODER = Symbol('decoder');
const FLOWING = Symbol('flowing');
const PAUSED = Symbol('paused');
const RESUME = Symbol('resume');
const BUFFER = Symbol('buffer');
const PIPES = Symbol('pipes');
const BUFFERLENGTH = Symbol('bufferLength');
const BUFFERPUSH = Symbol('bufferPush');
const BUFFERSHIFT = Symbol('bufferShift');
const OBJECTMODE = Symbol('objectMode');
// internal event when stream is destroyed
const DESTROYED = Symbol('destroyed');
// internal event when stream has an error
const ERROR = Symbol('error');
const EMITDATA = Symbol('emitData');
const EMITEND = Symbol('emitEnd');
const EMITEND2 = Symbol('emitEnd2');
const ASYNC = Symbol('async');
const ABORT = Symbol('abort');
const ABORTED = Symbol('aborted');
const SIGNAL = Symbol('signal');
const DATALISTENERS = Symbol('dataListeners');
const DISCARDED = Symbol('discarded');
const defer$3 = (fn) => Promise.resolve().then(fn);
const nodefer = (fn) => fn();
const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
(!!b &&
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0);
const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
/**
* Internal class representing a pipe to a destination stream.
*
* @internal
*/
class Pipe {
src;
dest;
opts;
ondrain;
constructor(src, dest, opts) {
this.src = src;
this.dest = dest;
this.opts = opts;
this.ondrain = () => src[RESUME]();
this.dest.on('drain', this.ondrain);
}
unpipe() {
this.dest.removeListener('drain', this.ondrain);
}
// only here for the prototype
/* c8 ignore start */
proxyErrors(_er) { }
/* c8 ignore stop */
end() {
this.unpipe();
if (this.opts.end)
this.dest.end();
}
}
/**
* Internal class representing a pipe to a destination stream where
* errors are proxied.
*
* @internal
*/
class PipeProxyErrors extends Pipe {
unpipe() {
this.src.removeListener('error', this.proxyErrors);
super.unpipe();
}
constructor(src, dest, opts) {
super(src, dest, opts);
this.proxyErrors = er => dest.emit('error', er);
src.on('error', this.proxyErrors);
}
}
const isObjectModeOptions = (o) => !!o.objectMode;
const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
/**
* Main export, the Minipass class
*
* `RType` is the type of data emitted, defaults to Buffer
*
* `WType` is the type of data to be written, if RType is buffer or string,
* then any {@link Minipass.ContiguousData} is allowed.
*
* `Events` is the set of event handler signatures that this object
* will emit, see {@link Minipass.Events}
*/
class Minipass extends EventEmitter$4 {
[FLOWING] = false;
[PAUSED] = false;
[PIPES] = [];
[BUFFER] = [];
[OBJECTMODE];
[ENCODING$1];
[ASYNC];
[DECODER];
[EOF] = false;
[EMITTED_END] = false;
[EMITTING_END] = false;
[CLOSED$1] = false;
[EMITTED_ERROR] = null;
[BUFFERLENGTH] = 0;
[DESTROYED] = false;
[SIGNAL];
[ABORTED] = false;
[DATALISTENERS] = 0;
[DISCARDED] = false;
/**
* true if the stream can be written
*/
writable = true;
/**
* true if the stream can be read
*/
readable = true;
/**
* If `RType` is Buffer, then options do not need to be provided.
* Otherwise, an options object must be provided to specify either
* {@link Minipass.SharedOptions.objectMode} or
* {@link Minipass.SharedOptions.encoding}, as appropriate.
*/
constructor(...args) {
const options = (args[0] ||
{});
super();
if (options.objectMode && typeof options.encoding === 'string') {
throw new TypeError('Encoding and objectMode may not be used together');
}
if (isObjectModeOptions(options)) {
this[OBJECTMODE] = true;
this[ENCODING$1] = null;
}
else if (isEncodingOptions(options)) {
this[ENCODING$1] = options.encoding;
this[OBJECTMODE] = false;
}
else {
this[OBJECTMODE] = false;
this[ENCODING$1] = null;
}
this[ASYNC] = !!options.async;
this[DECODER] = this[ENCODING$1]
? new StringDecoder(this[ENCODING$1])
: null;
//@ts-ignore - private option for debugging and testing
if (options && options.debugExposeBuffer === true) {
Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
}
//@ts-ignore - private option for debugging and testing
if (options && options.debugExposePipes === true) {
Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
}
const { signal } = options;
if (signal) {
this[SIGNAL] = signal;
if (signal.aborted) {
this[ABORT]();
}
else {
signal.addEventListener('abort', () => this[ABORT]());
}
}
}
/**
* The amount of data stored in the buffer waiting to be read.
*
* For Buffer strings, this will be the total byte length.
* For string encoding streams, this will be the string character length,
* according to JavaScript's `string.length` logic.
* For objectMode streams, this is a count of the items waiting to be
* emitted.
*/
get bufferLength() {
return this[BUFFERLENGTH];
}
/**
* The `BufferEncoding` currently in use, or `null`
*/
get encoding() {
return this[ENCODING$1];
}
/**
* @deprecated - This is a read only property
*/
set encoding(_enc) {
throw new Error('Encoding must be set at instantiation time');
}
/**
* @deprecated - Encoding may only be set at instantiation time
*/
setEncoding(_enc) {
throw new Error('Encoding must be set at instantiation time');
}
/**
* True if this is an objectMode stream
*/
get objectMode() {
return this[OBJECTMODE];
}
/**
* @deprecated - This is a read-only property
*/
set objectMode(_om) {
throw new Error('objectMode must be set at instantiation time');
}
/**
* true if this is an async stream
*/
get ['async']() {
return this[ASYNC];
}
/**
* Set to true to make this stream async.
*
* Once set, it cannot be unset, as this would potentially cause incorrect
* behavior. Ie, a sync stream can be made async, but an async stream
* cannot be safely made sync.
*/
set ['async'](a) {
this[ASYNC] = this[ASYNC] || !!a;
}
// drop everything and get out of the flow completely
[ABORT]() {
this[ABORTED] = true;
this.emit('abort', this[SIGNAL]?.reason);
this.destroy(this[SIGNAL]?.reason);
}
/**
* True if the stream has been aborted.
*/
get aborted() {
return this[ABORTED];
}
/**
* No-op setter. Stream aborted status is set via the AbortSignal provided
* in the constructor options.
*/
set aborted(_) { }
write(chunk, encoding, cb) {
if (this[ABORTED])
return false;
if (this[EOF])
throw new Error('write after end');
if (this[DESTROYED]) {
this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
return true;
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = 'utf8';
}
if (!encoding)
encoding = 'utf8';
const fn = this[ASYNC] ? defer$3 : nodefer;
// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything is only allowed if in object mode, so throw
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk)) {
//@ts-ignore - sinful unsafe type changing
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
}
else if (isArrayBufferLike(chunk)) {
//@ts-ignore - sinful unsafe type changing
chunk = Buffer.from(chunk);
}
else if (typeof chunk !== 'string') {
throw new Error('Non-contiguous data written to non-objectMode stream');
}
}
// handle object mode up front, since it's simpler
// this yields better performance, fewer checks later.
if (this[OBJECTMODE]) {
// maybe impossible?
/* c8 ignore start */
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
this[FLUSH](true);
/* c8 ignore stop */
if (this[FLOWING])
this.emit('data', chunk);
else
this[BUFFERPUSH](chunk);
if (this[BUFFERLENGTH] !== 0)
this.emit('readable');
if (cb)
fn(cb);
return this[FLOWING];
}
// at this point the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0)
this.emit('readable');
if (cb)
fn(cb);
return this[FLOWING];
}
// fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING$1] && !this[DECODER]?.lastNeed)) {
//@ts-ignore - sinful unsafe type change
chunk = Buffer.from(chunk, encoding);
}
if (Buffer.isBuffer(chunk) && this[ENCODING$1]) {
//@ts-ignore - sinful unsafe type change
chunk = this[DECODER].write(chunk);
}
// Note: flushing CAN potentially switch us into not-flowing mode
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
this[FLUSH](true);
if (this[FLOWING])
this.emit('data', chunk);
else
this[BUFFERPUSH](chunk);
if (this[BUFFERLENGTH] !== 0)
this.emit('readable');
if (cb)
fn(cb);
return this[FLOWING];
}
/**
* Low-level explicit read method.
*
* In objectMode, the argument is ignored, and one item is returned if
* available.
*
* `n` is the number of bytes (or in the case of encoding streams,
* characters) to consume. If `n` is not provided, then the entire buffer
* is returned, or `null` is returned if no data is available.
*
* If `n` is greater that the amount of data in the internal buffer,
* then `null` is returned.
*/
read(n) {
if (this[DESTROYED])
return null;
this[DISCARDED] = false;
if (this[BUFFERLENGTH] === 0 ||
n === 0 ||
(n && n > this[BUFFERLENGTH])) {
this[MAYBE_EMIT_END]();
return null;
}
if (this[OBJECTMODE])
n = null;
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
// not object mode, so if we have an encoding, then RType is string
// otherwise, must be Buffer
this[BUFFER] = [
(this[ENCODING$1]
? this[BUFFER].join('')
: Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
];
}
const ret = this[READ](n || null, this[BUFFER][0]);
this[MAYBE_EMIT_END]();
return ret;
}
[READ](n, chunk) {
if (this[OBJECTMODE])
this[BUFFERSHIFT]();
else {
const c = chunk;
if (n === c.length || n === null)
this[BUFFERSHIFT]();
else if (typeof c === 'string') {
this[BUFFER][0] = c.slice(n);
chunk = c.slice(0, n);
this[BUFFERLENGTH] -= n;
}
else {
this[BUFFER][0] = c.subarray(n);
chunk = c.subarray(0, n);
this[BUFFERLENGTH] -= n;
}
}
this.emit('data', chunk);
if (!this[BUFFER].length && !this[EOF])
this.emit('drain');
return chunk;
}
end(chunk, encoding, cb) {
if (typeof chunk === 'function') {
cb = chunk;
chunk = undefined;
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = 'utf8';
}
if (chunk !== undefined)
this.write(chunk, encoding);
if (cb)
this.once('end', cb);
this[EOF] = true;
this.writable = false;
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this[FLOWING] || !this[PAUSED])
this[MAYBE_EMIT_END]();
return this;
}
// don't let the internal resume be overwritten
[RESUME]() {
if (this[DESTROYED])
return;
if (!this[DATALISTENERS] && !this[PIPES].length) {
this[DISCARDED] = true;
}
this[PAUSED] = false;
this[FLOWING] = true;
this.emit('resume');
if (this[BUFFER].length)
this[FLUSH]();
else if (this[EOF])
this[MAYBE_EMIT_END]();
else
this.emit('drain');
}
/**
* Resume the stream if it is currently in a paused state
*
* If called when there are no pipe destinations or `data` event listeners,
* this will place the stream in a "discarded" state, where all data will
* be thrown away. The discarded state is removed if a pipe destination or
* data handler is added, if pause() is called, or if any synchronous or
* asynchronous iteration is started.
*/
resume() {
return this[RESUME]();
}
/**
* Pause the stream
*/
pause() {
this[FLOWING] = false;
this[PAUSED] = true;
this[DISCARDED] = false;
}
/**
* true if the stream has been forcibly destroyed
*/
get destroyed() {
return this[DESTROYED];
}
/**
* true if the stream is currently in a flowing state, meaning that
* any writes will be immediately emitted.
*/
get flowing() {
return this[FLOWING];
}
/**
* true if the stream is currently in a paused state
*/
get paused() {
return this[PAUSED];
}
[BUFFERPUSH](chunk) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1;
else
this[BUFFERLENGTH] += chunk.length;
this[BUFFER].push(chunk);
}
[BUFFERSHIFT]() {
if (this[OBJECTMODE])
this[BUFFERLENGTH] -= 1;
else
this[BUFFERLENGTH] -= this[BUFFER][0].length;
return this[BUFFER].shift();
}
[FLUSH](noDrain = false) {
do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
this[BUFFER].length);
if (!noDrain && !this[BUFFER].length && !this[EOF])
this.emit('drain');
}
[FLUSHCHUNK](chunk) {
this.emit('data', chunk);
return this[FLOWING];
}
/**
* Pipe all data emitted by this stream into the destination provided.
*
* Triggers the flow of data.
*/
pipe(dest, opts) {
if (this[DESTROYED])
return dest;
this[DISCARDED] = false;
const ended = this[EMITTED_END];
opts = opts || {};
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false;
else
opts.end = opts.end !== false;
opts.proxyErrors = !!opts.proxyErrors;
// piping an ended stream ends immediately
if (ended) {
if (opts.end)
dest.end();
}
else {
// "as" here just ignores the WType, which pipes don't care about,
// since they're only consuming from us, and writing to the dest
this[PIPES].push(!opts.proxyErrors
? new Pipe(this, dest, opts)
: new PipeProxyErrors(this, dest, opts));
if (this[ASYNC])
defer$3(() => this[RESUME]());
else
this[RESUME]();
}
return dest;
}
/**
* Fully unhook a piped destination stream.
*
* If the destination stream was the only consumer of this stream (ie,
* there are no other piped destinations or `'data'` event listeners)
* then the flow of data will stop until there is another consumer or
* {@link Minipass#resume} is explicitly called.
*/
unpipe(dest) {
const p = this[PIPES].find(p => p.dest === dest);
if (p) {
if (this[PIPES].length === 1) {
if (this[FLOWING] && this[DATALISTENERS] === 0) {
this[FLOWING] = false;
}
this[PIPES] = [];
}
else
this[PIPES].splice(this[PIPES].indexOf(p), 1);
p.unpipe();
}
}
/**
* Alias for {@link Minipass#on}
*/
addListener(ev, handler) {
return this.on(ev, handler);
}
/**
* Mostly identical to `EventEmitter.on`, with the following
* behavior differences to prevent data loss and unnecessary hangs:
*
* - Adding a 'data' event handler will trigger the flow of data
*
* - Adding a 'readable' event handler when there is data waiting to be read
* will cause 'readable' to be emitted immediately.
*
* - Adding an 'endish' event handler ('end', 'finish', etc.) which has
* already passed will cause the event to be emitted immediately and all
* handlers removed.
*
* - Adding an 'error' event handler after an error has been emitted will
* cause the event to be re-emitted immediately with the error previously
* raised.
*/
on(ev, handler) {
const ret = super.on(ev, handler);
if (ev === 'data') {
this[DISCARDED] = false;
this[DATALISTENERS]++;
if (!this[PIPES].length && !this[FLOWING]) {
this[RESUME]();
}
}
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
super.emit('readable');
}
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev);
this.removeAllListeners(ev);
}
else if (ev === 'error' && this[EMITTED_ERROR]) {
const h = handler;
if (this[ASYNC])
defer$3(() => h.call(this, this[EMITTED_ERROR]));
else
h.call(this, this[EMITTED_ERROR]);
}
return ret;
}
/**
* Alias for {@link Minipass#off}
*/
removeListener(ev, handler) {
return this.off(ev, handler);
}
/**
* Mostly identical to `EventEmitter.off`
*
* If a 'data' event handler is removed, and it was the last consumer
* (ie, there are no pipe destinations or other 'data' event listeners),
* then the flow of data will stop until there is another consumer or
* {@link Minipass#resume} is explicitly called.
*/
off(ev, handler) {
const ret = super.off(ev, handler);
// if we previously had listeners, and now we don't, and we don't
// have any pipes, then stop the flow, unless it's been explicitly
// put in a discarded flowing state via stream.resume().
if (ev === 'data') {
this[DATALISTENERS] = this.listeners('data').length;
if (this[DATALISTENERS] === 0 &&
!this[DISCARDED] &&
!this[PIPES].length) {
this[FLOWING] = false;
}
}
return ret;
}
/**
* Mostly identical to `EventEmitter.removeAllListeners`
*
* If all 'data' event handlers are removed, and they were the last consumer
* (ie, there are no pipe destinations), then the flow of data will stop
* until there is another consumer or {@link Minipass#resume} is explicitly
* called.
*/
removeAllListeners(ev) {
const ret = super.removeAllListeners(ev);
if (ev === 'data' || ev === undefined) {
this[DATALISTENERS] = 0;
if (!this[DISCARDED] && !this[PIPES].length) {
this[FLOWING] = false;
}
}
return ret;
}
/**
* true if the 'end' event has been emitted
*/
get emittedEnd() {
return this[EMITTED_END];
}
[MAYBE_EMIT_END]() {
if (!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this[BUFFER].length === 0 &&
this[EOF]) {
this[EMITTING_END] = true;
this.emit('end');
this.emit('prefinish');
this.emit('finish');
if (this[CLOSED$1])
this.emit('close');
this[EMITTING_END] = false;
}
}
/**
* Mostly identical to `EventEmitter.emit`, with the following
* behavior differences to prevent data loss and unnecessary hangs:
*
* If the stream has been destroyed, and the event is something other
* than 'close' or 'error', then `false` is returned and no handlers
* are called.
*
* If the event is 'end', and has already been emitted, then the event
* is ignored. If the stream is in a paused or non-flowing state, then
* the event will be deferred until data flow resumes. If the stream is
* async, then handlers will be called on the next tick rather than
* immediately.
*
* If the event is 'close', and 'end' has not yet been emitted, then
* the event will be deferred until after 'end' is emitted.
*
* If the event is 'error', and an AbortSignal was provided for the stream,
* and there are no listeners, then the event is ignored, matching the
* behavior of node core streams in the presense of an AbortSignal.
*
* If the event is 'finish' or 'prefinish', then all listeners will be
* removed after emitting the event, to prevent double-firing.
*/
emit(ev, ...args) {
const data = args[0];
// error and close are only events allowed after calling destroy()
if (ev !== 'error' &&
ev !== 'close' &&
ev !== DESTROYED &&
this[DESTROYED]) {
return false;
}
else if (ev === 'data') {
return !this[OBJECTMODE] && !data
? false
: this[ASYNC]
? (defer$3(() => this[EMITDATA](data)), true)
: this[EMITDATA](data);
}
else if (ev === 'end') {
return this[EMITEND]();
}
else if (ev === 'close') {
this[CLOSED$1] = true;
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return false;
const ret = super.emit('close');
this.removeAllListeners('close');
return ret;
}
else if (ev === 'error') {
this[EMITTED_ERROR] = data;
super.emit(ERROR, data);
const ret = !this[SIGNAL] || this.listeners('error').length
? super.emit('error', data)
: false;
this[MAYBE_EMIT_END]();
return ret;
}
else if (ev === 'resume') {
const ret = super.emit('resume');
this[MAYBE_EMIT_END]();
return ret;
}
else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev);
this.removeAllListeners(ev);
return ret;
}
// Some other unknown event
const ret = super.emit(ev, ...args);
this[MAYBE_EMIT_END]();
return ret;
}
[EMITDATA](data) {
for (const p of this[PIPES]) {
if (p.dest.write(data) === false)
this.pause();
}
const ret = this[DISCARDED] ? false : super.emit('data', data);
this[MAYBE_EMIT_END]();
return ret;
}
[EMITEND]() {
if (this[EMITTED_END])
return false;
this[EMITTED_END] = true;
this.readable = false;
return this[ASYNC]
? (defer$3(() => this[EMITEND2]()), true)
: this[EMITEND2]();
}
[EMITEND2]() {
if (this[DECODER]) {
const data = this[DECODER].end();
if (data) {
for (const p of this[PIPES]) {
p.dest.write(data);
}
if (!this[DISCARDED])
super.emit('data', data);
}
}
for (const p of this[PIPES]) {
p.end();
}
const ret = super.emit('end');
this.removeAllListeners('end');
return ret;
}
/**
* Return a Promise that resolves to an array of all emitted data once
* the stream ends.
*/
async collect() {
const buf = Object.assign([], {
dataLength: 0,
});
if (!this[OBJECTMODE])
buf.dataLength = 0;
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise();
this.on('data', c => {
buf.push(c);
if (!this[OBJECTMODE])
buf.dataLength += c.length;
});
await p;
return buf;
}
/**
* Return a Promise that resolves to the concatenation of all emitted data
* once the stream ends.
*
* Not allowed on objectMode streams.
*/
async concat() {
if (this[OBJECTMODE]) {
throw new Error('cannot concat in objectMode');
}
const buf = await this.collect();
return (this[ENCODING$1]
? buf.join('')
: Buffer.concat(buf, buf.dataLength));
}
/**
* Return a void Promise that resolves once the stream ends.
*/
async promise() {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')));
this.on('error', er => reject(er));
this.on('end', () => resolve());
});
}
/**
* Asynchronous `for await of` iteration.
*
* This will continue emitting all chunks until the stream terminates.
*/
[Symbol.asyncIterator]() {
// set this up front, in case the consumer doesn't call next()
// right away.
this[DISCARDED] = false;
let stopped = false;
const stop = async () => {
this.pause();
stopped = true;
return { value: undefined, done: true };
};
const next = () => {
if (stopped)
return stop();
const res = this.read();
if (res !== null)
return Promise.resolve({ done: false, value: res });
if (this[EOF])
return stop();
let resolve;
let reject;
const onerr = (er) => {
this.off('data', ondata);
this.off('end', onend);
this.off(DESTROYED, ondestroy);
stop();
reject(er);
};
const ondata = (value) => {
this.off('error', onerr);
this.off('end', onend);
this.off(DESTROYED, ondestroy);
this.pause();
resolve({ value, done: !!this[EOF] });
};
const onend = () => {
this.off('error', onerr);
this.off('data', ondata);
this.off(DESTROYED, ondestroy);
stop();
resolve({ done: true, value: undefined });
};
const ondestroy = () => onerr(new Error('stream destroyed'));
return new Promise((res, rej) => {
reject = rej;
resolve = res;
this.once(DESTROYED, ondestroy);
this.once('error', onerr);
this.once('end', onend);
this.once('data', ondata);
});
};
return {
next,
throw: stop,
return: stop,
[Symbol.asyncIterator]() {
return this;
},
};
}
/**
* Synchronous `for of` iteration.
*
* The iteration will terminate when the internal buffer runs out, even
* if the stream has not yet terminated.
*/
[Symbol.iterator]() {
// set this up front, in case the consumer doesn't call next()
// right away.
this[DISCARDED] = false;
let stopped = false;
const stop = () => {
this.pause();
this.off(ERROR, stop);
this.off(DESTROYED, stop);
this.off('end', stop);
stopped = true;
return { done: true, value: undefined };
};
const next = () => {
if (stopped)
return stop();
const value = this.read();
return value === null ? stop() : { done: false, value };
};
this.once('end', stop);
this.once(ERROR, stop);
this.once(DESTROYED, stop);
return {
next,
throw: stop,
return: stop,
[Symbol.iterator]() {
return this;
},
};
}
/**
* Destroy a stream, preventing it from being used for any further purpose.
*
* If the stream has a `close()` method, then it will be called on
* destruction.
*
* After destruction, any attempt to write data, read data, or emit most
* events will be ignored.
*
* If an error argument is provided, then it will be emitted in an
* 'error' event.
*/
destroy(er) {
if (this[DESTROYED]) {
if (er)
this.emit('error', er);
else
this.emit(DESTROYED);
return this;
}
this[DESTROYED] = true;
this[DISCARDED] = true;
// throw away all buffered data, it's never coming out
this[BUFFER].length = 0;
this[BUFFERLENGTH] = 0;
const wc = this;
if (typeof wc.close === 'function' && !this[CLOSED$1])
wc.close();
if (er)
this.emit('error', er);
// if no error to emit, still reject pending promises
else
this.emit(DESTROYED);
return this;
}
/**
* Alias for {@link isStream}
*
* Former export location, maintained for backwards compatibility.
*
* @deprecated
*/
static get isStream() {
return isStream;
}
}
const realpathSync = realpathSync$1.native;
const defaultFS = {
lstatSync,
readdir: readdir$4,
readdirSync,
readlinkSync,
realpathSync,
promises: {
lstat: lstat$3,
readdir: readdir$5,
readlink,
realpath: realpath$2,
},
};
// if they just gave us require('fs') then use our default
const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === fs$j ?
defaultFS
: {
...defaultFS,
...fsOption,
promises: {
...defaultFS.promises,
...(fsOption.promises || {}),
},
};
// turn something like //?/c:/ into c:\
const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
// windows paths are separated by either / or \
const eitherSep = /[\\\/]/;
const UNKNOWN = 0; // may not even exist, for all we know
const IFIFO = 0b0001;
const IFCHR = 0b0010;
const IFDIR = 0b0100;
const IFBLK = 0b0110;
const IFREG = 0b1000;
const IFLNK = 0b1010;
const IFSOCK = 0b1100;
const IFMT = 0b1111;
// mask to unset low 4 bits
const IFMT_UNKNOWN = ~IFMT;
// set after successfully calling readdir() and getting entries.
const READDIR_CALLED = 0b0000_0001_0000;
// set after a successful lstat()
const LSTAT_CALLED = 0b0000_0010_0000;
// set if an entry (or one of its parents) is definitely not a dir
const ENOTDIR = 0b0000_0100_0000;
// set if an entry (or one of its parents) does not exist
// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
const ENOENT = 0b0000_1000_0000;
// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
// set if we fail to readlink
const ENOREADLINK = 0b0001_0000_0000;
// set if we know realpath() will fail
const ENOREALPATH = 0b0010_0000_0000;
const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
const TYPEMASK = 0b0011_1111_1111;
const entToType = (s) => s.isFile() ? IFREG
: s.isDirectory() ? IFDIR
: s.isSymbolicLink() ? IFLNK
: s.isCharacterDevice() ? IFCHR
: s.isBlockDevice() ? IFBLK
: s.isSocket() ? IFSOCK
: s.isFIFO() ? IFIFO
: UNKNOWN;
// normalize unicode path names
const normalizeCache = new Map();
const normalize = (s) => {
const c = normalizeCache.get(s);
if (c)
return c;
const n = s.normalize('NFKD');
normalizeCache.set(s, n);
return n;
};
const normalizeNocaseCache = new Map();
const normalizeNocase = (s) => {
const c = normalizeNocaseCache.get(s);
if (c)
return c;
const n = normalize(s.toLowerCase());
normalizeNocaseCache.set(s, n);
return n;
};
/**
* An LRUCache for storing resolved path strings or Path objects.
* @internal
*/
class ResolveCache extends LRUCache {
constructor() {
super({ max: 256 });
}
}
// In order to prevent blowing out the js heap by allocating hundreds of
// thousands of Path entries when walking extremely large trees, the "children"
// in this tree are represented by storing an array of Path entries in an
// LRUCache, indexed by the parent. At any time, Path.children() may return an
// empty array, indicating that it doesn't know about any of its children, and
// thus has to rebuild that cache. This is fine, it just means that we don't
// benefit as much from having the cached entries, but huge directory walks
// don't blow out the stack, and smaller ones are still as fast as possible.
//
//It does impose some complexity when building up the readdir data, because we
//need to pass a reference to the children array that we started with.
/**
* an LRUCache for storing child entries.
* @internal
*/
class ChildrenCache extends LRUCache {
constructor(maxSize = 16 * 1024) {
super({
maxSize,
// parent + children
sizeCalculation: a => a.length + 1,
});
}
}
const setAsCwd = Symbol('PathScurry setAsCwd');
/**
* Path objects are sort of like a super-powered
* {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
*
* Each one represents a single filesystem entry on disk, which may or may not
* exist. It includes methods for reading various types of information via
* lstat, readlink, and readdir, and caches all information to the greatest
* degree possible.
*
* Note that fs operations that would normally throw will instead return an
* "empty" value. This is in order to prevent excessive overhead from error
* stack traces.
*/
class PathBase {
/**
* the basename of this path
*
* **Important**: *always* test the path name against any test string
* usingthe {@link isNamed} method, and not by directly comparing this
* string. Otherwise, unicode path strings that the system sees as identical
* will not be properly treated as the same path, leading to incorrect
* behavior and possible security issues.
*/
name;
/**
* the Path entry corresponding to the path root.
*
* @internal
*/
root;
/**
* All roots found within the current PathScurry family
*
* @internal
*/
roots;
/**
* a reference to the parent path, or undefined in the case of root entries
*
* @internal
*/
parent;
/**
* boolean indicating whether paths are compared case-insensitively
* @internal
*/
nocase;
/**
* boolean indicating that this path is the current working directory
* of the PathScurry collection that contains it.
*/
isCWD = false;
// potential default fs override
#fs;
// Stats fields
#dev;
get dev() {
return this.#dev;
}
#mode;
get mode() {
return this.#mode;
}
#nlink;
get nlink() {
return this.#nlink;
}
#uid;
get uid() {
return this.#uid;
}
#gid;
get gid() {
return this.#gid;
}
#rdev;
get rdev() {
return this.#rdev;
}
#blksize;
get blksize() {
return this.#blksize;
}
#ino;
get ino() {
return this.#ino;
}
#size;
get size() {
return this.#size;
}
#blocks;
get blocks() {
return this.#blocks;
}
#atimeMs;
get atimeMs() {
return this.#atimeMs;
}
#mtimeMs;
get mtimeMs() {
return this.#mtimeMs;
}
#ctimeMs;
get ctimeMs() {
return this.#ctimeMs;
}
#birthtimeMs;
get birthtimeMs() {
return this.#birthtimeMs;
}
#atime;
get atime() {
return this.#atime;
}
#mtime;
get mtime() {
return this.#mtime;
}
#ctime;
get ctime() {
return this.#ctime;
}
#birthtime;
get birthtime() {
return this.#birthtime;
}
#matchName;
#depth;
#fullpath;
#fullpathPosix;
#relative;
#relativePosix;
#type;
#children;
#linkTarget;
#realpath;
/**
* This property is for compatibility with the Dirent class as of
* Node v20, where Dirent['parentPath'] refers to the path of the
* directory that was passed to readdir. For root entries, it's the path
* to the entry itself.
*/
get parentPath() {
return (this.parent || this).fullpath();
}
/**
* Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
* this property refers to the *parent* path, not the path object itself.
*/
get path() {
return this.parentPath;
}
/**
* Do not create new Path objects directly. They should always be accessed
* via the PathScurry class or other methods on the Path class.
*
* @internal
*/
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
this.name = name;
this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
this.#type = type & TYPEMASK;
this.nocase = nocase;
this.roots = roots;
this.root = root || this;
this.#children = children;
this.#fullpath = opts.fullpath;
this.#relative = opts.relative;
this.#relativePosix = opts.relativePosix;
this.parent = opts.parent;
if (this.parent) {
this.#fs = this.parent.#fs;
}
else {
this.#fs = fsFromOption(opts.fs);
}
}
/**
* Returns the depth of the Path object from its root.
*
* For example, a path at `/foo/bar` would have a depth of 2.
*/
depth() {
if (this.#depth !== undefined)
return this.#depth;
if (!this.parent)
return (this.#depth = 0);
return (this.#depth = this.parent.depth() + 1);
}
/**
* @internal
*/
childrenCache() {
return this.#children;
}
/**
* Get the Path object referenced by the string path, resolved from this Path
*/
resolve(path) {
if (!path) {
return this;
}
const rootPath = this.getRootString(path);
const dir = path.substring(rootPath.length);
const dirParts = dir.split(this.splitSep);
const result = rootPath ?
this.getRoot(rootPath).#resolveParts(dirParts)
: this.#resolveParts(dirParts);
return result;
}
#resolveParts(dirParts) {
let p = this;
for (const part of dirParts) {
p = p.child(part);
}
return p;
}
/**
* Returns the cached children Path objects, if still available. If they
* have fallen out of the cache, then returns an empty array, and resets the
* READDIR_CALLED bit, so that future calls to readdir() will require an fs
* lookup.
*
* @internal
*/
children() {
const cached = this.#children.get(this);
if (cached) {
return cached;
}
const children = Object.assign([], { provisional: 0 });
this.#children.set(this, children);
this.#type &= ~READDIR_CALLED;
return children;
}
/**
* Resolves a path portion and returns or creates the child Path.
*
* Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
* `'..'`.
*
* This should not be called directly. If `pathPart` contains any path
* separators, it will lead to unsafe undefined behavior.
*
* Use `Path.resolve()` instead.
*
* @internal
*/
child(pathPart, opts) {
if (pathPart === '' || pathPart === '.') {
return this;
}
if (pathPart === '..') {
return this.parent || this;
}
// find the child
const children = this.children();
const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
for (const p of children) {
if (p.#matchName === name) {
return p;
}
}
// didn't find it, create provisional child, since it might not
// actually exist. If we know the parent isn't a dir, then
// in fact it CAN'T exist.
const s = this.parent ? this.sep : '';
const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
const pchild = this.newChild(pathPart, UNKNOWN, {
...opts,
parent: this,
fullpath,
});
if (!this.canReaddir()) {
pchild.#type |= ENOENT;
}
// don't have to update provisional, because if we have real children,
// then provisional is set to children.length, otherwise a lower number
children.push(pchild);
return pchild;
}
/**
* The relative path from the cwd. If it does not share an ancestor with
* the cwd, then this ends up being equivalent to the fullpath()
*/
relative() {
if (this.isCWD)
return '';
if (this.#relative !== undefined) {
return this.#relative;
}
const name = this.name;
const p = this.parent;
if (!p) {
return (this.#relative = this.name);
}
const pv = p.relative();
return pv + (!pv || !p.parent ? '' : this.sep) + name;
}
/**
* The relative path from the cwd, using / as the path separator.
* If it does not share an ancestor with
* the cwd, then this ends up being equivalent to the fullpathPosix()
* On posix systems, this is identical to relative().
*/
relativePosix() {
if (this.sep === '/')
return this.relative();
if (this.isCWD)
return '';
if (this.#relativePosix !== undefined)
return this.#relativePosix;
const name = this.name;
const p = this.parent;
if (!p) {
return (this.#relativePosix = this.fullpathPosix());
}
const pv = p.relativePosix();
return pv + (!pv || !p.parent ? '' : '/') + name;
}
/**
* The fully resolved path string for this Path entry
*/
fullpath() {
if (this.#fullpath !== undefined) {
return this.#fullpath;
}
const name = this.name;
const p = this.parent;
if (!p) {
return (this.#fullpath = this.name);
}
const pv = p.fullpath();
const fp = pv + (!p.parent ? '' : this.sep) + name;
return (this.#fullpath = fp);
}
/**
* On platforms other than windows, this is identical to fullpath.
*
* On windows, this is overridden to return the forward-slash form of the
* full UNC path.
*/
fullpathPosix() {
if (this.#fullpathPosix !== undefined)
return this.#fullpathPosix;
if (this.sep === '/')
return (this.#fullpathPosix = this.fullpath());
if (!this.parent) {
const p = this.fullpath().replace(/\\/g, '/');
if (/^[a-z]:\//i.test(p)) {
return (this.#fullpathPosix = `//?/${p}`);
}
else {
return (this.#fullpathPosix = p);
}
}
const p = this.parent;
const pfpp = p.fullpathPosix();
const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
return (this.#fullpathPosix = fpp);
}
/**
* Is the Path of an unknown type?
*
* Note that we might know *something* about it if there has been a previous
* filesystem operation, for example that it does not exist, or is not a
* link, or whether it has child entries.
*/
isUnknown() {
return (this.#type & IFMT) === UNKNOWN;
}
isType(type) {
return this[`is${type}`]();
}
getType() {
return (this.isUnknown() ? 'Unknown'
: this.isDirectory() ? 'Directory'
: this.isFile() ? 'File'
: this.isSymbolicLink() ? 'SymbolicLink'
: this.isFIFO() ? 'FIFO'
: this.isCharacterDevice() ? 'CharacterDevice'
: this.isBlockDevice() ? 'BlockDevice'
: /* c8 ignore start */ this.isSocket() ? 'Socket'
: 'Unknown');
/* c8 ignore stop */
}
/**
* Is the Path a regular file?
*/
isFile() {
return (this.#type & IFMT) === IFREG;
}
/**
* Is the Path a directory?
*/
isDirectory() {
return (this.#type & IFMT) === IFDIR;
}
/**
* Is the path a character device?
*/
isCharacterDevice() {
return (this.#type & IFMT) === IFCHR;
}
/**
* Is the path a block device?
*/
isBlockDevice() {
return (this.#type & IFMT) === IFBLK;
}
/**
* Is the path a FIFO pipe?
*/
isFIFO() {
return (this.#type & IFMT) === IFIFO;
}
/**
* Is the path a socket?
*/
isSocket() {
return (this.#type & IFMT) === IFSOCK;
}
/**
* Is the path a symbolic link?
*/
isSymbolicLink() {
return (this.#type & IFLNK) === IFLNK;
}
/**
* Return the entry if it has been subject of a successful lstat, or
* undefined otherwise.
*
* Does not read the filesystem, so an undefined result *could* simply
* mean that we haven't called lstat on it.
*/
lstatCached() {
return this.#type & LSTAT_CALLED ? this : undefined;
}
/**
* Return the cached link target if the entry has been the subject of a
* successful readlink, or undefined otherwise.
*
* Does not read the filesystem, so an undefined result *could* just mean we
* don't have any cached data. Only use it if you are very sure that a
* readlink() has been called at some point.
*/
readlinkCached() {
return this.#linkTarget;
}
/**
* Returns the cached realpath target if the entry has been the subject
* of a successful realpath, or undefined otherwise.
*
* Does not read the filesystem, so an undefined result *could* just mean we
* don't have any cached data. Only use it if you are very sure that a
* realpath() has been called at some point.
*/
realpathCached() {
return this.#realpath;
}
/**
* Returns the cached child Path entries array if the entry has been the
* subject of a successful readdir(), or [] otherwise.
*
* Does not read the filesystem, so an empty array *could* just mean we
* don't have any cached data. Only use it if you are very sure that a
* readdir() has been called recently enough to still be valid.
*/
readdirCached() {
const children = this.children();
return children.slice(0, children.provisional);
}
/**
* Return true if it's worth trying to readlink. Ie, we don't (yet) have
* any indication that readlink will definitely fail.
*
* Returns false if the path is known to not be a symlink, if a previous
* readlink failed, or if the entry does not exist.
*/
canReadlink() {
if (this.#linkTarget)
return true;
if (!this.parent)
return false;
// cases where it cannot possibly succeed
const ifmt = this.#type & IFMT;
return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
this.#type & ENOREADLINK ||
this.#type & ENOENT);
}
/**
* Return true if readdir has previously been successfully called on this
* path, indicating that cachedReaddir() is likely valid.
*/
calledReaddir() {
return !!(this.#type & READDIR_CALLED);
}
/**
* Returns true if the path is known to not exist. That is, a previous lstat
* or readdir failed to verify its existence when that would have been
* expected, or a parent entry was marked either enoent or enotdir.
*/
isENOENT() {
return !!(this.#type & ENOENT);
}
/**
* Return true if the path is a match for the given path name. This handles
* case sensitivity and unicode normalization.
*
* Note: even on case-sensitive systems, it is **not** safe to test the
* equality of the `.name` property to determine whether a given pathname
* matches, due to unicode normalization mismatches.
*
* Always use this method instead of testing the `path.name` property
* directly.
*/
isNamed(n) {
return !this.nocase ?
this.#matchName === normalize(n)
: this.#matchName === normalizeNocase(n);
}
/**
* Return the Path object corresponding to the target of a symbolic link.
*
* If the Path is not a symbolic link, or if the readlink call fails for any
* reason, `undefined` is returned.
*
* Result is cached, and thus may be outdated if the filesystem is mutated.
*/
async readlink() {
const target = this.#linkTarget;
if (target) {
return target;
}
if (!this.canReadlink()) {
return undefined;
}
/* c8 ignore start */
// already covered by the canReadlink test, here for ts grumples
if (!this.parent) {
return undefined;
}
/* c8 ignore stop */
try {
const read = await this.#fs.promises.readlink(this.fullpath());
const linkTarget = (await this.parent.realpath())?.resolve(read);
if (linkTarget) {
return (this.#linkTarget = linkTarget);
}
}
catch (er) {
this.#readlinkFail(er.code);
return undefined;
}
}
/**
* Synchronous {@link PathBase.readlink}
*/
readlinkSync() {
const target = this.#linkTarget;
if (target) {
return target;
}
if (!this.canReadlink()) {
return undefined;
}
/* c8 ignore start */
// already covered by the canReadlink test, here for ts grumples
if (!this.parent) {
return undefined;
}
/* c8 ignore stop */
try {
const read = this.#fs.readlinkSync(this.fullpath());
const linkTarget = this.parent.realpathSync()?.resolve(read);
if (linkTarget) {
return (this.#linkTarget = linkTarget);
}
}
catch (er) {
this.#readlinkFail(er.code);
return undefined;
}
}
#readdirSuccess(children) {
// succeeded, mark readdir called bit
this.#type |= READDIR_CALLED;
// mark all remaining provisional children as ENOENT
for (let p = children.provisional; p < children.length; p++) {
const c = children[p];
if (c)
c.#markENOENT();
}
}
#markENOENT() {
// mark as UNKNOWN and ENOENT
if (this.#type & ENOENT)
return;
this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
this.#markChildrenENOENT();
}
#markChildrenENOENT() {
// all children are provisional and do not exist
const children = this.children();
children.provisional = 0;
for (const p of children) {
p.#markENOENT();
}
}
#markENOREALPATH() {
this.#type |= ENOREALPATH;
this.#markENOTDIR();
}
// save the information when we know the entry is not a dir
#markENOTDIR() {
// entry is not a directory, so any children can't exist.
// this *should* be impossible, since any children created
// after it's been marked ENOTDIR should be marked ENOENT,
// so it won't even get to this point.
/* c8 ignore start */
if (this.#type & ENOTDIR)
return;
/* c8 ignore stop */
let t = this.#type;
// this could happen if we stat a dir, then delete it,
// then try to read it or one of its children.
if ((t & IFMT) === IFDIR)
t &= IFMT_UNKNOWN;
this.#type = t | ENOTDIR;
this.#markChildrenENOENT();
}
#readdirFail(code = '') {
// markENOTDIR and markENOENT also set provisional=0
if (code === 'ENOTDIR' || code === 'EPERM') {
this.#markENOTDIR();
}
else if (code === 'ENOENT') {
this.#markENOENT();
}
else {
this.children().provisional = 0;
}
}
#lstatFail(code = '') {
// Windows just raises ENOENT in this case, disable for win CI
/* c8 ignore start */
if (code === 'ENOTDIR') {
// already know it has a parent by this point
const p = this.parent;
p.#markENOTDIR();
}
else if (code === 'ENOENT') {
/* c8 ignore stop */
this.#markENOENT();
}
}
#readlinkFail(code = '') {
let ter = this.#type;
ter |= ENOREADLINK;
if (code === 'ENOENT')
ter |= ENOENT;
// windows gets a weird error when you try to readlink a file
if (code === 'EINVAL' || code === 'UNKNOWN') {
// exists, but not a symlink, we don't know WHAT it is, so remove
// all IFMT bits.
ter &= IFMT_UNKNOWN;
}
this.#type = ter;
// windows just gets ENOENT in this case. We do cover the case,
// just disabled because it's impossible on Windows CI
/* c8 ignore start */
if (code === 'ENOTDIR' && this.parent) {
this.parent.#markENOTDIR();
}
/* c8 ignore stop */
}
#readdirAddChild(e, c) {
return (this.#readdirMaybePromoteChild(e, c) ||
this.#readdirAddNewChild(e, c));
}
#readdirAddNewChild(e, c) {
// alloc new entry at head, so it's never provisional
const type = entToType(e);
const child = this.newChild(e.name, type, { parent: this });
const ifmt = child.#type & IFMT;
if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
child.#type |= ENOTDIR;
}
c.unshift(child);
c.provisional++;
return child;
}
#readdirMaybePromoteChild(e, c) {
for (let p = c.provisional; p < c.length; p++) {
const pchild = c[p];
const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
if (name !== pchild.#matchName) {
continue;
}
return this.#readdirPromoteChild(e, pchild, p, c);
}
}
#readdirPromoteChild(e, p, index, c) {
const v = p.name;
// retain any other flags, but set ifmt from dirent
p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
// case sensitivity fixing when we learn the true name.
if (v !== e.name)
p.name = e.name;
// just advance provisional index (potentially off the list),
// otherwise we have to splice/pop it out and re-insert at head
if (index !== c.provisional) {
if (index === c.length - 1)
c.pop();
else
c.splice(index, 1);
c.unshift(p);
}
c.provisional++;
return p;
}
/**
* Call lstat() on this Path, and update all known information that can be
* determined.
*
* Note that unlike `fs.lstat()`, the returned value does not contain some
* information, such as `mode`, `dev`, `nlink`, and `ino`. If that
* information is required, you will need to call `fs.lstat` yourself.
*
* If the Path refers to a nonexistent file, or if the lstat call fails for
* any reason, `undefined` is returned. Otherwise the updated Path object is
* returned.
*
* Results are cached, and thus may be out of date if the filesystem is
* mutated.
*/
async lstat() {
if ((this.#type & ENOENT) === 0) {
try {
this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
return this;
}
catch (er) {
this.#lstatFail(er.code);
}
}
}
/**
* synchronous {@link PathBase.lstat}
*/
lstatSync() {
if ((this.#type & ENOENT) === 0) {
try {
this.#applyStat(this.#fs.lstatSync(this.fullpath()));
return this;
}
catch (er) {
this.#lstatFail(er.code);
}
}
}
#applyStat(st) {
const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
this.#atime = atime;
this.#atimeMs = atimeMs;
this.#birthtime = birthtime;
this.#birthtimeMs = birthtimeMs;
this.#blksize = blksize;
this.#blocks = blocks;
this.#ctime = ctime;
this.#ctimeMs = ctimeMs;
this.#dev = dev;
this.#gid = gid;
this.#ino = ino;
this.#mode = mode;
this.#mtime = mtime;
this.#mtimeMs = mtimeMs;
this.#nlink = nlink;
this.#rdev = rdev;
this.#size = size;
this.#uid = uid;
const ifmt = entToType(st);
// retain any other flags, but set the ifmt
this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
this.#type |= ENOTDIR;
}
}
#onReaddirCB = [];
#readdirCBInFlight = false;
#callOnReaddirCB(children) {
this.#readdirCBInFlight = false;
const cbs = this.#onReaddirCB.slice();
this.#onReaddirCB.length = 0;
cbs.forEach(cb => cb(null, children));
}
/**
* Standard node-style callback interface to get list of directory entries.
*
* If the Path cannot or does not contain any children, then an empty array
* is returned.
*
* Results are cached, and thus may be out of date if the filesystem is
* mutated.
*
* @param cb The callback called with (er, entries). Note that the `er`
* param is somewhat extraneous, as all readdir() errors are handled and
* simply result in an empty set of entries being returned.
* @param allowZalgo Boolean indicating that immediately known results should
* *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
* zalgo at your peril, the dark pony lord is devious and unforgiving.
*/
readdirCB(cb, allowZalgo = false) {
if (!this.canReaddir()) {
if (allowZalgo)
cb(null, []);
else
queueMicrotask(() => cb(null, []));
return;
}
const children = this.children();
if (this.calledReaddir()) {
const c = children.slice(0, children.provisional);
if (allowZalgo)
cb(null, c);
else
queueMicrotask(() => cb(null, c));
return;
}
// don't have to worry about zalgo at this point.
this.#onReaddirCB.push(cb);
if (this.#readdirCBInFlight) {
return;
}
this.#readdirCBInFlight = true;
// else read the directory, fill up children
// de-provisionalize any provisional children.
const fullpath = this.fullpath();
this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
if (er) {
this.#readdirFail(er.code);
children.provisional = 0;
}
else {
// if we didn't get an error, we always get entries.
//@ts-ignore
for (const e of entries) {
this.#readdirAddChild(e, children);
}
this.#readdirSuccess(children);
}
this.#callOnReaddirCB(children.slice(0, children.provisional));
return;
});
}
#asyncReaddirInFlight;
/**
* Return an array of known child entries.
*
* If the Path cannot or does not contain any children, then an empty array
* is returned.
*
* Results are cached, and thus may be out of date if the filesystem is
* mutated.
*/
async readdir() {
if (!this.canReaddir()) {
return [];
}
const children = this.children();
if (this.calledReaddir()) {
return children.slice(0, children.provisional);
}
// else read the directory, fill up children
// de-provisionalize any provisional children.
const fullpath = this.fullpath();
if (this.#asyncReaddirInFlight) {
await this.#asyncReaddirInFlight;
}
else {
/* c8 ignore start */
let resolve = () => { };
/* c8 ignore stop */
this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
try {
for (const e of await this.#fs.promises.readdir(fullpath, {
withFileTypes: true,
})) {
this.#readdirAddChild(e, children);
}
this.#readdirSuccess(children);
}
catch (er) {
this.#readdirFail(er.code);
children.provisional = 0;
}
this.#asyncReaddirInFlight = undefined;
resolve();
}
return children.slice(0, children.provisional);
}
/**
* synchronous {@link PathBase.readdir}
*/
readdirSync() {
if (!this.canReaddir()) {
return [];
}
const children = this.children();
if (this.calledReaddir()) {
return children.slice(0, children.provisional);
}
// else read the directory, fill up children
// de-provisionalize any provisional children.
const fullpath = this.fullpath();
try {
for (const e of this.#fs.readdirSync(fullpath, {
withFileTypes: true,
})) {
this.#readdirAddChild(e, children);
}
this.#readdirSuccess(children);
}
catch (er) {
this.#readdirFail(er.code);
children.provisional = 0;
}
return children.slice(0, children.provisional);
}
canReaddir() {
if (this.#type & ENOCHILD)
return false;
const ifmt = IFMT & this.#type;
// we always set ENOTDIR when setting IFMT, so should be impossible
/* c8 ignore start */
if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
return false;
}
/* c8 ignore stop */
return true;
}
shouldWalk(dirs, walkFilter) {
return ((this.#type & IFDIR) === IFDIR &&
!(this.#type & ENOCHILD) &&
!dirs.has(this) &&
(!walkFilter || walkFilter(this)));
}
/**
* Return the Path object corresponding to path as resolved
* by realpath(3).
*
* If the realpath call fails for any reason, `undefined` is returned.
*
* Result is cached, and thus may be outdated if the filesystem is mutated.
* On success, returns a Path object.
*/
async realpath() {
if (this.#realpath)
return this.#realpath;
if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
return undefined;
try {
const rp = await this.#fs.promises.realpath(this.fullpath());
return (this.#realpath = this.resolve(rp));
}
catch (_) {
this.#markENOREALPATH();
}
}
/**
* Synchronous {@link realpath}
*/
realpathSync() {
if (this.#realpath)
return this.#realpath;
if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
return undefined;
try {
const rp = this.#fs.realpathSync(this.fullpath());
return (this.#realpath = this.resolve(rp));
}
catch (_) {
this.#markENOREALPATH();
}
}
/**
* Internal method to mark this Path object as the scurry cwd,
* called by {@link PathScurry#chdir}
*
* @internal
*/
[setAsCwd](oldCwd) {
if (oldCwd === this)
return;
oldCwd.isCWD = false;
this.isCWD = true;
const changed = new Set([]);
let rp = [];
let p = this;
while (p && p.parent) {
changed.add(p);
p.#relative = rp.join(this.sep);
p.#relativePosix = rp.join('/');
p = p.parent;
rp.push('..');
}
// now un-memoize parents of old cwd
p = oldCwd;
while (p && p.parent && !changed.has(p)) {
p.#relative = undefined;
p.#relativePosix = undefined;
p = p.parent;
}
}
}
/**
* Path class used on win32 systems
*
* Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
* as the path separator for parsing paths.
*/
class PathWin32 extends PathBase {
/**
* Separator for generating path strings.
*/
sep = '\\';
/**
* Separator for parsing path strings.
*/
splitSep = eitherSep;
/**
* Do not create new Path objects directly. They should always be accessed
* via the PathScurry class or other methods on the Path class.
*
* @internal
*/
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
super(name, type, root, roots, nocase, children, opts);
}
/**
* @internal
*/
newChild(name, type = UNKNOWN, opts = {}) {
return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
}
/**
* @internal
*/
getRootString(path) {
return win32$1.parse(path).root;
}
/**
* @internal
*/
getRoot(rootPath) {
rootPath = uncToDrive(rootPath.toUpperCase());
if (rootPath === this.root.name) {
return this.root;
}
// ok, not that one, check if it matches another we know about
for (const [compare, root] of Object.entries(this.roots)) {
if (this.sameRoot(rootPath, compare)) {
return (this.roots[rootPath] = root);
}
}
// otherwise, have to create a new one.
return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
}
/**
* @internal
*/
sameRoot(rootPath, compare = this.root.name) {
// windows can (rarely) have case-sensitive filesystem, but
// UNC and drive letters are always case-insensitive, and canonically
// represented uppercase.
rootPath = rootPath
.toUpperCase()
.replace(/\//g, '\\')
.replace(uncDriveRegexp, '$1\\');
return rootPath === compare;
}
}
/**
* Path class used on all posix systems.
*
* Uses `'/'` as the path separator.
*/
class PathPosix extends PathBase {
/**
* separator for parsing path strings
*/
splitSep = '/';
/**
* separator for generating path strings
*/
sep = '/';
/**
* Do not create new Path objects directly. They should always be accessed
* via the PathScurry class or other methods on the Path class.
*
* @internal
*/
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
super(name, type, root, roots, nocase, children, opts);
}
/**
* @internal
*/
getRootString(path) {
return path.startsWith('/') ? '/' : '';
}
/**
* @internal
*/
getRoot(_rootPath) {
return this.root;
}
/**
* @internal
*/
newChild(name, type = UNKNOWN, opts = {}) {
return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
}
}
/**
* The base class for all PathScurry classes, providing the interface for path
* resolution and filesystem operations.
*
* Typically, you should *not* instantiate this class directly, but rather one
* of the platform-specific classes, or the exported {@link PathScurry} which
* defaults to the current platform.
*/
class PathScurryBase {
/**
* The root Path entry for the current working directory of this Scurry
*/
root;
/**
* The string path for the root of this Scurry's current working directory
*/
rootPath;
/**
* A collection of all roots encountered, referenced by rootPath
*/
roots;
/**
* The Path entry corresponding to this PathScurry's current working directory.
*/
cwd;
#resolveCache;
#resolvePosixCache;
#children;
/**
* Perform path comparisons case-insensitively.
*
* Defaults true on Darwin and Windows systems, false elsewhere.
*/
nocase;
#fs;
/**
* This class should not be instantiated directly.
*
* Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
*
* @internal
*/
constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
this.#fs = fsFromOption(fs);
if (cwd instanceof URL || cwd.startsWith('file://')) {
cwd = fileURLToPath(cwd);
}
// resolve and split root, and then add to the store.
// this is the only time we call path.resolve()
const cwdPath = pathImpl.resolve(cwd);
this.roots = Object.create(null);
this.rootPath = this.parseRootPath(cwdPath);
this.#resolveCache = new ResolveCache();
this.#resolvePosixCache = new ResolveCache();
this.#children = new ChildrenCache(childrenCacheSize);
const split = cwdPath.substring(this.rootPath.length).split(sep);
// resolve('/') leaves '', splits to [''], we don't want that.
if (split.length === 1 && !split[0]) {
split.pop();
}
/* c8 ignore start */
if (nocase === undefined) {
throw new TypeError('must provide nocase setting to PathScurryBase ctor');
}
/* c8 ignore stop */
this.nocase = nocase;
this.root = this.newRoot(this.#fs);
this.roots[this.rootPath] = this.root;
let prev = this.root;
let len = split.length - 1;
const joinSep = pathImpl.sep;
let abs = this.rootPath;
let sawFirst = false;
for (const part of split) {
const l = len--;
prev = prev.child(part, {
relative: new Array(l).fill('..').join(joinSep),
relativePosix: new Array(l).fill('..').join('/'),
fullpath: (abs += (sawFirst ? '' : joinSep) + part),
});
sawFirst = true;
}
this.cwd = prev;
}
/**
* Get the depth of a provided path, string, or the cwd
*/
depth(path = this.cwd) {
if (typeof path === 'string') {
path = this.cwd.resolve(path);
}
return path.depth();
}
/**
* Return the cache of child entries. Exposed so subclasses can create
* child Path objects in a platform-specific way.
*
* @internal
*/
childrenCache() {
return this.#children;
}
/**
* Resolve one or more path strings to a resolved string
*
* Same interface as require('path').resolve.
*
* Much faster than path.resolve() when called multiple times for the same
* path, because the resolved Path objects are cached. Much slower
* otherwise.
*/
resolve(...paths) {
// first figure out the minimum number of paths we have to test
// we always start at cwd, but any absolutes will bump the start
let r = '';
for (let i = paths.length - 1; i >= 0; i--) {
const p = paths[i];
if (!p || p === '.')
continue;
r = r ? `${p}/${r}` : p;
if (this.isAbsolute(p)) {
break;
}
}
const cached = this.#resolveCache.get(r);
if (cached !== undefined) {
return cached;
}
const result = this.cwd.resolve(r).fullpath();
this.#resolveCache.set(r, result);
return result;
}
/**
* Resolve one or more path strings to a resolved string, returning
* the posix path. Identical to .resolve() on posix systems, but on
* windows will return a forward-slash separated UNC path.
*
* Same interface as require('path').resolve.
*
* Much faster than path.resolve() when called multiple times for the same
* path, because the resolved Path objects are cached. Much slower
* otherwise.
*/
resolvePosix(...paths) {
// first figure out the minimum number of paths we have to test
// we always start at cwd, but any absolutes will bump the start
let r = '';
for (let i = paths.length - 1; i >= 0; i--) {
const p = paths[i];
if (!p || p === '.')
continue;
r = r ? `${p}/${r}` : p;
if (this.isAbsolute(p)) {
break;
}
}
const cached = this.#resolvePosixCache.get(r);
if (cached !== undefined) {
return cached;
}
const result = this.cwd.resolve(r).fullpathPosix();
this.#resolvePosixCache.set(r, result);
return result;
}
/**
* find the relative path from the cwd to the supplied path string or entry
*/
relative(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.relative();
}
/**
* find the relative path from the cwd to the supplied path string or
* entry, using / as the path delimiter, even on Windows.
*/
relativePosix(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.relativePosix();
}
/**
* Return the basename for the provided string or Path object
*/
basename(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.name;
}
/**
* Return the dirname for the provided string or Path object
*/
dirname(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return (entry.parent || entry).fullpath();
}
async readdir(entry = this.cwd, opts = {
withFileTypes: true,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes } = opts;
if (!entry.canReaddir()) {
return [];
}
else {
const p = await entry.readdir();
return withFileTypes ? p : p.map(e => e.name);
}
}
readdirSync(entry = this.cwd, opts = {
withFileTypes: true,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true } = opts;
if (!entry.canReaddir()) {
return [];
}
else if (withFileTypes) {
return entry.readdirSync();
}
else {
return entry.readdirSync().map(e => e.name);
}
}
/**
* Call lstat() on the string or Path object, and update all known
* information that can be determined.
*
* Note that unlike `fs.lstat()`, the returned value does not contain some
* information, such as `mode`, `dev`, `nlink`, and `ino`. If that
* information is required, you will need to call `fs.lstat` yourself.
*
* If the Path refers to a nonexistent file, or if the lstat call fails for
* any reason, `undefined` is returned. Otherwise the updated Path object is
* returned.
*
* Results are cached, and thus may be out of date if the filesystem is
* mutated.
*/
async lstat(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.lstat();
}
/**
* synchronous {@link PathScurryBase.lstat}
*/
lstatSync(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.lstatSync();
}
async readlink(entry = this.cwd, { withFileTypes } = {
withFileTypes: false,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
withFileTypes = entry.withFileTypes;
entry = this.cwd;
}
const e = await entry.readlink();
return withFileTypes ? e : e?.fullpath();
}
readlinkSync(entry = this.cwd, { withFileTypes } = {
withFileTypes: false,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
withFileTypes = entry.withFileTypes;
entry = this.cwd;
}
const e = entry.readlinkSync();
return withFileTypes ? e : e?.fullpath();
}
async realpath(entry = this.cwd, { withFileTypes } = {
withFileTypes: false,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
withFileTypes = entry.withFileTypes;
entry = this.cwd;
}
const e = await entry.realpath();
return withFileTypes ? e : e?.fullpath();
}
realpathSync(entry = this.cwd, { withFileTypes } = {
withFileTypes: false,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
withFileTypes = entry.withFileTypes;
entry = this.cwd;
}
const e = entry.realpathSync();
return withFileTypes ? e : e?.fullpath();
}
async walk(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
const results = [];
if (!filter || filter(entry)) {
results.push(withFileTypes ? entry : entry.fullpath());
}
const dirs = new Set();
const walk = (dir, cb) => {
dirs.add(dir);
dir.readdirCB((er, entries) => {
/* c8 ignore start */
if (er) {
return cb(er);
}
/* c8 ignore stop */
let len = entries.length;
if (!len)
return cb();
const next = () => {
if (--len === 0) {
cb();
}
};
for (const e of entries) {
if (!filter || filter(e)) {
results.push(withFileTypes ? e : e.fullpath());
}
if (follow && e.isSymbolicLink()) {
e.realpath()
.then(r => (r?.isUnknown() ? r.lstat() : r))
.then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
}
else {
if (e.shouldWalk(dirs, walkFilter)) {
walk(e, next);
}
else {
next();
}
}
}
}, true); // zalgooooooo
};
const start = entry;
return new Promise((res, rej) => {
walk(start, er => {
/* c8 ignore start */
if (er)
return rej(er);
/* c8 ignore stop */
res(results);
});
});
}
walkSync(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
const results = [];
if (!filter || filter(entry)) {
results.push(withFileTypes ? entry : entry.fullpath());
}
const dirs = new Set([entry]);
for (const dir of dirs) {
const entries = dir.readdirSync();
for (const e of entries) {
if (!filter || filter(e)) {
results.push(withFileTypes ? e : e.fullpath());
}
let r = e;
if (e.isSymbolicLink()) {
if (!(follow && (r = e.realpathSync())))
continue;
if (r.isUnknown())
r.lstatSync();
}
if (r.shouldWalk(dirs, walkFilter)) {
dirs.add(r);
}
}
}
return results;
}
/**
* Support for `for await`
*
* Alias for {@link PathScurryBase.iterate}
*
* Note: As of Node 19, this is very slow, compared to other methods of
* walking. Consider using {@link PathScurryBase.stream} if memory overhead
* and backpressure are concerns, or {@link PathScurryBase.walk} if not.
*/
[Symbol.asyncIterator]() {
return this.iterate();
}
iterate(entry = this.cwd, options = {}) {
// iterating async over the stream is significantly more performant,
// especially in the warm-cache scenario, because it buffers up directory
// entries in the background instead of waiting for a yield for each one.
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
options = entry;
entry = this.cwd;
}
return this.stream(entry, options)[Symbol.asyncIterator]();
}
/**
* Iterating over a PathScurry performs a synchronous walk.
*
* Alias for {@link PathScurryBase.iterateSync}
*/
[Symbol.iterator]() {
return this.iterateSync();
}
*iterateSync(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
if (!filter || filter(entry)) {
yield withFileTypes ? entry : entry.fullpath();
}
const dirs = new Set([entry]);
for (const dir of dirs) {
const entries = dir.readdirSync();
for (const e of entries) {
if (!filter || filter(e)) {
yield withFileTypes ? e : e.fullpath();
}
let r = e;
if (e.isSymbolicLink()) {
if (!(follow && (r = e.realpathSync())))
continue;
if (r.isUnknown())
r.lstatSync();
}
if (r.shouldWalk(dirs, walkFilter)) {
dirs.add(r);
}
}
}
}
stream(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
const results = new Minipass({ objectMode: true });
if (!filter || filter(entry)) {
results.write(withFileTypes ? entry : entry.fullpath());
}
const dirs = new Set();
const queue = [entry];
let processing = 0;
const process = () => {
let paused = false;
while (!paused) {
const dir = queue.shift();
if (!dir) {
if (processing === 0)
results.end();
return;
}
processing++;
dirs.add(dir);
const onReaddir = (er, entries, didRealpaths = false) => {
/* c8 ignore start */
if (er)
return results.emit('error', er);
/* c8 ignore stop */
if (follow && !didRealpaths) {
const promises = [];
for (const e of entries) {
if (e.isSymbolicLink()) {
promises.push(e
.realpath()
.then((r) => r?.isUnknown() ? r.lstat() : r));
}
}
if (promises.length) {
Promise.all(promises).then(() => onReaddir(null, entries, true));
return;
}
}
for (const e of entries) {
if (e && (!filter || filter(e))) {
if (!results.write(withFileTypes ? e : e.fullpath())) {
paused = true;
}
}
}
processing--;
for (const e of entries) {
const r = e.realpathCached() || e;
if (r.shouldWalk(dirs, walkFilter)) {
queue.push(r);
}
}
if (paused && !results.flowing) {
results.once('drain', process);
}
else if (!sync) {
process();
}
};
// zalgo containment
let sync = true;
dir.readdirCB(onReaddir, true);
sync = false;
}
};
process();
return results;
}
streamSync(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
const results = new Minipass({ objectMode: true });
const dirs = new Set();
if (!filter || filter(entry)) {
results.write(withFileTypes ? entry : entry.fullpath());
}
const queue = [entry];
let processing = 0;
const process = () => {
let paused = false;
while (!paused) {
const dir = queue.shift();
if (!dir) {
if (processing === 0)
results.end();
return;
}
processing++;
dirs.add(dir);
const entries = dir.readdirSync();
for (const e of entries) {
if (!filter || filter(e)) {
if (!results.write(withFileTypes ? e : e.fullpath())) {
paused = true;
}
}
}
processing--;
for (const e of entries) {
let r = e;
if (e.isSymbolicLink()) {
if (!(follow && (r = e.realpathSync())))
continue;
if (r.isUnknown())
r.lstatSync();
}
if (r.shouldWalk(dirs, walkFilter)) {
queue.push(r);
}
}
}
if (paused && !results.flowing)
results.once('drain', process);
};
process();
return results;
}
chdir(path = this.cwd) {
const oldCwd = this.cwd;
this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
this.cwd[setAsCwd](oldCwd);
}
}
/**
* Windows implementation of {@link PathScurryBase}
*
* Defaults to case insensitve, uses `'\\'` to generate path strings. Uses
* {@link PathWin32} for Path objects.
*/
class PathScurryWin32 extends PathScurryBase {
/**
* separator for generating path strings
*/
sep = '\\';
constructor(cwd = process.cwd(), opts = {}) {
const { nocase = true } = opts;
super(cwd, win32$1, '\\', { ...opts, nocase });
this.nocase = nocase;
for (let p = this.cwd; p; p = p.parent) {
p.nocase = this.nocase;
}
}
/**
* @internal
*/
parseRootPath(dir) {
// if the path starts with a single separator, it's not a UNC, and we'll
// just get separator as the root, and driveFromUNC will return \
// In that case, mount \ on the root from the cwd.
return win32$1.parse(dir).root.toUpperCase();
}
/**
* @internal
*/
newRoot(fs) {
return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
}
/**
* Return true if the provided path string is an absolute path
*/
isAbsolute(p) {
return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
}
}
/**
* {@link PathScurryBase} implementation for all posix systems other than Darwin.
*
* Defaults to case-sensitive matching, uses `'/'` to generate path strings.
*
* Uses {@link PathPosix} for Path objects.
*/
class PathScurryPosix extends PathScurryBase {
/**
* separator for generating path strings
*/
sep = '/';
constructor(cwd = process.cwd(), opts = {}) {
const { nocase = false } = opts;
super(cwd, posix$1, '/', { ...opts, nocase });
this.nocase = nocase;
}
/**
* @internal
*/
parseRootPath(_dir) {
return '/';
}
/**
* @internal
*/
newRoot(fs) {
return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
}
/**
* Return true if the provided path string is an absolute path
*/
isAbsolute(p) {
return p.startsWith('/');
}
}
/**
* {@link PathScurryBase} implementation for Darwin (macOS) systems.
*
* Defaults to case-insensitive matching, uses `'/'` for generating path
* strings.
*
* Uses {@link PathPosix} for Path objects.
*/
class PathScurryDarwin extends PathScurryPosix {
constructor(cwd = process.cwd(), opts = {}) {
const { nocase = true } = opts;
super(cwd, { ...opts, nocase });
}
}
/**
* Default {@link PathBase} implementation for the current platform.
*
* {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
*/
process.platform === 'win32' ? PathWin32 : PathPosix;
/**
* Default {@link PathScurryBase} implementation for the current platform.
*
* {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
* Darwin (macOS) systems, {@link PathScurryPosix} on all others.
*/
const PathScurry = process.platform === 'win32' ? PathScurryWin32
: process.platform === 'darwin' ? PathScurryDarwin
: PathScurryPosix;
// this is just a very light wrapper around 2 arrays with an offset index
const isPatternList = (pl) => pl.length >= 1;
const isGlobList = (gl) => gl.length >= 1;
/**
* An immutable-ish view on an array of glob parts and their parsed
* results
*/
class Pattern {
#patternList;
#globList;
#index;
length;
#platform;
#rest;
#globString;
#isDrive;
#isUNC;
#isAbsolute;
#followGlobstar = true;
constructor(patternList, globList, index, platform) {
if (!isPatternList(patternList)) {
throw new TypeError('empty pattern list');
}
if (!isGlobList(globList)) {
throw new TypeError('empty glob list');
}
if (globList.length !== patternList.length) {
throw new TypeError('mismatched pattern list and glob list lengths');
}
this.length = patternList.length;
if (index < 0 || index >= this.length) {
throw new TypeError('index out of range');
}
this.#patternList = patternList;
this.#globList = globList;
this.#index = index;
this.#platform = platform;
// normalize root entries of absolute patterns on initial creation.
if (this.#index === 0) {
// c: => ['c:/']
// C:/ => ['C:/']
// C:/x => ['C:/', 'x']
// //host/share => ['//host/share/']
// //host/share/ => ['//host/share/']
// //host/share/x => ['//host/share/', 'x']
// /etc => ['/', 'etc']
// / => ['/']
if (this.isUNC()) {
// '' / '' / 'host' / 'share'
const [p0, p1, p2, p3, ...prest] = this.#patternList;
const [g0, g1, g2, g3, ...grest] = this.#globList;
if (prest[0] === '') {
// ends in /
prest.shift();
grest.shift();
}
const p = [p0, p1, p2, p3, ''].join('/');
const g = [g0, g1, g2, g3, ''].join('/');
this.#patternList = [p, ...prest];
this.#globList = [g, ...grest];
this.length = this.#patternList.length;
}
else if (this.isDrive() || this.isAbsolute()) {
const [p1, ...prest] = this.#patternList;
const [g1, ...grest] = this.#globList;
if (prest[0] === '') {
// ends in /
prest.shift();
grest.shift();
}
const p = p1 + '/';
const g = g1 + '/';
this.#patternList = [p, ...prest];
this.#globList = [g, ...grest];
this.length = this.#patternList.length;
}
}
}
/**
* The first entry in the parsed list of patterns
*/
pattern() {
return this.#patternList[this.#index];
}
/**
* true of if pattern() returns a string
*/
isString() {
return typeof this.#patternList[this.#index] === 'string';
}
/**
* true of if pattern() returns GLOBSTAR
*/
isGlobstar() {
return this.#patternList[this.#index] === GLOBSTAR$2;
}
/**
* true if pattern() returns a regexp
*/
isRegExp() {
return this.#patternList[this.#index] instanceof RegExp;
}
/**
* The /-joined set of glob parts that make up this pattern
*/
globString() {
return (this.#globString =
this.#globString ||
(this.#index === 0 ?
this.isAbsolute() ?
this.#globList[0] + this.#globList.slice(1).join('/')
: this.#globList.join('/')
: this.#globList.slice(this.#index).join('/')));
}
/**
* true if there are more pattern parts after this one
*/
hasMore() {
return this.length > this.#index + 1;
}
/**
* The rest of the pattern after this part, or null if this is the end
*/
rest() {
if (this.#rest !== undefined)
return this.#rest;
if (!this.hasMore())
return (this.#rest = null);
this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
this.#rest.#isAbsolute = this.#isAbsolute;
this.#rest.#isUNC = this.#isUNC;
this.#rest.#isDrive = this.#isDrive;
return this.#rest;
}
/**
* true if the pattern represents a //unc/path/ on windows
*/
isUNC() {
const pl = this.#patternList;
return this.#isUNC !== undefined ?
this.#isUNC
: (this.#isUNC =
this.#platform === 'win32' &&
this.#index === 0 &&
pl[0] === '' &&
pl[1] === '' &&
typeof pl[2] === 'string' &&
!!pl[2] &&
typeof pl[3] === 'string' &&
!!pl[3]);
}
// pattern like C:/...
// split = ['C:', ...]
// XXX: would be nice to handle patterns like `c:*` to test the cwd
// in c: for *, but I don't know of a way to even figure out what that
// cwd is without actually chdir'ing into it?
/**
* True if the pattern starts with a drive letter on Windows
*/
isDrive() {
const pl = this.#patternList;
return this.#isDrive !== undefined ?
this.#isDrive
: (this.#isDrive =
this.#platform === 'win32' &&
this.#index === 0 &&
this.length > 1 &&
typeof pl[0] === 'string' &&
/^[a-z]:$/i.test(pl[0]));
}
// pattern = '/' or '/...' or '/x/...'
// split = ['', ''] or ['', ...] or ['', 'x', ...]
// Drive and UNC both considered absolute on windows
/**
* True if the pattern is rooted on an absolute path
*/
isAbsolute() {
const pl = this.#patternList;
return this.#isAbsolute !== undefined ?
this.#isAbsolute
: (this.#isAbsolute =
(pl[0] === '' && pl.length > 1) ||
this.isDrive() ||
this.isUNC());
}
/**
* consume the root of the pattern, and return it
*/
root() {
const p = this.#patternList[0];
return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
p
: '';
}
/**
* Check to see if the current globstar pattern is allowed to follow
* a symbolic link.
*/
checkFollowGlobstar() {
return !(this.#index === 0 ||
!this.isGlobstar() ||
!this.#followGlobstar);
}
/**
* Mark that the current globstar pattern is following a symbolic link
*/
markFollowGlobstar() {
if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
return false;
this.#followGlobstar = false;
return true;
}
}
// give it a pattern, and it'll be able to tell you if
// a given path should be ignored.
// Ignoring a path ignores its children if the pattern ends in /**
// Ignores are always parsed in dot:true mode
const defaultPlatform$1 = (typeof process === 'object' &&
process &&
typeof process.platform === 'string') ?
process.platform
: 'linux';
/**
* Class used to process ignored patterns
*/
class Ignore {
relative;
relativeChildren;
absolute;
absoluteChildren;
platform;
mmopts;
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform$1, }) {
this.relative = [];
this.absolute = [];
this.relativeChildren = [];
this.absoluteChildren = [];
this.platform = platform;
this.mmopts = {
dot: true,
nobrace,
nocase,
noext,
noglobstar,
optimizationLevel: 2,
platform,
nocomment: true,
nonegate: true,
};
for (const ign of ignored)
this.add(ign);
}
add(ign) {
// this is a little weird, but it gives us a clean set of optimized
// minimatch matchers, without getting tripped up if one of them
// ends in /** inside a brace section, and it's only inefficient at
// the start of the walk, not along it.
// It'd be nice if the Pattern class just had a .test() method, but
// handling globstars is a bit of a pita, and that code already lives
// in minimatch anyway.
// Another way would be if maybe Minimatch could take its set/globParts
// as an option, and then we could at least just use Pattern to test
// for absolute-ness.
// Yet another way, Minimatch could take an array of glob strings, and
// a cwd option, and do the right thing.
const mm = new Minimatch(ign, this.mmopts);
for (let i = 0; i < mm.set.length; i++) {
const parsed = mm.set[i];
const globParts = mm.globParts[i];
/* c8 ignore start */
if (!parsed || !globParts) {
throw new Error('invalid pattern object');
}
// strip off leading ./ portions
// https://github.com/isaacs/node-glob/issues/570
while (parsed[0] === '.' && globParts[0] === '.') {
parsed.shift();
globParts.shift();
}
/* c8 ignore stop */
const p = new Pattern(parsed, globParts, 0, this.platform);
const m = new Minimatch(p.globString(), this.mmopts);
const children = globParts[globParts.length - 1] === '**';
const absolute = p.isAbsolute();
if (absolute)
this.absolute.push(m);
else
this.relative.push(m);
if (children) {
if (absolute)
this.absoluteChildren.push(m);
else
this.relativeChildren.push(m);
}
}
}
ignored(p) {
const fullpath = p.fullpath();
const fullpaths = `${fullpath}/`;
const relative = p.relative() || '.';
const relatives = `${relative}/`;
for (const m of this.relative) {
if (m.match(relative) || m.match(relatives))
return true;
}
for (const m of this.absolute) {
if (m.match(fullpath) || m.match(fullpaths))
return true;
}
return false;
}
childrenIgnored(p) {
const fullpath = p.fullpath() + '/';
const relative = (p.relative() || '.') + '/';
for (const m of this.relativeChildren) {
if (m.match(relative))
return true;
}
for (const m of this.absoluteChildren) {
if (m.match(fullpath))
return true;
}
return false;
}
}
// synchronous utility for filtering entries and calculating subwalks
/**
* A cache of which patterns have been processed for a given Path
*/
class HasWalkedCache {
store;
constructor(store = new Map()) {
this.store = store;
}
copy() {
return new HasWalkedCache(new Map(this.store));
}
hasWalked(target, pattern) {
return this.store.get(target.fullpath())?.has(pattern.globString());
}
storeWalked(target, pattern) {
const fullpath = target.fullpath();
const cached = this.store.get(fullpath);
if (cached)
cached.add(pattern.globString());
else
this.store.set(fullpath, new Set([pattern.globString()]));
}
}
/**
* A record of which paths have been matched in a given walk step,
* and whether they only are considered a match if they are a directory,
* and whether their absolute or relative path should be returned.
*/
class MatchRecord {
store = new Map();
add(target, absolute, ifDir) {
const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
const current = this.store.get(target);
this.store.set(target, current === undefined ? n : n & current);
}
// match, absolute, ifdir
entries() {
return [...this.store.entries()].map(([path, n]) => [
path,
!!(n & 2),
!!(n & 1),
]);
}
}
/**
* A collection of patterns that must be processed in a subsequent step
* for a given path.
*/
class SubWalks {
store = new Map();
add(target, pattern) {
if (!target.canReaddir()) {
return;
}
const subs = this.store.get(target);
if (subs) {
if (!subs.find(p => p.globString() === pattern.globString())) {
subs.push(pattern);
}
}
else
this.store.set(target, [pattern]);
}
get(target) {
const subs = this.store.get(target);
/* c8 ignore start */
if (!subs) {
throw new Error('attempting to walk unknown path');
}
/* c8 ignore stop */
return subs;
}
entries() {
return this.keys().map(k => [k, this.store.get(k)]);
}
keys() {
return [...this.store.keys()].filter(t => t.canReaddir());
}
}
/**
* The class that processes patterns for a given path.
*
* Handles child entry filtering, and determining whether a path's
* directory contents must be read.
*/
class Processor {
hasWalkedCache;
matches = new MatchRecord();
subwalks = new SubWalks();
patterns;
follow;
dot;
opts;
constructor(opts, hasWalkedCache) {
this.opts = opts;
this.follow = !!opts.follow;
this.dot = !!opts.dot;
this.hasWalkedCache =
hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
}
processPatterns(target, patterns) {
this.patterns = patterns;
const processingSet = patterns.map(p => [target, p]);
// map of paths to the magic-starting subwalks they need to walk
// first item in patterns is the filter
for (let [t, pattern] of processingSet) {
this.hasWalkedCache.storeWalked(t, pattern);
const root = pattern.root();
const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
// start absolute patterns at root
if (root) {
t = t.resolve(root === '/' && this.opts.root !== undefined ?
this.opts.root
: root);
const rest = pattern.rest();
if (!rest) {
this.matches.add(t, true, false);
continue;
}
else {
pattern = rest;
}
}
if (t.isENOENT())
continue;
let p;
let rest;
let changed = false;
while (typeof (p = pattern.pattern()) === 'string' &&
(rest = pattern.rest())) {
const c = t.resolve(p);
t = c;
pattern = rest;
changed = true;
}
p = pattern.pattern();
rest = pattern.rest();
if (changed) {
if (this.hasWalkedCache.hasWalked(t, pattern))
continue;
this.hasWalkedCache.storeWalked(t, pattern);
}
// now we have either a final string for a known entry,
// more strings for an unknown entry,
// or a pattern starting with magic, mounted on t.
if (typeof p === 'string') {
// must not be final entry, otherwise we would have
// concatenated it earlier.
const ifDir = p === '..' || p === '' || p === '.';
this.matches.add(t.resolve(p), absolute, ifDir);
continue;
}
else if (p === GLOBSTAR$2) {
// if no rest, match and subwalk pattern
// if rest, process rest and subwalk pattern
// if it's a symlink, but we didn't get here by way of a
// globstar match (meaning it's the first time THIS globstar
// has traversed a symlink), then we follow it. Otherwise, stop.
if (!t.isSymbolicLink() ||
this.follow ||
pattern.checkFollowGlobstar()) {
this.subwalks.add(t, pattern);
}
const rp = rest?.pattern();
const rrest = rest?.rest();
if (!rest || ((rp === '' || rp === '.') && !rrest)) {
// only HAS to be a dir if it ends in **/ or **/.
// but ending in ** will match files as well.
this.matches.add(t, absolute, rp === '' || rp === '.');
}
else {
if (rp === '..') {
// this would mean you're matching **/.. at the fs root,
// and no thanks, I'm not gonna test that specific case.
/* c8 ignore start */
const tp = t.parent || t;
/* c8 ignore stop */
if (!rrest)
this.matches.add(tp, absolute, true);
else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
this.subwalks.add(tp, rrest);
}
}
}
}
else if (p instanceof RegExp) {
this.subwalks.add(t, pattern);
}
}
return this;
}
subwalkTargets() {
return this.subwalks.keys();
}
child() {
return new Processor(this.opts, this.hasWalkedCache);
}
// return a new Processor containing the subwalks for each
// child entry, and a set of matches, and
// a hasWalkedCache that's a copy of this one
// then we're going to call
filterEntries(parent, entries) {
const patterns = this.subwalks.get(parent);
// put matches and entry walks into the results processor
const results = this.child();
for (const e of entries) {
for (const pattern of patterns) {
const absolute = pattern.isAbsolute();
const p = pattern.pattern();
const rest = pattern.rest();
if (p === GLOBSTAR$2) {
results.testGlobstar(e, pattern, rest, absolute);
}
else if (p instanceof RegExp) {
results.testRegExp(e, p, rest, absolute);
}
else {
results.testString(e, p, rest, absolute);
}
}
}
return results;
}
testGlobstar(e, pattern, rest, absolute) {
if (this.dot || !e.name.startsWith('.')) {
if (!pattern.hasMore()) {
this.matches.add(e, absolute, false);
}
if (e.canReaddir()) {
// if we're in follow mode or it's not a symlink, just keep
// testing the same pattern. If there's more after the globstar,
// then this symlink consumes the globstar. If not, then we can
// follow at most ONE symlink along the way, so we mark it, which
// also checks to ensure that it wasn't already marked.
if (this.follow || !e.isSymbolicLink()) {
this.subwalks.add(e, pattern);
}
else if (e.isSymbolicLink()) {
if (rest && pattern.checkFollowGlobstar()) {
this.subwalks.add(e, rest);
}
else if (pattern.markFollowGlobstar()) {
this.subwalks.add(e, pattern);
}
}
}
}
// if the NEXT thing matches this entry, then also add
// the rest.
if (rest) {
const rp = rest.pattern();
if (typeof rp === 'string' &&
// dots and empty were handled already
rp !== '..' &&
rp !== '' &&
rp !== '.') {
this.testString(e, rp, rest.rest(), absolute);
}
else if (rp === '..') {
/* c8 ignore start */
const ep = e.parent || e;
/* c8 ignore stop */
this.subwalks.add(ep, rest);
}
else if (rp instanceof RegExp) {
this.testRegExp(e, rp, rest.rest(), absolute);
}
}
}
testRegExp(e, p, rest, absolute) {
if (!p.test(e.name))
return;
if (!rest) {
this.matches.add(e, absolute, false);
}
else {
this.subwalks.add(e, rest);
}
}
testString(e, p, rest, absolute) {
// should never happen?
if (!e.isNamed(p))
return;
if (!rest) {
this.matches.add(e, absolute, false);
}
else {
this.subwalks.add(e, rest);
}
}
}
/**
* Single-use utility classes to provide functionality to the {@link Glob}
* methods.
*
* @module
*/
const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts)
: Array.isArray(ignore) ? new Ignore(ignore, opts)
: ignore;
/**
* basic walking utilities that all the glob walker types use
*/
class GlobUtil {
path;
patterns;
opts;
seen = new Set();
paused = false;
aborted = false;
#onResume = [];
#ignore;
#sep;
signal;
maxDepth;
includeChildMatches;
constructor(patterns, path, opts) {
this.patterns = patterns;
this.path = path;
this.opts = opts;
this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
this.includeChildMatches = opts.includeChildMatches !== false;
if (opts.ignore || !this.includeChildMatches) {
this.#ignore = makeIgnore(opts.ignore ?? [], opts);
if (!this.includeChildMatches &&
typeof this.#ignore.add !== 'function') {
const m = 'cannot ignore child matches, ignore lacks add() method.';
throw new Error(m);
}
}
// ignore, always set with maxDepth, but it's optional on the
// GlobOptions type
/* c8 ignore start */
this.maxDepth = opts.maxDepth || Infinity;
/* c8 ignore stop */
if (opts.signal) {
this.signal = opts.signal;
this.signal.addEventListener('abort', () => {
this.#onResume.length = 0;
});
}
}
#ignored(path) {
return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
}
#childrenIgnored(path) {
return !!this.#ignore?.childrenIgnored?.(path);
}
// backpressure mechanism
pause() {
this.paused = true;
}
resume() {
/* c8 ignore start */
if (this.signal?.aborted)
return;
/* c8 ignore stop */
this.paused = false;
let fn = undefined;
while (!this.paused && (fn = this.#onResume.shift())) {
fn();
}
}
onResume(fn) {
if (this.signal?.aborted)
return;
/* c8 ignore start */
if (!this.paused) {
fn();
}
else {
/* c8 ignore stop */
this.#onResume.push(fn);
}
}
// do the requisite realpath/stat checking, and return the path
// to add or undefined to filter it out.
async matchCheck(e, ifDir) {
if (ifDir && this.opts.nodir)
return undefined;
let rpc;
if (this.opts.realpath) {
rpc = e.realpathCached() || (await e.realpath());
if (!rpc)
return undefined;
e = rpc;
}
const needStat = e.isUnknown() || this.opts.stat;
const s = needStat ? await e.lstat() : e;
if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
const target = await s.realpath();
/* c8 ignore start */
if (target && (target.isUnknown() || this.opts.stat)) {
await target.lstat();
}
/* c8 ignore stop */
}
return this.matchCheckTest(s, ifDir);
}
matchCheckTest(e, ifDir) {
return (e &&
(this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
(!ifDir || e.canReaddir()) &&
(!this.opts.nodir || !e.isDirectory()) &&
(!this.opts.nodir ||
!this.opts.follow ||
!e.isSymbolicLink() ||
!e.realpathCached()?.isDirectory()) &&
!this.#ignored(e)) ?
e
: undefined;
}
matchCheckSync(e, ifDir) {
if (ifDir && this.opts.nodir)
return undefined;
let rpc;
if (this.opts.realpath) {
rpc = e.realpathCached() || e.realpathSync();
if (!rpc)
return undefined;
e = rpc;
}
const needStat = e.isUnknown() || this.opts.stat;
const s = needStat ? e.lstatSync() : e;
if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
const target = s.realpathSync();
if (target && (target?.isUnknown() || this.opts.stat)) {
target.lstatSync();
}
}
return this.matchCheckTest(s, ifDir);
}
matchFinish(e, absolute) {
if (this.#ignored(e))
return;
// we know we have an ignore if this is false, but TS doesn't
if (!this.includeChildMatches && this.#ignore?.add) {
const ign = `${e.relativePosix()}/**`;
this.#ignore.add(ign);
}
const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
this.seen.add(e);
const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
// ok, we have what we need!
if (this.opts.withFileTypes) {
this.matchEmit(e);
}
else if (abs) {
const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
this.matchEmit(abs + mark);
}
else {
const rel = this.opts.posix ? e.relativePosix() : e.relative();
const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
'.' + this.#sep
: '';
this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
}
}
async match(e, absolute, ifDir) {
const p = await this.matchCheck(e, ifDir);
if (p)
this.matchFinish(p, absolute);
}
matchSync(e, absolute, ifDir) {
const p = this.matchCheckSync(e, ifDir);
if (p)
this.matchFinish(p, absolute);
}
walkCB(target, patterns, cb) {
/* c8 ignore start */
if (this.signal?.aborted)
cb();
/* c8 ignore stop */
this.walkCB2(target, patterns, new Processor(this.opts), cb);
}
walkCB2(target, patterns, processor, cb) {
if (this.#childrenIgnored(target))
return cb();
if (this.signal?.aborted)
cb();
if (this.paused) {
this.onResume(() => this.walkCB2(target, patterns, processor, cb));
return;
}
processor.processPatterns(target, patterns);
// done processing. all of the above is sync, can be abstracted out.
// subwalks is a map of paths to the entry filters they need
// matches is a map of paths to [absolute, ifDir] tuples.
let tasks = 1;
const next = () => {
if (--tasks === 0)
cb();
};
for (const [m, absolute, ifDir] of processor.matches.entries()) {
if (this.#ignored(m))
continue;
tasks++;
this.match(m, absolute, ifDir).then(() => next());
}
for (const t of processor.subwalkTargets()) {
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
continue;
}
tasks++;
const childrenCached = t.readdirCached();
if (t.calledReaddir())
this.walkCB3(t, childrenCached, processor, next);
else {
t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
}
}
next();
}
walkCB3(target, entries, processor, cb) {
processor = processor.filterEntries(target, entries);
let tasks = 1;
const next = () => {
if (--tasks === 0)
cb();
};
for (const [m, absolute, ifDir] of processor.matches.entries()) {
if (this.#ignored(m))
continue;
tasks++;
this.match(m, absolute, ifDir).then(() => next());
}
for (const [target, patterns] of processor.subwalks.entries()) {
tasks++;
this.walkCB2(target, patterns, processor.child(), next);
}
next();
}
walkCBSync(target, patterns, cb) {
/* c8 ignore start */
if (this.signal?.aborted)
cb();
/* c8 ignore stop */
this.walkCB2Sync(target, patterns, new Processor(this.opts), cb);
}
walkCB2Sync(target, patterns, processor, cb) {
if (this.#childrenIgnored(target))
return cb();
if (this.signal?.aborted)
cb();
if (this.paused) {
this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
return;
}
processor.processPatterns(target, patterns);
// done processing. all of the above is sync, can be abstracted out.
// subwalks is a map of paths to the entry filters they need
// matches is a map of paths to [absolute, ifDir] tuples.
let tasks = 1;
const next = () => {
if (--tasks === 0)
cb();
};
for (const [m, absolute, ifDir] of processor.matches.entries()) {
if (this.#ignored(m))
continue;
this.matchSync(m, absolute, ifDir);
}
for (const t of processor.subwalkTargets()) {
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
continue;
}
tasks++;
const children = t.readdirSync();
this.walkCB3Sync(t, children, processor, next);
}
next();
}
walkCB3Sync(target, entries, processor, cb) {
processor = processor.filterEntries(target, entries);
let tasks = 1;
const next = () => {
if (--tasks === 0)
cb();
};
for (const [m, absolute, ifDir] of processor.matches.entries()) {
if (this.#ignored(m))
continue;
this.matchSync(m, absolute, ifDir);
}
for (const [target, patterns] of processor.subwalks.entries()) {
tasks++;
this.walkCB2Sync(target, patterns, processor.child(), next);
}
next();
}
}
class GlobWalker extends GlobUtil {
matches = new Set();
constructor(patterns, path, opts) {
super(patterns, path, opts);
}
matchEmit(e) {
this.matches.add(e);
}
async walk() {
if (this.signal?.aborted)
throw this.signal.reason;
if (this.path.isUnknown()) {
await this.path.lstat();
}
await new Promise((res, rej) => {
this.walkCB(this.path, this.patterns, () => {
if (this.signal?.aborted) {
rej(this.signal.reason);
}
else {
res(this.matches);
}
});
});
return this.matches;
}
walkSync() {
if (this.signal?.aborted)
throw this.signal.reason;
if (this.path.isUnknown()) {
this.path.lstatSync();
}
// nothing for the callback to do, because this never pauses
this.walkCBSync(this.path, this.patterns, () => {
if (this.signal?.aborted)
throw this.signal.reason;
});
return this.matches;
}
}
class GlobStream extends GlobUtil {
results;
constructor(patterns, path, opts) {
super(patterns, path, opts);
this.results = new Minipass({
signal: this.signal,
objectMode: true,
});
this.results.on('drain', () => this.resume());
this.results.on('resume', () => this.resume());
}
matchEmit(e) {
this.results.write(e);
if (!this.results.flowing)
this.pause();
}
stream() {
const target = this.path;
if (target.isUnknown()) {
target.lstat().then(() => {
this.walkCB(target, this.patterns, () => this.results.end());
});
}
else {
this.walkCB(target, this.patterns, () => this.results.end());
}
return this.results;
}
streamSync() {
if (this.path.isUnknown()) {
this.path.lstatSync();
}
this.walkCBSync(this.path, this.patterns, () => this.results.end());
return this.results;
}
}
// if no process global, just call it linux.
// so we default to case-sensitive, / separators
const defaultPlatform = (typeof process === 'object' &&
process &&
typeof process.platform === 'string') ?
process.platform
: 'linux';
/**
* An object that can perform glob pattern traversals.
*/
class Glob {
absolute;
cwd;
root;
dot;
dotRelative;
follow;
ignore;
magicalBraces;
mark;
matchBase;
maxDepth;
nobrace;
nocase;
nodir;
noext;
noglobstar;
pattern;
platform;
realpath;
scurry;
stat;
signal;
windowsPathsNoEscape;
withFileTypes;
includeChildMatches;
/**
* The options provided to the constructor.
*/
opts;
/**
* An array of parsed immutable {@link Pattern} objects.
*/
patterns;
/**
* All options are stored as properties on the `Glob` object.
*
* See {@link GlobOptions} for full options descriptions.
*
* Note that a previous `Glob` object can be passed as the
* `GlobOptions` to another `Glob` instantiation to re-use settings
* and caches with a new pattern.
*
* Traversal functions can be called multiple times to run the walk
* again.
*/
constructor(pattern, opts) {
/* c8 ignore start */
if (!opts)
throw new TypeError('glob options required');
/* c8 ignore stop */
this.withFileTypes = !!opts.withFileTypes;
this.signal = opts.signal;
this.follow = !!opts.follow;
this.dot = !!opts.dot;
this.dotRelative = !!opts.dotRelative;
this.nodir = !!opts.nodir;
this.mark = !!opts.mark;
if (!opts.cwd) {
this.cwd = '';
}
else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
opts.cwd = fileURLToPath(opts.cwd);
}
this.cwd = opts.cwd || '';
this.root = opts.root;
this.magicalBraces = !!opts.magicalBraces;
this.nobrace = !!opts.nobrace;
this.noext = !!opts.noext;
this.realpath = !!opts.realpath;
this.absolute = opts.absolute;
this.includeChildMatches = opts.includeChildMatches !== false;
this.noglobstar = !!opts.noglobstar;
this.matchBase = !!opts.matchBase;
this.maxDepth =
typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
this.stat = !!opts.stat;
this.ignore = opts.ignore;
if (this.withFileTypes && this.absolute !== undefined) {
throw new Error('cannot set absolute and withFileTypes:true');
}
if (typeof pattern === 'string') {
pattern = [pattern];
}
this.windowsPathsNoEscape =
!!opts.windowsPathsNoEscape ||
opts.allowWindowsEscape ===
false;
if (this.windowsPathsNoEscape) {
pattern = pattern.map(p => p.replace(/\\/g, '/'));
}
if (this.matchBase) {
if (opts.noglobstar) {
throw new TypeError('base matching requires globstar');
}
pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
}
this.pattern = pattern;
this.platform = opts.platform || defaultPlatform;
this.opts = { ...opts, platform: this.platform };
if (opts.scurry) {
this.scurry = opts.scurry;
if (opts.nocase !== undefined &&
opts.nocase !== opts.scurry.nocase) {
throw new Error('nocase option contradicts provided scurry option');
}
}
else {
const Scurry = opts.platform === 'win32' ? PathScurryWin32
: opts.platform === 'darwin' ? PathScurryDarwin
: opts.platform ? PathScurryPosix
: PathScurry;
this.scurry = new Scurry(this.cwd, {
nocase: opts.nocase,
fs: opts.fs,
});
}
this.nocase = this.scurry.nocase;
// If you do nocase:true on a case-sensitive file system, then
// we need to use regexps instead of strings for non-magic
// path portions, because statting `aBc` won't return results
// for the file `AbC` for example.
const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
const mmo = {
// default nocase based on platform
...opts,
dot: this.dot,
matchBase: this.matchBase,
nobrace: this.nobrace,
nocase: this.nocase,
nocaseMagicOnly,
nocomment: true,
noext: this.noext,
nonegate: true,
optimizationLevel: 2,
platform: this.platform,
windowsPathsNoEscape: this.windowsPathsNoEscape,
debug: !!this.opts.debug,
};
const mms = this.pattern.map(p => new Minimatch(p, mmo));
const [matchSet, globParts] = mms.reduce((set, m) => {
set[0].push(...m.set);
set[1].push(...m.globParts);
return set;
}, [[], []]);
this.patterns = matchSet.map((set, i) => {
const g = globParts[i];
/* c8 ignore start */
if (!g)
throw new Error('invalid pattern object');
/* c8 ignore stop */
return new Pattern(set, g, 0, this.platform);
});
}
async walk() {
// Walkers always return array of Path objects, so we just have to
// coerce them into the right shape. It will have already called
// realpath() if the option was set to do so, so we know that's cached.
// start out knowing the cwd, at least
return [
...(await new GlobWalker(this.patterns, this.scurry.cwd, {
...this.opts,
maxDepth: this.maxDepth !== Infinity ?
this.maxDepth + this.scurry.cwd.depth()
: Infinity,
platform: this.platform,
nocase: this.nocase,
includeChildMatches: this.includeChildMatches,
}).walk()),
];
}
walkSync() {
return [
...new GlobWalker(this.patterns, this.scurry.cwd, {
...this.opts,
maxDepth: this.maxDepth !== Infinity ?
this.maxDepth + this.scurry.cwd.depth()
: Infinity,
platform: this.platform,
nocase: this.nocase,
includeChildMatches: this.includeChildMatches,
}).walkSync(),
];
}
stream() {
return new GlobStream(this.patterns, this.scurry.cwd, {
...this.opts,
maxDepth: this.maxDepth !== Infinity ?
this.maxDepth + this.scurry.cwd.depth()
: Infinity,
platform: this.platform,
nocase: this.nocase,
includeChildMatches: this.includeChildMatches,
}).stream();
}
streamSync() {
return new GlobStream(this.patterns, this.scurry.cwd, {
...this.opts,
maxDepth: this.maxDepth !== Infinity ?
this.maxDepth + this.scurry.cwd.depth()
: Infinity,
platform: this.platform,
nocase: this.nocase,
includeChildMatches: this.includeChildMatches,
}).streamSync();
}
/**
* Default sync iteration function. Returns a Generator that
* iterates over the results.
*/
iterateSync() {
return this.streamSync()[Symbol.iterator]();
}
[Symbol.iterator]() {
return this.iterateSync();
}
/**
* Default async iteration function. Returns an AsyncGenerator that
* iterates over the results.
*/
iterate() {
return this.stream()[Symbol.asyncIterator]();
}
[Symbol.asyncIterator]() {
return this.iterate();
}
}
/**
* Return true if the patterns provided contain any magic glob characters,
* given the options provided.
*
* Brace expansion is not considered "magic" unless the `magicalBraces` option
* is set, as brace expansion just turns one string into an array of strings.
* So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
* `'xby'` both do not contain any magic glob characters, and it's treated the
* same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
* is in the options, brace expansion _is_ treated as a pattern having magic.
*/
const hasMagic = (pattern, options = {}) => {
if (!Array.isArray(pattern)) {
pattern = [pattern];
}
for (const p of pattern) {
if (new Minimatch(p, options).hasMagic())
return true;
}
return false;
};
function globStreamSync(pattern, options = {}) {
return new Glob(pattern, options).streamSync();
}
function globStream(pattern, options = {}) {
return new Glob(pattern, options).stream();
}
function globSync(pattern, options = {}) {
return new Glob(pattern, options).walkSync();
}
async function glob_(pattern, options = {}) {
return new Glob(pattern, options).walk();
}
function globIterateSync(pattern, options = {}) {
return new Glob(pattern, options).iterateSync();
}
function globIterate(pattern, options = {}) {
return new Glob(pattern, options).iterate();
}
// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
const streamSync = globStreamSync;
const stream$5 = Object.assign(globStream, { sync: globStreamSync });
const iterateSync = globIterateSync;
const iterate = Object.assign(globIterate, {
sync: globIterateSync,
});
const sync$9 = Object.assign(globSync, {
stream: globStreamSync,
iterate: globIterateSync,
});
const glob$1 = Object.assign(glob_, {
glob: glob_,
globSync,
sync: sync$9,
globStream,
stream: stream$5,
globStreamSync,
streamSync,
globIterate,
iterate,
globIterateSync,
iterateSync,
Glob,
hasMagic,
escape: escape$2,
unescape: unescape$1,
});
glob$1.glob = glob$1;
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars$1 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars$1.length; i++) {
const c = chars$1.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
function decodeInteger(reader, relative) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
return relative + value;
}
function encodeInteger(builder, num, relative) {
let delta = num - relative;
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
do {
let clamped = delta & 0b011111;
delta >>>= 5;
if (delta > 0)
clamped |= 0b100000;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max)
return false;
return reader.peek() !== comma;
}
const bufLength = 1024 * 16;
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
class StringWriter {
constructor() {
this.pos = 0;
this.out = '';
this.buffer = new Uint8Array(bufLength);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
}
class StringReader {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
}
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(';');
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol)
sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
}
else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
}
else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted)
sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator$1);
}
function sortComparator$1(a, b) {
return a[0] - b[0];
}
function encode$1(decoded) {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0)
writer.write(semicolon);
if (line.length === 0)
continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0)
writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1)
continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4)
continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
}
return writer.flush();
}
class BitSet {
constructor(arg) {
this.bits = arg instanceof BitSet ? arg.bits.slice() : [];
}
add(n) {
this.bits[n >> 5] |= 1 << (n & 31);
}
has(n) {
return !!(this.bits[n >> 5] & (1 << (n & 31)));
}
}
class Chunk {
constructor(start, end, content) {
this.start = start;
this.end = end;
this.original = content;
this.intro = '';
this.outro = '';
this.content = content;
this.storeName = false;
this.edited = false;
{
this.previous = null;
this.next = null;
}
}
appendLeft(content) {
this.outro += content;
}
appendRight(content) {
this.intro = this.intro + content;
}
clone() {
const chunk = new Chunk(this.start, this.end, this.original);
chunk.intro = this.intro;
chunk.outro = this.outro;
chunk.content = this.content;
chunk.storeName = this.storeName;
chunk.edited = this.edited;
return chunk;
}
contains(index) {
return this.start < index && index < this.end;
}
eachNext(fn) {
let chunk = this;
while (chunk) {
fn(chunk);
chunk = chunk.next;
}
}
eachPrevious(fn) {
let chunk = this;
while (chunk) {
fn(chunk);
chunk = chunk.previous;
}
}
edit(content, storeName, contentOnly) {
this.content = content;
if (!contentOnly) {
this.intro = '';
this.outro = '';
}
this.storeName = storeName;
this.edited = true;
return this;
}
prependLeft(content) {
this.outro = content + this.outro;
}
prependRight(content) {
this.intro = content + this.intro;
}
reset() {
this.intro = '';
this.outro = '';
if (this.edited) {
this.content = this.original;
this.storeName = false;
this.edited = false;
}
}
split(index) {
const sliceIndex = index - this.start;
const originalBefore = this.original.slice(0, sliceIndex);
const originalAfter = this.original.slice(sliceIndex);
this.original = originalBefore;
const newChunk = new Chunk(index, this.end, originalAfter);
newChunk.outro = this.outro;
this.outro = '';
this.end = index;
if (this.edited) {
// after split we should save the edit content record into the correct chunk
// to make sure sourcemap correct
// For example:
// ' test'.trim()
// split -> ' ' + 'test'
// ✔️ edit -> '' + 'test'
// ✖️ edit -> 'test' + ''
// TODO is this block necessary?...
newChunk.edit('', false);
this.content = '';
} else {
this.content = originalBefore;
}
newChunk.next = this.next;
if (newChunk.next) newChunk.next.previous = newChunk;
newChunk.previous = this;
this.next = newChunk;
return newChunk;
}
toString() {
return this.intro + this.content + this.outro;
}
trimEnd(rx) {
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
const trimmed = this.content.replace(rx, '');
if (trimmed.length) {
if (trimmed !== this.content) {
this.split(this.start + trimmed.length).edit('', undefined, true);
if (this.edited) {
// save the change, if it has been edited
this.edit(trimmed, this.storeName, true);
}
}
return true;
} else {
this.edit('', undefined, true);
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
}
}
trimStart(rx) {
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
const trimmed = this.content.replace(rx, '');
if (trimmed.length) {
if (trimmed !== this.content) {
const newChunk = this.split(this.end - trimmed.length);
if (this.edited) {
// save the change, if it has been edited
newChunk.edit(trimmed, this.storeName, true);
}
this.edit('', undefined, true);
}
return true;
} else {
this.edit('', undefined, true);
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
}
}
}
function getBtoa() {
if (typeof globalThis !== 'undefined' && typeof globalThis.btoa === 'function') {
return (str) => globalThis.btoa(unescape(encodeURIComponent(str)));
} else if (typeof Buffer === 'function') {
return (str) => Buffer.from(str, 'utf-8').toString('base64');
} else {
return () => {
throw new Error('Unsupported environment: `window.btoa` or `Buffer` should be supported.');
};
}
}
const btoa$1 = /*#__PURE__*/ getBtoa();
let SourceMap$1 = class SourceMap {
constructor(properties) {
this.version = 3;
this.file = properties.file;
this.sources = properties.sources;
this.sourcesContent = properties.sourcesContent;
this.names = properties.names;
this.mappings = encode$1(properties.mappings);
if (typeof properties.x_google_ignoreList !== 'undefined') {
this.x_google_ignoreList = properties.x_google_ignoreList;
}
}
toString() {
return JSON.stringify(this);
}
toUrl() {
return 'data:application/json;charset=utf-8;base64,' + btoa$1(this.toString());
}
};
function guessIndent(code) {
const lines = code.split('\n');
const tabbed = lines.filter((line) => /^\t+/.test(line));
const spaced = lines.filter((line) => /^ {2,}/.test(line));
if (tabbed.length === 0 && spaced.length === 0) {
return null;
}
// More lines tabbed than spaced? Assume tabs, and
// default to tabs in the case of a tie (or nothing
// to go on)
if (tabbed.length >= spaced.length) {
return '\t';
}
// Otherwise, we need to guess the multiple
const min = spaced.reduce((previous, current) => {
const numSpaces = /^ +/.exec(current)[0].length;
return Math.min(numSpaces, previous);
}, Infinity);
return new Array(min + 1).join(' ');
}
function getRelativePath(from, to) {
const fromParts = from.split(/[/\\]/);
const toParts = to.split(/[/\\]/);
fromParts.pop(); // get dirname
while (fromParts[0] === toParts[0]) {
fromParts.shift();
toParts.shift();
}
if (fromParts.length) {
let i = fromParts.length;
while (i--) fromParts[i] = '..';
}
return fromParts.concat(toParts).join('/');
}
const toString$1 = Object.prototype.toString;
function isObject$2(thing) {
return toString$1.call(thing) === '[object Object]';
}
function getLocator(source) {
const originalLines = source.split('\n');
const lineOffsets = [];
for (let i = 0, pos = 0; i < originalLines.length; i++) {
lineOffsets.push(pos);
pos += originalLines[i].length + 1;
}
return function locate(index) {
let i = 0;
let j = lineOffsets.length;
while (i < j) {
const m = (i + j) >> 1;
if (index < lineOffsets[m]) {
j = m;
} else {
i = m + 1;
}
}
const line = i - 1;
const column = index - lineOffsets[line];
return { line, column };
};
}
const wordRegex = /\w/;
class Mappings {
constructor(hires) {
this.hires = hires;
this.generatedCodeLine = 0;
this.generatedCodeColumn = 0;
this.raw = [];
this.rawSegments = this.raw[this.generatedCodeLine] = [];
this.pending = null;
}
addEdit(sourceIndex, content, loc, nameIndex) {
if (content.length) {
const contentLengthMinusOne = content.length - 1;
let contentLineEnd = content.indexOf('\n', 0);
let previousContentLineEnd = -1;
// Loop through each line in the content and add a segment, but stop if the last line is empty,
// else code afterwards would fill one line too many
while (contentLineEnd >= 0 && contentLengthMinusOne > contentLineEnd) {
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (nameIndex >= 0) {
segment.push(nameIndex);
}
this.rawSegments.push(segment);
this.generatedCodeLine += 1;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
this.generatedCodeColumn = 0;
previousContentLineEnd = contentLineEnd;
contentLineEnd = content.indexOf('\n', contentLineEnd + 1);
}
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (nameIndex >= 0) {
segment.push(nameIndex);
}
this.rawSegments.push(segment);
this.advance(content.slice(previousContentLineEnd + 1));
} else if (this.pending) {
this.rawSegments.push(this.pending);
this.advance(content);
}
this.pending = null;
}
addUneditedChunk(sourceIndex, chunk, original, loc, sourcemapLocations) {
let originalCharIndex = chunk.start;
let first = true;
// when iterating each char, check if it's in a word boundary
let charInHiresBoundary = false;
while (originalCharIndex < chunk.end) {
if (this.hires || first || sourcemapLocations.has(originalCharIndex)) {
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (this.hires === 'boundary') {
// in hires "boundary", group segments per word boundary than per char
if (wordRegex.test(original[originalCharIndex])) {
// for first char in the boundary found, start the boundary by pushing a segment
if (!charInHiresBoundary) {
this.rawSegments.push(segment);
charInHiresBoundary = true;
}
} else {
// for non-word char, end the boundary by pushing a segment
this.rawSegments.push(segment);
charInHiresBoundary = false;
}
} else {
this.rawSegments.push(segment);
}
}
if (original[originalCharIndex] === '\n') {
loc.line += 1;
loc.column = 0;
this.generatedCodeLine += 1;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
this.generatedCodeColumn = 0;
first = true;
} else {
loc.column += 1;
this.generatedCodeColumn += 1;
first = false;
}
originalCharIndex += 1;
}
this.pending = null;
}
advance(str) {
if (!str) return;
const lines = str.split('\n');
if (lines.length > 1) {
for (let i = 0; i < lines.length - 1; i++) {
this.generatedCodeLine++;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
}
this.generatedCodeColumn = 0;
}
this.generatedCodeColumn += lines[lines.length - 1].length;
}
}
const n$1 = '\n';
const warned = {
insertLeft: false,
insertRight: false,
storeName: false,
};
class MagicString {
constructor(string, options = {}) {
const chunk = new Chunk(0, string.length, string);
Object.defineProperties(this, {
original: { writable: true, value: string },
outro: { writable: true, value: '' },
intro: { writable: true, value: '' },
firstChunk: { writable: true, value: chunk },
lastChunk: { writable: true, value: chunk },
lastSearchedChunk: { writable: true, value: chunk },
byStart: { writable: true, value: {} },
byEnd: { writable: true, value: {} },
filename: { writable: true, value: options.filename },
indentExclusionRanges: { writable: true, value: options.indentExclusionRanges },
sourcemapLocations: { writable: true, value: new BitSet() },
storedNames: { writable: true, value: {} },
indentStr: { writable: true, value: undefined },
ignoreList: { writable: true, value: options.ignoreList },
});
this.byStart[0] = chunk;
this.byEnd[string.length] = chunk;
}
addSourcemapLocation(char) {
this.sourcemapLocations.add(char);
}
append(content) {
if (typeof content !== 'string') throw new TypeError('outro content must be a string');
this.outro += content;
return this;
}
appendLeft(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byEnd[index];
if (chunk) {
chunk.appendLeft(content);
} else {
this.intro += content;
}
return this;
}
appendRight(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byStart[index];
if (chunk) {
chunk.appendRight(content);
} else {
this.outro += content;
}
return this;
}
clone() {
const cloned = new MagicString(this.original, { filename: this.filename });
let originalChunk = this.firstChunk;
let clonedChunk = (cloned.firstChunk = cloned.lastSearchedChunk = originalChunk.clone());
while (originalChunk) {
cloned.byStart[clonedChunk.start] = clonedChunk;
cloned.byEnd[clonedChunk.end] = clonedChunk;
const nextOriginalChunk = originalChunk.next;
const nextClonedChunk = nextOriginalChunk && nextOriginalChunk.clone();
if (nextClonedChunk) {
clonedChunk.next = nextClonedChunk;
nextClonedChunk.previous = clonedChunk;
clonedChunk = nextClonedChunk;
}
originalChunk = nextOriginalChunk;
}
cloned.lastChunk = clonedChunk;
if (this.indentExclusionRanges) {
cloned.indentExclusionRanges = this.indentExclusionRanges.slice();
}
cloned.sourcemapLocations = new BitSet(this.sourcemapLocations);
cloned.intro = this.intro;
cloned.outro = this.outro;
return cloned;
}
generateDecodedMap(options) {
options = options || {};
const sourceIndex = 0;
const names = Object.keys(this.storedNames);
const mappings = new Mappings(options.hires);
const locate = getLocator(this.original);
if (this.intro) {
mappings.advance(this.intro);
}
this.firstChunk.eachNext((chunk) => {
const loc = locate(chunk.start);
if (chunk.intro.length) mappings.advance(chunk.intro);
if (chunk.edited) {
mappings.addEdit(
sourceIndex,
chunk.content,
loc,
chunk.storeName ? names.indexOf(chunk.original) : -1,
);
} else {
mappings.addUneditedChunk(sourceIndex, chunk, this.original, loc, this.sourcemapLocations);
}
if (chunk.outro.length) mappings.advance(chunk.outro);
});
return {
file: options.file ? options.file.split(/[/\\]/).pop() : undefined,
sources: [
options.source ? getRelativePath(options.file || '', options.source) : options.file || '',
],
sourcesContent: options.includeContent ? [this.original] : undefined,
names,
mappings: mappings.raw,
x_google_ignoreList: this.ignoreList ? [sourceIndex] : undefined,
};
}
generateMap(options) {
return new SourceMap$1(this.generateDecodedMap(options));
}
_ensureindentStr() {
if (this.indentStr === undefined) {
this.indentStr = guessIndent(this.original);
}
}
_getRawIndentString() {
this._ensureindentStr();
return this.indentStr;
}
getIndentString() {
this._ensureindentStr();
return this.indentStr === null ? '\t' : this.indentStr;
}
indent(indentStr, options) {
const pattern = /^[^\r\n]/gm;
if (isObject$2(indentStr)) {
options = indentStr;
indentStr = undefined;
}
if (indentStr === undefined) {
this._ensureindentStr();
indentStr = this.indentStr || '\t';
}
if (indentStr === '') return this; // noop
options = options || {};
// Process exclusion ranges
const isExcluded = {};
if (options.exclude) {
const exclusions =
typeof options.exclude[0] === 'number' ? [options.exclude] : options.exclude;
exclusions.forEach((exclusion) => {
for (let i = exclusion[0]; i < exclusion[1]; i += 1) {
isExcluded[i] = true;
}
});
}
let shouldIndentNextCharacter = options.indentStart !== false;
const replacer = (match) => {
if (shouldIndentNextCharacter) return `${indentStr}${match}`;
shouldIndentNextCharacter = true;
return match;
};
this.intro = this.intro.replace(pattern, replacer);
let charIndex = 0;
let chunk = this.firstChunk;
while (chunk) {
const end = chunk.end;
if (chunk.edited) {
if (!isExcluded[charIndex]) {
chunk.content = chunk.content.replace(pattern, replacer);
if (chunk.content.length) {
shouldIndentNextCharacter = chunk.content[chunk.content.length - 1] === '\n';
}
}
} else {
charIndex = chunk.start;
while (charIndex < end) {
if (!isExcluded[charIndex]) {
const char = this.original[charIndex];
if (char === '\n') {
shouldIndentNextCharacter = true;
} else if (char !== '\r' && shouldIndentNextCharacter) {
shouldIndentNextCharacter = false;
if (charIndex === chunk.start) {
chunk.prependRight(indentStr);
} else {
this._splitChunk(chunk, charIndex);
chunk = chunk.next;
chunk.prependRight(indentStr);
}
}
}
charIndex += 1;
}
}
charIndex = chunk.end;
chunk = chunk.next;
}
this.outro = this.outro.replace(pattern, replacer);
return this;
}
insert() {
throw new Error(
'magicString.insert(...) is deprecated. Use prependRight(...) or appendLeft(...)',
);
}
insertLeft(index, content) {
if (!warned.insertLeft) {
console.warn(
'magicString.insertLeft(...) is deprecated. Use magicString.appendLeft(...) instead',
); // eslint-disable-line no-console
warned.insertLeft = true;
}
return this.appendLeft(index, content);
}
insertRight(index, content) {
if (!warned.insertRight) {
console.warn(
'magicString.insertRight(...) is deprecated. Use magicString.prependRight(...) instead',
); // eslint-disable-line no-console
warned.insertRight = true;
}
return this.prependRight(index, content);
}
move(start, end, index) {
if (index >= start && index <= end) throw new Error('Cannot move a selection inside itself');
this._split(start);
this._split(end);
this._split(index);
const first = this.byStart[start];
const last = this.byEnd[end];
const oldLeft = first.previous;
const oldRight = last.next;
const newRight = this.byStart[index];
if (!newRight && last === this.lastChunk) return this;
const newLeft = newRight ? newRight.previous : this.lastChunk;
if (oldLeft) oldLeft.next = oldRight;
if (oldRight) oldRight.previous = oldLeft;
if (newLeft) newLeft.next = first;
if (newRight) newRight.previous = last;
if (!first.previous) this.firstChunk = last.next;
if (!last.next) {
this.lastChunk = first.previous;
this.lastChunk.next = null;
}
first.previous = newLeft;
last.next = newRight || null;
if (!newLeft) this.firstChunk = first;
if (!newRight) this.lastChunk = last;
return this;
}
overwrite(start, end, content, options) {
options = options || {};
return this.update(start, end, content, { ...options, overwrite: !options.contentOnly });
}
update(start, end, content, options) {
if (typeof content !== 'string') throw new TypeError('replacement content must be a string');
if (this.original.length !== 0) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
}
if (end > this.original.length) throw new Error('end is out of bounds');
if (start === end)
throw new Error(
'Cannot overwrite a zero-length range – use appendLeft or prependRight instead',
);
this._split(start);
this._split(end);
if (options === true) {
if (!warned.storeName) {
console.warn(
'The final argument to magicString.overwrite(...) should be an options object. See https://github.com/rich-harris/magic-string',
); // eslint-disable-line no-console
warned.storeName = true;
}
options = { storeName: true };
}
const storeName = options !== undefined ? options.storeName : false;
const overwrite = options !== undefined ? options.overwrite : false;
if (storeName) {
const original = this.original.slice(start, end);
Object.defineProperty(this.storedNames, original, {
writable: true,
value: true,
enumerable: true,
});
}
const first = this.byStart[start];
const last = this.byEnd[end];
if (first) {
let chunk = first;
while (chunk !== last) {
if (chunk.next !== this.byStart[chunk.end]) {
throw new Error('Cannot overwrite across a split point');
}
chunk = chunk.next;
chunk.edit('', false);
}
first.edit(content, storeName, !overwrite);
} else {
// must be inserting at the end
const newChunk = new Chunk(start, end, '').edit(content, storeName);
// TODO last chunk in the array may not be the last chunk, if it's moved...
last.next = newChunk;
newChunk.previous = last;
}
return this;
}
prepend(content) {
if (typeof content !== 'string') throw new TypeError('outro content must be a string');
this.intro = content + this.intro;
return this;
}
prependLeft(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byEnd[index];
if (chunk) {
chunk.prependLeft(content);
} else {
this.intro = content + this.intro;
}
return this;
}
prependRight(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byStart[index];
if (chunk) {
chunk.prependRight(content);
} else {
this.outro = content + this.outro;
}
return this;
}
remove(start, end) {
if (this.original.length !== 0) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
}
if (start === end) return this;
if (start < 0 || end > this.original.length) throw new Error('Character is out of bounds');
if (start > end) throw new Error('end must be greater than start');
this._split(start);
this._split(end);
let chunk = this.byStart[start];
while (chunk) {
chunk.intro = '';
chunk.outro = '';
chunk.edit('');
chunk = end > chunk.end ? this.byStart[chunk.end] : null;
}
return this;
}
reset(start, end) {
if (this.original.length !== 0) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
}
if (start === end) return this;
if (start < 0 || end > this.original.length) throw new Error('Character is out of bounds');
if (start > end) throw new Error('end must be greater than start');
this._split(start);
this._split(end);
let chunk = this.byStart[start];
while (chunk) {
chunk.reset();
chunk = end > chunk.end ? this.byStart[chunk.end] : null;
}
return this;
}
lastChar() {
if (this.outro.length) return this.outro[this.outro.length - 1];
let chunk = this.lastChunk;
do {
if (chunk.outro.length) return chunk.outro[chunk.outro.length - 1];
if (chunk.content.length) return chunk.content[chunk.content.length - 1];
if (chunk.intro.length) return chunk.intro[chunk.intro.length - 1];
} while ((chunk = chunk.previous));
if (this.intro.length) return this.intro[this.intro.length - 1];
return '';
}
lastLine() {
let lineIndex = this.outro.lastIndexOf(n$1);
if (lineIndex !== -1) return this.outro.substr(lineIndex + 1);
let lineStr = this.outro;
let chunk = this.lastChunk;
do {
if (chunk.outro.length > 0) {
lineIndex = chunk.outro.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.outro.substr(lineIndex + 1) + lineStr;
lineStr = chunk.outro + lineStr;
}
if (chunk.content.length > 0) {
lineIndex = chunk.content.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.content.substr(lineIndex + 1) + lineStr;
lineStr = chunk.content + lineStr;
}
if (chunk.intro.length > 0) {
lineIndex = chunk.intro.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.intro.substr(lineIndex + 1) + lineStr;
lineStr = chunk.intro + lineStr;
}
} while ((chunk = chunk.previous));
lineIndex = this.intro.lastIndexOf(n$1);
if (lineIndex !== -1) return this.intro.substr(lineIndex + 1) + lineStr;
return this.intro + lineStr;
}
slice(start = 0, end = this.original.length) {
if (this.original.length !== 0) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
}
let result = '';
// find start chunk
let chunk = this.firstChunk;
while (chunk && (chunk.start > start || chunk.end <= start)) {
// found end chunk before start
if (chunk.start < end && chunk.end >= end) {
return result;
}
chunk = chunk.next;
}
if (chunk && chunk.edited && chunk.start !== start)
throw new Error(`Cannot use replaced character ${start} as slice start anchor.`);
const startChunk = chunk;
while (chunk) {
if (chunk.intro && (startChunk !== chunk || chunk.start === start)) {
result += chunk.intro;
}
const containsEnd = chunk.start < end && chunk.end >= end;
if (containsEnd && chunk.edited && chunk.end !== end)
throw new Error(`Cannot use replaced character ${end} as slice end anchor.`);
const sliceStart = startChunk === chunk ? start - chunk.start : 0;
const sliceEnd = containsEnd ? chunk.content.length + end - chunk.end : chunk.content.length;
result += chunk.content.slice(sliceStart, sliceEnd);
if (chunk.outro && (!containsEnd || chunk.end === end)) {
result += chunk.outro;
}
if (containsEnd) {
break;
}
chunk = chunk.next;
}
return result;
}
// TODO deprecate this? not really very useful
snip(start, end) {
const clone = this.clone();
clone.remove(0, start);
clone.remove(end, clone.original.length);
return clone;
}
_split(index) {
if (this.byStart[index] || this.byEnd[index]) return;
let chunk = this.lastSearchedChunk;
const searchForward = index > chunk.end;
while (chunk) {
if (chunk.contains(index)) return this._splitChunk(chunk, index);
chunk = searchForward ? this.byStart[chunk.end] : this.byEnd[chunk.start];
}
}
_splitChunk(chunk, index) {
if (chunk.edited && chunk.content.length) {
// zero-length edited chunks are a special case (overlapping replacements)
const loc = getLocator(this.original)(index);
throw new Error(
`Cannot split a chunk that has already been edited (${loc.line}:${loc.column} – "${chunk.original}")`,
);
}
const newChunk = chunk.split(index);
this.byEnd[index] = chunk;
this.byStart[index] = newChunk;
this.byEnd[newChunk.end] = newChunk;
if (chunk === this.lastChunk) this.lastChunk = newChunk;
this.lastSearchedChunk = chunk;
return true;
}
toString() {
let str = this.intro;
let chunk = this.firstChunk;
while (chunk) {
str += chunk.toString();
chunk = chunk.next;
}
return str + this.outro;
}
isEmpty() {
let chunk = this.firstChunk;
do {
if (
(chunk.intro.length && chunk.intro.trim()) ||
(chunk.content.length && chunk.content.trim()) ||
(chunk.outro.length && chunk.outro.trim())
)
return false;
} while ((chunk = chunk.next));
return true;
}
length() {
let chunk = this.firstChunk;
let length = 0;
do {
length += chunk.intro.length + chunk.content.length + chunk.outro.length;
} while ((chunk = chunk.next));
return length;
}
trimLines() {
return this.trim('[\\r\\n]');
}
trim(charType) {
return this.trimStart(charType).trimEnd(charType);
}
trimEndAborted(charType) {
const rx = new RegExp((charType || '\\s') + '+$');
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
let chunk = this.lastChunk;
do {
const end = chunk.end;
const aborted = chunk.trimEnd(rx);
// if chunk was trimmed, we have a new lastChunk
if (chunk.end !== end) {
if (this.lastChunk === chunk) {
this.lastChunk = chunk.next;
}
this.byEnd[chunk.end] = chunk;
this.byStart[chunk.next.start] = chunk.next;
this.byEnd[chunk.next.end] = chunk.next;
}
if (aborted) return true;
chunk = chunk.previous;
} while (chunk);
return false;
}
trimEnd(charType) {
this.trimEndAborted(charType);
return this;
}
trimStartAborted(charType) {
const rx = new RegExp('^' + (charType || '\\s') + '+');
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
let chunk = this.firstChunk;
do {
const end = chunk.end;
const aborted = chunk.trimStart(rx);
if (chunk.end !== end) {
// special case...
if (chunk === this.lastChunk) this.lastChunk = chunk.next;
this.byEnd[chunk.end] = chunk;
this.byStart[chunk.next.start] = chunk.next;
this.byEnd[chunk.next.end] = chunk.next;
}
if (aborted) return true;
chunk = chunk.next;
} while (chunk);
return false;
}
trimStart(charType) {
this.trimStartAborted(charType);
return this;
}
hasChanged() {
return this.original !== this.toString();
}
_replaceRegexp(searchValue, replacement) {
function getReplacement(match, str) {
if (typeof replacement === 'string') {
return replacement.replace(/\$(\$|&|\d+)/g, (_, i) => {
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#specifying_a_string_as_a_parameter
if (i === '$') return '$';
if (i === '&') return match[0];
const num = +i;
if (num < match.length) return match[+i];
return `$${i}`;
});
} else {
return replacement(...match, match.index, str, match.groups);
}
}
function matchAll(re, str) {
let match;
const matches = [];
while ((match = re.exec(str))) {
matches.push(match);
}
return matches;
}
if (searchValue.global) {
const matches = matchAll(searchValue, this.original);
matches.forEach((match) => {
if (match.index != null) {
const replacement = getReplacement(match, this.original);
if (replacement !== match[0]) {
this.overwrite(
match.index,
match.index + match[0].length,
replacement
);
}
}
});
} else {
const match = this.original.match(searchValue);
if (match && match.index != null) {
const replacement = getReplacement(match, this.original);
if (replacement !== match[0]) {
this.overwrite(
match.index,
match.index + match[0].length,
replacement
);
}
}
}
return this;
}
_replaceString(string, replacement) {
const { original } = this;
const index = original.indexOf(string);
if (index !== -1) {
this.overwrite(index, index + string.length, replacement);
}
return this;
}
replace(searchValue, replacement) {
if (typeof searchValue === 'string') {
return this._replaceString(searchValue, replacement);
}
return this._replaceRegexp(searchValue, replacement);
}
_replaceAllString(string, replacement) {
const { original } = this;
const stringLength = string.length;
for (
let index = original.indexOf(string);
index !== -1;
index = original.indexOf(string, index + stringLength)
) {
const previous = original.slice(index, index + stringLength);
if (previous !== replacement)
this.overwrite(index, index + stringLength, replacement);
}
return this;
}
replaceAll(searchValue, replacement) {
if (typeof searchValue === 'string') {
return this._replaceAllString(searchValue, replacement);
}
if (!searchValue.global) {
throw new TypeError(
'MagicString.prototype.replaceAll called with a non-global RegExp argument',
);
}
return this._replaceRegexp(searchValue, replacement);
}
}
function isReference(node, parent) {
if (node.type === 'MemberExpression') {
return !node.computed && isReference(node.object, node);
}
if (node.type === 'Identifier') {
if (!parent)
return true;
switch (parent.type) {
// disregard `bar` in `foo.bar`
case 'MemberExpression': return parent.computed || node === parent.object;
// disregard the `foo` in `class {foo(){}}` but keep it in `class {[foo](){}}`
case 'MethodDefinition': return parent.computed;
// disregard the `foo` in `class {foo=bar}` but keep it in `class {[foo]=bar}` and `class {bar=foo}`
case 'FieldDefinition': return parent.computed || node === parent.value;
// disregard the `bar` in `{ bar: foo }`, but keep it in `{ [bar]: foo }`
case 'Property': return parent.computed || node === parent.value;
// disregard the `bar` in `export { foo as bar }` or
// the foo in `import { foo as bar }`
case 'ExportSpecifier':
case 'ImportSpecifier': return node === parent.local;
// disregard the `foo` in `foo: while (...) { ... break foo; ... continue foo;}`
case 'LabeledStatement':
case 'BreakStatement':
case 'ContinueStatement': return false;
default: return true;
}
}
return false;
}
var version$2 = "26.0.1";
var peerDependencies = {
rollup: "^2.68.0||^3.0.0||^4.0.0"
};
function tryParse(parse, code, id) {
try {
return parse(code, { allowReturnOutsideFunction: true });
} catch (err) {
err.message += ` in ${id}`;
throw err;
}
}
const firstpassGlobal = /\b(?:require|module|exports|global)\b/;
const firstpassNoGlobal = /\b(?:require|module|exports)\b/;
function hasCjsKeywords(code, ignoreGlobal) {
const firstpass = ignoreGlobal ? firstpassNoGlobal : firstpassGlobal;
return firstpass.test(code);
}
/* eslint-disable no-underscore-dangle */
function analyzeTopLevelStatements(parse, code, id) {
const ast = tryParse(parse, code, id);
let isEsModule = false;
let hasDefaultExport = false;
let hasNamedExports = false;
for (const node of ast.body) {
switch (node.type) {
case 'ExportDefaultDeclaration':
isEsModule = true;
hasDefaultExport = true;
break;
case 'ExportNamedDeclaration':
isEsModule = true;
if (node.declaration) {
hasNamedExports = true;
} else {
for (const specifier of node.specifiers) {
if (specifier.exported.name === 'default') {
hasDefaultExport = true;
} else {
hasNamedExports = true;
}
}
}
break;
case 'ExportAllDeclaration':
isEsModule = true;
if (node.exported && node.exported.name === 'default') {
hasDefaultExport = true;
} else {
hasNamedExports = true;
}
break;
case 'ImportDeclaration':
isEsModule = true;
break;
}
}
return { isEsModule, hasDefaultExport, hasNamedExports, ast };
}
/* eslint-disable import/prefer-default-export */
function deconflict(scopes, globals, identifier) {
let i = 1;
let deconflicted = makeLegalIdentifier(identifier);
const hasConflicts = () =>
scopes.some((scope) => scope.contains(deconflicted)) || globals.has(deconflicted);
while (hasConflicts()) {
deconflicted = makeLegalIdentifier(`${identifier}_${i}`);
i += 1;
}
for (const scope of scopes) {
scope.declarations[deconflicted] = true;
}
return deconflicted;
}
function getName(id) {
const name = makeLegalIdentifier(basename$1(id, extname(id)));
if (name !== 'index') {
return name;
}
return makeLegalIdentifier(basename$1(dirname$1(id)));
}
function normalizePathSlashes(path) {
return path.replace(/\\/g, '/');
}
const getVirtualPathForDynamicRequirePath = (path, commonDir) =>
`/${normalizePathSlashes(relative$1(commonDir, path))}`;
function capitalize(name) {
return name[0].toUpperCase() + name.slice(1);
}
function getStrictRequiresFilter({ strictRequires }) {
switch (strictRequires) {
case true:
return { strictRequiresFilter: () => true, detectCyclesAndConditional: false };
// eslint-disable-next-line no-undefined
case undefined:
case 'auto':
case 'debug':
case null:
return { strictRequiresFilter: () => false, detectCyclesAndConditional: true };
case false:
return { strictRequiresFilter: () => false, detectCyclesAndConditional: false };
default:
if (typeof strictRequires === 'string' || Array.isArray(strictRequires)) {
return {
strictRequiresFilter: createFilter$1(strictRequires),
detectCyclesAndConditional: false
};
}
throw new Error('Unexpected value for "strictRequires" option.');
}
}
function getPackageEntryPoint(dirPath) {
let entryPoint = 'index.js';
try {
if (existsSync(join$1(dirPath, 'package.json'))) {
entryPoint =
JSON.parse(readFileSync(join$1(dirPath, 'package.json'), { encoding: 'utf8' })).main ||
entryPoint;
}
} catch (ignored) {
// ignored
}
return entryPoint;
}
function isDirectory$1(path) {
try {
if (statSync$1(path).isDirectory()) return true;
} catch (ignored) {
// Nothing to do here
}
return false;
}
function getDynamicRequireModules(patterns, dynamicRequireRoot) {
const dynamicRequireModules = new Map();
const dirNames = new Set();
for (const pattern of !patterns || Array.isArray(patterns) ? patterns || [] : [patterns]) {
const isNegated = pattern.startsWith('!');
const modifyMap = (targetPath, resolvedPath) =>
isNegated
? dynamicRequireModules.delete(targetPath)
: dynamicRequireModules.set(targetPath, resolvedPath);
for (const path of glob$1
.sync(isNegated ? pattern.substr(1) : pattern)
.sort((a, b) => a.localeCompare(b, 'en'))) {
const resolvedPath = resolve$3(path);
const requirePath = normalizePathSlashes(resolvedPath);
if (isDirectory$1(resolvedPath)) {
dirNames.add(resolvedPath);
const modulePath = resolve$3(join$1(resolvedPath, getPackageEntryPoint(path)));
modifyMap(requirePath, modulePath);
modifyMap(normalizePathSlashes(modulePath), modulePath);
} else {
dirNames.add(dirname$1(resolvedPath));
modifyMap(requirePath, resolvedPath);
}
}
}
return {
commonDir: dirNames.size ? getCommonDir([...dirNames, dynamicRequireRoot]) : null,
dynamicRequireModules
};
}
const FAILED_REQUIRE_ERROR = `throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');`;
const COMMONJS_REQUIRE_EXPORT = 'commonjsRequire';
const CREATE_COMMONJS_REQUIRE_EXPORT = 'createCommonjsRequire';
function getDynamicModuleRegistry(
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ignoreDynamicRequires
) {
if (!isDynamicRequireModulesEnabled) {
return `export function ${COMMONJS_REQUIRE_EXPORT}(path) {
${FAILED_REQUIRE_ERROR}
}`;
}
const dynamicModuleImports = [...dynamicRequireModules.values()]
.map(
(id, index) =>
`import ${
id.endsWith('.json') ? `json${index}` : `{ __require as require${index} }`
} from ${JSON.stringify(id)};`
)
.join('\n');
const dynamicModuleProps = [...dynamicRequireModules.keys()]
.map(
(id, index) =>
`\t\t${JSON.stringify(getVirtualPathForDynamicRequirePath(id, commonDir))}: ${
id.endsWith('.json') ? `function () { return json${index}; }` : `require${index}`
}`
)
.join(',\n');
return `${dynamicModuleImports}
var dynamicModules;
function getDynamicModules() {
return dynamicModules || (dynamicModules = {
${dynamicModuleProps}
});
}
export function ${CREATE_COMMONJS_REQUIRE_EXPORT}(originalModuleDir) {
function handleRequire(path) {
var resolvedPath = commonjsResolve(path, originalModuleDir);
if (resolvedPath !== null) {
return getDynamicModules()[resolvedPath]();
}
${ignoreDynamicRequires ? 'return require(path);' : FAILED_REQUIRE_ERROR}
}
handleRequire.resolve = function (path) {
var resolvedPath = commonjsResolve(path, originalModuleDir);
if (resolvedPath !== null) {
return resolvedPath;
}
return require.resolve(path);
}
return handleRequire;
}
function commonjsResolve (path, originalModuleDir) {
var shouldTryNodeModules = isPossibleNodeModulesPath(path);
path = normalize(path);
var relPath;
if (path[0] === '/') {
originalModuleDir = '';
}
var modules = getDynamicModules();
var checkedExtensions = ['', '.js', '.json'];
while (true) {
if (!shouldTryNodeModules) {
relPath = normalize(originalModuleDir + '/' + path);
} else {
relPath = normalize(originalModuleDir + '/node_modules/' + path);
}
if (relPath.endsWith('/..')) {
break; // Travelled too far up, avoid infinite loop
}
for (var extensionIndex = 0; extensionIndex < checkedExtensions.length; extensionIndex++) {
var resolvedPath = relPath + checkedExtensions[extensionIndex];
if (modules[resolvedPath]) {
return resolvedPath;
}
}
if (!shouldTryNodeModules) break;
var nextDir = normalize(originalModuleDir + '/..');
if (nextDir === originalModuleDir) break;
originalModuleDir = nextDir;
}
return null;
}
function isPossibleNodeModulesPath (modulePath) {
var c0 = modulePath[0];
if (c0 === '/' || c0 === '\\\\') return false;
var c1 = modulePath[1], c2 = modulePath[2];
if ((c0 === '.' && (!c1 || c1 === '/' || c1 === '\\\\')) ||
(c0 === '.' && c1 === '.' && (!c2 || c2 === '/' || c2 === '\\\\'))) return false;
if (c1 === ':' && (c2 === '/' || c2 === '\\\\')) return false;
return true;
}
function normalize (path) {
path = path.replace(/\\\\/g, '/');
var parts = path.split('/');
var slashed = parts[0] === '';
for (var i = 1; i < parts.length; i++) {
if (parts[i] === '.' || parts[i] === '') {
parts.splice(i--, 1);
}
}
for (var i = 1; i < parts.length; i++) {
if (parts[i] !== '..') continue;
if (i > 0 && parts[i - 1] !== '..' && parts[i - 1] !== '.') {
parts.splice(--i, 2);
i--;
}
}
path = parts.join('/');
if (slashed && path[0] !== '/') path = '/' + path;
else if (path.length === 0) path = '.';
return path;
}`;
}
const isWrappedId = (id, suffix) => id.endsWith(suffix);
const wrapId = (id, suffix) => `\0${id}${suffix}`;
const unwrapId = (wrappedId, suffix) => wrappedId.slice(1, -suffix.length);
const PROXY_SUFFIX = '?commonjs-proxy';
const WRAPPED_SUFFIX = '?commonjs-wrapped';
const EXTERNAL_SUFFIX = '?commonjs-external';
const EXPORTS_SUFFIX = '?commonjs-exports';
const MODULE_SUFFIX = '?commonjs-module';
const ENTRY_SUFFIX = '?commonjs-entry';
const ES_IMPORT_SUFFIX = '?commonjs-es-import';
const DYNAMIC_MODULES_ID = '\0commonjs-dynamic-modules';
const HELPERS_ID = '\0commonjsHelpers.js';
const IS_WRAPPED_COMMONJS = 'withRequireFunction';
// `x['default']` is used instead of `x.default` for backward compatibility with ES3 browsers.
// Minifiers like uglify will usually transpile it back if compatibility with ES3 is not enabled.
// This could be improved by inspecting Rollup's "generatedCode" option
const HELPERS = `
export var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
export function getDefaultExportFromCjs (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
}
export function getDefaultExportFromNamespaceIfPresent (n) {
return n && Object.prototype.hasOwnProperty.call(n, 'default') ? n['default'] : n;
}
export function getDefaultExportFromNamespaceIfNotNamed (n) {
return n && Object.prototype.hasOwnProperty.call(n, 'default') && Object.keys(n).length === 1 ? n['default'] : n;
}
export function getAugmentedNamespace(n) {
if (n.__esModule) return n;
var f = n.default;
if (typeof f == "function") {
var a = function a () {
if (this instanceof a) {
return Reflect.construct(f, arguments, this.constructor);
}
return f.apply(this, arguments);
};
a.prototype = f.prototype;
} else a = {};
Object.defineProperty(a, '__esModule', {value: true});
Object.keys(n).forEach(function (k) {
var d = Object.getOwnPropertyDescriptor(n, k);
Object.defineProperty(a, k, d.get ? d : {
enumerable: true,
get: function () {
return n[k];
}
});
});
return a;
}
`;
function getHelpersModule() {
return HELPERS;
}
function getUnknownRequireProxy(id, requireReturnsDefault) {
if (requireReturnsDefault === true || id.endsWith('.json')) {
return `export { default } from ${JSON.stringify(id)};`;
}
const name = getName(id);
const exported =
requireReturnsDefault === 'auto'
? `import { getDefaultExportFromNamespaceIfNotNamed } from "${HELPERS_ID}"; export default /*@__PURE__*/getDefaultExportFromNamespaceIfNotNamed(${name});`
: requireReturnsDefault === 'preferred'
? `import { getDefaultExportFromNamespaceIfPresent } from "${HELPERS_ID}"; export default /*@__PURE__*/getDefaultExportFromNamespaceIfPresent(${name});`
: !requireReturnsDefault
? `import { getAugmentedNamespace } from "${HELPERS_ID}"; export default /*@__PURE__*/getAugmentedNamespace(${name});`
: `export default ${name};`;
return `import * as ${name} from ${JSON.stringify(id)}; ${exported}`;
}
async function getStaticRequireProxy(id, requireReturnsDefault, loadModule) {
const name = getName(id);
const {
meta: { commonjs: commonjsMeta }
} = await loadModule({ id });
if (!commonjsMeta) {
return getUnknownRequireProxy(id, requireReturnsDefault);
}
if (commonjsMeta.isCommonJS) {
return `export { __moduleExports as default } from ${JSON.stringify(id)};`;
}
if (!requireReturnsDefault) {
return `import { getAugmentedNamespace } from "${HELPERS_ID}"; import * as ${name} from ${JSON.stringify(
id
)}; export default /*@__PURE__*/getAugmentedNamespace(${name});`;
}
if (
requireReturnsDefault !== true &&
(requireReturnsDefault === 'namespace' ||
!commonjsMeta.hasDefaultExport ||
(requireReturnsDefault === 'auto' && commonjsMeta.hasNamedExports))
) {
return `import * as ${name} from ${JSON.stringify(id)}; export default ${name};`;
}
return `export { default } from ${JSON.stringify(id)};`;
}
function getEntryProxy(id, defaultIsModuleExports, getModuleInfo, shebang) {
const {
meta: { commonjs: commonjsMeta },
hasDefaultExport
} = getModuleInfo(id);
if (!commonjsMeta || commonjsMeta.isCommonJS !== IS_WRAPPED_COMMONJS) {
const stringifiedId = JSON.stringify(id);
let code = `export * from ${stringifiedId};`;
if (hasDefaultExport) {
code += `export { default } from ${stringifiedId};`;
}
return shebang + code;
}
const result = getEsImportProxy(id, defaultIsModuleExports);
return {
...result,
code: shebang + result.code
};
}
function getEsImportProxy(id, defaultIsModuleExports) {
const name = getName(id);
const exportsName = `${name}Exports`;
const requireModule = `require${capitalize(name)}`;
let code =
`import { getDefaultExportFromCjs } from "${HELPERS_ID}";\n` +
`import { __require as ${requireModule} } from ${JSON.stringify(id)};\n` +
`var ${exportsName} = ${requireModule}();\n` +
`export { ${exportsName} as __moduleExports };`;
if (defaultIsModuleExports === true) {
code += `\nexport { ${exportsName} as default };`;
} else {
code += `export default /*@__PURE__*/getDefaultExportFromCjs(${exportsName});`;
}
return {
code,
syntheticNamedExports: '__moduleExports'
};
}
/* eslint-disable no-param-reassign, no-undefined */
function getCandidatesForExtension(resolved, extension) {
return [resolved + extension, `${resolved}${sep$1}index${extension}`];
}
function getCandidates(resolved, extensions) {
return extensions.reduce(
(paths, extension) => paths.concat(getCandidatesForExtension(resolved, extension)),
[resolved]
);
}
function resolveExtensions(importee, importer, extensions) {
// not our problem
if (importee[0] !== '.' || !importer) return undefined;
const resolved = resolve$3(dirname$1(importer), importee);
const candidates = getCandidates(resolved, extensions);
for (let i = 0; i < candidates.length; i += 1) {
try {
const stats = statSync$1(candidates[i]);
if (stats.isFile()) return { id: candidates[i] };
} catch (err) {
/* noop */
}
}
return undefined;
}
function getResolveId(extensions, isPossibleCjsId) {
const currentlyResolving = new Map();
return {
/**
* This is a Maps of importers to Sets of require sources being resolved at
* the moment by resolveRequireSourcesAndUpdateMeta
*/
currentlyResolving,
async resolveId(importee, importer, resolveOptions) {
const customOptions = resolveOptions.custom;
// All logic below is specific to ES imports.
// Also, if we do not skip this logic for requires that are resolved while
// transforming a commonjs file, it can easily lead to deadlocks.
if (
customOptions &&
customOptions['node-resolve'] &&
customOptions['node-resolve'].isRequire
) {
return null;
}
const currentlyResolvingForParent = currentlyResolving.get(importer);
if (currentlyResolvingForParent && currentlyResolvingForParent.has(importee)) {
this.warn({
code: 'THIS_RESOLVE_WITHOUT_OPTIONS',
message:
'It appears a plugin has implemented a "resolveId" hook that uses "this.resolve" without forwarding the third "options" parameter of "resolveId". This is problematic as it can lead to wrong module resolutions especially for the node-resolve plugin and in certain cases cause early exit errors for the commonjs plugin.\nIn rare cases, this warning can appear if the same file is both imported and required from the same mixed ES/CommonJS module, in which case it can be ignored.',
url: 'https://rollupjs.org/guide/en/#resolveid'
});
return null;
}
if (isWrappedId(importee, WRAPPED_SUFFIX)) {
return unwrapId(importee, WRAPPED_SUFFIX);
}
if (
importee.endsWith(ENTRY_SUFFIX) ||
isWrappedId(importee, MODULE_SUFFIX) ||
isWrappedId(importee, EXPORTS_SUFFIX) ||
isWrappedId(importee, PROXY_SUFFIX) ||
isWrappedId(importee, ES_IMPORT_SUFFIX) ||
isWrappedId(importee, EXTERNAL_SUFFIX) ||
importee.startsWith(HELPERS_ID) ||
importee === DYNAMIC_MODULES_ID
) {
return importee;
}
if (importer) {
if (
importer === DYNAMIC_MODULES_ID ||
// Proxies are only importing resolved ids, no need to resolve again
isWrappedId(importer, PROXY_SUFFIX) ||
isWrappedId(importer, ES_IMPORT_SUFFIX) ||
importer.endsWith(ENTRY_SUFFIX)
) {
return importee;
}
if (isWrappedId(importer, EXTERNAL_SUFFIX)) {
// We need to return null for unresolved imports so that the proper warning is shown
if (
!(await this.resolve(
importee,
importer,
Object.assign({ skipSelf: true }, resolveOptions)
))
) {
return null;
}
// For other external imports, we need to make sure they are handled as external
return { id: importee, external: true };
}
}
if (importee.startsWith('\0')) {
return null;
}
// If this is an entry point or ESM import, we need to figure out if the importee is wrapped and
// if that is the case, we need to add a proxy.
const resolved =
(await this.resolve(
importee,
importer,
Object.assign({ skipSelf: true }, resolveOptions)
)) || resolveExtensions(importee, importer, extensions);
// Make sure that even if other plugins resolve again, we ignore our own proxies
if (
!resolved ||
resolved.external ||
resolved.id.endsWith(ENTRY_SUFFIX) ||
isWrappedId(resolved.id, ES_IMPORT_SUFFIX) ||
!isPossibleCjsId(resolved.id)
) {
return resolved;
}
const moduleInfo = await this.load(resolved);
const {
meta: { commonjs: commonjsMeta }
} = moduleInfo;
if (commonjsMeta) {
const { isCommonJS } = commonjsMeta;
if (isCommonJS) {
if (resolveOptions.isEntry) {
moduleInfo.moduleSideEffects = true;
// We must not precede entry proxies with a `\0` as that will mess up relative external resolution
return resolved.id + ENTRY_SUFFIX;
}
if (isCommonJS === IS_WRAPPED_COMMONJS) {
return { id: wrapId(resolved.id, ES_IMPORT_SUFFIX), meta: { commonjs: { resolved } } };
}
}
}
return resolved;
}
};
}
function getRequireResolver(extensions, detectCyclesAndConditional, currentlyResolving) {
const knownCjsModuleTypes = Object.create(null);
const requiredIds = Object.create(null);
const unconditionallyRequiredIds = Object.create(null);
const dependencies = Object.create(null);
const getDependencies = (id) => dependencies[id] || (dependencies[id] = new Set());
const isCyclic = (id) => {
const dependenciesToCheck = new Set(getDependencies(id));
for (const dependency of dependenciesToCheck) {
if (dependency === id) {
return true;
}
for (const childDependency of getDependencies(dependency)) {
dependenciesToCheck.add(childDependency);
}
}
return false;
};
// Once a module is listed here, its type (wrapped or not) is fixed and may
// not change for the rest of the current build, to not break already
// transformed modules.
const fullyAnalyzedModules = Object.create(null);
const getTypeForFullyAnalyzedModule = (id) => {
const knownType = knownCjsModuleTypes[id];
if (knownType !== true || !detectCyclesAndConditional || fullyAnalyzedModules[id]) {
return knownType;
}
if (isCyclic(id)) {
return (knownCjsModuleTypes[id] = IS_WRAPPED_COMMONJS);
}
return knownType;
};
const setInitialParentType = (id, initialCommonJSType) => {
// Fully analyzed modules may never change type
if (fullyAnalyzedModules[id]) {
return;
}
knownCjsModuleTypes[id] = initialCommonJSType;
if (
detectCyclesAndConditional &&
knownCjsModuleTypes[id] === true &&
requiredIds[id] &&
!unconditionallyRequiredIds[id]
) {
knownCjsModuleTypes[id] = IS_WRAPPED_COMMONJS;
}
};
const analyzeRequiredModule = async (parentId, resolved, isConditional, loadModule) => {
const childId = resolved.id;
requiredIds[childId] = true;
if (!(isConditional || knownCjsModuleTypes[parentId] === IS_WRAPPED_COMMONJS)) {
unconditionallyRequiredIds[childId] = true;
}
getDependencies(parentId).add(childId);
if (!isCyclic(childId)) {
// This makes sure the current transform handler waits for all direct
// dependencies to be loaded and transformed and therefore for all
// transitive CommonJS dependencies to be loaded as well so that all
// cycles have been found and knownCjsModuleTypes is reliable.
await loadModule(resolved);
}
};
const getTypeForImportedModule = async (resolved, loadModule) => {
if (resolved.id in knownCjsModuleTypes) {
// This handles cyclic ES dependencies
return knownCjsModuleTypes[resolved.id];
}
const {
meta: { commonjs }
} = await loadModule(resolved);
return (commonjs && commonjs.isCommonJS) || false;
};
return {
getWrappedIds: () =>
Object.keys(knownCjsModuleTypes).filter(
(id) => knownCjsModuleTypes[id] === IS_WRAPPED_COMMONJS
),
isRequiredId: (id) => requiredIds[id],
async shouldTransformCachedModule({
id: parentId,
resolvedSources,
meta: { commonjs: parentMeta }
}) {
// We explicitly track ES modules to handle circular imports
if (!(parentMeta && parentMeta.isCommonJS)) knownCjsModuleTypes[parentId] = false;
if (isWrappedId(parentId, ES_IMPORT_SUFFIX)) return false;
const parentRequires = parentMeta && parentMeta.requires;
if (parentRequires) {
setInitialParentType(parentId, parentMeta.initialCommonJSType);
await Promise.all(
parentRequires.map(({ resolved, isConditional }) =>
analyzeRequiredModule(parentId, resolved, isConditional, this.load)
)
);
if (getTypeForFullyAnalyzedModule(parentId) !== parentMeta.isCommonJS) {
return true;
}
for (const {
resolved: { id }
} of parentRequires) {
if (getTypeForFullyAnalyzedModule(id) !== parentMeta.isRequiredCommonJS[id]) {
return true;
}
}
// Now that we decided to go with the cached copy, neither the parent
// module nor any of its children may change types anymore
fullyAnalyzedModules[parentId] = true;
for (const {
resolved: { id }
} of parentRequires) {
fullyAnalyzedModules[id] = true;
}
}
const parentRequireSet = new Set((parentRequires || []).map(({ resolved: { id } }) => id));
return (
await Promise.all(
Object.keys(resolvedSources)
.map((source) => resolvedSources[source])
.filter(({ id, external }) => !(external || parentRequireSet.has(id)))
.map(async (resolved) => {
if (isWrappedId(resolved.id, ES_IMPORT_SUFFIX)) {
return (
(await getTypeForImportedModule(
(
await this.load({ id: resolved.id })
).meta.commonjs.resolved,
this.load
)) !== IS_WRAPPED_COMMONJS
);
}
return (await getTypeForImportedModule(resolved, this.load)) === IS_WRAPPED_COMMONJS;
})
)
).some((shouldTransform) => shouldTransform);
},
/* eslint-disable no-param-reassign */
resolveRequireSourcesAndUpdateMeta:
(rollupContext) => async (parentId, isParentCommonJS, parentMeta, sources) => {
parentMeta.initialCommonJSType = isParentCommonJS;
parentMeta.requires = [];
parentMeta.isRequiredCommonJS = Object.create(null);
setInitialParentType(parentId, isParentCommonJS);
const currentlyResolvingForParent = currentlyResolving.get(parentId) || new Set();
currentlyResolving.set(parentId, currentlyResolvingForParent);
const requireTargets = await Promise.all(
sources.map(async ({ source, isConditional }) => {
// Never analyze or proxy internal modules
if (source.startsWith('\0')) {
return { id: source, allowProxy: false };
}
currentlyResolvingForParent.add(source);
const resolved =
(await rollupContext.resolve(source, parentId, {
skipSelf: false,
custom: { 'node-resolve': { isRequire: true } }
})) || resolveExtensions(source, parentId, extensions);
currentlyResolvingForParent.delete(source);
if (!resolved) {
return { id: wrapId(source, EXTERNAL_SUFFIX), allowProxy: false };
}
const childId = resolved.id;
if (resolved.external) {
return { id: wrapId(childId, EXTERNAL_SUFFIX), allowProxy: false };
}
parentMeta.requires.push({ resolved, isConditional });
await analyzeRequiredModule(parentId, resolved, isConditional, rollupContext.load);
return { id: childId, allowProxy: true };
})
);
parentMeta.isCommonJS = getTypeForFullyAnalyzedModule(parentId);
fullyAnalyzedModules[parentId] = true;
return requireTargets.map(({ id: dependencyId, allowProxy }, index) => {
// eslint-disable-next-line no-multi-assign
const isCommonJS = (parentMeta.isRequiredCommonJS[dependencyId] =
getTypeForFullyAnalyzedModule(dependencyId));
fullyAnalyzedModules[dependencyId] = true;
return {
source: sources[index].source,
id: allowProxy
? isCommonJS === IS_WRAPPED_COMMONJS
? wrapId(dependencyId, WRAPPED_SUFFIX)
: wrapId(dependencyId, PROXY_SUFFIX)
: dependencyId,
isCommonJS
};
});
},
isCurrentlyResolving(source, parentId) {
const currentlyResolvingForParent = currentlyResolving.get(parentId);
return currentlyResolvingForParent && currentlyResolvingForParent.has(source);
}
};
}
function validateVersion(actualVersion, peerDependencyVersion, name) {
const versionRegexp = /\^(\d+\.\d+\.\d+)/g;
let minMajor = Infinity;
let minMinor = Infinity;
let minPatch = Infinity;
let foundVersion;
// eslint-disable-next-line no-cond-assign
while ((foundVersion = versionRegexp.exec(peerDependencyVersion))) {
const [foundMajor, foundMinor, foundPatch] = foundVersion[1].split('.').map(Number);
if (foundMajor < minMajor) {
minMajor = foundMajor;
minMinor = foundMinor;
minPatch = foundPatch;
}
}
if (!actualVersion) {
throw new Error(
`Insufficient ${name} version: "@rollup/plugin-commonjs" requires at least ${name}@${minMajor}.${minMinor}.${minPatch}.`
);
}
const [major, minor, patch] = actualVersion.split('.').map(Number);
if (
major < minMajor ||
(major === minMajor && (minor < minMinor || (minor === minMinor && patch < minPatch)))
) {
throw new Error(
`Insufficient ${name} version: "@rollup/plugin-commonjs" requires at least ${name}@${minMajor}.${minMinor}.${minPatch} but found ${name}@${actualVersion}.`
);
}
}
const operators = {
'==': (x) => equals(x.left, x.right, false),
'!=': (x) => not(operators['=='](x)),
'===': (x) => equals(x.left, x.right, true),
'!==': (x) => not(operators['==='](x)),
'!': (x) => isFalsy(x.argument),
'&&': (x) => isTruthy(x.left) && isTruthy(x.right),
'||': (x) => isTruthy(x.left) || isTruthy(x.right)
};
function not(value) {
return value === null ? value : !value;
}
function equals(a, b, strict) {
if (a.type !== b.type) return null;
// eslint-disable-next-line eqeqeq
if (a.type === 'Literal') return strict ? a.value === b.value : a.value == b.value;
return null;
}
function isTruthy(node) {
if (!node) return false;
if (node.type === 'Literal') return !!node.value;
if (node.type === 'ParenthesizedExpression') return isTruthy(node.expression);
if (node.operator in operators) return operators[node.operator](node);
return null;
}
function isFalsy(node) {
return not(isTruthy(node));
}
function getKeypath(node) {
const parts = [];
while (node.type === 'MemberExpression') {
if (node.computed) return null;
parts.unshift(node.property.name);
// eslint-disable-next-line no-param-reassign
node = node.object;
}
if (node.type !== 'Identifier') return null;
const { name } = node;
parts.unshift(name);
return { name, keypath: parts.join('.') };
}
const KEY_COMPILED_ESM = '__esModule';
function getDefineCompiledEsmType(node) {
const definedPropertyWithExports = getDefinePropertyCallName(node, 'exports');
const definedProperty =
definedPropertyWithExports || getDefinePropertyCallName(node, 'module.exports');
if (definedProperty && definedProperty.key === KEY_COMPILED_ESM) {
return isTruthy(definedProperty.value)
? definedPropertyWithExports
? 'exports'
: 'module'
: false;
}
return false;
}
function getDefinePropertyCallName(node, targetName) {
const {
callee: { object, property }
} = node;
if (!object || object.type !== 'Identifier' || object.name !== 'Object') return;
if (!property || property.type !== 'Identifier' || property.name !== 'defineProperty') return;
if (node.arguments.length !== 3) return;
const targetNames = targetName.split('.');
const [target, key, value] = node.arguments;
if (targetNames.length === 1) {
if (target.type !== 'Identifier' || target.name !== targetNames[0]) {
return;
}
}
if (targetNames.length === 2) {
if (
target.type !== 'MemberExpression' ||
target.object.name !== targetNames[0] ||
target.property.name !== targetNames[1]
) {
return;
}
}
if (value.type !== 'ObjectExpression' || !value.properties) return;
const valueProperty = value.properties.find((p) => p.key && p.key.name === 'value');
if (!valueProperty || !valueProperty.value) return;
// eslint-disable-next-line consistent-return
return { key: key.value, value: valueProperty.value };
}
function isShorthandProperty(parent) {
return parent && parent.type === 'Property' && parent.shorthand;
}
function wrapCode(magicString, uses, moduleName, exportsName, indentExclusionRanges) {
const args = [];
const passedArgs = [];
if (uses.module) {
args.push('module');
passedArgs.push(moduleName);
}
if (uses.exports) {
args.push('exports');
passedArgs.push(uses.module ? `${moduleName}.exports` : exportsName);
}
magicString
.trim()
.indent('\t', { exclude: indentExclusionRanges })
.prepend(`(function (${args.join(', ')}) {\n`)
// For some reason, this line is only indented correctly when using a
// require-wrapper if we have this leading space
.append(` \n} (${passedArgs.join(', ')}));`);
}
function rewriteExportsAndGetExportsBlock(
magicString,
moduleName,
exportsName,
exportedExportsName,
wrapped,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsAssignmentsByName,
topLevelAssignments,
defineCompiledEsmExpressions,
deconflictedExportNames,
code,
HELPERS_NAME,
exportMode,
defaultIsModuleExports,
usesRequireWrapper,
requireName
) {
const exports = [];
const exportDeclarations = [];
if (usesRequireWrapper) {
getExportsWhenUsingRequireWrapper(
magicString,
wrapped,
exportMode,
exports,
moduleExportsAssignments,
exportsAssignmentsByName,
moduleName,
exportsName,
requireName,
defineCompiledEsmExpressions
);
} else if (exportMode === 'replace') {
getExportsForReplacedModuleExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsName,
defaultIsModuleExports,
HELPERS_NAME
);
} else {
if (exportMode === 'module') {
exportDeclarations.push(`var ${exportedExportsName} = ${moduleName}.exports`);
exports.push(`${exportedExportsName} as __moduleExports`);
} else {
exports.push(`${exportsName} as __moduleExports`);
}
if (wrapped) {
exportDeclarations.push(
getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME)
);
} else {
getExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
exportsAssignmentsByName,
deconflictedExportNames,
topLevelAssignments,
moduleName,
exportsName,
exportedExportsName,
defineCompiledEsmExpressions,
HELPERS_NAME,
defaultIsModuleExports,
exportMode
);
}
}
if (exports.length) {
exportDeclarations.push(`export { ${exports.join(', ')} }`);
}
return `\n\n${exportDeclarations.join(';\n')};`;
}
function getExportsWhenUsingRequireWrapper(
magicString,
wrapped,
exportMode,
exports,
moduleExportsAssignments,
exportsAssignmentsByName,
moduleName,
exportsName,
requireName,
defineCompiledEsmExpressions
) {
exports.push(`${requireName} as __require`);
if (wrapped) return;
if (exportMode === 'replace') {
rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, exportsName);
} else {
rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, `${moduleName}.exports`);
// Collect and rewrite named exports
for (const [exportName, { nodes }] of exportsAssignmentsByName) {
for (const { node, type } of nodes) {
magicString.overwrite(
node.start,
node.left.end,
`${
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
}.${exportName}`
);
}
}
replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
);
}
}
function getExportsForReplacedModuleExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsName,
defaultIsModuleExports,
HELPERS_NAME
) {
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, exportsName);
}
magicString.prependRight(firstTopLevelModuleExportsAssignment.left.start, 'var ');
exports.push(`${exportsName} as __moduleExports`);
exportDeclarations.push(
getDefaultExportDeclaration(exportsName, defaultIsModuleExports, HELPERS_NAME)
);
}
function getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME) {
return `export default ${
defaultIsModuleExports === true
? exportedExportsName
: defaultIsModuleExports === false
? `${exportedExportsName}.default`
: `/*@__PURE__*/${HELPERS_NAME}.getDefaultExportFromCjs(${exportedExportsName})`
}`;
}
function getExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
exportsAssignmentsByName,
deconflictedExportNames,
topLevelAssignments,
moduleName,
exportsName,
exportedExportsName,
defineCompiledEsmExpressions,
HELPERS_NAME,
defaultIsModuleExports,
exportMode
) {
let deconflictedDefaultExportName;
// Collect and rewrite module.exports assignments
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, `${moduleName}.exports`);
}
// Collect and rewrite named exports
for (const [exportName, { nodes }] of exportsAssignmentsByName) {
const deconflicted = deconflictedExportNames[exportName];
let needsDeclaration = true;
for (const { node, type } of nodes) {
let replacement = `${deconflicted} = ${
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
}.${exportName}`;
if (needsDeclaration && topLevelAssignments.has(node)) {
replacement = `var ${replacement}`;
needsDeclaration = false;
}
magicString.overwrite(node.start, node.left.end, replacement);
}
if (needsDeclaration) {
magicString.prepend(`var ${deconflicted};\n`);
}
if (exportName === 'default') {
deconflictedDefaultExportName = deconflicted;
} else {
exports.push(exportName === deconflicted ? exportName : `${deconflicted} as ${exportName}`);
}
}
const isRestorableCompiledEsm = replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
);
if (
defaultIsModuleExports === false ||
(defaultIsModuleExports === 'auto' &&
isRestorableCompiledEsm &&
moduleExportsAssignments.length === 0)
) {
// If there is no deconflictedDefaultExportName, then we use the namespace as
// fallback because there can be no "default" property on the namespace
exports.push(`${deconflictedDefaultExportName || exportedExportsName} as default`);
} else if (
defaultIsModuleExports === true ||
(!isRestorableCompiledEsm && moduleExportsAssignments.length === 0)
) {
exports.push(`${exportedExportsName} as default`);
} else {
exportDeclarations.push(
getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME)
);
}
}
function rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, exportsName) {
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, exportsName);
}
}
function replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
) {
let isRestorableCompiledEsm = false;
for (const { node, type } of defineCompiledEsmExpressions) {
isRestorableCompiledEsm = true;
const moduleExportsExpression =
node.type === 'CallExpression' ? node.arguments[0] : node.left.object;
magicString.overwrite(
moduleExportsExpression.start,
moduleExportsExpression.end,
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
);
}
return isRestorableCompiledEsm;
}
function isRequireExpression(node, scope) {
if (!node) return false;
if (node.type !== 'CallExpression') return false;
// Weird case of `require()` or `module.require()` without arguments
if (node.arguments.length === 0) return false;
return isRequire(node.callee, scope);
}
function isRequire(node, scope) {
return (
(node.type === 'Identifier' && node.name === 'require' && !scope.contains('require')) ||
(node.type === 'MemberExpression' && isModuleRequire(node, scope))
);
}
function isModuleRequire({ object, property }, scope) {
return (
object.type === 'Identifier' &&
object.name === 'module' &&
property.type === 'Identifier' &&
property.name === 'require' &&
!scope.contains('module')
);
}
function hasDynamicArguments(node) {
return (
node.arguments.length > 1 ||
(node.arguments[0].type !== 'Literal' &&
(node.arguments[0].type !== 'TemplateLiteral' || node.arguments[0].expressions.length > 0))
);
}
const reservedMethod = { resolve: true, cache: true, main: true };
function isNodeRequirePropertyAccess(parent) {
return parent && parent.property && reservedMethod[parent.property.name];
}
function getRequireStringArg(node) {
return node.arguments[0].type === 'Literal'
? node.arguments[0].value
: node.arguments[0].quasis[0].value.cooked;
}
function getRequireHandlers() {
const requireExpressions = [];
function addRequireExpression(
sourceId,
node,
scope,
usesReturnValue,
isInsideTryBlock,
isInsideConditional,
toBeRemoved
) {
requireExpressions.push({
sourceId,
node,
scope,
usesReturnValue,
isInsideTryBlock,
isInsideConditional,
toBeRemoved
});
}
async function rewriteRequireExpressionsAndGetImportBlock(
magicString,
topLevelDeclarations,
reassignedNames,
helpersName,
dynamicRequireName,
moduleName,
exportsName,
id,
exportMode,
resolveRequireSourcesAndUpdateMeta,
needsRequireWrapper,
isEsModule,
isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
commonjsMeta
) {
const imports = [];
imports.push(`import * as ${helpersName} from "${HELPERS_ID}"`);
if (dynamicRequireName) {
imports.push(
`import { ${
isDynamicRequireModulesEnabled ? CREATE_COMMONJS_REQUIRE_EXPORT : COMMONJS_REQUIRE_EXPORT
} as ${dynamicRequireName} } from "${DYNAMIC_MODULES_ID}"`
);
}
if (exportMode === 'module') {
imports.push(
`import { __module as ${moduleName} } from ${JSON.stringify(wrapId(id, MODULE_SUFFIX))}`,
`var ${exportsName} = ${moduleName}.exports`
);
} else if (exportMode === 'exports') {
imports.push(
`import { __exports as ${exportsName} } from ${JSON.stringify(wrapId(id, EXPORTS_SUFFIX))}`
);
}
const requiresBySource = collectSources(requireExpressions);
const requireTargets = await resolveRequireSourcesAndUpdateMeta(
id,
needsRequireWrapper ? IS_WRAPPED_COMMONJS : !isEsModule,
commonjsMeta,
Object.keys(requiresBySource).map((source) => {
return {
source,
isConditional: requiresBySource[source].every((require) => require.isInsideConditional)
};
})
);
processRequireExpressions(
imports,
requireTargets,
requiresBySource,
getIgnoreTryCatchRequireStatementMode,
magicString
);
return imports.length ? `${imports.join(';\n')};\n\n` : '';
}
return {
addRequireExpression,
rewriteRequireExpressionsAndGetImportBlock
};
}
function collectSources(requireExpressions) {
const requiresBySource = Object.create(null);
for (const requireExpression of requireExpressions) {
const { sourceId } = requireExpression;
if (!requiresBySource[sourceId]) {
requiresBySource[sourceId] = [];
}
const requires = requiresBySource[sourceId];
requires.push(requireExpression);
}
return requiresBySource;
}
function processRequireExpressions(
imports,
requireTargets,
requiresBySource,
getIgnoreTryCatchRequireStatementMode,
magicString
) {
const generateRequireName = getGenerateRequireName();
for (const { source, id: resolvedId, isCommonJS } of requireTargets) {
const requires = requiresBySource[source];
const name = generateRequireName(requires);
let usesRequired = false;
let needsImport = false;
for (const { node, usesReturnValue, toBeRemoved, isInsideTryBlock } of requires) {
const { canConvertRequire, shouldRemoveRequire } =
isInsideTryBlock && isWrappedId(resolvedId, EXTERNAL_SUFFIX)
? getIgnoreTryCatchRequireStatementMode(source)
: { canConvertRequire: true, shouldRemoveRequire: false };
if (shouldRemoveRequire) {
if (usesReturnValue) {
magicString.overwrite(node.start, node.end, 'undefined');
} else {
magicString.remove(toBeRemoved.start, toBeRemoved.end);
}
} else if (canConvertRequire) {
needsImport = true;
if (isCommonJS === IS_WRAPPED_COMMONJS) {
magicString.overwrite(node.start, node.end, `${name}()`);
} else if (usesReturnValue) {
usesRequired = true;
magicString.overwrite(node.start, node.end, name);
} else {
magicString.remove(toBeRemoved.start, toBeRemoved.end);
}
}
}
if (needsImport) {
if (isCommonJS === IS_WRAPPED_COMMONJS) {
imports.push(`import { __require as ${name} } from ${JSON.stringify(resolvedId)}`);
} else {
imports.push(`import ${usesRequired ? `${name} from ` : ''}${JSON.stringify(resolvedId)}`);
}
}
}
}
function getGenerateRequireName() {
let uid = 0;
return (requires) => {
let name;
const hasNameConflict = ({ scope }) => scope.contains(name);
do {
name = `require$$${uid}`;
uid += 1;
} while (requires.some(hasNameConflict));
return name;
};
}
/* eslint-disable no-param-reassign, no-shadow, no-underscore-dangle, no-continue */
const exportsPattern = /^(?:module\.)?exports(?:\.([a-zA-Z_$][a-zA-Z_$0-9]*))?$/;
const functionType = /^(?:FunctionDeclaration|FunctionExpression|ArrowFunctionExpression)$/;
// There are three different types of CommonJS modules, described by their
// "exportMode":
// - exports: Only assignments to (module.)exports properties
// - replace: A single assignment to module.exports itself
// - module: Anything else
// Special cases:
// - usesRequireWrapper
// - isWrapped
async function transformCommonjs(
parse,
code,
id,
isEsModule,
ignoreGlobal,
ignoreRequire,
ignoreDynamicRequires,
getIgnoreTryCatchRequireStatementMode,
sourceMap,
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
astCache,
defaultIsModuleExports,
needsRequireWrapper,
resolveRequireSourcesAndUpdateMeta,
isRequired,
checkDynamicRequire,
commonjsMeta
) {
const ast = astCache || tryParse(parse, code, id);
const magicString = new MagicString(code);
const uses = {
module: false,
exports: false,
global: false,
require: false
};
const virtualDynamicRequirePath =
isDynamicRequireModulesEnabled && getVirtualPathForDynamicRequirePath(dirname$1(id), commonDir);
let scope = attachScopes(ast, 'scope');
let lexicalDepth = 0;
let programDepth = 0;
let classBodyDepth = 0;
let currentTryBlockEnd = null;
let shouldWrap = false;
const globals = new Set();
// A conditionalNode is a node for which execution is not guaranteed. If such a node is a require
// or contains nested requires, those should be handled as function calls unless there is an
// unconditional require elsewhere.
let currentConditionalNodeEnd = null;
const conditionalNodes = new Set();
const { addRequireExpression, rewriteRequireExpressionsAndGetImportBlock } = getRequireHandlers();
// See which names are assigned to. This is necessary to prevent
// illegally replacing `var foo = require('foo')` with `import foo from 'foo'`,
// where `foo` is later reassigned. (This happens in the wild. CommonJS, sigh)
const reassignedNames = new Set();
const topLevelDeclarations = [];
const skippedNodes = new Set();
const moduleAccessScopes = new Set([scope]);
const exportsAccessScopes = new Set([scope]);
const moduleExportsAssignments = [];
let firstTopLevelModuleExportsAssignment = null;
const exportsAssignmentsByName = new Map();
const topLevelAssignments = new Set();
const topLevelDefineCompiledEsmExpressions = [];
const replacedGlobal = [];
const replacedDynamicRequires = [];
const importedVariables = new Set();
const indentExclusionRanges = [];
walk$3(ast, {
enter(node, parent) {
if (skippedNodes.has(node)) {
this.skip();
return;
}
if (currentTryBlockEnd !== null && node.start > currentTryBlockEnd) {
currentTryBlockEnd = null;
}
if (currentConditionalNodeEnd !== null && node.start > currentConditionalNodeEnd) {
currentConditionalNodeEnd = null;
}
if (currentConditionalNodeEnd === null && conditionalNodes.has(node)) {
currentConditionalNodeEnd = node.end;
}
programDepth += 1;
if (node.scope) ({ scope } = node);
if (functionType.test(node.type)) lexicalDepth += 1;
if (sourceMap) {
magicString.addSourcemapLocation(node.start);
magicString.addSourcemapLocation(node.end);
}
// eslint-disable-next-line default-case
switch (node.type) {
case 'AssignmentExpression':
if (node.left.type === 'MemberExpression') {
const flattened = getKeypath(node.left);
if (!flattened || scope.contains(flattened.name)) return;
const exportsPatternMatch = exportsPattern.exec(flattened.keypath);
if (!exportsPatternMatch || flattened.keypath === 'exports') return;
const [, exportName] = exportsPatternMatch;
uses[flattened.name] = true;
// we're dealing with `module.exports = ...` or `[module.]exports.foo = ...` –
if (flattened.keypath === 'module.exports') {
moduleExportsAssignments.push(node);
if (programDepth > 3) {
moduleAccessScopes.add(scope);
} else if (!firstTopLevelModuleExportsAssignment) {
firstTopLevelModuleExportsAssignment = node;
}
} else if (exportName === KEY_COMPILED_ESM) {
if (programDepth > 3) {
shouldWrap = true;
} else {
// The "type" is either "module" or "exports" to discern
// assignments to module.exports vs exports if needed
topLevelDefineCompiledEsmExpressions.push({ node, type: flattened.name });
}
} else {
const exportsAssignments = exportsAssignmentsByName.get(exportName) || {
nodes: [],
scopes: new Set()
};
exportsAssignments.nodes.push({ node, type: flattened.name });
exportsAssignments.scopes.add(scope);
exportsAccessScopes.add(scope);
exportsAssignmentsByName.set(exportName, exportsAssignments);
if (programDepth <= 3) {
topLevelAssignments.add(node);
}
}
skippedNodes.add(node.left);
} else {
for (const name of extractAssignedNames(node.left)) {
reassignedNames.add(name);
}
}
return;
case 'CallExpression': {
const defineCompiledEsmType = getDefineCompiledEsmType(node);
if (defineCompiledEsmType) {
if (programDepth === 3 && parent.type === 'ExpressionStatement') {
// skip special handling for [module.]exports until we know we render this
skippedNodes.add(node.arguments[0]);
topLevelDefineCompiledEsmExpressions.push({ node, type: defineCompiledEsmType });
} else {
shouldWrap = true;
}
return;
}
// Transform require.resolve
if (
isDynamicRequireModulesEnabled &&
node.callee.object &&
isRequire(node.callee.object, scope) &&
node.callee.property.name === 'resolve'
) {
checkDynamicRequire(node.start);
uses.require = true;
const requireNode = node.callee.object;
replacedDynamicRequires.push(requireNode);
skippedNodes.add(node.callee);
return;
}
if (!isRequireExpression(node, scope)) {
const keypath = getKeypath(node.callee);
if (keypath && importedVariables.has(keypath.name)) {
// Heuristic to deoptimize requires after a required function has been called
currentConditionalNodeEnd = Infinity;
}
return;
}
skippedNodes.add(node.callee);
uses.require = true;
if (hasDynamicArguments(node)) {
if (isDynamicRequireModulesEnabled) {
checkDynamicRequire(node.start);
}
if (!ignoreDynamicRequires) {
replacedDynamicRequires.push(node.callee);
}
return;
}
const requireStringArg = getRequireStringArg(node);
if (!ignoreRequire(requireStringArg)) {
const usesReturnValue = parent.type !== 'ExpressionStatement';
const toBeRemoved =
parent.type === 'ExpressionStatement' &&
(!currentConditionalNodeEnd ||
// We should completely remove requires directly in a try-catch
// so that Rollup can remove up the try-catch
(currentTryBlockEnd !== null && currentTryBlockEnd < currentConditionalNodeEnd))
? parent
: node;
addRequireExpression(
requireStringArg,
node,
scope,
usesReturnValue,
currentTryBlockEnd !== null,
currentConditionalNodeEnd !== null,
toBeRemoved
);
if (parent.type === 'VariableDeclarator' && parent.id.type === 'Identifier') {
for (const name of extractAssignedNames(parent.id)) {
importedVariables.add(name);
}
}
}
return;
}
case 'ClassBody':
classBodyDepth += 1;
return;
case 'ConditionalExpression':
case 'IfStatement':
// skip dead branches
if (isFalsy(node.test)) {
skippedNodes.add(node.consequent);
} else if (isTruthy(node.test)) {
if (node.alternate) {
skippedNodes.add(node.alternate);
}
} else {
conditionalNodes.add(node.consequent);
if (node.alternate) {
conditionalNodes.add(node.alternate);
}
}
return;
case 'ArrowFunctionExpression':
case 'FunctionDeclaration':
case 'FunctionExpression':
// requires in functions should be conditional unless it is an IIFE
if (
currentConditionalNodeEnd === null &&
!(parent.type === 'CallExpression' && parent.callee === node)
) {
currentConditionalNodeEnd = node.end;
}
return;
case 'Identifier': {
const { name } = node;
if (
!isReference(node, parent) ||
scope.contains(name) ||
(parent.type === 'PropertyDefinition' && parent.key === node)
)
return;
switch (name) {
case 'require':
uses.require = true;
if (isNodeRequirePropertyAccess(parent)) {
return;
}
if (!ignoreDynamicRequires) {
if (isShorthandProperty(parent)) {
// as key and value are the same object, isReference regards
// both as references, so we need to skip now
skippedNodes.add(parent.value);
magicString.prependRight(node.start, 'require: ');
}
replacedDynamicRequires.push(node);
}
return;
case 'module':
case 'exports':
shouldWrap = true;
uses[name] = true;
return;
case 'global':
uses.global = true;
if (!ignoreGlobal) {
replacedGlobal.push(node);
}
return;
case 'define':
magicString.overwrite(node.start, node.end, 'undefined', {
storeName: true
});
return;
default:
globals.add(name);
return;
}
}
case 'LogicalExpression':
// skip dead branches
if (node.operator === '&&') {
if (isFalsy(node.left)) {
skippedNodes.add(node.right);
} else if (!isTruthy(node.left)) {
conditionalNodes.add(node.right);
}
} else if (node.operator === '||') {
if (isTruthy(node.left)) {
skippedNodes.add(node.right);
} else if (!isFalsy(node.left)) {
conditionalNodes.add(node.right);
}
}
return;
case 'MemberExpression':
if (!isDynamicRequireModulesEnabled && isModuleRequire(node, scope)) {
uses.require = true;
replacedDynamicRequires.push(node);
skippedNodes.add(node.object);
skippedNodes.add(node.property);
}
return;
case 'ReturnStatement':
// if top-level return, we need to wrap it
if (lexicalDepth === 0) {
shouldWrap = true;
}
return;
case 'ThisExpression':
// rewrite top-level `this` as `commonjsHelpers.commonjsGlobal`
if (lexicalDepth === 0 && !classBodyDepth) {
uses.global = true;
if (!ignoreGlobal) {
replacedGlobal.push(node);
}
}
return;
case 'TryStatement':
if (currentTryBlockEnd === null) {
currentTryBlockEnd = node.block.end;
}
if (currentConditionalNodeEnd === null) {
currentConditionalNodeEnd = node.end;
}
return;
case 'UnaryExpression':
// rewrite `typeof module`, `typeof module.exports` and `typeof exports` (https://github.com/rollup/rollup-plugin-commonjs/issues/151)
if (node.operator === 'typeof') {
const flattened = getKeypath(node.argument);
if (!flattened) return;
if (scope.contains(flattened.name)) return;
if (
!isEsModule &&
(flattened.keypath === 'module.exports' ||
flattened.keypath === 'module' ||
flattened.keypath === 'exports')
) {
magicString.overwrite(node.start, node.end, `'object'`, {
storeName: false
});
}
}
return;
case 'VariableDeclaration':
if (!scope.parent) {
topLevelDeclarations.push(node);
}
return;
case 'TemplateElement':
if (node.value.raw.includes('\n')) {
indentExclusionRanges.push([node.start, node.end]);
}
}
},
leave(node) {
programDepth -= 1;
if (node.scope) scope = scope.parent;
if (functionType.test(node.type)) lexicalDepth -= 1;
if (node.type === 'ClassBody') classBodyDepth -= 1;
}
});
const nameBase = getName(id);
const exportsName = deconflict([...exportsAccessScopes], globals, nameBase);
const moduleName = deconflict([...moduleAccessScopes], globals, `${nameBase}Module`);
const requireName = deconflict([scope], globals, `require${capitalize(nameBase)}`);
const isRequiredName = deconflict([scope], globals, `hasRequired${capitalize(nameBase)}`);
const helpersName = deconflict([scope], globals, 'commonjsHelpers');
const dynamicRequireName =
replacedDynamicRequires.length > 0 &&
deconflict(
[scope],
globals,
isDynamicRequireModulesEnabled ? CREATE_COMMONJS_REQUIRE_EXPORT : COMMONJS_REQUIRE_EXPORT
);
const deconflictedExportNames = Object.create(null);
for (const [exportName, { scopes }] of exportsAssignmentsByName) {
deconflictedExportNames[exportName] = deconflict([...scopes], globals, exportName);
}
for (const node of replacedGlobal) {
magicString.overwrite(node.start, node.end, `${helpersName}.commonjsGlobal`, {
storeName: true
});
}
for (const node of replacedDynamicRequires) {
magicString.overwrite(
node.start,
node.end,
isDynamicRequireModulesEnabled
? `${dynamicRequireName}(${JSON.stringify(virtualDynamicRequirePath)})`
: dynamicRequireName,
{
contentOnly: true,
storeName: true
}
);
}
// We cannot wrap ES/mixed modules
shouldWrap = !isEsModule && (shouldWrap || (uses.exports && moduleExportsAssignments.length > 0));
if (
!(
shouldWrap ||
isRequired ||
needsRequireWrapper ||
uses.module ||
uses.exports ||
uses.require ||
topLevelDefineCompiledEsmExpressions.length > 0
) &&
(ignoreGlobal || !uses.global)
) {
return { meta: { commonjs: { isCommonJS: false } } };
}
let leadingComment = '';
if (code.startsWith('/*')) {
const commentEnd = code.indexOf('*/', 2) + 2;
leadingComment = `${code.slice(0, commentEnd)}\n`;
magicString.remove(0, commentEnd).trim();
}
let shebang = '';
if (code.startsWith('#!')) {
const shebangEndPosition = code.indexOf('\n') + 1;
shebang = code.slice(0, shebangEndPosition);
magicString.remove(0, shebangEndPosition).trim();
}
const exportMode = isEsModule
? 'none'
: shouldWrap
? uses.module
? 'module'
: 'exports'
: firstTopLevelModuleExportsAssignment
? exportsAssignmentsByName.size === 0 && topLevelDefineCompiledEsmExpressions.length === 0
? 'replace'
: 'module'
: moduleExportsAssignments.length === 0
? 'exports'
: 'module';
const exportedExportsName =
exportMode === 'module' ? deconflict([], globals, `${nameBase}Exports`) : exportsName;
const importBlock = await rewriteRequireExpressionsAndGetImportBlock(
magicString,
topLevelDeclarations,
reassignedNames,
helpersName,
dynamicRequireName,
moduleName,
exportsName,
id,
exportMode,
resolveRequireSourcesAndUpdateMeta,
needsRequireWrapper,
isEsModule,
isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
commonjsMeta
);
const usesRequireWrapper = commonjsMeta.isCommonJS === IS_WRAPPED_COMMONJS;
const exportBlock = isEsModule
? ''
: rewriteExportsAndGetExportsBlock(
magicString,
moduleName,
exportsName,
exportedExportsName,
shouldWrap,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsAssignmentsByName,
topLevelAssignments,
topLevelDefineCompiledEsmExpressions,
deconflictedExportNames,
code,
helpersName,
exportMode,
defaultIsModuleExports,
usesRequireWrapper,
requireName
);
if (shouldWrap) {
wrapCode(magicString, uses, moduleName, exportsName, indentExclusionRanges);
}
if (usesRequireWrapper) {
magicString.trim().indent('\t', {
exclude: indentExclusionRanges
});
const exported = exportMode === 'module' ? `${moduleName}.exports` : exportsName;
magicString.prepend(
`var ${isRequiredName};
function ${requireName} () {
\tif (${isRequiredName}) return ${exported};
\t${isRequiredName} = 1;
`
).append(`
\treturn ${exported};
}`);
if (exportMode === 'replace') {
magicString.prepend(`var ${exportsName};\n`);
}
}
magicString
.trim()
.prepend(shebang + leadingComment + importBlock)
.append(exportBlock);
return {
code: magicString.toString(),
map: sourceMap ? magicString.generateMap() : null,
syntheticNamedExports: isEsModule || usesRequireWrapper ? false : '__moduleExports',
meta: { commonjs: { ...commonjsMeta, shebang } }
};
}
const PLUGIN_NAME = 'commonjs';
function commonjs(options = {}) {
const {
ignoreGlobal,
ignoreDynamicRequires,
requireReturnsDefault: requireReturnsDefaultOption,
defaultIsModuleExports: defaultIsModuleExportsOption,
esmExternals
} = options;
const extensions = options.extensions || ['.js'];
const filter = createFilter$1(options.include, options.exclude);
const isPossibleCjsId = (id) => {
const extName = extname(id);
return extName === '.cjs' || (extensions.includes(extName) && filter(id));
};
const { strictRequiresFilter, detectCyclesAndConditional } = getStrictRequiresFilter(options);
const getRequireReturnsDefault =
typeof requireReturnsDefaultOption === 'function'
? requireReturnsDefaultOption
: () => requireReturnsDefaultOption;
let esmExternalIds;
const isEsmExternal =
typeof esmExternals === 'function'
? esmExternals
: Array.isArray(esmExternals)
? ((esmExternalIds = new Set(esmExternals)), (id) => esmExternalIds.has(id))
: () => esmExternals;
const getDefaultIsModuleExports =
typeof defaultIsModuleExportsOption === 'function'
? defaultIsModuleExportsOption
: () =>
typeof defaultIsModuleExportsOption === 'boolean' ? defaultIsModuleExportsOption : 'auto';
const dynamicRequireRoot =
typeof options.dynamicRequireRoot === 'string'
? resolve$3(options.dynamicRequireRoot)
: process.cwd();
const { commonDir, dynamicRequireModules } = getDynamicRequireModules(
options.dynamicRequireTargets,
dynamicRequireRoot
);
const isDynamicRequireModulesEnabled = dynamicRequireModules.size > 0;
const ignoreRequire =
typeof options.ignore === 'function'
? options.ignore
: Array.isArray(options.ignore)
? (id) => options.ignore.includes(id)
: () => false;
const getIgnoreTryCatchRequireStatementMode = (id) => {
const mode =
typeof options.ignoreTryCatch === 'function'
? options.ignoreTryCatch(id)
: Array.isArray(options.ignoreTryCatch)
? options.ignoreTryCatch.includes(id)
: typeof options.ignoreTryCatch !== 'undefined'
? options.ignoreTryCatch
: true;
return {
canConvertRequire: mode !== 'remove' && mode !== true,
shouldRemoveRequire: mode === 'remove'
};
};
const { currentlyResolving, resolveId } = getResolveId(extensions, isPossibleCjsId);
const sourceMap = options.sourceMap !== false;
// Initialized in buildStart
let requireResolver;
function transformAndCheckExports(code, id) {
const normalizedId = normalizePathSlashes(id);
const { isEsModule, hasDefaultExport, hasNamedExports, ast } = analyzeTopLevelStatements(
this.parse,
code,
id
);
const commonjsMeta = this.getModuleInfo(id).meta.commonjs || {};
if (hasDefaultExport) {
commonjsMeta.hasDefaultExport = true;
}
if (hasNamedExports) {
commonjsMeta.hasNamedExports = true;
}
if (
!dynamicRequireModules.has(normalizedId) &&
(!(hasCjsKeywords(code, ignoreGlobal) || requireResolver.isRequiredId(id)) ||
(isEsModule && !options.transformMixedEsModules))
) {
commonjsMeta.isCommonJS = false;
return { meta: { commonjs: commonjsMeta } };
}
const needsRequireWrapper =
!isEsModule && (dynamicRequireModules.has(normalizedId) || strictRequiresFilter(id));
const checkDynamicRequire = (position) => {
const normalizedDynamicRequireRoot = normalizePathSlashes(dynamicRequireRoot);
if (normalizedId.indexOf(normalizedDynamicRequireRoot) !== 0) {
this.error(
{
code: 'DYNAMIC_REQUIRE_OUTSIDE_ROOT',
normalizedId,
normalizedDynamicRequireRoot,
message: `"${normalizedId}" contains dynamic require statements but it is not within the current dynamicRequireRoot "${normalizedDynamicRequireRoot}". You should set dynamicRequireRoot to "${dirname$1(
normalizedId
)}" or one of its parent directories.`
},
position
);
}
};
return transformCommonjs(
this.parse,
code,
id,
isEsModule,
ignoreGlobal || isEsModule,
ignoreRequire,
ignoreDynamicRequires && !isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
sourceMap,
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ast,
getDefaultIsModuleExports(id),
needsRequireWrapper,
requireResolver.resolveRequireSourcesAndUpdateMeta(this),
requireResolver.isRequiredId(id),
checkDynamicRequire,
commonjsMeta
);
}
return {
name: PLUGIN_NAME,
version: version$2,
options(rawOptions) {
// We inject the resolver in the beginning so that "catch-all-resolver" like node-resolver
// do not prevent our plugin from resolving entry points ot proxies.
const plugins = Array.isArray(rawOptions.plugins)
? [...rawOptions.plugins]
: rawOptions.plugins
? [rawOptions.plugins]
: [];
plugins.unshift({
name: 'commonjs--resolver',
resolveId
});
return { ...rawOptions, plugins };
},
buildStart({ plugins }) {
validateVersion(this.meta.rollupVersion, peerDependencies.rollup, 'rollup');
const nodeResolve = plugins.find(({ name }) => name === 'node-resolve');
if (nodeResolve) {
validateVersion(nodeResolve.version, '^13.0.6', '@rollup/plugin-node-resolve');
}
if (options.namedExports != null) {
this.warn(
'The namedExports option from "@rollup/plugin-commonjs" is deprecated. Named exports are now handled automatically.'
);
}
requireResolver = getRequireResolver(
extensions,
detectCyclesAndConditional,
currentlyResolving
);
},
buildEnd() {
if (options.strictRequires === 'debug') {
const wrappedIds = requireResolver.getWrappedIds();
if (wrappedIds.length) {
this.warn({
code: 'WRAPPED_IDS',
ids: wrappedIds,
message: `The commonjs plugin automatically wrapped the following files:\n[\n${wrappedIds
.map((id) => `\t${JSON.stringify(relative$1(process.cwd(), id))}`)
.join(',\n')}\n]`
});
} else {
this.warn({
code: 'WRAPPED_IDS',
ids: wrappedIds,
message: 'The commonjs plugin did not wrap any files.'
});
}
}
},
load(id) {
if (id === HELPERS_ID) {
return getHelpersModule();
}
if (isWrappedId(id, MODULE_SUFFIX)) {
const name = getName(unwrapId(id, MODULE_SUFFIX));
return {
code: `var ${name} = {exports: {}}; export {${name} as __module}`,
meta: { commonjs: { isCommonJS: false } }
};
}
if (isWrappedId(id, EXPORTS_SUFFIX)) {
const name = getName(unwrapId(id, EXPORTS_SUFFIX));
return {
code: `var ${name} = {}; export {${name} as __exports}`,
meta: { commonjs: { isCommonJS: false } }
};
}
if (isWrappedId(id, EXTERNAL_SUFFIX)) {
const actualId = unwrapId(id, EXTERNAL_SUFFIX);
return getUnknownRequireProxy(
actualId,
isEsmExternal(actualId) ? getRequireReturnsDefault(actualId) : true
);
}
// entry suffix is just appended to not mess up relative external resolution
if (id.endsWith(ENTRY_SUFFIX)) {
const acutalId = id.slice(0, -ENTRY_SUFFIX.length);
const {
meta: { commonjs: commonjsMeta }
} = this.getModuleInfo(acutalId);
const shebang = commonjsMeta?.shebang ?? '';
return getEntryProxy(
acutalId,
getDefaultIsModuleExports(acutalId),
this.getModuleInfo,
shebang
);
}
if (isWrappedId(id, ES_IMPORT_SUFFIX)) {
const actualId = unwrapId(id, ES_IMPORT_SUFFIX);
return getEsImportProxy(actualId, getDefaultIsModuleExports(actualId));
}
if (id === DYNAMIC_MODULES_ID) {
return getDynamicModuleRegistry(
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ignoreDynamicRequires
);
}
if (isWrappedId(id, PROXY_SUFFIX)) {
const actualId = unwrapId(id, PROXY_SUFFIX);
return getStaticRequireProxy(actualId, getRequireReturnsDefault(actualId), this.load);
}
return null;
},
shouldTransformCachedModule(...args) {
return requireResolver.shouldTransformCachedModule.call(this, ...args);
},
transform(code, id) {
if (!isPossibleCjsId(id)) return null;
try {
return transformAndCheckExports.call(this, code, id);
} catch (err) {
return this.error(err, err.pos);
}
}
};
}
// Matches the scheme of a URL, eg "http://"
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith('//');
}
function isAbsolutePath(input) {
return input.startsWith('/');
}
function isFileUrl(input) {
return input.startsWith('file:');
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path = match[2];
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
}
function makeUrl(scheme, user, host, port, path, query, hash) {
return {
scheme,
user,
host,
port,
path,
query,
hash,
type: 7 /* Absolute */,
};
}
function parseUrl$3(input) {
if (isSchemeRelativeUrl(input)) {
const url = parseAbsoluteUrl('http:' + input);
url.scheme = '';
url.type = 6 /* SchemeRelative */;
return url;
}
if (isAbsolutePath(input)) {
const url = parseAbsoluteUrl('http://foo.com' + input);
url.scheme = '';
url.host = '';
url.type = 5 /* AbsolutePath */;
return url;
}
if (isFileUrl(input))
return parseFileUrl(input);
if (isAbsoluteUrl(input))
return parseAbsoluteUrl(input);
const url = parseAbsoluteUrl('http://foo.com/' + input);
url.scheme = '';
url.host = '';
url.type = input
? input.startsWith('?')
? 3 /* Query */
: input.startsWith('#')
? 2 /* Hash */
: 4 /* RelativePath */
: 1 /* Empty */;
return url;
}
function stripPathFilename(path) {
// If a path ends with a parent directory "..", then it's a relative path with excess parent
// paths. It's not a file, so we can't strip it.
if (path.endsWith('/..'))
return path;
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
function mergePaths(url, base) {
normalizePath$4(base, base.type);
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
// path).
if (url.path === '/') {
url.path = base.path;
}
else {
// Resolution happens relative to the base path's directory, not the file.
url.path = stripPathFilename(base.path) + url.path;
}
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath$4(url, type) {
const rel = type <= 4 /* RelativePath */;
const pieces = url.path.split('/');
// We need to preserve the first piece always, so that we output a leading slash. The item at
// pieces[0] is an empty string.
let pointer = 1;
// Positive is the number of real directories we've output, used for popping a parent directory.
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
let positive = 0;
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
// real directory, we won't need to append, unless the other conditions happen again.
let addTrailingSlash = false;
for (let i = 1; i < pieces.length; i++) {
const piece = pieces[i];
// An empty directory, could be a trailing slash, or just a double "//" in the path.
if (!piece) {
addTrailingSlash = true;
continue;
}
// If we encounter a real directory, then we don't need to append anymore.
addTrailingSlash = false;
// A current directory, which we can always drop.
if (piece === '.')
continue;
// A parent directory, we need to see if there are any real directories we can pop. Else, we
// have an excess of parents, and we'll need to keep the "..".
if (piece === '..') {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
}
else if (rel) {
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
pieces[pointer++] = piece;
}
continue;
}
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
// any popped or dropped directories.
pieces[pointer++] = piece;
positive++;
}
let path = '';
for (let i = 1; i < pointer; i++) {
path += '/' + pieces[i];
}
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
path += '/';
}
url.path = path;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve$2(input, base) {
if (!input && !base)
return '';
const url = parseUrl$3(input);
let inputType = url.type;
if (base && inputType !== 7 /* Absolute */) {
const baseUrl = parseUrl$3(base);
const baseType = baseUrl.type;
switch (inputType) {
case 1 /* Empty */:
url.hash = baseUrl.hash;
// fall through
case 2 /* Hash */:
url.query = baseUrl.query;
// fall through
case 3 /* Query */:
case 4 /* RelativePath */:
mergePaths(url, baseUrl);
// fall through
case 5 /* AbsolutePath */:
// The host, user, and port are joined, you can't copy one without the others.
url.user = baseUrl.user;
url.host = baseUrl.host;
url.port = baseUrl.port;
// fall through
case 6 /* SchemeRelative */:
// The input doesn't have a schema at least, so we need to copy at least that over.
url.scheme = baseUrl.scheme;
}
if (baseType > inputType)
inputType = baseType;
}
normalizePath$4(url, inputType);
const queryHash = url.query + url.hash;
switch (inputType) {
// This is impossible, because of the empty checks at the start of the function.
// case UrlType.Empty:
case 2 /* Hash */:
case 3 /* Query */:
return queryHash;
case 4 /* RelativePath */: {
// The first char is always a "/", and we need it to be relative.
const path = url.path.slice(1);
if (!path)
return queryHash || '.';
if (isRelative(base || input) && !isRelative(path)) {
// If base started with a leading ".", or there is no base and input started with a ".",
// then we need to ensure that the relative path starts with a ".". We don't know if
// relative starts with a "..", though, so check before prepending.
return './' + path + queryHash;
}
return path + queryHash;
}
case 5 /* AbsolutePath */:
return url.path + queryHash;
default:
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
}
}
function resolve$1(input, base) {
// The base is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
if (base && !base.endsWith('/'))
base += '/';
return resolve$2(input, base);
}
/**
* Removes everything after the last "/", but leaves the slash.
*/
function stripFilename(path) {
if (!path)
return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
const COLUMN$1 = 0;
const SOURCES_INDEX$1 = 1;
const SOURCE_LINE$1 = 2;
const SOURCE_COLUMN$1 = 3;
const NAMES_INDEX$1 = 4;
function maybeSort(mappings, owned) {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length)
return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned)
mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings, start) {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i]))
return i;
}
return mappings.length;
}
function isSorted(line) {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) {
return false;
}
}
return true;
}
function sortSegments(line, owned) {
if (!owned)
line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[COLUMN$1] - b[COLUMN$1];
}
let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
function binarySearch(haystack, needle, low, high) {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN$1] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
}
else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
function upperBound(haystack, needle, index) {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN$1] !== needle)
break;
}
return index;
}
function lowerBound(haystack, needle, index) {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN$1] !== needle)
break;
}
return index;
}
function memoizedState() {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
function memoizedBinarySearch(haystack, needle, state, key) {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
}
else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
const LEAST_UPPER_BOUND = -1;
const GREATEST_LOWER_BOUND = 1;
class TraceMap {
constructor(map, mapUrl) {
const isString = typeof map === 'string';
if (!isString && map._decodedMemo)
return map;
const parsed = (isString ? JSON.parse(map) : map);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names || [];
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
const from = resolve$1(sourceRoot || '', stripFilename(mapUrl));
this.resolvedSources = sources.map((s) => resolve$1(s || '', from));
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
}
else {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast$2(map) {
return map;
}
/**
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
*/
function encodedMappings(map) {
var _a;
var _b;
return ((_a = (_b = cast$2(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode$1(cast$2(map)._decoded)));
}
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
function decodedMappings(map) {
var _a;
return ((_a = cast$2(map))._decoded || (_a._decoded = decode(cast$2(map)._encoded)));
}
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
function traceSegment(map, line, column) {
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return null;
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast$2(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
return index === -1 ? null : segments[index];
}
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
function originalPositionFor$1(map, needle) {
let { line, column, bias } = needle;
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast$2(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
if (index === -1)
return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1)
return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(resolvedSources[segment[SOURCES_INDEX$1]], segment[SOURCE_LINE$1] + 1, segment[SOURCE_COLUMN$1], segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function decodedMap(map) {
return clone(map, decodedMappings(map));
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function encodedMap(map) {
return clone(map, encodedMappings(map));
}
function clone(map, mappings) {
return {
version: map.version,
file: map.file,
names: map.names,
sourceRoot: map.sourceRoot,
sources: map.sources,
sourcesContent: map.sourcesContent,
mappings,
ignoreList: map.ignoreList || map.x_google_ignoreList,
};
}
function OMapping(source, line, column, name) {
return { source, line, column, name };
}
function traceSegmentInternal(segments, memo, line, column, bias) {
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
}
else if (bias === LEAST_UPPER_BOUND)
index++;
if (index === -1 || index === segments.length)
return -1;
return index;
}
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
class SetArray {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
* with public access modifiers.
*/
function cast$1(set) {
return set;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
function get(setarr, key) {
return cast$1(setarr)._indexes[key];
}
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
function put(setarr, key) {
// The key may or may not be present. If it is present, it's a number.
const index = get(setarr, key);
if (index !== undefined)
return index;
const { array, _indexes: indexes } = cast$1(setarr);
const length = array.push(key);
return (indexes[key] = length - 1);
}
/**
* Removes the key, if it exists in the set.
*/
function remove(setarr, key) {
const index = get(setarr, key);
if (index === undefined)
return;
const { array, _indexes: indexes } = cast$1(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = undefined;
array.pop();
}
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const NO_NAME = -1;
/**
* Provides the state to generate a sourcemap.
*/
class GenMapping {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map) {
return map;
}
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name);
};
/**
* Adds/removes the content of the source file to the source map.
*/
function setSourceContent(map, source, content) {
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length)
sourcesContent[index] = null;
if (ignore)
put(ignoreList, index);
else
remove(ignoreList, index);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toDecodedMap(map) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || undefined,
names: names.array,
sourceRoot: map.sourceRoot || undefined,
sources: sources.array,
sourcesContent,
mappings,
ignoreList: ignoreList.array,
};
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign(Object.assign({}, decoded), { mappings: encode$1(decoded.mappings) });
}
// This split declaration is only so that terser can elminiate the static initialization block.
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
const line = getLine(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipSourceless(line, index))
return;
return insert(line, index, [genColumn]);
}
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length)
sourcesContent[sourcesIndex] = null;
if (skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(line, index, name
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
}
function getLine(mappings, index) {
for (let i = mappings.length; i <= index; i++) {
mappings[i] = [];
}
return mappings[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN])
break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0)
break;
}
if (len < length)
mappings.length = len;
}
function skipSourceless(line, index) {
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
// doesn't generate any useful information.
if (index === 0)
return true;
const prev = line[index - 1];
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
// genrate any new information. Else, this segment will end the source/named segment and point to
// a sourceless position, which is useful.
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
// A source/named segment at the start of a line gives position at that genColumn
if (index === 0)
return false;
const prev = line[index - 1];
// If the previous segment is sourceless, then we're transitioning to a source.
if (prev.length === 1)
return false;
// If the previous segment maps to the exact same source position, then this segment doesn't
// provide any new position information.
return (sourcesIndex === prev[SOURCES_INDEX] &&
sourceLine === prev[SOURCE_LINE] &&
sourceColumn === prev[SOURCE_COLUMN] &&
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
}
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
const EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content, ignore) {
return { source, line, column, name, content, ignore };
}
function Source(map, sources, source, content, ignore) {
return {
map,
sources,
source,
content,
ignore,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null, false);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content, ignore) {
return Source(null, EMPTY_SOURCES, source, content, ignore);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
const { column, line, name, content, source, ignore } = traced;
maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null)
setSourceContent(gen, source, content);
if (ignore)
setIgnore(gen, source, true);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
}
const segment = traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build$2(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build$2(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent, ignoreList } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
ignore: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content, ignore } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build$2(new TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
return OriginalSource(source, sourceContent, ignored);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.ignoreList = out.ignoreList;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
}
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
var src$3 = {exports: {}};
var browser$3 = {exports: {}};
/**
* Helpers.
*/
var ms$1;
var hasRequiredMs$1;
function requireMs$1 () {
if (hasRequiredMs$1) return ms$1;
hasRequiredMs$1 = 1;
var s = 1000;
var m = s * 60;
var h = m * 60;
var d = h * 24;
var w = d * 7;
var y = d * 365.25;
/**
* Parse or format the given `val`.
*
* Options:
*
* - `long` verbose formatting [false]
*
* @param {String|Number} val
* @param {Object} [options]
* @throws {Error} throw an error if val is not a non-empty string or a number
* @return {String|Number}
* @api public
*/
ms$1 = function(val, options) {
options = options || {};
var type = typeof val;
if (type === 'string' && val.length > 0) {
return parse(val);
} else if (type === 'number' && isFinite(val)) {
return options.long ? fmtLong(val) : fmtShort(val);
}
throw new Error(
'val is not a non-empty string or a valid number. val=' +
JSON.stringify(val)
);
};
/**
* Parse the given `str` and return milliseconds.
*
* @param {String} str
* @return {Number}
* @api private
*/
function parse(str) {
str = String(str);
if (str.length > 100) {
return;
}
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
str
);
if (!match) {
return;
}
var n = parseFloat(match[1]);
var type = (match[2] || 'ms').toLowerCase();
switch (type) {
case 'years':
case 'year':
case 'yrs':
case 'yr':
case 'y':
return n * y;
case 'weeks':
case 'week':
case 'w':
return n * w;
case 'days':
case 'day':
case 'd':
return n * d;
case 'hours':
case 'hour':
case 'hrs':
case 'hr':
case 'h':
return n * h;
case 'minutes':
case 'minute':
case 'mins':
case 'min':
case 'm':
return n * m;
case 'seconds':
case 'second':
case 'secs':
case 'sec':
case 's':
return n * s;
case 'milliseconds':
case 'millisecond':
case 'msecs':
case 'msec':
case 'ms':
return n;
default:
return undefined;
}
}
/**
* Short format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtShort(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d) {
return Math.round(ms / d) + 'd';
}
if (msAbs >= h) {
return Math.round(ms / h) + 'h';
}
if (msAbs >= m) {
return Math.round(ms / m) + 'm';
}
if (msAbs >= s) {
return Math.round(ms / s) + 's';
}
return ms + 'ms';
}
/**
* Long format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtLong(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d) {
return plural(ms, msAbs, d, 'day');
}
if (msAbs >= h) {
return plural(ms, msAbs, h, 'hour');
}
if (msAbs >= m) {
return plural(ms, msAbs, m, 'minute');
}
if (msAbs >= s) {
return plural(ms, msAbs, s, 'second');
}
return ms + ' ms';
}
/**
* Pluralization helper.
*/
function plural(ms, msAbs, n, name) {
var isPlural = msAbs >= n * 1.5;
return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
}
return ms$1;
}
var common$b;
var hasRequiredCommon;
function requireCommon () {
if (hasRequiredCommon) return common$b;
hasRequiredCommon = 1;
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*/
function setup(env) {
createDebug.debug = createDebug;
createDebug.default = createDebug;
createDebug.coerce = coerce;
createDebug.disable = disable;
createDebug.enable = enable;
createDebug.enabled = enabled;
createDebug.humanize = requireMs$1();
createDebug.destroy = destroy;
Object.keys(env).forEach(key => {
createDebug[key] = env[key];
});
/**
* The currently active debug mode names, and names to skip.
*/
createDebug.names = [];
createDebug.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
createDebug.formatters = {};
/**
* Selects a color for a debug namespace
* @param {String} namespace The namespace string for the debug instance to be colored
* @return {Number|String} An ANSI color code for the given namespace
* @api private
*/
function selectColor(namespace) {
let hash = 0;
for (let i = 0; i < namespace.length; i++) {
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
}
createDebug.selectColor = selectColor;
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
let prevTime;
let enableOverride = null;
let namespacesCache;
let enabledCache;
function debug(...args) {
// Disabled?
if (!debug.enabled) {
return;
}
const self = debug;
// Set `diff` timestamp
const curr = Number(new Date());
const ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
args[0] = createDebug.coerce(args[0]);
if (typeof args[0] !== 'string') {
// Anything else let's inspect with %O
args.unshift('%O');
}
// Apply any `formatters` transformations
let index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {
// If we encounter an escaped % then don't increase the array index
if (match === '%%') {
return '%';
}
index++;
const formatter = createDebug.formatters[format];
if (typeof formatter === 'function') {
const val = args[index];
match = formatter.call(self, val);
// Now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
});
// Apply env-specific formatting (colors, etc.)
createDebug.formatArgs.call(self, args);
const logFn = self.log || createDebug.log;
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.useColors = createDebug.useColors();
debug.color = createDebug.selectColor(namespace);
debug.extend = extend;
debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.
Object.defineProperty(debug, 'enabled', {
enumerable: true,
configurable: false,
get: () => {
if (enableOverride !== null) {
return enableOverride;
}
if (namespacesCache !== createDebug.namespaces) {
namespacesCache = createDebug.namespaces;
enabledCache = createDebug.enabled(namespace);
}
return enabledCache;
},
set: v => {
enableOverride = v;
}
});
// Env-specific initialization logic for debug instances
if (typeof createDebug.init === 'function') {
createDebug.init(debug);
}
return debug;
}
function extend(namespace, delimiter) {
const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
newDebug.log = this.log;
return newDebug;
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
createDebug.save(namespaces);
createDebug.namespaces = namespaces;
createDebug.names = [];
createDebug.skips = [];
let i;
const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
const len = split.length;
for (i = 0; i < len; i++) {
if (!split[i]) {
// ignore empty strings
continue;
}
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));
} else {
createDebug.names.push(new RegExp('^' + namespaces + '$'));
}
}
}
/**
* Disable debug output.
*
* @return {String} namespaces
* @api public
*/
function disable() {
const namespaces = [
...createDebug.names.map(toNamespace),
...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)
].join(',');
createDebug.enable('');
return namespaces;
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
if (name[name.length - 1] === '*') {
return true;
}
let i;
let len;
for (i = 0, len = createDebug.skips.length; i < len; i++) {
if (createDebug.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = createDebug.names.length; i < len; i++) {
if (createDebug.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Convert regexp to namespace
*
* @param {RegExp} regxep
* @return {String} namespace
* @api private
*/
function toNamespace(regexp) {
return regexp.toString()
.substring(2, regexp.toString().length - 2)
.replace(/\.\*\?$/, '*');
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) {
return val.stack || val.message;
}
return val;
}
/**
* XXX DO NOT USE. This is a temporary stub function.
* XXX It WILL be removed in the next major release.
*/
function destroy() {
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
}
createDebug.enable(createDebug.load());
return createDebug;
}
common$b = setup;
return common$b;
}
/* eslint-env browser */
var hasRequiredBrowser$1;
function requireBrowser$1 () {
if (hasRequiredBrowser$1) return browser$3.exports;
hasRequiredBrowser$1 = 1;
(function (module, exports) {
/**
* This is the web browser implementation of `debug()`.
*/
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.storage = localstorage();
exports.destroy = (() => {
let warned = false;
return () => {
if (!warned) {
warned = true;
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
}
};
})();
/**
* Colors.
*/
exports.colors = [
'#0000CC',
'#0000FF',
'#0033CC',
'#0033FF',
'#0066CC',
'#0066FF',
'#0099CC',
'#0099FF',
'#00CC00',
'#00CC33',
'#00CC66',
'#00CC99',
'#00CCCC',
'#00CCFF',
'#3300CC',
'#3300FF',
'#3333CC',
'#3333FF',
'#3366CC',
'#3366FF',
'#3399CC',
'#3399FF',
'#33CC00',
'#33CC33',
'#33CC66',
'#33CC99',
'#33CCCC',
'#33CCFF',
'#6600CC',
'#6600FF',
'#6633CC',
'#6633FF',
'#66CC00',
'#66CC33',
'#9900CC',
'#9900FF',
'#9933CC',
'#9933FF',
'#99CC00',
'#99CC33',
'#CC0000',
'#CC0033',
'#CC0066',
'#CC0099',
'#CC00CC',
'#CC00FF',
'#CC3300',
'#CC3333',
'#CC3366',
'#CC3399',
'#CC33CC',
'#CC33FF',
'#CC6600',
'#CC6633',
'#CC9900',
'#CC9933',
'#CCCC00',
'#CCCC33',
'#FF0000',
'#FF0033',
'#FF0066',
'#FF0099',
'#FF00CC',
'#FF00FF',
'#FF3300',
'#FF3333',
'#FF3366',
'#FF3399',
'#FF33CC',
'#FF33FF',
'#FF6600',
'#FF6633',
'#FF9900',
'#FF9933',
'#FFCC00',
'#FFCC33'
];
/**
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
* and the Firebug extension (any Firefox version) are known
* to support "%c" CSS customizations.
*
* TODO: add a `localStorage` variable to explicitly enable/disable colors
*/
// eslint-disable-next-line complexity
function useColors() {
// NB: In an Electron preload script, document will be defined but not fully
// initialized. Since we know we're in Chrome, we'll just detect this case
// explicitly
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
return true;
}
// Internet Explorer and Edge do not support colors.
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
return false;
}
let m;
// Is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
// Is firebug? http://stackoverflow.com/a/398120/376773
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
// Is firefox >= v31?
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
(typeof navigator !== 'undefined' && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31) ||
// Double check webkit in userAgent just in case we are in a worker
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
}
/**
* Colorize log arguments if enabled.
*
* @api public
*/
function formatArgs(args) {
args[0] = (this.useColors ? '%c' : '') +
this.namespace +
(this.useColors ? ' %c' : ' ') +
args[0] +
(this.useColors ? '%c ' : ' ') +
'+' + module.exports.humanize(this.diff);
if (!this.useColors) {
return;
}
const c = 'color: ' + this.color;
args.splice(1, 0, c, 'color: inherit');
// The final "%c" is somewhat tricky, because there could be other
// arguments passed either before or after the %c, so we need to
// figure out the correct index to insert the CSS into
let index = 0;
let lastC = 0;
args[0].replace(/%[a-zA-Z%]/g, match => {
if (match === '%%') {
return;
}
index++;
if (match === '%c') {
// We only are interested in the *last* %c
// (the user may have provided their own)
lastC = index;
}
});
args.splice(lastC, 0, c);
}
/**
* Invokes `console.debug()` when available.
* No-op when `console.debug` is not a "function".
* If `console.debug` is not available, falls back
* to `console.log`.
*
* @api public
*/
exports.log = console.debug || console.log || (() => {});
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
try {
if (namespaces) {
exports.storage.setItem('debug', namespaces);
} else {
exports.storage.removeItem('debug');
}
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
let r;
try {
r = exports.storage.getItem('debug');
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
if (!r && typeof process !== 'undefined' && 'env' in process) {
r = process.env.DEBUG;
}
return r;
}
/**
* Localstorage attempts to return the localstorage.
*
* This is necessary because safari throws
* when a user disables cookies/localstorage
* and you attempt to access it.
*
* @return {LocalStorage}
* @api private
*/
function localstorage() {
try {
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
// The Browser also has localStorage in the global context.
return localStorage;
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
}
module.exports = requireCommon()(exports);
const {formatters} = module.exports;
/**
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
*/
formatters.j = function (v) {
try {
return JSON.stringify(v);
} catch (error) {
return '[UnexpectedJSONParseError]: ' + error.message;
}
};
} (browser$3, browser$3.exports));
return browser$3.exports;
}
var node$1 = {exports: {}};
/**
* Module dependencies.
*/
var hasRequiredNode$1;
function requireNode$1 () {
if (hasRequiredNode$1) return node$1.exports;
hasRequiredNode$1 = 1;
(function (module, exports) {
const tty = require$$0$3;
const util = require$$0$5;
/**
* This is the Node.js implementation of `debug()`.
*/
exports.init = init;
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.destroy = util.deprecate(
() => {},
'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'
);
/**
* Colors.
*/
exports.colors = [6, 2, 3, 4, 5, 1];
try {
// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
// eslint-disable-next-line import/no-extraneous-dependencies
const supportsColor = require('supports-color');
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
exports.colors = [
20,
21,
26,
27,
32,
33,
38,
39,
40,
41,
42,
43,
44,
45,
56,
57,
62,
63,
68,
69,
74,
75,
76,
77,
78,
79,
80,
81,
92,
93,
98,
99,
112,
113,
128,
129,
134,
135,
148,
149,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
178,
179,
184,
185,
196,
197,
198,
199,
200,
201,
202,
203,
204,
205,
206,
207,
208,
209,
214,
215,
220,
221
];
}
} catch (error) {
// Swallow - we only care if `supports-color` is available; it doesn't have to be.
}
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter(key => {
return /^debug_/i.test(key);
}).reduce((obj, key) => {
// Camel-case
const prop = key
.substring(6)
.toLowerCase()
.replace(/_([a-z])/g, (_, k) => {
return k.toUpperCase();
});
// Coerce string value into JS value
let val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) {
val = true;
} else if (/^(no|off|false|disabled)$/i.test(val)) {
val = false;
} else if (val === 'null') {
val = null;
} else {
val = Number(val);
}
obj[prop] = val;
return obj;
}, {});
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors() {
return 'colors' in exports.inspectOpts ?
Boolean(exports.inspectOpts.colors) :
tty.isatty(process.stderr.fd);
}
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs(args) {
const {namespace: name, useColors} = this;
if (useColors) {
const c = this.color;
const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c);
const prefix = ` ${colorCode};1m${name} \u001B[0m`;
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m');
} else {
args[0] = getDate() + name + ' ' + args[0];
}
}
function getDate() {
if (exports.inspectOpts.hideDate) {
return '';
}
return new Date().toISOString() + ' ';
}
/**
* Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr.
*/
function log(...args) {
return process.stderr.write(util.formatWithOptions(exports.inspectOpts, ...args) + '\n');
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
if (namespaces) {
process.env.DEBUG = namespaces;
} else {
// If you set a process.env field to null or undefined, it gets cast to the
// string 'null' or 'undefined'. Just delete instead.
delete process.env.DEBUG;
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
return process.env.DEBUG;
}
/**
* Init logic for `debug` instances.
*
* Create a new `inspectOpts` object in case `useColors` is set
* differently for a particular `debug` instance.
*/
function init(debug) {
debug.inspectOpts = {};
const keys = Object.keys(exports.inspectOpts);
for (let i = 0; i < keys.length; i++) {
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
}
}
module.exports = requireCommon()(exports);
const {formatters} = module.exports;
/**
* Map %o to `util.inspect()`, all on a single line.
*/
formatters.o = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts)
.split('\n')
.map(str => str.trim())
.join(' ');
};
/**
* Map %O to `util.inspect()`, allowing multiple lines if needed.
*/
formatters.O = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts);
};
} (node$1, node$1.exports));
return node$1.exports;
}
/**
* Detect Electron renderer / nwjs process, which is node, but we should
* treat as a browser.
*/
if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {
src$3.exports = requireBrowser$1();
} else {
src$3.exports = requireNode$1();
}
var srcExports$1 = src$3.exports;
var debug$i = /*@__PURE__*/getDefaultExportFromCjs(srcExports$1);
let pnp;
if (process.versions.pnp) {
try {
pnp = createRequire$1(import.meta.url)("pnpapi");
} catch {
}
}
function invalidatePackageData(packageCache, pkgPath) {
const pkgDir = normalizePath$3(path$n.dirname(pkgPath));
packageCache.forEach((pkg, cacheKey) => {
if (pkg.dir === pkgDir) {
packageCache.delete(cacheKey);
}
});
}
function resolvePackageData(pkgName, basedir, preserveSymlinks = false, packageCache) {
if (pnp) {
const cacheKey = getRpdCacheKey(pkgName, basedir, preserveSymlinks);
if (packageCache?.has(cacheKey)) return packageCache.get(cacheKey);
try {
const pkg = pnp.resolveToUnqualified(pkgName, basedir, {
considerBuiltins: false
});
if (!pkg) return null;
const pkgData = loadPackageData(path$n.join(pkg, "package.json"));
packageCache?.set(cacheKey, pkgData);
return pkgData;
} catch {
return null;
}
}
const originalBasedir = basedir;
while (basedir) {
if (packageCache) {
const cached = getRpdCache(
packageCache,
pkgName,
basedir,
originalBasedir,
preserveSymlinks
);
if (cached) return cached;
}
const pkg = path$n.join(basedir, "node_modules", pkgName, "package.json");
try {
if (fs__default.existsSync(pkg)) {
const pkgPath = preserveSymlinks ? pkg : safeRealpathSync(pkg);
const pkgData = loadPackageData(pkgPath);
if (packageCache) {
setRpdCache(
packageCache,
pkgData,
pkgName,
basedir,
originalBasedir,
preserveSymlinks
);
}
return pkgData;
}
} catch {
}
const nextBasedir = path$n.dirname(basedir);
if (nextBasedir === basedir) break;
basedir = nextBasedir;
}
return null;
}
function findNearestPackageData(basedir, packageCache) {
const originalBasedir = basedir;
while (basedir) {
if (packageCache) {
const cached = getFnpdCache(packageCache, basedir, originalBasedir);
if (cached) return cached;
}
const pkgPath = path$n.join(basedir, "package.json");
if (tryStatSync(pkgPath)?.isFile()) {
try {
const pkgData = loadPackageData(pkgPath);
if (packageCache) {
setFnpdCache(packageCache, pkgData, basedir, originalBasedir);
}
return pkgData;
} catch {
}
}
const nextBasedir = path$n.dirname(basedir);
if (nextBasedir === basedir) break;
basedir = nextBasedir;
}
return null;
}
function findNearestMainPackageData(basedir, packageCache) {
const nearestPackage = findNearestPackageData(basedir, packageCache);
return nearestPackage && (nearestPackage.data.name ? nearestPackage : findNearestMainPackageData(
path$n.dirname(nearestPackage.dir),
packageCache
));
}
function loadPackageData(pkgPath) {
const data = JSON.parse(fs__default.readFileSync(pkgPath, "utf-8"));
const pkgDir = normalizePath$3(path$n.dirname(pkgPath));
const { sideEffects } = data;
let hasSideEffects;
if (typeof sideEffects === "boolean") {
hasSideEffects = () => sideEffects;
} else if (Array.isArray(sideEffects)) {
if (sideEffects.length <= 0) {
hasSideEffects = () => false;
} else {
const finalPackageSideEffects = sideEffects.map((sideEffect) => {
if (sideEffect.includes("/")) {
return sideEffect;
}
return `**/${sideEffect}`;
});
hasSideEffects = createFilter(finalPackageSideEffects, null, {
resolve: pkgDir
});
}
} else {
hasSideEffects = () => null;
}
const pkg = {
dir: pkgDir,
data,
hasSideEffects,
webResolvedImports: {},
nodeResolvedImports: {},
setResolvedCache(key, entry, targetWeb) {
if (targetWeb) {
pkg.webResolvedImports[key] = entry;
} else {
pkg.nodeResolvedImports[key] = entry;
}
},
getResolvedCache(key, targetWeb) {
if (targetWeb) {
return pkg.webResolvedImports[key];
} else {
return pkg.nodeResolvedImports[key];
}
}
};
return pkg;
}
function watchPackageDataPlugin(packageCache) {
const watchQueue = /* @__PURE__ */ new Set();
const watchedDirs = /* @__PURE__ */ new Set();
const watchFileStub = (id) => {
watchQueue.add(id);
};
let watchFile = watchFileStub;
const setPackageData = packageCache.set.bind(packageCache);
packageCache.set = (id, pkg) => {
if (!isInNodeModules$1(pkg.dir) && !watchedDirs.has(pkg.dir)) {
watchedDirs.add(pkg.dir);
watchFile(path$n.join(pkg.dir, "package.json"));
}
return setPackageData(id, pkg);
};
return {
name: "vite:watch-package-data",
buildStart() {
watchFile = this.addWatchFile.bind(this);
watchQueue.forEach(watchFile);
watchQueue.clear();
},
buildEnd() {
watchFile = watchFileStub;
},
watchChange(id) {
if (id.endsWith("/package.json")) {
invalidatePackageData(packageCache, path$n.normalize(id));
}
}
};
}
function getRpdCache(packageCache, pkgName, basedir, originalBasedir, preserveSymlinks) {
const cacheKey = getRpdCacheKey(pkgName, basedir, preserveSymlinks);
const pkgData = packageCache.get(cacheKey);
if (pkgData) {
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getRpdCacheKey(pkgName, dir, preserveSymlinks), pkgData);
});
return pkgData;
}
}
function setRpdCache(packageCache, pkgData, pkgName, basedir, originalBasedir, preserveSymlinks) {
packageCache.set(getRpdCacheKey(pkgName, basedir, preserveSymlinks), pkgData);
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getRpdCacheKey(pkgName, dir, preserveSymlinks), pkgData);
});
}
function getRpdCacheKey(pkgName, basedir, preserveSymlinks) {
return `rpd_${pkgName}_${basedir}_${preserveSymlinks}`;
}
function getFnpdCache(packageCache, basedir, originalBasedir) {
const cacheKey = getFnpdCacheKey(basedir);
const pkgData = packageCache.get(cacheKey);
if (pkgData) {
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getFnpdCacheKey(dir), pkgData);
});
return pkgData;
}
}
function setFnpdCache(packageCache, pkgData, basedir, originalBasedir) {
packageCache.set(getFnpdCacheKey(basedir), pkgData);
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getFnpdCacheKey(dir), pkgData);
});
}
function getFnpdCacheKey(basedir) {
return `fnpd_${basedir}`;
}
function traverseBetweenDirs(longerDir, shorterDir, cb) {
while (longerDir !== shorterDir) {
cb(longerDir);
longerDir = path$n.dirname(longerDir);
}
}
const createFilter = createFilter$1;
const replaceSlashOrColonRE = /[/:]/g;
const replaceDotRE = /\./g;
const replaceNestedIdRE = /\s*>\s*/g;
const replaceHashRE = /#/g;
const flattenId = (id) => {
const flatId = limitFlattenIdLength(
id.replace(replaceSlashOrColonRE, "_").replace(replaceDotRE, "__").replace(replaceNestedIdRE, "___").replace(replaceHashRE, "____")
);
return flatId;
};
const FLATTEN_ID_HASH_LENGTH = 8;
const FLATTEN_ID_MAX_FILE_LENGTH = 170;
const limitFlattenIdLength = (id, limit = FLATTEN_ID_MAX_FILE_LENGTH) => {
if (id.length <= limit) {
return id;
}
return id.slice(0, limit - (FLATTEN_ID_HASH_LENGTH + 1)) + "_" + getHash(id);
};
const normalizeId = (id) => id.replace(replaceNestedIdRE, " > ");
const NODE_BUILTIN_NAMESPACE = "node:";
const NPM_BUILTIN_NAMESPACE = "npm:";
const BUN_BUILTIN_NAMESPACE = "bun:";
const nodeBuiltins = builtinModules.filter((id) => !id.includes(":"));
function isBuiltin(id) {
if (process.versions.deno && id.startsWith(NPM_BUILTIN_NAMESPACE)) return true;
if (process.versions.bun && id.startsWith(BUN_BUILTIN_NAMESPACE)) return true;
return isNodeBuiltin(id);
}
function isNodeBuiltin(id) {
if (id.startsWith(NODE_BUILTIN_NAMESPACE)) return true;
return nodeBuiltins.includes(id);
}
function isInNodeModules$1(id) {
return id.includes("node_modules");
}
function moduleListContains(moduleList, id) {
return moduleList?.some(
(m) => m === id || id.startsWith(withTrailingSlash(m))
);
}
function isOptimizable(id, optimizeDeps) {
const { extensions } = optimizeDeps;
return OPTIMIZABLE_ENTRY_RE.test(id) || (extensions?.some((ext) => id.endsWith(ext)) ?? false);
}
const bareImportRE = /^(?![a-zA-Z]:)[\w@](?!.*:\/\/)/;
const deepImportRE = /^([^@][^/]*)\/|^(@[^/]+\/[^/]+)\//;
const _require$1 = createRequire$1(import.meta.url);
function resolveDependencyVersion(dep, pkgRelativePath = "../../package.json") {
const pkgPath = path$n.resolve(_require$1.resolve(dep), pkgRelativePath);
return JSON.parse(fs__default.readFileSync(pkgPath, "utf-8")).version;
}
const rollupVersion = resolveDependencyVersion("rollup");
const filter = process.env.VITE_DEBUG_FILTER;
const DEBUG = process.env.DEBUG;
function createDebugger(namespace, options = {}) {
const log = debug$i(namespace);
const { onlyWhenFocused } = options;
let enabled = log.enabled;
if (enabled && onlyWhenFocused) {
const ns = typeof onlyWhenFocused === "string" ? onlyWhenFocused : namespace;
enabled = !!DEBUG?.includes(ns);
}
if (enabled) {
return (...args) => {
if (!filter || args.some((a) => a?.includes?.(filter))) {
log(...args);
}
};
}
}
function testCaseInsensitiveFS() {
if (!CLIENT_ENTRY.endsWith("client.mjs")) {
throw new Error(
`cannot test case insensitive FS, CLIENT_ENTRY const doesn't contain client.mjs`
);
}
if (!fs__default.existsSync(CLIENT_ENTRY)) {
throw new Error(
"cannot test case insensitive FS, CLIENT_ENTRY does not point to an existing file: " + CLIENT_ENTRY
);
}
return fs__default.existsSync(CLIENT_ENTRY.replace("client.mjs", "cLiEnT.mjs"));
}
const urlCanParse = (
// eslint-disable-next-line n/no-unsupported-features/node-builtins
URL$3.canParse ?? // URL.canParse is supported from Node.js 18.17.0+, 20.0.0+
((path2, base) => {
try {
new URL$3(path2, base);
return true;
} catch {
return false;
}
})
);
const isCaseInsensitiveFS = testCaseInsensitiveFS();
const VOLUME_RE = /^[A-Z]:/i;
function normalizePath$3(id) {
return path$n.posix.normalize(isWindows$3 ? slash$1(id) : id);
}
function fsPathFromId(id) {
const fsPath = normalizePath$3(
id.startsWith(FS_PREFIX) ? id.slice(FS_PREFIX.length) : id
);
return fsPath[0] === "/" || VOLUME_RE.test(fsPath) ? fsPath : `/${fsPath}`;
}
function fsPathFromUrl(url) {
return fsPathFromId(cleanUrl(url));
}
function isParentDirectory(dir, file) {
dir = withTrailingSlash(dir);
return file.startsWith(dir) || isCaseInsensitiveFS && file.toLowerCase().startsWith(dir.toLowerCase());
}
function isSameFileUri(file1, file2) {
return file1 === file2 || isCaseInsensitiveFS && file1.toLowerCase() === file2.toLowerCase();
}
const externalRE = /^(https?:)?\/\//;
const isExternalUrl = (url) => externalRE.test(url);
const dataUrlRE = /^\s*data:/i;
const isDataUrl = (url) => dataUrlRE.test(url);
const virtualModuleRE = /^virtual-module:.*/;
const virtualModulePrefix = "virtual-module:";
const knownJsSrcRE = /\.(?:[jt]sx?|m[jt]s|vue|marko|svelte|astro|imba|mdx)(?:$|\?)/;
const isJSRequest = (url) => {
url = cleanUrl(url);
if (knownJsSrcRE.test(url)) {
return true;
}
if (!path$n.extname(url) && url[url.length - 1] !== "/") {
return true;
}
return false;
};
const knownTsRE = /\.(?:ts|mts|cts|tsx)(?:$|\?)/;
const isTsRequest = (url) => knownTsRE.test(url);
const importQueryRE = /(\?|&)import=?(?:&|$)/;
const directRequestRE$1 = /(\?|&)direct=?(?:&|$)/;
const internalPrefixes = [
FS_PREFIX,
VALID_ID_PREFIX,
CLIENT_PUBLIC_PATH,
ENV_PUBLIC_PATH
];
const InternalPrefixRE = new RegExp(`^(?:${internalPrefixes.join("|")})`);
const trailingSeparatorRE = /[?&]$/;
const isImportRequest = (url) => importQueryRE.test(url);
const isInternalRequest = (url) => InternalPrefixRE.test(url);
function removeImportQuery(url) {
return url.replace(importQueryRE, "$1").replace(trailingSeparatorRE, "");
}
function removeDirectQuery(url) {
return url.replace(directRequestRE$1, "$1").replace(trailingSeparatorRE, "");
}
const urlRE = /(\?|&)url(?:&|$)/;
const rawRE = /(\?|&)raw(?:&|$)/;
function removeUrlQuery(url) {
return url.replace(urlRE, "$1").replace(trailingSeparatorRE, "");
}
const replacePercentageRE = /%/g;
function injectQuery(url, queryToInject) {
const resolvedUrl = new URL$3(
url.replace(replacePercentageRE, "%25"),
"relative:///"
);
const { search, hash } = resolvedUrl;
let pathname = cleanUrl(url);
pathname = isWindows$3 ? slash$1(pathname) : pathname;
return `${pathname}?${queryToInject}${search ? `&` + search.slice(1) : ""}${hash ?? ""}`;
}
const timestampRE = /\bt=\d{13}&?\b/;
function removeTimestampQuery(url) {
return url.replace(timestampRE, "").replace(trailingSeparatorRE, "");
}
async function asyncReplace(input, re, replacer) {
let match;
let remaining = input;
let rewritten = "";
while (match = re.exec(remaining)) {
rewritten += remaining.slice(0, match.index);
rewritten += await replacer(match);
remaining = remaining.slice(match.index + match[0].length);
}
rewritten += remaining;
return rewritten;
}
function timeFrom(start, subtract = 0) {
const time = performance$1.now() - start - subtract;
const timeString = (time.toFixed(2) + `ms`).padEnd(5, " ");
if (time < 10) {
return colors$1.green(timeString);
} else if (time < 50) {
return colors$1.yellow(timeString);
} else {
return colors$1.red(timeString);
}
}
function prettifyUrl(url, root) {
url = removeTimestampQuery(url);
const isAbsoluteFile = url.startsWith(root);
if (isAbsoluteFile || url.startsWith(FS_PREFIX)) {
const file = path$n.posix.relative(
root,
isAbsoluteFile ? url : fsPathFromId(url)
);
return colors$1.dim(file);
} else {
return colors$1.dim(url);
}
}
function isObject$1(value) {
return Object.prototype.toString.call(value) === "[object Object]";
}
function isDefined(value) {
return value != null;
}
function tryStatSync(file) {
try {
return fs__default.statSync(file, { throwIfNoEntry: false });
} catch {
}
}
function lookupFile(dir, fileNames) {
while (dir) {
for (const fileName of fileNames) {
const fullPath = path$n.join(dir, fileName);
if (tryStatSync(fullPath)?.isFile()) return fullPath;
}
const parentDir = path$n.dirname(dir);
if (parentDir === dir) return;
dir = parentDir;
}
}
function isFilePathESM(filePath, packageCache) {
if (/\.m[jt]s$/.test(filePath)) {
return true;
} else if (/\.c[jt]s$/.test(filePath)) {
return false;
} else {
try {
const pkg = findNearestPackageData(path$n.dirname(filePath), packageCache);
return pkg?.data.type === "module";
} catch {
return false;
}
}
}
const splitRE = /\r?\n/g;
const range = 2;
function pad$1(source, n = 2) {
const lines = source.split(splitRE);
return lines.map((l) => ` `.repeat(n) + l).join(`
`);
}
function posToNumber(source, pos) {
if (typeof pos === "number") return pos;
const lines = source.split(splitRE);
const { line, column } = pos;
let start = 0;
for (let i = 0; i < line - 1 && i < lines.length; i++) {
start += lines[i].length + 1;
}
return start + column;
}
function numberToPos(source, offset) {
if (typeof offset !== "number") return offset;
if (offset > source.length) {
throw new Error(
`offset is longer than source length! offset ${offset} > length ${source.length}`
);
}
const lines = source.split(splitRE);
let counted = 0;
let line = 0;
let column = 0;
for (; line < lines.length; line++) {
const lineLength = lines[line].length + 1;
if (counted + lineLength >= offset) {
column = offset - counted + 1;
break;
}
counted += lineLength;
}
return { line: line + 1, column };
}
function generateCodeFrame(source, start = 0, end) {
start = Math.max(posToNumber(source, start), 0);
end = Math.min(
end !== void 0 ? posToNumber(source, end) : start,
source.length
);
const lines = source.split(splitRE);
let count = 0;
const res = [];
for (let i = 0; i < lines.length; i++) {
count += lines[i].length;
if (count >= start) {
for (let j = i - range; j <= i + range || end > count; j++) {
if (j < 0 || j >= lines.length) continue;
const line = j + 1;
res.push(
`${line}${" ".repeat(Math.max(3 - String(line).length, 0))}| ${lines[j]}`
);
const lineLength = lines[j].length;
if (j === i) {
const pad2 = Math.max(start - (count - lineLength), 0);
const length = Math.max(
1,
end > count ? lineLength - pad2 : end - start
);
res.push(` | ` + " ".repeat(pad2) + "^".repeat(length));
} else if (j > i) {
if (end > count) {
const length = Math.max(Math.min(end - count, lineLength), 1);
res.push(` | ` + "^".repeat(length));
}
count += lineLength + 1;
}
}
break;
}
count++;
}
return res.join("\n");
}
function isFileReadable(filename) {
if (!tryStatSync(filename)) {
return false;
}
try {
fs__default.accessSync(filename, fs__default.constants.R_OK);
return true;
} catch {
return false;
}
}
const splitFirstDirRE = /(.+?)[\\/](.+)/;
function emptyDir(dir, skip) {
const skipInDir = [];
let nested = null;
if (skip?.length) {
for (const file of skip) {
if (path$n.dirname(file) !== ".") {
const matched = splitFirstDirRE.exec(file);
if (matched) {
nested ??= /* @__PURE__ */ new Map();
const [, nestedDir, skipPath] = matched;
let nestedSkip = nested.get(nestedDir);
if (!nestedSkip) {
nestedSkip = [];
nested.set(nestedDir, nestedSkip);
}
if (!nestedSkip.includes(skipPath)) {
nestedSkip.push(skipPath);
}
}
} else {
skipInDir.push(file);
}
}
}
for (const file of fs__default.readdirSync(dir)) {
if (skipInDir.includes(file)) {
continue;
}
if (nested?.has(file)) {
emptyDir(path$n.resolve(dir, file), nested.get(file));
} else {
fs__default.rmSync(path$n.resolve(dir, file), { recursive: true, force: true });
}
}
}
function copyDir(srcDir, destDir) {
fs__default.mkdirSync(destDir, { recursive: true });
for (const file of fs__default.readdirSync(srcDir)) {
const srcFile = path$n.resolve(srcDir, file);
if (srcFile === destDir) {
continue;
}
const destFile = path$n.resolve(destDir, file);
const stat = fs__default.statSync(srcFile);
if (stat.isDirectory()) {
copyDir(srcFile, destFile);
} else {
fs__default.copyFileSync(srcFile, destFile);
}
}
}
const ERR_SYMLINK_IN_RECURSIVE_READDIR = "ERR_SYMLINK_IN_RECURSIVE_READDIR";
async function recursiveReaddir(dir) {
if (!fs__default.existsSync(dir)) {
return [];
}
let dirents;
try {
dirents = await fsp.readdir(dir, { withFileTypes: true });
} catch (e) {
if (e.code === "EACCES") {
return [];
}
throw e;
}
if (dirents.some((dirent) => dirent.isSymbolicLink())) {
const err = new Error(
"Symbolic links are not supported in recursiveReaddir"
);
err.code = ERR_SYMLINK_IN_RECURSIVE_READDIR;
throw err;
}
const files = await Promise.all(
dirents.map((dirent) => {
const res = path$n.resolve(dir, dirent.name);
return dirent.isDirectory() ? recursiveReaddir(res) : normalizePath$3(res);
})
);
return files.flat(1);
}
let safeRealpathSync = isWindows$3 ? windowsSafeRealPathSync : fs__default.realpathSync.native;
const windowsNetworkMap = /* @__PURE__ */ new Map();
function windowsMappedRealpathSync(path2) {
const realPath = fs__default.realpathSync.native(path2);
if (realPath.startsWith("\\\\")) {
for (const [network, volume] of windowsNetworkMap) {
if (realPath.startsWith(network)) return realPath.replace(network, volume);
}
}
return realPath;
}
const parseNetUseRE = /^\w* +(\w:) +([^ ]+)\s/;
let firstSafeRealPathSyncRun = false;
function windowsSafeRealPathSync(path2) {
if (!firstSafeRealPathSyncRun) {
optimizeSafeRealPathSync();
firstSafeRealPathSyncRun = true;
}
return fs__default.realpathSync(path2);
}
function optimizeSafeRealPathSync() {
const nodeVersion = process.versions.node.split(".").map(Number);
if (nodeVersion[0] < 18 || nodeVersion[0] === 18 && nodeVersion[1] < 10) {
safeRealpathSync = fs__default.realpathSync;
return;
}
try {
fs__default.realpathSync.native(path$n.resolve("./"));
} catch (error) {
if (error.message.includes("EISDIR: illegal operation on a directory")) {
safeRealpathSync = fs__default.realpathSync;
return;
}
}
exec("net use", (error, stdout) => {
if (error) return;
const lines = stdout.split("\n");
for (const line of lines) {
const m = parseNetUseRE.exec(line);
if (m) windowsNetworkMap.set(m[2], m[1]);
}
if (windowsNetworkMap.size === 0) {
safeRealpathSync = fs__default.realpathSync.native;
} else {
safeRealpathSync = windowsMappedRealpathSync;
}
});
}
function ensureWatchedFile(watcher, file, root) {
if (file && // only need to watch if out of root
!file.startsWith(withTrailingSlash(root)) && // some rollup plugins use null bytes for private resolved Ids
!file.includes("\0") && fs__default.existsSync(file)) {
watcher.add(path$n.resolve(file));
}
}
const escapedSpaceCharacters = /(?: |\\t|\\n|\\f|\\r)+/g;
const imageSetUrlRE = /^(?:[\w\-]+\(.*?\)|'.*?'|".*?"|\S*)/;
function joinSrcset(ret) {
return ret.map(({ url, descriptor }) => url + (descriptor ? ` ${descriptor}` : "")).join(", ");
}
function splitSrcSetDescriptor(srcs) {
return splitSrcSet(srcs).map((s) => {
const src = s.replace(escapedSpaceCharacters, " ").trim();
const url = imageSetUrlRE.exec(src)?.[0] ?? "";
return {
url,
descriptor: src.slice(url.length).trim()
};
}).filter(({ url }) => !!url);
}
function processSrcSet(srcs, replacer) {
return Promise.all(
splitSrcSetDescriptor(srcs).map(async ({ url, descriptor }) => ({
url: await replacer({ url, descriptor }),
descriptor
}))
).then(joinSrcset);
}
function processSrcSetSync(srcs, replacer) {
return joinSrcset(
splitSrcSetDescriptor(srcs).map(({ url, descriptor }) => ({
url: replacer({ url, descriptor }),
descriptor
}))
);
}
const cleanSrcSetRE = /(?:url|image|gradient|cross-fade)\([^)]*\)|"([^"]|(?<=\\)")*"|'([^']|(?<=\\)')*'|data:\w+\/[\w.+\-]+;base64,[\w+/=]+|\?\S+,/g;
function splitSrcSet(srcs) {
const parts = [];
const cleanedSrcs = srcs.replace(cleanSrcSetRE, blankReplacer);
let startIndex = 0;
let splitIndex;
do {
splitIndex = cleanedSrcs.indexOf(",", startIndex);
parts.push(
srcs.slice(startIndex, splitIndex !== -1 ? splitIndex : void 0)
);
startIndex = splitIndex + 1;
} while (splitIndex !== -1);
return parts;
}
const windowsDriveRE = /^[A-Z]:/;
const replaceWindowsDriveRE = /^([A-Z]):\//;
const linuxAbsolutePathRE = /^\/[^/]/;
function escapeToLinuxLikePath(path2) {
if (windowsDriveRE.test(path2)) {
return path2.replace(replaceWindowsDriveRE, "/windows/$1/");
}
if (linuxAbsolutePathRE.test(path2)) {
return `/linux${path2}`;
}
return path2;
}
const revertWindowsDriveRE = /^\/windows\/([A-Z])\//;
function unescapeToLinuxLikePath(path2) {
if (path2.startsWith("/linux/")) {
return path2.slice("/linux".length);
}
if (path2.startsWith("/windows/")) {
return path2.replace(revertWindowsDriveRE, "$1:/");
}
return path2;
}
const nullSourceMap = {
names: [],
sources: [],
mappings: "",
version: 3
};
function combineSourcemaps(filename, sourcemapList) {
if (sourcemapList.length === 0 || sourcemapList.every((m) => m.sources.length === 0)) {
return { ...nullSourceMap };
}
sourcemapList = sourcemapList.map((sourcemap) => {
const newSourcemaps = { ...sourcemap };
newSourcemaps.sources = sourcemap.sources.map(
(source) => source ? escapeToLinuxLikePath(source) : null
);
if (sourcemap.sourceRoot) {
newSourcemaps.sourceRoot = escapeToLinuxLikePath(sourcemap.sourceRoot);
}
return newSourcemaps;
});
let map;
let mapIndex = 1;
const useArrayInterface = sourcemapList.slice(0, -1).find((m) => m.sources.length !== 1) === void 0;
if (useArrayInterface) {
map = remapping(sourcemapList, () => null);
} else {
map = remapping(sourcemapList[0], function loader(sourcefile) {
const mapForSources = sourcemapList.slice(mapIndex).find((s) => s.sources.includes(sourcefile));
if (mapForSources) {
mapIndex++;
return mapForSources;
}
return null;
});
}
if (!map.file) {
delete map.file;
}
map.sources = map.sources.map(
(source) => source ? unescapeToLinuxLikePath(source) : source
);
map.file = filename;
return map;
}
function unique(arr) {
return Array.from(new Set(arr));
}
async function getLocalhostAddressIfDiffersFromDNS() {
const [nodeResult, dnsResult] = await Promise.all([
promises.lookup("localhost"),
promises.lookup("localhost", { verbatim: true })
]);
const isSame = nodeResult.family === dnsResult.family && nodeResult.address === dnsResult.address;
return isSame ? void 0 : nodeResult.address;
}
function diffDnsOrderChange(oldUrls, newUrls) {
return !(oldUrls === newUrls || oldUrls && newUrls && arrayEqual(oldUrls.local, newUrls.local) && arrayEqual(oldUrls.network, newUrls.network));
}
async function resolveHostname(optionsHost) {
let host;
if (optionsHost === void 0 || optionsHost === false) {
host = "localhost";
} else if (optionsHost === true) {
host = void 0;
} else {
host = optionsHost;
}
let name = host === void 0 || wildcardHosts.has(host) ? "localhost" : host;
if (host === "localhost") {
const localhostAddr = await getLocalhostAddressIfDiffersFromDNS();
if (localhostAddr) {
name = localhostAddr;
}
}
return { host, name };
}
async function resolveServerUrls(server, options, config) {
const address = server.address();
const isAddressInfo = (x) => x?.address;
if (!isAddressInfo(address)) {
return { local: [], network: [] };
}
const local = [];
const network = [];
const hostname = await resolveHostname(options.host);
const protocol = options.https ? "https" : "http";
const port = address.port;
const base = config.rawBase === "./" || config.rawBase === "" ? "/" : config.rawBase;
if (hostname.host !== void 0 && !wildcardHosts.has(hostname.host)) {
let hostnameName = hostname.name;
if (hostnameName.includes(":")) {
hostnameName = `[${hostnameName}]`;
}
const address2 = `${protocol}://${hostnameName}:${port}${base}`;
if (loopbackHosts.has(hostname.host)) {
local.push(address2);
} else {
network.push(address2);
}
} else {
Object.values(os$5.networkInterfaces()).flatMap((nInterface) => nInterface ?? []).filter(
(detail) => detail && detail.address && (detail.family === "IPv4" || // @ts-expect-error Node 18.0 - 18.3 returns number
detail.family === 4)
).forEach((detail) => {
let host = detail.address.replace("127.0.0.1", hostname.name);
if (host.includes(":")) {
host = `[${host}]`;
}
const url = `${protocol}://${host}:${port}${base}`;
if (detail.address.includes("127.0.0.1")) {
local.push(url);
} else {
network.push(url);
}
});
}
return { local, network };
}
function arraify(target) {
return Array.isArray(target) ? target : [target];
}
const multilineCommentsRE = /\/\*[^*]*\*+(?:[^/*][^*]*\*+)*\//g;
const singlelineCommentsRE = /\/\/.*/g;
const requestQuerySplitRE = /\?(?!.*[/|}])/;
const requestQueryMaybeEscapedSplitRE = /\\?\?(?!.*[/|}])/;
const blankReplacer = (match) => " ".repeat(match.length);
function getHash(text, length = 8) {
const h = createHash$2("sha256").update(text).digest("hex").substring(0, length);
if (length <= 64) return h;
return h.padEnd(length, "_");
}
const _dirname = path$n.dirname(fileURLToPath(import.meta.url));
const requireResolveFromRootWithFallback = (root, id) => {
const found = resolvePackageData(id, root) || resolvePackageData(id, _dirname);
if (!found) {
const error = new Error(`${JSON.stringify(id)} not found.`);
error.code = "MODULE_NOT_FOUND";
throw error;
}
return _require$1.resolve(id, { paths: [root, _dirname] });
};
function emptyCssComments(raw) {
return raw.replace(multilineCommentsRE, blankReplacer);
}
function backwardCompatibleWorkerPlugins(plugins) {
if (Array.isArray(plugins)) {
return plugins;
}
if (typeof plugins === "function") {
return plugins();
}
return [];
}
function mergeConfigRecursively(defaults, overrides, rootPath) {
const merged = { ...defaults };
for (const key in overrides) {
const value = overrides[key];
if (value == null) {
continue;
}
const existing = merged[key];
if (existing == null) {
merged[key] = value;
continue;
}
if (key === "alias" && (rootPath === "resolve" || rootPath === "")) {
merged[key] = mergeAlias(existing, value);
continue;
} else if (key === "assetsInclude" && rootPath === "") {
merged[key] = [].concat(existing, value);
continue;
} else if (key === "noExternal" && rootPath === "ssr" && (existing === true || value === true)) {
merged[key] = true;
continue;
} else if (key === "plugins" && rootPath === "worker") {
merged[key] = () => [
...backwardCompatibleWorkerPlugins(existing),
...backwardCompatibleWorkerPlugins(value)
];
continue;
} else if (key === "server" && rootPath === "server.hmr") {
merged[key] = value;
continue;
}
if (Array.isArray(existing) || Array.isArray(value)) {
merged[key] = [...arraify(existing), ...arraify(value)];
continue;
}
if (isObject$1(existing) && isObject$1(value)) {
merged[key] = mergeConfigRecursively(
existing,
value,
rootPath ? `${rootPath}.${key}` : key
);
continue;
}
merged[key] = value;
}
return merged;
}
function mergeConfig(defaults, overrides, isRoot = true) {
if (typeof defaults === "function" || typeof overrides === "function") {
throw new Error(`Cannot merge config in form of callback`);
}
return mergeConfigRecursively(defaults, overrides, isRoot ? "" : ".");
}
function mergeAlias(a, b) {
if (!a) return b;
if (!b) return a;
if (isObject$1(a) && isObject$1(b)) {
return { ...a, ...b };
}
return [...normalizeAlias(b), ...normalizeAlias(a)];
}
function normalizeAlias(o = []) {
return Array.isArray(o) ? o.map(normalizeSingleAlias) : Object.keys(o).map(
(find) => normalizeSingleAlias({
find,
replacement: o[find]
})
);
}
function normalizeSingleAlias({
find,
replacement,
customResolver
}) {
if (typeof find === "string" && find[find.length - 1] === "/" && replacement[replacement.length - 1] === "/") {
find = find.slice(0, find.length - 1);
replacement = replacement.slice(0, replacement.length - 1);
}
const alias = {
find,
replacement
};
if (customResolver) {
alias.customResolver = customResolver;
}
return alias;
}
function transformStableResult(s, id, config) {
return {
code: s.toString(),
map: config.command === "build" && config.build.sourcemap ? s.generateMap({ hires: "boundary", source: id }) : null
};
}
async function asyncFlatten(arr) {
do {
arr = (await Promise.all(arr)).flat(Infinity);
} while (arr.some((v) => v?.then));
return arr;
}
function stripBomTag(content) {
if (content.charCodeAt(0) === 65279) {
return content.slice(1);
}
return content;
}
const windowsDrivePathPrefixRE = /^[A-Za-z]:[/\\]/;
const isNonDriveRelativeAbsolutePath = (p) => {
if (!isWindows$3) return p[0] === "/";
return windowsDrivePathPrefixRE.test(p);
};
function shouldServeFile(filePath, root) {
if (!isCaseInsensitiveFS) return true;
return hasCorrectCase(filePath, root);
}
function hasCorrectCase(file, assets) {
if (file === assets) return true;
const parent = path$n.dirname(file);
if (fs__default.readdirSync(parent).includes(path$n.basename(file))) {
return hasCorrectCase(parent, assets);
}
return false;
}
function joinUrlSegments(a, b) {
if (!a || !b) {
return a || b || "";
}
if (a[a.length - 1] === "/") {
a = a.substring(0, a.length - 1);
}
if (b[0] !== "/") {
b = "/" + b;
}
return a + b;
}
function removeLeadingSlash(str) {
return str[0] === "/" ? str.slice(1) : str;
}
function stripBase(path2, base) {
if (path2 === base) {
return "/";
}
const devBase = withTrailingSlash(base);
return path2.startsWith(devBase) ? path2.slice(devBase.length - 1) : path2;
}
function arrayEqual(a, b) {
if (a === b) return true;
if (a.length !== b.length) return false;
for (let i = 0; i < a.length; i++) {
if (a[i] !== b[i]) return false;
}
return true;
}
function evalValue(rawValue) {
const fn = new Function(`
var console, exports, global, module, process, require
return (
${rawValue}
)
`);
return fn();
}
function getNpmPackageName(importPath) {
const parts = importPath.split("/");
if (parts[0][0] === "@") {
if (!parts[1]) return null;
return `${parts[0]}/${parts[1]}`;
} else {
return parts[0];
}
}
const escapeRegexRE = /[-/\\^$*+?.()|[\]{}]/g;
function escapeRegex(str) {
return str.replace(escapeRegexRE, "\\$&");
}
function getPackageManagerCommand(type = "install") {
const packageManager = process.env.npm_config_user_agent?.split(" ")[0].split("/")[0] || "npm";
switch (type) {
case "install":
return packageManager === "npm" ? "npm install" : `${packageManager} add`;
case "uninstall":
return packageManager === "npm" ? "npm uninstall" : `${packageManager} remove`;
case "update":
return packageManager === "yarn" ? "yarn upgrade" : `${packageManager} update`;
default:
throw new TypeError(`Unknown command type: ${type}`);
}
}
function isDevServer(server) {
return "pluginContainer" in server;
}
function promiseWithResolvers() {
let resolve;
let reject;
const promise = new Promise((_resolve, _reject) => {
resolve = _resolve;
reject = _reject;
});
return { promise, resolve, reject };
}
function createSerialPromiseQueue() {
let previousTask;
return {
async run(f) {
const thisTask = f();
const depTasks = Promise.all([previousTask, thisTask]);
previousTask = depTasks;
const [, result] = await depTasks;
if (previousTask === depTasks) {
previousTask = void 0;
}
return result;
}
};
}
function sortObjectKeys(obj) {
const sorted = {};
for (const key of Object.keys(obj).sort()) {
sorted[key] = obj[key];
}
return sorted;
}
function displayTime(time) {
if (time < 1e3) {
return `${time}ms`;
}
time = time / 1e3;
if (time < 60) {
return `${time.toFixed(2)}s`;
}
const mins = parseInt((time / 60).toString());
const seconds = time % 60;
return `${mins}m${seconds < 1 ? "" : ` ${seconds.toFixed(0)}s`}`;
}
function encodeURIPath(uri) {
if (uri.startsWith("data:")) return uri;
const filePath = cleanUrl(uri);
const postfix = filePath !== uri ? uri.slice(filePath.length) : "";
return encodeURI(filePath) + postfix;
}
function partialEncodeURIPath(uri) {
if (uri.startsWith("data:")) return uri;
const filePath = cleanUrl(uri);
const postfix = filePath !== uri ? uri.slice(filePath.length) : "";
return filePath.replaceAll("%", "%25") + postfix;
}
const setupSIGTERMListener = (callback) => {
process.once("SIGTERM", callback);
if (process.env.CI !== "true") {
process.stdin.on("end", callback);
}
};
const teardownSIGTERMListener = (callback) => {
process.off("SIGTERM", callback);
if (process.env.CI !== "true") {
process.stdin.off("end", callback);
}
};
const LogLevels = {
silent: 0,
error: 1,
warn: 2,
info: 3
};
let lastType;
let lastMsg;
let sameCount = 0;
function clearScreen() {
const repeatCount = process.stdout.rows - 2;
const blank = repeatCount > 0 ? "\n".repeat(repeatCount) : "";
console.log(blank);
readline.cursorTo(process.stdout, 0, 0);
readline.clearScreenDown(process.stdout);
}
let timeFormatter;
function getTimeFormatter() {
timeFormatter ??= new Intl.DateTimeFormat(void 0, {
hour: "numeric",
minute: "numeric",
second: "numeric"
});
return timeFormatter;
}
function createLogger(level = "info", options = {}) {
if (options.customLogger) {
return options.customLogger;
}
const loggedErrors = /* @__PURE__ */ new WeakSet();
const { prefix = "[vite]", allowClearScreen = true } = options;
const thresh = LogLevels[level];
const canClearScreen = allowClearScreen && process.stdout.isTTY && !process.env.CI;
const clear = canClearScreen ? clearScreen : () => {
};
function format(type, msg, options2 = {}) {
if (options2.timestamp) {
let tag = "";
if (type === "info") {
tag = colors$1.cyan(colors$1.bold(prefix));
} else if (type === "warn") {
tag = colors$1.yellow(colors$1.bold(prefix));
} else {
tag = colors$1.red(colors$1.bold(prefix));
}
return `${colors$1.dim(getTimeFormatter().format(/* @__PURE__ */ new Date()))} ${tag} ${msg}`;
} else {
return msg;
}
}
function output(type, msg, options2 = {}) {
if (thresh >= LogLevels[type]) {
const method = type === "info" ? "log" : type;
if (options2.error) {
loggedErrors.add(options2.error);
}
if (canClearScreen) {
if (type === lastType && msg === lastMsg) {
sameCount++;
clear();
console[method](
format(type, msg, options2),
colors$1.yellow(`(x${sameCount + 1})`)
);
} else {
sameCount = 0;
lastMsg = msg;
lastType = type;
if (options2.clear) {
clear();
}
console[method](format(type, msg, options2));
}
} else {
console[method](format(type, msg, options2));
}
}
}
const warnedMessages = /* @__PURE__ */ new Set();
const logger = {
hasWarned: false,
info(msg, opts) {
output("info", msg, opts);
},
warn(msg, opts) {
logger.hasWarned = true;
output("warn", msg, opts);
},
warnOnce(msg, opts) {
if (warnedMessages.has(msg)) return;
logger.hasWarned = true;
output("warn", msg, opts);
warnedMessages.add(msg);
},
error(msg, opts) {
logger.hasWarned = true;
output("error", msg, opts);
},
clearScreen(type) {
if (thresh >= LogLevels[type]) {
clear();
}
},
hasErrorLogged(error) {
return loggedErrors.has(error);
}
};
return logger;
}
function printServerUrls(urls, optionsHost, info) {
const colorUrl = (url) => colors$1.cyan(url.replace(/:(\d+)\//, (_, port) => `:${colors$1.bold(port)}/`));
for (const url of urls.local) {
info(` ${colors$1.green("\u279C")} ${colors$1.bold("Local")}: ${colorUrl(url)}`);
}
for (const url of urls.network) {
info(` ${colors$1.green("\u279C")} ${colors$1.bold("Network")}: ${colorUrl(url)}`);
}
if (urls.network.length === 0 && optionsHost === void 0) {
info(
colors$1.dim(` ${colors$1.green("\u279C")} ${colors$1.bold("Network")}: use `) + colors$1.bold("--host") + colors$1.dim(" to expose")
);
}
}
const groups = [
{ name: "Assets", color: colors$1.green },
{ name: "CSS", color: colors$1.magenta },
{ name: "JS", color: colors$1.cyan }
];
const COMPRESSIBLE_ASSETS_RE = /\.(?:html|json|svg|txt|xml|xhtml)$/;
function buildReporterPlugin(config) {
const compress = promisify$4(gzip);
const chunkLimit = config.build.chunkSizeWarningLimit;
const numberFormatter = new Intl.NumberFormat("en", {
maximumFractionDigits: 2,
minimumFractionDigits: 2
});
const displaySize = (bytes) => {
return `${numberFormatter.format(bytes / 1e3)} kB`;
};
const tty = process.stdout.isTTY && !process.env.CI;
const shouldLogInfo = LogLevels[config.logLevel || "info"] >= LogLevels.info;
let hasTransformed = false;
let hasRenderedChunk = false;
let hasCompressChunk = false;
let transformedCount = 0;
let chunkCount = 0;
let compressedCount = 0;
async function getCompressedSize(code) {
if (config.build.ssr || !config.build.reportCompressedSize) {
return null;
}
if (shouldLogInfo && !hasCompressChunk) {
if (!tty) {
config.logger.info("computing gzip size...");
} else {
writeLine("computing gzip size (0)...");
}
hasCompressChunk = true;
}
const compressed = await compress(
typeof code === "string" ? code : Buffer.from(code)
);
compressedCount++;
if (shouldLogInfo && tty) {
writeLine(`computing gzip size (${compressedCount})...`);
}
return compressed.length;
}
const logTransform = throttle((id) => {
writeLine(
`transforming (${transformedCount}) ${colors$1.dim(
path$n.relative(config.root, id)
)}`
);
});
return {
name: "vite:reporter",
transform(_, id) {
transformedCount++;
if (shouldLogInfo) {
if (!tty) {
if (!hasTransformed) {
config.logger.info(`transforming...`);
}
} else {
if (id.includes(`?`)) return;
logTransform(id);
}
hasTransformed = true;
}
return null;
},
buildStart() {
transformedCount = 0;
},
buildEnd() {
if (shouldLogInfo) {
if (tty) {
clearLine$1();
}
config.logger.info(
`${colors$1.green(`\u2713`)} ${transformedCount} modules transformed.`
);
}
},
renderStart() {
chunkCount = 0;
compressedCount = 0;
},
renderChunk(code, chunk, options) {
if (!options.inlineDynamicImports) {
for (const id of chunk.moduleIds) {
const module = this.getModuleInfo(id);
if (!module) continue;
if (module.importers.length && module.dynamicImporters.length) {
const detectedIneffectiveDynamicImport = module.dynamicImporters.some(
(id2) => !isInNodeModules$1(id2) && chunk.moduleIds.includes(id2)
);
if (detectedIneffectiveDynamicImport) {
this.warn(
`
(!) ${module.id} is dynamically imported by ${module.dynamicImporters.join(
", "
)} but also statically imported by ${module.importers.join(
", "
)}, dynamic import will not move module into another chunk.
`
);
}
}
}
}
chunkCount++;
if (shouldLogInfo) {
if (!tty) {
if (!hasRenderedChunk) {
config.logger.info("rendering chunks...");
}
} else {
writeLine(`rendering chunks (${chunkCount})...`);
}
hasRenderedChunk = true;
}
return null;
},
generateBundle() {
if (shouldLogInfo && tty) clearLine$1();
},
async writeBundle({ dir: outDir }, output) {
let hasLargeChunks = false;
if (shouldLogInfo) {
const entries = (await Promise.all(
Object.values(output).map(
async (chunk) => {
if (chunk.type === "chunk") {
return {
name: chunk.fileName,
group: "JS",
size: chunk.code.length,
compressedSize: await getCompressedSize(chunk.code),
mapSize: chunk.map ? chunk.map.toString().length : null
};
} else {
if (chunk.fileName.endsWith(".map")) return null;
const isCSS = chunk.fileName.endsWith(".css");
const isCompressible = isCSS || COMPRESSIBLE_ASSETS_RE.test(chunk.fileName);
return {
name: chunk.fileName,
group: isCSS ? "CSS" : "Assets",
size: chunk.source.length,
mapSize: null,
// Rollup doesn't support CSS maps?
compressedSize: isCompressible ? await getCompressedSize(chunk.source) : null
};
}
}
)
)).filter(isDefined);
if (tty) clearLine$1();
let longest = 0;
let biggestSize = 0;
let biggestMap = 0;
let biggestCompressSize = 0;
for (const entry of entries) {
if (entry.name.length > longest) longest = entry.name.length;
if (entry.size > biggestSize) biggestSize = entry.size;
if (entry.mapSize && entry.mapSize > biggestMap) {
biggestMap = entry.mapSize;
}
if (entry.compressedSize && entry.compressedSize > biggestCompressSize) {
biggestCompressSize = entry.compressedSize;
}
}
const sizePad = displaySize(biggestSize).length;
const mapPad = displaySize(biggestMap).length;
const compressPad = displaySize(biggestCompressSize).length;
const relativeOutDir = normalizePath$3(
path$n.relative(
config.root,
path$n.resolve(config.root, outDir ?? config.build.outDir)
)
);
const assetsDir = path$n.join(config.build.assetsDir, "/");
for (const group of groups) {
const filtered = entries.filter((e) => e.group === group.name);
if (!filtered.length) continue;
for (const entry of filtered.sort((a, z) => a.size - z.size)) {
const isLarge = group.name === "JS" && entry.size / 1e3 > chunkLimit;
if (isLarge) hasLargeChunks = true;
const sizeColor = isLarge ? colors$1.yellow : colors$1.dim;
let log = colors$1.dim(withTrailingSlash(relativeOutDir));
log += !config.build.lib && entry.name.startsWith(withTrailingSlash(assetsDir)) ? colors$1.dim(assetsDir) + group.color(
entry.name.slice(assetsDir.length).padEnd(longest + 2 - assetsDir.length)
) : group.color(entry.name.padEnd(longest + 2));
log += colors$1.bold(
sizeColor(displaySize(entry.size).padStart(sizePad))
);
if (entry.compressedSize) {
log += colors$1.dim(
` \u2502 gzip: ${displaySize(entry.compressedSize).padStart(
compressPad
)}`
);
}
if (entry.mapSize) {
log += colors$1.dim(
` \u2502 map: ${displaySize(entry.mapSize).padStart(mapPad)}`
);
}
config.logger.info(log);
}
}
} else {
hasLargeChunks = Object.values(output).some((chunk) => {
return chunk.type === "chunk" && chunk.code.length / 1e3 > chunkLimit;
});
}
if (hasLargeChunks && config.build.minify && !config.build.lib && !config.build.ssr) {
config.logger.warn(
colors$1.yellow(
`
(!) Some chunks are larger than ${chunkLimit} kB after minification. Consider:
- Using dynamic import() to code-split the application
- Use build.rollupOptions.output.manualChunks to improve chunking: https://rollupjs.org/configuration-options/#output-manualchunks
- Adjust chunk size limit for this warning via build.chunkSizeWarningLimit.`
)
);
}
}
};
}
function writeLine(output) {
clearLine$1();
if (output.length < process.stdout.columns) {
process.stdout.write(output);
} else {
process.stdout.write(output.substring(0, process.stdout.columns - 1));
}
}
function clearLine$1() {
process.stdout.clearLine(0);
process.stdout.cursorTo(0);
}
function throttle(fn) {
let timerHandle = null;
return (...args) => {
if (timerHandle) return;
fn(...args);
timerHandle = setTimeout(() => {
timerHandle = null;
}, 100);
};
}
const POSIX_SEP_RE = new RegExp('\\' + path$n.posix.sep, 'g');
const NATIVE_SEP_RE = new RegExp('\\' + path$n.sep, 'g');
/** @type {Map}*/
const PATTERN_REGEX_CACHE = new Map();
const GLOB_ALL_PATTERN = `**/*`;
const TS_EXTENSIONS = ['.ts', '.tsx', '.mts', '.cts'];
const JS_EXTENSIONS = ['.js', '.jsx', '.mjs', '.cjs'];
const TSJS_EXTENSIONS = TS_EXTENSIONS.concat(JS_EXTENSIONS);
const TS_EXTENSIONS_RE_GROUP = `\\.(?:${TS_EXTENSIONS.map((ext) => ext.substring(1)).join('|')})`;
const TSJS_EXTENSIONS_RE_GROUP = `\\.(?:${TSJS_EXTENSIONS.map((ext) => ext.substring(1)).join(
'|'
)})`;
const IS_POSIX = path$n.posix.sep === path$n.sep;
/**
* @template T
* @returns {{resolve:(result:T)=>void, reject:(error:any)=>void, promise: Promise}}
*/
function makePromise() {
let resolve, reject;
const promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
return { promise, resolve, reject };
}
/**
* @param {string} filename
* @param {import('./cache.js').TSConfckCache} [cache]
* @returns {Promise}
*/
async function resolveTSConfigJson(filename, cache) {
if (path$n.extname(filename) !== '.json') {
return; // ignore files that are not json
}
const tsconfig = path$n.resolve(filename);
if (cache && (cache.hasParseResult(tsconfig) || cache.hasParseResult(filename))) {
return tsconfig;
}
return promises$1.stat(tsconfig).then((stat) => {
if (stat.isFile() || stat.isFIFO()) {
return tsconfig;
} else {
throw new Error(`${filename} exists but is not a regular file.`);
}
});
}
/**
*
* @param {string} dir an absolute directory path
* @returns {boolean} if dir path includes a node_modules segment
*/
const isInNodeModules = IS_POSIX
? (dir) => dir.includes('/node_modules/')
: (dir) => dir.match(/[/\\]node_modules[/\\]/);
/**
* convert posix separator to native separator
*
* eg.
* windows: C:/foo/bar -> c:\foo\bar
* linux: /foo/bar -> /foo/bar
*
* @param {string} filename with posix separators
* @returns {string} filename with native separators
*/
const posix2native = IS_POSIX
? (filename) => filename
: (filename) => filename.replace(POSIX_SEP_RE, path$n.sep);
/**
* convert native separator to posix separator
*
* eg.
* windows: C:\foo\bar -> c:/foo/bar
* linux: /foo/bar -> /foo/bar
*
* @param {string} filename - filename with native separators
* @returns {string} filename with posix separators
*/
const native2posix = IS_POSIX
? (filename) => filename
: (filename) => filename.replace(NATIVE_SEP_RE, path$n.posix.sep);
/**
* converts params to native separator, resolves path and converts native back to posix
*
* needed on windows to handle posix paths in tsconfig
*
* @param dir {string|null} directory to resolve from
* @param filename {string} filename or pattern to resolve
* @returns string
*/
const resolve2posix = IS_POSIX
? (dir, filename) => (dir ? path$n.resolve(dir, filename) : path$n.resolve(filename))
: (dir, filename) =>
native2posix(
dir
? path$n.resolve(posix2native(dir), posix2native(filename))
: path$n.resolve(posix2native(filename))
);
/**
*
* @param {import('./public.d.ts').TSConfckParseResult} result
* @param {import('./public.d.ts').TSConfckParseOptions} [options]
* @returns {string[]}
*/
function resolveReferencedTSConfigFiles(result, options) {
const dir = path$n.dirname(result.tsconfigFile);
return result.tsconfig.references.map((ref) => {
const refPath = ref.path.endsWith('.json')
? ref.path
: path$n.join(ref.path, options?.configName ?? 'tsconfig.json');
return resolve2posix(dir, refPath);
});
}
/**
* @param {string} filename
* @param {import('./public.d.ts').TSConfckParseResult} result
* @returns {import('./public.d.ts').TSConfckParseResult}
*/
function resolveSolutionTSConfig(filename, result) {
const allowJs = result.tsconfig.compilerOptions?.allowJs;
const extensions = allowJs ? TSJS_EXTENSIONS : TS_EXTENSIONS;
if (
result.referenced &&
extensions.some((ext) => filename.endsWith(ext)) &&
!isIncluded(filename, result)
) {
const solutionTSConfig = result.referenced.find((referenced) =>
isIncluded(filename, referenced)
);
if (solutionTSConfig) {
return solutionTSConfig;
}
}
return result;
}
/**
*
* @param {string} filename
* @param {import('./public.d.ts').TSConfckParseResult} result
* @returns {boolean}
*/
function isIncluded(filename, result) {
const dir = native2posix(path$n.dirname(result.tsconfigFile));
const files = (result.tsconfig.files || []).map((file) => resolve2posix(dir, file));
const absoluteFilename = resolve2posix(null, filename);
if (files.includes(filename)) {
return true;
}
const allowJs = result.tsconfig.compilerOptions?.allowJs;
const isIncluded = isGlobMatch(
absoluteFilename,
dir,
result.tsconfig.include || (result.tsconfig.files ? [] : [GLOB_ALL_PATTERN]),
allowJs
);
if (isIncluded) {
const isExcluded = isGlobMatch(absoluteFilename, dir, result.tsconfig.exclude || [], allowJs);
return !isExcluded;
}
return false;
}
/**
* test filenames agains glob patterns in tsconfig
*
* @param filename {string} posix style abolute path to filename to test
* @param dir {string} posix style absolute path to directory of tsconfig containing patterns
* @param patterns {string[]} glob patterns to match against
* @param allowJs {boolean} allowJs setting in tsconfig to include js extensions in checks
* @returns {boolean} true when at least one pattern matches filename
*/
function isGlobMatch(filename, dir, patterns, allowJs) {
const extensions = allowJs ? TSJS_EXTENSIONS : TS_EXTENSIONS;
return patterns.some((pattern) => {
// filename must end with part of pattern that comes after last wildcard
let lastWildcardIndex = pattern.length;
let hasWildcard = false;
let hasExtension = false;
let hasSlash = false;
let lastSlashIndex = -1;
for (let i = pattern.length - 1; i > -1; i--) {
const c = pattern[i];
if (!hasWildcard) {
if (c === '*' || c === '?') {
lastWildcardIndex = i;
hasWildcard = true;
}
}
if (!hasSlash) {
if (c === '.') {
hasExtension = true;
} else if (c === '/') {
lastSlashIndex = i;
hasSlash = true;
}
}
if (hasWildcard && hasSlash) {
break;
}
}
if (!hasExtension && (!hasWildcard || lastWildcardIndex < lastSlashIndex)) {
// add implicit glob
pattern += `${pattern.endsWith('/') ? '' : '/'}${GLOB_ALL_PATTERN}`;
lastWildcardIndex = pattern.length - 1;
hasWildcard = true;
}
// if pattern does not end with wildcard, filename must end with pattern after last wildcard
if (
lastWildcardIndex < pattern.length - 1 &&
!filename.endsWith(pattern.slice(lastWildcardIndex + 1))
) {
return false;
}
// if pattern ends with *, filename must end with a default extension
if (pattern.endsWith('*') && !extensions.some((ext) => filename.endsWith(ext))) {
return false;
}
// for **/* , filename must start with the dir
if (pattern === GLOB_ALL_PATTERN) {
return filename.startsWith(`${dir}/`);
}
const resolvedPattern = resolve2posix(dir, pattern);
// filename must start with part of pattern that comes before first wildcard
let firstWildcardIndex = -1;
for (let i = 0; i < resolvedPattern.length; i++) {
if (resolvedPattern[i] === '*' || resolvedPattern[i] === '?') {
firstWildcardIndex = i;
hasWildcard = true;
break;
}
}
if (
firstWildcardIndex > 1 &&
!filename.startsWith(resolvedPattern.slice(0, firstWildcardIndex - 1))
) {
return false;
}
if (!hasWildcard) {
// no wildcard in pattern, filename must be equal to resolved pattern
return filename === resolvedPattern;
} else if (
firstWildcardIndex + GLOB_ALL_PATTERN.length ===
resolvedPattern.length - (pattern.length - 1 - lastWildcardIndex) &&
resolvedPattern.slice(firstWildcardIndex, firstWildcardIndex + GLOB_ALL_PATTERN.length) ===
GLOB_ALL_PATTERN
) {
// singular glob-all pattern and we already validated prefix and suffix matches
return true;
}
// complex pattern, use regex to check it
if (PATTERN_REGEX_CACHE.has(resolvedPattern)) {
return PATTERN_REGEX_CACHE.get(resolvedPattern).test(filename);
}
const regex = pattern2regex(resolvedPattern, allowJs);
PATTERN_REGEX_CACHE.set(resolvedPattern, regex);
return regex.test(filename);
});
}
/**
* @param {string} resolvedPattern
* @param {boolean} allowJs
* @returns {RegExp}
*/
function pattern2regex(resolvedPattern, allowJs) {
let regexStr = '^';
for (let i = 0; i < resolvedPattern.length; i++) {
const char = resolvedPattern[i];
if (char === '?') {
regexStr += '[^\\/]';
continue;
}
if (char === '*') {
if (resolvedPattern[i + 1] === '*' && resolvedPattern[i + 2] === '/') {
i += 2;
regexStr += '(?:[^\\/]*\\/)*'; // zero or more path segments
continue;
}
regexStr += '[^\\/]*';
continue;
}
if ('/.+^${}()|[]\\'.includes(char)) {
regexStr += `\\`;
}
regexStr += char;
}
// add known file endings if pattern ends on *
if (resolvedPattern.endsWith('*')) {
regexStr += allowJs ? TSJS_EXTENSIONS_RE_GROUP : TS_EXTENSIONS_RE_GROUP;
}
regexStr += '$';
return new RegExp(regexStr);
}
/**
* replace tokens like ${configDir}
* @param {any} tsconfig
* @param {string} configDir
* @returns {any}
*/
function replaceTokens(tsconfig, configDir) {
return JSON.parse(
JSON.stringify(tsconfig)
// replace ${configDir}
.replaceAll(/"\${configDir}/g, `"${native2posix(configDir)}`)
);
}
/**
* find the closest tsconfig.json file
*
* @param {string} filename - path to file to find tsconfig for (absolute or relative to cwd)
* @param {import('./public.d.ts').TSConfckFindOptions} [options] - options
* @returns {Promise} absolute path to closest tsconfig.json or null if not found
*/
async function find(filename, options) {
let dir = path$n.dirname(path$n.resolve(filename));
if (options?.ignoreNodeModules && isInNodeModules(dir)) {
return null;
}
const cache = options?.cache;
const configName = options?.configName ?? 'tsconfig.json';
if (cache?.hasConfigPath(dir, configName)) {
return cache.getConfigPath(dir, configName);
}
const { /** @type {Promise} */ promise, resolve, reject } = makePromise();
if (options?.root && !path$n.isAbsolute(options.root)) {
options.root = path$n.resolve(options.root);
}
findUp(dir, { promise, resolve, reject }, options);
return promise;
}
/**
*
* @param {string} dir
* @param {{promise:Promise,resolve:(result:string|null)=>void,reject:(err:any)=>void}} madePromise
* @param {import('./public.d.ts').TSConfckFindOptions} [options] - options
*/
function findUp(dir, { resolve, reject, promise }, options) {
const { cache, root, configName } = options ?? {};
if (cache) {
if (cache.hasConfigPath(dir, configName)) {
let cached;
try {
cached = cache.getConfigPath(dir, configName);
} catch (e) {
reject(e);
return;
}
if (cached?.then) {
cached.then(resolve).catch(reject);
} else {
resolve(cached);
}
} else {
cache.setConfigPath(dir, promise, configName);
}
}
const tsconfig = path$n.join(dir, options?.configName ?? 'tsconfig.json');
fs__default.stat(tsconfig, (err, stats) => {
if (stats && (stats.isFile() || stats.isFIFO())) {
resolve(tsconfig);
} else if (err?.code !== 'ENOENT') {
reject(err);
} else {
let parent;
if (root === dir || (parent = path$n.dirname(dir)) === dir) {
resolve(null);
} else {
findUp(parent, { promise, resolve, reject }, options);
}
}
});
}
/*
this file contains code from strip-bom and strip-json-comments by Sindre Sorhus
https://github.com/sindresorhus/strip-json-comments/blob/v4.0.0/index.js
https://github.com/sindresorhus/strip-bom/blob/v5.0.0/index.js
licensed under MIT, see ../LICENSE
*/
/**
* convert content of tsconfig.json to regular json
*
* @param {string} tsconfigJson - content of tsconfig.json
* @returns {string} content as regular json, comments and dangling commas have been replaced with whitespace
*/
function toJson(tsconfigJson) {
const stripped = stripDanglingComma(stripJsonComments(stripBom(tsconfigJson)));
if (stripped.trim() === '') {
// only whitespace left after stripping, return empty object so that JSON.parse still works
return '{}';
} else {
return stripped;
}
}
/**
* replace dangling commas from pseudo-json string with single space
* implementation heavily inspired by strip-json-comments
*
* @param {string} pseudoJson
* @returns {string}
*/
function stripDanglingComma(pseudoJson) {
let insideString = false;
let offset = 0;
let result = '';
let danglingCommaPos = null;
for (let i = 0; i < pseudoJson.length; i++) {
const currentCharacter = pseudoJson[i];
if (currentCharacter === '"') {
const escaped = isEscaped(pseudoJson, i);
if (!escaped) {
insideString = !insideString;
}
}
if (insideString) {
danglingCommaPos = null;
continue;
}
if (currentCharacter === ',') {
danglingCommaPos = i;
continue;
}
if (danglingCommaPos) {
if (currentCharacter === '}' || currentCharacter === ']') {
result += pseudoJson.slice(offset, danglingCommaPos) + ' ';
offset = danglingCommaPos + 1;
danglingCommaPos = null;
} else if (!currentCharacter.match(/\s/)) {
danglingCommaPos = null;
}
}
}
return result + pseudoJson.substring(offset);
}
// start strip-json-comments
/**
*
* @param {string} jsonString
* @param {number} quotePosition
* @returns {boolean}
*/
function isEscaped(jsonString, quotePosition) {
let index = quotePosition - 1;
let backslashCount = 0;
while (jsonString[index] === '\\') {
index -= 1;
backslashCount += 1;
}
return Boolean(backslashCount % 2);
}
/**
*
* @param {string} string
* @param {number?} start
* @param {number?} end
*/
function strip(string, start, end) {
return string.slice(start, end).replace(/\S/g, ' ');
}
const singleComment = Symbol('singleComment');
const multiComment = Symbol('multiComment');
/**
* @param {string} jsonString
* @returns {string}
*/
function stripJsonComments(jsonString) {
let isInsideString = false;
/** @type {false | symbol} */
let isInsideComment = false;
let offset = 0;
let result = '';
for (let index = 0; index < jsonString.length; index++) {
const currentCharacter = jsonString[index];
const nextCharacter = jsonString[index + 1];
if (!isInsideComment && currentCharacter === '"') {
const escaped = isEscaped(jsonString, index);
if (!escaped) {
isInsideString = !isInsideString;
}
}
if (isInsideString) {
continue;
}
if (!isInsideComment && currentCharacter + nextCharacter === '//') {
result += jsonString.slice(offset, index);
offset = index;
isInsideComment = singleComment;
index++;
} else if (isInsideComment === singleComment && currentCharacter + nextCharacter === '\r\n') {
index++;
isInsideComment = false;
result += strip(jsonString, offset, index);
offset = index;
} else if (isInsideComment === singleComment && currentCharacter === '\n') {
isInsideComment = false;
result += strip(jsonString, offset, index);
offset = index;
} else if (!isInsideComment && currentCharacter + nextCharacter === '/*') {
result += jsonString.slice(offset, index);
offset = index;
isInsideComment = multiComment;
index++;
} else if (isInsideComment === multiComment && currentCharacter + nextCharacter === '*/') {
index++;
isInsideComment = false;
result += strip(jsonString, offset, index + 1);
offset = index + 1;
}
}
return result + (isInsideComment ? strip(jsonString.slice(offset)) : jsonString.slice(offset));
}
// end strip-json-comments
// start strip-bom
/**
* @param {string} string
* @returns {string}
*/
function stripBom(string) {
// Catches EFBBBF (UTF-8 BOM) because the buffer-to-string
// conversion translates it to FEFF (UTF-16 BOM).
if (string.charCodeAt(0) === 0xfeff) {
return string.slice(1);
}
return string;
}
// end strip-bom
const not_found_result = {
tsconfigFile: null,
tsconfig: {}
};
/**
* parse the closest tsconfig.json file
*
* @param {string} filename - path to a tsconfig .json or a source file or directory (absolute or relative to cwd)
* @param {import('./public.d.ts').TSConfckParseOptions} [options] - options
* @returns {Promise}
* @throws {TSConfckParseError}
*/
async function parse$e(filename, options) {
/** @type {import('./cache.js').TSConfckCache} */
const cache = options?.cache;
if (cache?.hasParseResult(filename)) {
return getParsedDeep(filename, cache, options);
}
const {
resolve,
reject,
/** @type {Promise}*/
promise
} = makePromise();
cache?.setParseResult(filename, promise, true);
try {
let tsconfigFile =
(await resolveTSConfigJson(filename, cache)) || (await find(filename, options));
if (!tsconfigFile) {
resolve(not_found_result);
return promise;
}
let result;
if (filename !== tsconfigFile && cache?.hasParseResult(tsconfigFile)) {
result = await getParsedDeep(tsconfigFile, cache, options);
} else {
result = await parseFile$1(tsconfigFile, cache, filename === tsconfigFile);
await Promise.all([parseExtends(result, cache), parseReferences(result, options)]);
}
result.tsconfig = replaceTokens(result.tsconfig, path$n.dirname(tsconfigFile));
resolve(resolveSolutionTSConfig(filename, result));
} catch (e) {
reject(e);
}
return promise;
}
/**
* ensure extends and references are parsed
*
* @param {string} filename - cached file
* @param {import('./cache.js').TSConfckCache} cache - cache
* @param {import('./public.d.ts').TSConfckParseOptions} options - options
*/
async function getParsedDeep(filename, cache, options) {
const result = await cache.getParseResult(filename);
if (
(result.tsconfig.extends && !result.extended) ||
(result.tsconfig.references && !result.referenced)
) {
const promise = Promise.all([
parseExtends(result, cache),
parseReferences(result, options)
]).then(() => result);
cache.setParseResult(filename, promise, true);
return promise;
}
return result;
}
/**
*
* @param {string} tsconfigFile - path to tsconfig file
* @param {import('./cache.js').TSConfckCache} [cache] - cache
* @param {boolean} [skipCache] - skip cache
* @returns {Promise}
*/
async function parseFile$1(tsconfigFile, cache, skipCache) {
if (
!skipCache &&
cache?.hasParseResult(tsconfigFile) &&
!cache.getParseResult(tsconfigFile)._isRootFile_
) {
return cache.getParseResult(tsconfigFile);
}
const promise = promises$1
.readFile(tsconfigFile, 'utf-8')
.then(toJson)
.then((json) => {
const parsed = JSON.parse(json);
applyDefaults(parsed, tsconfigFile);
return {
tsconfigFile,
tsconfig: normalizeTSConfig(parsed, path$n.dirname(tsconfigFile))
};
})
.catch((e) => {
throw new TSConfckParseError(
`parsing ${tsconfigFile} failed: ${e}`,
'PARSE_FILE',
tsconfigFile,
e
);
});
if (
!skipCache &&
(!cache?.hasParseResult(tsconfigFile) || !cache.getParseResult(tsconfigFile)._isRootFile_)
) {
cache?.setParseResult(tsconfigFile, promise);
}
return promise;
}
/**
* normalize to match the output of ts.parseJsonConfigFileContent
*
* @param {any} tsconfig - typescript tsconfig output
* @param {string} dir - directory
*/
function normalizeTSConfig(tsconfig, dir) {
// set baseUrl to absolute path
const baseUrl = tsconfig.compilerOptions?.baseUrl;
if (baseUrl && !baseUrl.startsWith('${') && !path$n.isAbsolute(baseUrl)) {
tsconfig.compilerOptions.baseUrl = resolve2posix(dir, baseUrl);
}
return tsconfig;
}
/**
*
* @param {import('./public.d.ts').TSConfckParseResult} result
* @param {import('./public.d.ts').TSConfckParseOptions} [options]
* @returns {Promise}
*/
async function parseReferences(result, options) {
if (!result.tsconfig.references) {
return;
}
const referencedFiles = resolveReferencedTSConfigFiles(result, options);
const referenced = await Promise.all(
referencedFiles.map((file) => parseFile$1(file, options?.cache))
);
await Promise.all(referenced.map((ref) => parseExtends(ref, options?.cache)));
referenced.forEach((ref) => {
ref.solution = result;
});
result.referenced = referenced;
}
/**
* @param {import('./public.d.ts').TSConfckParseResult} result
* @param {import('./cache.js').TSConfckCache}[cache]
* @returns {Promise}
*/
async function parseExtends(result, cache) {
if (!result.tsconfig.extends) {
return;
}
// use result as first element in extended
// but dereference tsconfig so that mergeExtended can modify the original without affecting extended[0]
/** @type {import('./public.d.ts').TSConfckParseResult[]} */
const extended = [
{ tsconfigFile: result.tsconfigFile, tsconfig: JSON.parse(JSON.stringify(result.tsconfig)) }
];
// flatten extends graph into extended
let pos = 0;
/** @type {string[]} */
const extendsPath = [];
let currentBranchDepth = 0;
while (pos < extended.length) {
const extending = extended[pos];
extendsPath.push(extending.tsconfigFile);
if (extending.tsconfig.extends) {
// keep following this branch
currentBranchDepth += 1;
/** @type {string[]} */
let resolvedExtends;
if (!Array.isArray(extending.tsconfig.extends)) {
resolvedExtends = [resolveExtends(extending.tsconfig.extends, extending.tsconfigFile)];
} else {
// reverse because typescript 5.0 treats ['a','b','c'] as c extends b extends a
resolvedExtends = extending.tsconfig.extends
.reverse()
.map((ex) => resolveExtends(ex, extending.tsconfigFile));
}
const circularExtends = resolvedExtends.find((tsconfigFile) =>
extendsPath.includes(tsconfigFile)
);
if (circularExtends) {
const circle = extendsPath.concat([circularExtends]).join(' -> ');
throw new TSConfckParseError(
`Circular dependency in "extends": ${circle}`,
'EXTENDS_CIRCULAR',
result.tsconfigFile
);
}
// add new extends to the list directly after current
extended.splice(
pos + 1,
0,
...(await Promise.all(resolvedExtends.map((file) => parseFile$1(file, cache))))
);
} else {
// reached a leaf, backtrack to the last branching point and continue
extendsPath.splice(-currentBranchDepth);
currentBranchDepth = 0;
}
pos = pos + 1;
}
result.extended = extended;
// skip first as it is the original config
for (const ext of result.extended.slice(1)) {
extendTSConfig(result, ext);
}
}
/**
*
* @param {string} extended
* @param {string} from
* @returns {string}
*/
function resolveExtends(extended, from) {
if (extended === '..') {
// see #149
extended = '../tsconfig.json';
}
const req = createRequire$2(from);
let error;
try {
return req.resolve(extended);
} catch (e) {
error = e;
}
if (extended[0] !== '.' && !path$n.isAbsolute(extended)) {
try {
return req.resolve(`${extended}/tsconfig.json`);
} catch (e) {
error = e;
}
}
throw new TSConfckParseError(
`failed to resolve "extends":"${extended}" in ${from}`,
'EXTENDS_RESOLVE',
from,
error
);
}
// references, extends and custom keys are not carried over
const EXTENDABLE_KEYS = [
'compilerOptions',
'files',
'include',
'exclude',
'watchOptions',
'compileOnSave',
'typeAcquisition',
'buildOptions'
];
/**
*
* @param {import('./public.d.ts').TSConfckParseResult} extending
* @param {import('./public.d.ts').TSConfckParseResult} extended
* @returns void
*/
function extendTSConfig(extending, extended) {
const extendingConfig = extending.tsconfig;
const extendedConfig = extended.tsconfig;
const relativePath = native2posix(
path$n.relative(path$n.dirname(extending.tsconfigFile), path$n.dirname(extended.tsconfigFile))
);
for (const key of Object.keys(extendedConfig).filter((key) => EXTENDABLE_KEYS.includes(key))) {
if (key === 'compilerOptions') {
if (!extendingConfig.compilerOptions) {
extendingConfig.compilerOptions = {};
}
for (const option of Object.keys(extendedConfig.compilerOptions)) {
if (Object.prototype.hasOwnProperty.call(extendingConfig.compilerOptions, option)) {
continue; // already set
}
extendingConfig.compilerOptions[option] = rebaseRelative(
option,
extendedConfig.compilerOptions[option],
relativePath
);
}
} else if (extendingConfig[key] === undefined) {
if (key === 'watchOptions') {
extendingConfig.watchOptions = {};
for (const option of Object.keys(extendedConfig.watchOptions)) {
extendingConfig.watchOptions[option] = rebaseRelative(
option,
extendedConfig.watchOptions[option],
relativePath
);
}
} else {
extendingConfig[key] = rebaseRelative(key, extendedConfig[key], relativePath);
}
}
}
}
const REBASE_KEYS = [
// root
'files',
'include',
'exclude',
// compilerOptions
'baseUrl',
'rootDir',
'rootDirs',
'typeRoots',
'outDir',
'outFile',
'declarationDir',
// watchOptions
'excludeDirectories',
'excludeFiles'
];
/** @typedef {string | string[]} PathValue */
/**
*
* @param {string} key
* @param {PathValue} value
* @param {string} prependPath
* @returns {PathValue}
*/
function rebaseRelative(key, value, prependPath) {
if (!REBASE_KEYS.includes(key)) {
return value;
}
if (Array.isArray(value)) {
return value.map((x) => rebasePath(x, prependPath));
} else {
return rebasePath(value, prependPath);
}
}
/**
*
* @param {string} value
* @param {string} prependPath
* @returns {string}
*/
function rebasePath(value, prependPath) {
if (path$n.isAbsolute(value) || value.startsWith('${configDir}')) {
return value;
} else {
// relative paths use posix syntax in tsconfig
return path$n.posix.normalize(path$n.posix.join(prependPath, value));
}
}
class TSConfckParseError extends Error {
/**
* error code
* @type {string}
*/
code;
/**
* error cause
* @type { Error | undefined}
*/
cause;
/**
* absolute path of tsconfig file where the error happened
* @type {string}
*/
tsconfigFile;
/**
*
* @param {string} message - error message
* @param {string} code - error code
* @param {string} tsconfigFile - path to tsconfig file
* @param {Error?} cause - cause of this error
*/
constructor(message, code, tsconfigFile, cause) {
super(message);
// Set the prototype explicitly.
Object.setPrototypeOf(this, TSConfckParseError.prototype);
this.name = TSConfckParseError.name;
this.code = code;
this.cause = cause;
this.tsconfigFile = tsconfigFile;
}
}
/**
*
* @param {any} tsconfig
* @param {string} tsconfigFile
*/
function applyDefaults(tsconfig, tsconfigFile) {
if (isJSConfig(tsconfigFile)) {
tsconfig.compilerOptions = {
...DEFAULT_JSCONFIG_COMPILER_OPTIONS,
...tsconfig.compilerOptions
};
}
}
const DEFAULT_JSCONFIG_COMPILER_OPTIONS = {
allowJs: true,
maxNodeModuleJsDepth: 2,
allowSyntheticDefaultImports: true,
skipLibCheck: true,
noEmit: true
};
/**
* @param {string} configFileName
*/
function isJSConfig(configFileName) {
return path$n.basename(configFileName) === 'jsconfig.json';
}
/** @template T */
class TSConfckCache {
/**
* clear cache, use this if you have a long running process and tsconfig files have been added,changed or deleted
*/
clear() {
this.#configPaths.clear();
this.#parsed.clear();
}
/**
* has cached closest config for files in dir
* @param {string} dir
* @param {string} [configName=tsconfig.json]
* @returns {boolean}
*/
hasConfigPath(dir, configName = 'tsconfig.json') {
return this.#configPaths.has(`${dir}/${configName}`);
}
/**
* get cached closest tsconfig for files in dir
* @param {string} dir
* @param {string} [configName=tsconfig.json]
* @returns {Promise|string|null}
* @throws {unknown} if cached value is an error
*/
getConfigPath(dir, configName = 'tsconfig.json') {
const key = `${dir}/${configName}`;
const value = this.#configPaths.get(key);
if (value == null || value.length || value.then) {
return value;
} else {
throw value;
}
}
/**
* has parsed tsconfig for file
* @param {string} file
* @returns {boolean}
*/
hasParseResult(file) {
return this.#parsed.has(file);
}
/**
* get parsed tsconfig for file
* @param {string} file
* @returns {Promise|T}
* @throws {unknown} if cached value is an error
*/
getParseResult(file) {
const value = this.#parsed.get(file);
if (value.then || value.tsconfig) {
return value;
} else {
throw value; // cached error, rethrow
}
}
/**
* @internal
* @private
* @param file
* @param {boolean} isRootFile a flag to check if current file which involking the parse() api, used to distinguish the normal cache which only parsed by parseFile()
* @param {Promise} result
*/
setParseResult(file, result, isRootFile = false) {
// _isRootFile_ is a temporary property for Promise result, used to prevent deadlock with cache
Object.defineProperty(result, '_isRootFile_', {
value: isRootFile,
writable: false,
enumerable: false,
configurable: false
});
this.#parsed.set(file, result);
result
.then((parsed) => {
if (this.#parsed.get(file) === result) {
this.#parsed.set(file, parsed);
}
})
.catch((e) => {
if (this.#parsed.get(file) === result) {
this.#parsed.set(file, e);
}
});
}
/**
* @internal
* @private
* @param {string} dir
* @param {Promise} configPath
* @param {string} [configName=tsconfig.json]
*/
setConfigPath(dir, configPath, configName = 'tsconfig.json') {
const key = `${dir}/${configName}`;
this.#configPaths.set(key, configPath);
configPath
.then((path) => {
if (this.#configPaths.get(key) === configPath) {
this.#configPaths.set(key, path);
}
})
.catch((e) => {
if (this.#configPaths.get(key) === configPath) {
this.#configPaths.set(key, e);
}
});
}
/**
* map directories to their closest tsconfig.json
* @internal
* @private
* @type{Map|string|null)>}
*/
#configPaths = new Map();
/**
* map files to their parsed tsconfig result
* @internal
* @private
* @type {Map|T)> }
*/
#parsed = new Map();
}
const debug$h = createDebugger("vite:esbuild");
const IIFE_BEGIN_RE = /(?:const|var)\s+\S+\s*=\s*function\([^()]*\)\s*\{\s*"use strict";/;
const validExtensionRE = /\.\w+$/;
const jsxExtensionsRE = /\.(?:j|t)sx\b/;
const defaultEsbuildSupported = {
"dynamic-import": true,
"import-meta": true
};
let server;
async function transformWithEsbuild(code, filename, options, inMap) {
let loader = options?.loader;
if (!loader) {
const ext = path$n.extname(validExtensionRE.test(filename) ? filename : cleanUrl(filename)).slice(1);
if (ext === "cjs" || ext === "mjs") {
loader = "js";
} else if (ext === "cts" || ext === "mts") {
loader = "ts";
} else {
loader = ext;
}
}
let tsconfigRaw = options?.tsconfigRaw;
if (typeof tsconfigRaw !== "string") {
const meaningfulFields = [
"alwaysStrict",
"experimentalDecorators",
"importsNotUsedAsValues",
"jsx",
"jsxFactory",
"jsxFragmentFactory",
"jsxImportSource",
"preserveValueImports",
"target",
"useDefineForClassFields",
"verbatimModuleSyntax"
];
const compilerOptionsForFile = {};
if (loader === "ts" || loader === "tsx") {
const loadedTsconfig = await loadTsconfigJsonForFile(filename);
const loadedCompilerOptions = loadedTsconfig.compilerOptions ?? {};
for (const field of meaningfulFields) {
if (field in loadedCompilerOptions) {
compilerOptionsForFile[field] = loadedCompilerOptions[field];
}
}
}
const compilerOptions = {
...compilerOptionsForFile,
...tsconfigRaw?.compilerOptions
};
if (compilerOptions.useDefineForClassFields === void 0 && compilerOptions.target === void 0) {
compilerOptions.useDefineForClassFields = false;
}
if (options) {
options.jsx && (compilerOptions.jsx = void 0);
options.jsxFactory && (compilerOptions.jsxFactory = void 0);
options.jsxFragment && (compilerOptions.jsxFragmentFactory = void 0);
options.jsxImportSource && (compilerOptions.jsxImportSource = void 0);
}
tsconfigRaw = {
...tsconfigRaw,
compilerOptions
};
}
const resolvedOptions = {
sourcemap: true,
// ensure source file name contains full query
sourcefile: filename,
...options,
loader,
tsconfigRaw
};
delete resolvedOptions.include;
delete resolvedOptions.exclude;
delete resolvedOptions.jsxInject;
try {
const result = await transform$1(code, resolvedOptions);
let map;
if (inMap && resolvedOptions.sourcemap) {
const nextMap = JSON.parse(result.map);
nextMap.sourcesContent = [];
map = combineSourcemaps(filename, [
nextMap,
inMap
]);
} else {
map = resolvedOptions.sourcemap && resolvedOptions.sourcemap !== "inline" ? JSON.parse(result.map) : { mappings: "" };
}
return {
...result,
map
};
} catch (e) {
debug$h?.(`esbuild error with options used: `, resolvedOptions);
if (e.errors) {
e.frame = "";
e.errors.forEach((m) => {
if (m.text === "Experimental decorators are not currently enabled" || m.text === "Parameter decorators only work when experimental decorators are enabled") {
m.text += '. Vite 5 now uses esbuild 0.18 and you need to enable them by adding "experimentalDecorators": true in your "tsconfig.json" file.';
}
e.frame += `
` + prettifyMessage(m, code);
});
e.loc = e.errors[0].location;
}
throw e;
}
}
function esbuildPlugin(config) {
const options = config.esbuild;
const { jsxInject, include, exclude, ...esbuildTransformOptions } = options;
const filter = createFilter(include || /\.(m?ts|[jt]sx)$/, exclude || /\.js$/);
const transformOptions = {
target: "esnext",
charset: "utf8",
...esbuildTransformOptions,
minify: false,
minifyIdentifiers: false,
minifySyntax: false,
minifyWhitespace: false,
treeShaking: false,
// keepNames is not needed when minify is disabled.
// Also transforming multiple times with keepNames enabled breaks
// tree-shaking. (#9164)
keepNames: false,
supported: {
...defaultEsbuildSupported,
...esbuildTransformOptions.supported
}
};
return {
name: "vite:esbuild",
configureServer(_server) {
server = _server;
server.watcher.on("add", reloadOnTsconfigChange).on("change", reloadOnTsconfigChange).on("unlink", reloadOnTsconfigChange);
},
buildEnd() {
server = null;
},
async transform(code, id) {
if (filter(id) || filter(cleanUrl(id))) {
const result = await transformWithEsbuild(code, id, transformOptions);
if (result.warnings.length) {
result.warnings.forEach((m) => {
this.warn(prettifyMessage(m, code));
});
}
if (jsxInject && jsxExtensionsRE.test(id)) {
result.code = jsxInject + ";" + result.code;
}
return {
code: result.code,
map: result.map
};
}
}
};
}
const rollupToEsbuildFormatMap = {
es: "esm",
cjs: "cjs",
// passing `var Lib = (() => {})()` to esbuild with format = "iife"
// will turn it to `(() => { var Lib = (() => {})() })()`,
// so we remove the format config to tell esbuild not doing this
//
// although esbuild doesn't change format, there is still possibility
// that `{ treeShaking: true }` removes a top-level no-side-effect variable
// like: `var Lib = 1`, which becomes `` after esbuild transforming,
// but thankfully rollup does not do this optimization now
iife: void 0
};
const buildEsbuildPlugin = (config) => {
return {
name: "vite:esbuild-transpile",
async renderChunk(code, chunk, opts) {
if (opts.__vite_skip_esbuild__) {
return null;
}
const options = resolveEsbuildTranspileOptions(config, opts.format);
if (!options) {
return null;
}
const res = await transformWithEsbuild(code, chunk.fileName, options);
if (config.build.lib) {
const esbuildCode = res.code;
const contentIndex = opts.format === "iife" ? Math.max(esbuildCode.search(IIFE_BEGIN_RE), 0) : opts.format === "umd" ? esbuildCode.indexOf(`(function(`) : 0;
if (contentIndex > 0) {
const esbuildHelpers = esbuildCode.slice(0, contentIndex);
res.code = esbuildCode.slice(contentIndex).replace(`"use strict";`, `"use strict";` + esbuildHelpers);
}
}
return res;
}
};
};
function resolveEsbuildTranspileOptions(config, format) {
const target = config.build.target;
const minify = config.build.minify === "esbuild";
if ((!target || target === "esnext") && !minify) {
return null;
}
const isEsLibBuild = config.build.lib && format === "es";
const esbuildOptions = config.esbuild || {};
const options = {
charset: "utf8",
...esbuildOptions,
loader: "js",
target: target || void 0,
format: rollupToEsbuildFormatMap[format],
supported: {
...defaultEsbuildSupported,
...esbuildOptions.supported
}
};
if (!minify) {
return {
...options,
minify: false,
minifyIdentifiers: false,
minifySyntax: false,
minifyWhitespace: false,
treeShaking: false
};
}
if (options.minifyIdentifiers != null || options.minifySyntax != null || options.minifyWhitespace != null) {
if (isEsLibBuild) {
return {
...options,
minify: false,
minifyIdentifiers: options.minifyIdentifiers ?? true,
minifySyntax: options.minifySyntax ?? true,
minifyWhitespace: false,
treeShaking: true
};
} else {
return {
...options,
minify: false,
minifyIdentifiers: options.minifyIdentifiers ?? true,
minifySyntax: options.minifySyntax ?? true,
minifyWhitespace: options.minifyWhitespace ?? true,
treeShaking: true
};
}
}
if (isEsLibBuild) {
return {
...options,
minify: false,
minifyIdentifiers: true,
minifySyntax: true,
minifyWhitespace: false,
treeShaking: true
};
} else {
return {
...options,
minify: true,
treeShaking: true
};
}
}
function prettifyMessage(m, code) {
let res = colors$1.yellow(m.text);
if (m.location) {
res += `
` + generateCodeFrame(code, m.location);
}
return res + `
`;
}
let tsconfckCache;
async function loadTsconfigJsonForFile(filename) {
try {
if (!tsconfckCache) {
tsconfckCache = new TSConfckCache();
}
const result = await parse$e(filename, {
cache: tsconfckCache,
ignoreNodeModules: true
});
if (server && result.tsconfigFile) {
ensureWatchedFile(server.watcher, result.tsconfigFile, server.config.root);
}
return result.tsconfig;
} catch (e) {
if (e instanceof TSConfckParseError) {
if (server && e.tsconfigFile) {
ensureWatchedFile(server.watcher, e.tsconfigFile, server.config.root);
}
}
throw e;
}
}
async function reloadOnTsconfigChange(changedFile) {
if (!server) return;
if (path$n.basename(changedFile) === "tsconfig.json" || changedFile.endsWith(".json") && tsconfckCache?.hasParseResult(changedFile)) {
server.config.logger.info(
`changed tsconfig file detected: ${changedFile} - Clearing cache and forcing full-reload to ensure TypeScript is compiled with updated config values.`,
{ clear: server.config.clearScreen, timestamp: true }
);
server.moduleGraph.invalidateAll();
tsconfckCache?.clear();
if (server) {
server.hot.send({
type: "full-reload",
path: "*"
});
}
}
}
// src/realWorker.ts
var Worker = class {
/** @internal */
_code;
/** @internal */
_parentFunctions;
/** @internal */
_max;
/** @internal */
_pool;
/** @internal */
_idlePool;
/** @internal */
_queue;
constructor(fn, options = {}) {
this._code = genWorkerCode(fn, options.parentFunctions ?? {});
this._parentFunctions = options.parentFunctions ?? {};
const defaultMax = Math.max(
1,
// os.availableParallelism is available from Node.js 18.14.0
(os$5.availableParallelism?.() ?? os$5.cpus().length) - 1
);
this._max = options.max || defaultMax;
this._pool = [];
this._idlePool = [];
this._queue = [];
}
async run(...args) {
const worker = await this._getAvailableWorker();
return new Promise((resolve, reject) => {
worker.currentResolve = resolve;
worker.currentReject = reject;
worker.postMessage({ type: "run", args });
});
}
stop() {
this._pool.forEach((w) => w.unref());
this._queue.forEach(
([, reject]) => reject(
new Error("Main worker pool stopped before a worker was available.")
)
);
this._pool = [];
this._idlePool = [];
this._queue = [];
}
/** @internal */
async _getAvailableWorker() {
if (this._idlePool.length) {
return this._idlePool.shift();
}
if (this._pool.length < this._max) {
const worker = new Worker$1(this._code, { eval: true });
worker.on("message", async (args) => {
if (args.type === "run") {
if ("result" in args) {
worker.currentResolve && worker.currentResolve(args.result);
worker.currentResolve = null;
} else {
if (args.error instanceof ReferenceError) {
args.error.message += ". Maybe you forgot to pass the function to parentFunction?";
}
worker.currentReject && worker.currentReject(args.error);
worker.currentReject = null;
}
this._assignDoneWorker(worker);
} else if (args.type === "parentFunction") {
try {
const result = await this._parentFunctions[args.name](...args.args);
worker.postMessage({ type: "parentFunction", id: args.id, result });
} catch (e) {
worker.postMessage({
type: "parentFunction",
id: args.id,
error: e
});
}
}
});
worker.on("error", (err) => {
worker.currentReject && worker.currentReject(err);
worker.currentReject = null;
});
worker.on("exit", (code) => {
const i = this._pool.indexOf(worker);
if (i > -1)
this._pool.splice(i, 1);
if (code !== 0 && worker.currentReject) {
worker.currentReject(
new Error(`Worker stopped with non-0 exit code ${code}`)
);
worker.currentReject = null;
}
});
this._pool.push(worker);
return worker;
}
let resolve;
let reject;
const onWorkerAvailablePromise = new Promise((r, rj) => {
resolve = r;
reject = rj;
});
this._queue.push([resolve, reject]);
return onWorkerAvailablePromise;
}
/** @internal */
_assignDoneWorker(worker) {
if (this._queue.length) {
const [resolve] = this._queue.shift();
resolve(worker);
return;
}
this._idlePool.push(worker);
}
};
function genWorkerCode(fn, parentFunctions) {
const createParentFunctionCaller = (parentPort) => {
let id = 0;
const resolvers = /* @__PURE__ */ new Map();
const call = (key) => async (...args) => {
id++;
let resolve, reject;
const promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
resolvers.set(id, { resolve, reject });
parentPort.postMessage({ type: "parentFunction", id, name: key, args });
return await promise;
};
const receive = (id2, args) => {
if (resolvers.has(id2)) {
const { resolve, reject } = resolvers.get(id2);
resolvers.delete(id2);
if ("result" in args) {
resolve(args.result);
} else {
reject(args.error);
}
}
};
return { call, receive };
};
return `
const { parentPort } = require('worker_threads')
const parentFunctionCaller = (${createParentFunctionCaller.toString()})(parentPort)
const doWork = (() => {
${Object.keys(parentFunctions).map(
(key) => `const ${key} = parentFunctionCaller.call(${JSON.stringify(key)});`
).join("\n")}
return (${fn.toString()})()
})()
parentPort.on('message', async (args) => {
if (args.type === 'run') {
try {
const res = await doWork(...args.args)
parentPort.postMessage({ type: 'run', result: res })
} catch (e) {
parentPort.postMessage({ type: 'run', error: e })
}
} else if (args.type === 'parentFunction') {
parentFunctionCaller.receive(args.id, args)
}
})
`;
}
var FakeWorker = class {
/** @internal */
_fn;
constructor(fn, options = {}) {
const argsAndCode = genFakeWorkerArgsAndCode(
fn,
options.parentFunctions ?? {}
);
const require2 = createRequire$1(import.meta.url);
this._fn = new Function(...argsAndCode)(require2, options.parentFunctions);
}
async run(...args) {
try {
return await this._fn(...args);
} catch (err) {
if (err instanceof ReferenceError) {
err.message += ". Maybe you forgot to pass the function to parentFunction?";
}
throw err;
}
}
stop() {
}
};
function genFakeWorkerArgsAndCode(fn, parentFunctions) {
return [
"require",
"parentFunctions",
`
${Object.keys(parentFunctions).map((key) => `const ${key} = parentFunctions[${JSON.stringify(key)}];`).join("\n")}
return (${fn.toString()})()
`
];
}
// src/workerWithFallback.ts
var WorkerWithFallback = class {
/** @internal */
_disableReal;
/** @internal */
_realWorker;
/** @internal */
_fakeWorker;
/** @internal */
_shouldUseFake;
constructor(fn, options) {
this._disableReal = options.max !== void 0 && options.max <= 0;
this._realWorker = new Worker(fn, options);
this._fakeWorker = new FakeWorker(fn, options);
this._shouldUseFake = options.shouldUseFake;
}
async run(...args) {
const useFake = this._disableReal || this._shouldUseFake(...args);
return this[useFake ? "_fakeWorker" : "_realWorker"].run(...args);
}
stop() {
this._realWorker.stop();
this._fakeWorker.stop();
}
};
let terserPath;
const loadTerserPath = (root) => {
if (terserPath) return terserPath;
try {
terserPath = requireResolveFromRootWithFallback(root, "terser");
} catch (e) {
if (e.code === "MODULE_NOT_FOUND") {
throw new Error(
"terser not found. Since Vite v3, terser has become an optional dependency. You need to install it."
);
} else {
const message = new Error(`terser failed to load:
${e.message}`);
message.stack = e.stack + "\n" + message.stack;
throw message;
}
}
return terserPath;
};
function terserPlugin(config) {
const { maxWorkers, ...terserOptions } = config.build.terserOptions;
const makeWorker = () => new Worker(
() => async (terserPath2, code, options) => {
const terser = require(terserPath2);
return terser.minify(code, options);
},
{
max: maxWorkers
}
);
let worker;
return {
name: "vite:terser",
async renderChunk(code, _chunk, outputOptions) {
if (config.build.minify !== "terser" && // @ts-expect-error injected by @vitejs/plugin-legacy
!outputOptions.__vite_force_terser__) {
return null;
}
if (config.build.lib && outputOptions.format === "es") {
return null;
}
worker ||= makeWorker();
const terserPath2 = loadTerserPath(config.root);
const res = await worker.run(terserPath2, code, {
safari10: true,
...terserOptions,
sourceMap: !!outputOptions.sourcemap,
module: outputOptions.format.startsWith("es"),
toplevel: outputOptions.format === "cjs"
});
return {
code: res.code,
map: res.map
};
},
closeBundle() {
worker?.stop();
}
};
}
const mimes = {
"3g2": "video/3gpp2",
"3gp": "video/3gpp",
"3gpp": "video/3gpp",
"3mf": "model/3mf",
"aac": "audio/aac",
"ac": "application/pkix-attr-cert",
"adp": "audio/adpcm",
"adts": "audio/aac",
"ai": "application/postscript",
"aml": "application/automationml-aml+xml",
"amlx": "application/automationml-amlx+zip",
"amr": "audio/amr",
"apng": "image/apng",
"appcache": "text/cache-manifest",
"appinstaller": "application/appinstaller",
"appx": "application/appx",
"appxbundle": "application/appxbundle",
"asc": "application/pgp-keys",
"atom": "application/atom+xml",
"atomcat": "application/atomcat+xml",
"atomdeleted": "application/atomdeleted+xml",
"atomsvc": "application/atomsvc+xml",
"au": "audio/basic",
"avci": "image/avci",
"avcs": "image/avcs",
"avif": "image/avif",
"aw": "application/applixware",
"bdoc": "application/bdoc",
"bin": "application/octet-stream",
"bmp": "image/bmp",
"bpk": "application/octet-stream",
"btf": "image/prs.btif",
"btif": "image/prs.btif",
"buffer": "application/octet-stream",
"ccxml": "application/ccxml+xml",
"cdfx": "application/cdfx+xml",
"cdmia": "application/cdmi-capability",
"cdmic": "application/cdmi-container",
"cdmid": "application/cdmi-domain",
"cdmio": "application/cdmi-object",
"cdmiq": "application/cdmi-queue",
"cer": "application/pkix-cert",
"cgm": "image/cgm",
"cjs": "application/node",
"class": "application/java-vm",
"coffee": "text/coffeescript",
"conf": "text/plain",
"cpl": "application/cpl+xml",
"cpt": "application/mac-compactpro",
"crl": "application/pkix-crl",
"css": "text/css",
"csv": "text/csv",
"cu": "application/cu-seeme",
"cwl": "application/cwl",
"cww": "application/prs.cww",
"davmount": "application/davmount+xml",
"dbk": "application/docbook+xml",
"deb": "application/octet-stream",
"def": "text/plain",
"deploy": "application/octet-stream",
"dib": "image/bmp",
"disposition-notification": "message/disposition-notification",
"dist": "application/octet-stream",
"distz": "application/octet-stream",
"dll": "application/octet-stream",
"dmg": "application/octet-stream",
"dms": "application/octet-stream",
"doc": "application/msword",
"dot": "application/msword",
"dpx": "image/dpx",
"drle": "image/dicom-rle",
"dsc": "text/prs.lines.tag",
"dssc": "application/dssc+der",
"dtd": "application/xml-dtd",
"dump": "application/octet-stream",
"dwd": "application/atsc-dwd+xml",
"ear": "application/java-archive",
"ecma": "application/ecmascript",
"elc": "application/octet-stream",
"emf": "image/emf",
"eml": "message/rfc822",
"emma": "application/emma+xml",
"emotionml": "application/emotionml+xml",
"eps": "application/postscript",
"epub": "application/epub+zip",
"exe": "application/octet-stream",
"exi": "application/exi",
"exp": "application/express",
"exr": "image/aces",
"ez": "application/andrew-inset",
"fdf": "application/fdf",
"fdt": "application/fdt+xml",
"fits": "image/fits",
"g3": "image/g3fax",
"gbr": "application/rpki-ghostbusters",
"geojson": "application/geo+json",
"gif": "image/gif",
"glb": "model/gltf-binary",
"gltf": "model/gltf+json",
"gml": "application/gml+xml",
"gpx": "application/gpx+xml",
"gram": "application/srgs",
"grxml": "application/srgs+xml",
"gxf": "application/gxf",
"gz": "application/gzip",
"h261": "video/h261",
"h263": "video/h263",
"h264": "video/h264",
"heic": "image/heic",
"heics": "image/heic-sequence",
"heif": "image/heif",
"heifs": "image/heif-sequence",
"hej2": "image/hej2k",
"held": "application/atsc-held+xml",
"hjson": "application/hjson",
"hlp": "application/winhlp",
"hqx": "application/mac-binhex40",
"hsj2": "image/hsj2",
"htm": "text/html",
"html": "text/html",
"ics": "text/calendar",
"ief": "image/ief",
"ifb": "text/calendar",
"iges": "model/iges",
"igs": "model/iges",
"img": "application/octet-stream",
"in": "text/plain",
"ini": "text/plain",
"ink": "application/inkml+xml",
"inkml": "application/inkml+xml",
"ipfix": "application/ipfix",
"iso": "application/octet-stream",
"its": "application/its+xml",
"jade": "text/jade",
"jar": "application/java-archive",
"jhc": "image/jphc",
"jls": "image/jls",
"jp2": "image/jp2",
"jpe": "image/jpeg",
"jpeg": "image/jpeg",
"jpf": "image/jpx",
"jpg": "image/jpeg",
"jpg2": "image/jp2",
"jpgm": "image/jpm",
"jpgv": "video/jpeg",
"jph": "image/jph",
"jpm": "image/jpm",
"jpx": "image/jpx",
"js": "text/javascript",
"json": "application/json",
"json5": "application/json5",
"jsonld": "application/ld+json",
"jsonml": "application/jsonml+json",
"jsx": "text/jsx",
"jt": "model/jt",
"jxr": "image/jxr",
"jxra": "image/jxra",
"jxrs": "image/jxrs",
"jxs": "image/jxs",
"jxsc": "image/jxsc",
"jxsi": "image/jxsi",
"jxss": "image/jxss",
"kar": "audio/midi",
"ktx": "image/ktx",
"ktx2": "image/ktx2",
"less": "text/less",
"lgr": "application/lgr+xml",
"list": "text/plain",
"litcoffee": "text/coffeescript",
"log": "text/plain",
"lostxml": "application/lost+xml",
"lrf": "application/octet-stream",
"m1v": "video/mpeg",
"m21": "application/mp21",
"m2a": "audio/mpeg",
"m2v": "video/mpeg",
"m3a": "audio/mpeg",
"m4a": "audio/mp4",
"m4p": "application/mp4",
"m4s": "video/iso.segment",
"ma": "application/mathematica",
"mads": "application/mads+xml",
"maei": "application/mmt-aei+xml",
"man": "text/troff",
"manifest": "text/cache-manifest",
"map": "application/json",
"mar": "application/octet-stream",
"markdown": "text/markdown",
"mathml": "application/mathml+xml",
"mb": "application/mathematica",
"mbox": "application/mbox",
"md": "text/markdown",
"mdx": "text/mdx",
"me": "text/troff",
"mesh": "model/mesh",
"meta4": "application/metalink4+xml",
"metalink": "application/metalink+xml",
"mets": "application/mets+xml",
"mft": "application/rpki-manifest",
"mid": "audio/midi",
"midi": "audio/midi",
"mime": "message/rfc822",
"mj2": "video/mj2",
"mjp2": "video/mj2",
"mjs": "text/javascript",
"mml": "text/mathml",
"mods": "application/mods+xml",
"mov": "video/quicktime",
"mp2": "audio/mpeg",
"mp21": "application/mp21",
"mp2a": "audio/mpeg",
"mp3": "audio/mpeg",
"mp4": "video/mp4",
"mp4a": "audio/mp4",
"mp4s": "application/mp4",
"mp4v": "video/mp4",
"mpd": "application/dash+xml",
"mpe": "video/mpeg",
"mpeg": "video/mpeg",
"mpf": "application/media-policy-dataset+xml",
"mpg": "video/mpeg",
"mpg4": "video/mp4",
"mpga": "audio/mpeg",
"mpp": "application/dash-patch+xml",
"mrc": "application/marc",
"mrcx": "application/marcxml+xml",
"ms": "text/troff",
"mscml": "application/mediaservercontrol+xml",
"msh": "model/mesh",
"msi": "application/octet-stream",
"msix": "application/msix",
"msixbundle": "application/msixbundle",
"msm": "application/octet-stream",
"msp": "application/octet-stream",
"mtl": "model/mtl",
"musd": "application/mmt-usd+xml",
"mxf": "application/mxf",
"mxmf": "audio/mobile-xmf",
"mxml": "application/xv+xml",
"n3": "text/n3",
"nb": "application/mathematica",
"nq": "application/n-quads",
"nt": "application/n-triples",
"obj": "model/obj",
"oda": "application/oda",
"oga": "audio/ogg",
"ogg": "audio/ogg",
"ogv": "video/ogg",
"ogx": "application/ogg",
"omdoc": "application/omdoc+xml",
"onepkg": "application/onenote",
"onetmp": "application/onenote",
"onetoc": "application/onenote",
"onetoc2": "application/onenote",
"opf": "application/oebps-package+xml",
"opus": "audio/ogg",
"otf": "font/otf",
"owl": "application/rdf+xml",
"oxps": "application/oxps",
"p10": "application/pkcs10",
"p7c": "application/pkcs7-mime",
"p7m": "application/pkcs7-mime",
"p7s": "application/pkcs7-signature",
"p8": "application/pkcs8",
"pdf": "application/pdf",
"pfr": "application/font-tdpfr",
"pgp": "application/pgp-encrypted",
"pkg": "application/octet-stream",
"pki": "application/pkixcmp",
"pkipath": "application/pkix-pkipath",
"pls": "application/pls+xml",
"png": "image/png",
"prc": "model/prc",
"prf": "application/pics-rules",
"provx": "application/provenance+xml",
"ps": "application/postscript",
"pskcxml": "application/pskc+xml",
"pti": "image/prs.pti",
"qt": "video/quicktime",
"raml": "application/raml+yaml",
"rapd": "application/route-apd+xml",
"rdf": "application/rdf+xml",
"relo": "application/p2p-overlay+xml",
"rif": "application/reginfo+xml",
"rl": "application/resource-lists+xml",
"rld": "application/resource-lists-diff+xml",
"rmi": "audio/midi",
"rnc": "application/relax-ng-compact-syntax",
"rng": "application/xml",
"roa": "application/rpki-roa",
"roff": "text/troff",
"rq": "application/sparql-query",
"rs": "application/rls-services+xml",
"rsat": "application/atsc-rsat+xml",
"rsd": "application/rsd+xml",
"rsheet": "application/urc-ressheet+xml",
"rss": "application/rss+xml",
"rtf": "text/rtf",
"rtx": "text/richtext",
"rusd": "application/route-usd+xml",
"s3m": "audio/s3m",
"sbml": "application/sbml+xml",
"scq": "application/scvp-cv-request",
"scs": "application/scvp-cv-response",
"sdp": "application/sdp",
"senmlx": "application/senml+xml",
"sensmlx": "application/sensml+xml",
"ser": "application/java-serialized-object",
"setpay": "application/set-payment-initiation",
"setreg": "application/set-registration-initiation",
"sgi": "image/sgi",
"sgm": "text/sgml",
"sgml": "text/sgml",
"shex": "text/shex",
"shf": "application/shf+xml",
"shtml": "text/html",
"sieve": "application/sieve",
"sig": "application/pgp-signature",
"sil": "audio/silk",
"silo": "model/mesh",
"siv": "application/sieve",
"slim": "text/slim",
"slm": "text/slim",
"sls": "application/route-s-tsid+xml",
"smi": "application/smil+xml",
"smil": "application/smil+xml",
"snd": "audio/basic",
"so": "application/octet-stream",
"spdx": "text/spdx",
"spp": "application/scvp-vp-response",
"spq": "application/scvp-vp-request",
"spx": "audio/ogg",
"sql": "application/sql",
"sru": "application/sru+xml",
"srx": "application/sparql-results+xml",
"ssdl": "application/ssdl+xml",
"ssml": "application/ssml+xml",
"stk": "application/hyperstudio",
"stl": "model/stl",
"stpx": "model/step+xml",
"stpxz": "model/step-xml+zip",
"stpz": "model/step+zip",
"styl": "text/stylus",
"stylus": "text/stylus",
"svg": "image/svg+xml",
"svgz": "image/svg+xml",
"swidtag": "application/swid+xml",
"t": "text/troff",
"t38": "image/t38",
"td": "application/urc-targetdesc+xml",
"tei": "application/tei+xml",
"teicorpus": "application/tei+xml",
"text": "text/plain",
"tfi": "application/thraud+xml",
"tfx": "image/tiff-fx",
"tif": "image/tiff",
"tiff": "image/tiff",
"toml": "application/toml",
"tr": "text/troff",
"trig": "application/trig",
"ts": "video/mp2t",
"tsd": "application/timestamped-data",
"tsv": "text/tab-separated-values",
"ttc": "font/collection",
"ttf": "font/ttf",
"ttl": "text/turtle",
"ttml": "application/ttml+xml",
"txt": "text/plain",
"u3d": "model/u3d",
"u8dsn": "message/global-delivery-status",
"u8hdr": "message/global-headers",
"u8mdn": "message/global-disposition-notification",
"u8msg": "message/global",
"ubj": "application/ubjson",
"uri": "text/uri-list",
"uris": "text/uri-list",
"urls": "text/uri-list",
"vcard": "text/vcard",
"vrml": "model/vrml",
"vtt": "text/vtt",
"vxml": "application/voicexml+xml",
"war": "application/java-archive",
"wasm": "application/wasm",
"wav": "audio/wav",
"weba": "audio/webm",
"webm": "video/webm",
"webmanifest": "application/manifest+json",
"webp": "image/webp",
"wgsl": "text/wgsl",
"wgt": "application/widget",
"wif": "application/watcherinfo+xml",
"wmf": "image/wmf",
"woff": "font/woff",
"woff2": "font/woff2",
"wrl": "model/vrml",
"wsdl": "application/wsdl+xml",
"wspolicy": "application/wspolicy+xml",
"x3d": "model/x3d+xml",
"x3db": "model/x3d+fastinfoset",
"x3dbz": "model/x3d+binary",
"x3dv": "model/x3d-vrml",
"x3dvz": "model/x3d+vrml",
"x3dz": "model/x3d+xml",
"xaml": "application/xaml+xml",
"xav": "application/xcap-att+xml",
"xca": "application/xcap-caps+xml",
"xcs": "application/calendar+xml",
"xdf": "application/xcap-diff+xml",
"xdssc": "application/dssc+xml",
"xel": "application/xcap-el+xml",
"xenc": "application/xenc+xml",
"xer": "application/patch-ops-error+xml",
"xfdf": "application/xfdf",
"xht": "application/xhtml+xml",
"xhtml": "application/xhtml+xml",
"xhvml": "application/xv+xml",
"xlf": "application/xliff+xml",
"xm": "audio/xm",
"xml": "text/xml",
"xns": "application/xcap-ns+xml",
"xop": "application/xop+xml",
"xpl": "application/xproc+xml",
"xsd": "application/xml",
"xsf": "application/prs.xsf+xml",
"xsl": "application/xml",
"xslt": "application/xml",
"xspf": "application/xspf+xml",
"xvm": "application/xv+xml",
"xvml": "application/xv+xml",
"yaml": "text/yaml",
"yang": "application/yang",
"yin": "application/yin+xml",
"yml": "text/yaml",
"zip": "application/zip"
};
function lookup(extn) {
let tmp = ('' + extn).trim().toLowerCase();
let idx = tmp.lastIndexOf('.');
return mimes[!~idx ? tmp : tmp.substring(++idx)];
}
const publicFilesMap = /* @__PURE__ */ new WeakMap();
async function initPublicFiles(config) {
let fileNames;
try {
fileNames = await recursiveReaddir(config.publicDir);
} catch (e) {
if (e.code === ERR_SYMLINK_IN_RECURSIVE_READDIR) {
return;
}
throw e;
}
const publicFiles = new Set(
fileNames.map((fileName) => fileName.slice(config.publicDir.length))
);
publicFilesMap.set(config, publicFiles);
return publicFiles;
}
function getPublicFiles(config) {
return publicFilesMap.get(config);
}
function checkPublicFile(url, config) {
const { publicDir } = config;
if (!publicDir || url[0] !== "/") {
return;
}
const fileName = cleanUrl(url);
const publicFiles = getPublicFiles(config);
if (publicFiles) {
return publicFiles.has(fileName) ? normalizePath$3(path$n.join(publicDir, fileName)) : void 0;
}
const publicFile = normalizePath$3(path$n.join(publicDir, fileName));
if (!publicFile.startsWith(withTrailingSlash(publicDir))) {
return;
}
return fs__default.existsSync(publicFile) ? publicFile : void 0;
}
const assetUrlRE = /__VITE_ASSET__([\w$]+)__(?:\$_(.*?)__)?/g;
const jsSourceMapRE = /\.[cm]?js\.map$/;
const assetCache = /* @__PURE__ */ new WeakMap();
const generatedAssets = /* @__PURE__ */ new WeakMap();
function registerCustomMime() {
mimes["ico"] = "image/x-icon";
mimes["flac"] = "audio/flac";
mimes["eot"] = "application/vnd.ms-fontobject";
}
function renderAssetUrlInJS(ctx, config, chunk, opts, code) {
const toRelativeRuntime = createToImportMetaURLBasedRelativeRuntime(
opts.format,
config.isWorker
);
let match;
let s;
assetUrlRE.lastIndex = 0;
while (match = assetUrlRE.exec(code)) {
s ||= new MagicString(code);
const [full, referenceId, postfix = ""] = match;
const file = ctx.getFileName(referenceId);
chunk.viteMetadata.importedAssets.add(cleanUrl(file));
const filename = file + postfix;
const replacement = toOutputFilePathInJS(
filename,
"asset",
chunk.fileName,
"js",
config,
toRelativeRuntime
);
const replacementString = typeof replacement === "string" ? JSON.stringify(encodeURIPath(replacement)).slice(1, -1) : `"+${replacement.runtime}+"`;
s.update(match.index, match.index + full.length, replacementString);
}
const publicAssetUrlMap = publicAssetUrlCache.get(config);
publicAssetUrlRE.lastIndex = 0;
while (match = publicAssetUrlRE.exec(code)) {
s ||= new MagicString(code);
const [full, hash] = match;
const publicUrl = publicAssetUrlMap.get(hash).slice(1);
const replacement = toOutputFilePathInJS(
publicUrl,
"public",
chunk.fileName,
"js",
config,
toRelativeRuntime
);
const replacementString = typeof replacement === "string" ? JSON.stringify(encodeURIPath(replacement)).slice(1, -1) : `"+${replacement.runtime}+"`;
s.update(match.index, match.index + full.length, replacementString);
}
return s;
}
function assetPlugin(config) {
registerCustomMime();
let moduleGraph;
return {
name: "vite:asset",
buildStart() {
assetCache.set(config, /* @__PURE__ */ new Map());
generatedAssets.set(config, /* @__PURE__ */ new Map());
},
configureServer(server) {
moduleGraph = server.moduleGraph;
},
resolveId(id) {
if (!config.assetsInclude(cleanUrl(id)) && !urlRE.test(id)) {
return;
}
const publicFile = checkPublicFile(id, config);
if (publicFile) {
return id;
}
},
async load(id) {
if (id[0] === "\0") {
return;
}
if (rawRE.test(id)) {
const file = checkPublicFile(id, config) || cleanUrl(id);
this.addWatchFile(file);
return `export default ${JSON.stringify(
await fsp.readFile(file, "utf-8")
)}`;
}
if (!urlRE.test(id) && !config.assetsInclude(cleanUrl(id))) {
return;
}
id = removeUrlQuery(id);
let url = await fileToUrl$1(id, config, this);
if (moduleGraph) {
const mod = moduleGraph.getModuleById(id);
if (mod && mod.lastHMRTimestamp > 0) {
url = injectQuery(url, `t=${mod.lastHMRTimestamp}`);
}
}
return {
code: `export default ${JSON.stringify(encodeURIPath(url))}`,
// Force rollup to keep this module from being shared between other entry points if it's an entrypoint.
// If the resulting chunk is empty, it will be removed in generateBundle.
moduleSideEffects: config.command === "build" && this.getModuleInfo(id)?.isEntry ? "no-treeshake" : false,
meta: config.command === "build" ? { "vite:asset": true } : void 0
};
},
renderChunk(code, chunk, opts) {
const s = renderAssetUrlInJS(this, config, chunk, opts, code);
if (s) {
return {
code: s.toString(),
map: config.build.sourcemap ? s.generateMap({ hires: "boundary" }) : null
};
} else {
return null;
}
},
generateBundle(_, bundle) {
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === "chunk" && chunk.isEntry && chunk.moduleIds.length === 1 && config.assetsInclude(chunk.moduleIds[0]) && this.getModuleInfo(chunk.moduleIds[0])?.meta["vite:asset"]) {
delete bundle[file];
}
}
if (config.command === "build" && config.build.ssr && !config.build.ssrEmitAssets) {
for (const file in bundle) {
if (bundle[file].type === "asset" && !file.endsWith("ssr-manifest.json") && !jsSourceMapRE.test(file)) {
delete bundle[file];
}
}
}
}
};
}
async function fileToUrl$1(id, config, ctx) {
if (config.command === "serve") {
return fileToDevUrl(id, config);
} else {
return fileToBuiltUrl(id, config, ctx);
}
}
function fileToDevUrl(id, config, skipBase = false) {
let rtn;
if (checkPublicFile(id, config)) {
rtn = id;
} else if (id.startsWith(withTrailingSlash(config.root))) {
rtn = "/" + path$n.posix.relative(config.root, id);
} else {
rtn = path$n.posix.join(FS_PREFIX, id);
}
if (skipBase) {
return rtn;
}
const base = joinUrlSegments(config.server?.origin ?? "", config.decodedBase);
return joinUrlSegments(base, removeLeadingSlash(rtn));
}
function getPublicAssetFilename(hash, config) {
return publicAssetUrlCache.get(config)?.get(hash);
}
const publicAssetUrlCache = /* @__PURE__ */ new WeakMap();
const publicAssetUrlRE = /__VITE_PUBLIC_ASSET__([a-z\d]{8})__/g;
function publicFileToBuiltUrl(url, config) {
if (config.command !== "build") {
return joinUrlSegments(config.decodedBase, url);
}
const hash = getHash(url);
let cache = publicAssetUrlCache.get(config);
if (!cache) {
cache = /* @__PURE__ */ new Map();
publicAssetUrlCache.set(config, cache);
}
if (!cache.get(hash)) {
cache.set(hash, url);
}
return `__VITE_PUBLIC_ASSET__${hash}__`;
}
const GIT_LFS_PREFIX = Buffer$1.from("version https://git-lfs.github.com");
function isGitLfsPlaceholder(content) {
if (content.length < GIT_LFS_PREFIX.length) return false;
return GIT_LFS_PREFIX.compare(content, 0, GIT_LFS_PREFIX.length) === 0;
}
async function fileToBuiltUrl(id, config, pluginContext, skipPublicCheck = false, forceInline) {
if (!skipPublicCheck && checkPublicFile(id, config)) {
return publicFileToBuiltUrl(id, config);
}
const cache = assetCache.get(config);
const cached = cache.get(id);
if (cached) {
return cached;
}
const file = cleanUrl(id);
const content = await fsp.readFile(file);
let url;
if (shouldInline(config, file, id, content, pluginContext, forceInline)) {
if (config.build.lib && isGitLfsPlaceholder(content)) {
config.logger.warn(
colors$1.yellow(`Inlined file ${id} was not downloaded via Git LFS`)
);
}
if (file.endsWith(".svg")) {
url = svgToDataURL(content);
} else {
const mimeType = lookup(file) ?? "application/octet-stream";
url = `data:${mimeType};base64,${content.toString("base64")}`;
}
} else {
const { search, hash } = parse$h(id);
const postfix = (search || "") + (hash || "");
const originalFileName = normalizePath$3(path$n.relative(config.root, file));
const referenceId = pluginContext.emitFile({
type: "asset",
// Ignore directory structure for asset file names
name: path$n.basename(file),
originalFileName,
source: content
});
generatedAssets.get(config).set(referenceId, { originalFileName });
url = `__VITE_ASSET__${referenceId}__${postfix ? `$_${postfix}__` : ``}`;
}
cache.set(id, url);
return url;
}
async function urlToBuiltUrl(url, importer, config, pluginContext, forceInline) {
if (checkPublicFile(url, config)) {
return publicFileToBuiltUrl(url, config);
}
const file = url[0] === "/" ? path$n.join(config.root, url) : path$n.join(path$n.dirname(importer), url);
return fileToBuiltUrl(
file,
config,
pluginContext,
// skip public check since we just did it above
true,
forceInline
);
}
const shouldInline = (config, file, id, content, pluginContext, forceInline) => {
if (config.build.lib) return true;
if (pluginContext.getModuleInfo(id)?.isEntry) return false;
if (forceInline !== void 0) return forceInline;
let limit;
if (typeof config.build.assetsInlineLimit === "function") {
const userShouldInline = config.build.assetsInlineLimit(file, content);
if (userShouldInline != null) return userShouldInline;
limit = DEFAULT_ASSETS_INLINE_LIMIT;
} else {
limit = Number(config.build.assetsInlineLimit);
}
if (file.endsWith(".html")) return false;
if (file.endsWith(".svg") && id.includes("#")) return false;
return content.length < limit && !isGitLfsPlaceholder(content);
};
const nestedQuotesRE = /"[^"']*'[^"]*"|'[^'"]*"[^']*'/;
function svgToDataURL(content) {
const stringContent = content.toString();
if (stringContent.includes(" \s+<").replaceAll('"', "'").replaceAll("%", "%25").replaceAll("#", "%23").replaceAll("<", "%3c").replaceAll(">", "%3e").replaceAll(/\s+/g, "%20");
}
}
const endsWithJSRE = /\.[cm]?js$/;
function manifestPlugin(config) {
const manifest = {};
let outputCount;
return {
name: "vite:manifest",
buildStart() {
outputCount = 0;
},
generateBundle({ format }, bundle) {
function getChunkName(chunk) {
return getChunkOriginalFileName(chunk, config.root, format);
}
function getInternalImports(imports) {
const filteredImports = [];
for (const file of imports) {
if (bundle[file] === void 0) {
continue;
}
filteredImports.push(getChunkName(bundle[file]));
}
return filteredImports;
}
function createChunk(chunk) {
const manifestChunk = {
file: chunk.fileName,
name: chunk.name
};
if (chunk.facadeModuleId) {
manifestChunk.src = getChunkName(chunk);
}
if (chunk.isEntry) {
manifestChunk.isEntry = true;
}
if (chunk.isDynamicEntry) {
manifestChunk.isDynamicEntry = true;
}
if (chunk.imports.length) {
const internalImports = getInternalImports(chunk.imports);
if (internalImports.length > 0) {
manifestChunk.imports = internalImports;
}
}
if (chunk.dynamicImports.length) {
const internalImports = getInternalImports(chunk.dynamicImports);
if (internalImports.length > 0) {
manifestChunk.dynamicImports = internalImports;
}
}
if (chunk.viteMetadata?.importedCss.size) {
manifestChunk.css = [...chunk.viteMetadata.importedCss];
}
if (chunk.viteMetadata?.importedAssets.size) {
manifestChunk.assets = [...chunk.viteMetadata.importedAssets];
}
return manifestChunk;
}
function createAsset(asset, src, isEntry) {
const manifestChunk = {
file: asset.fileName,
src
};
if (isEntry) manifestChunk.isEntry = true;
return manifestChunk;
}
const assets = generatedAssets.get(config);
const entryCssAssetFileNames = /* @__PURE__ */ new Set();
for (const [id, asset] of assets.entries()) {
if (asset.isEntry) {
try {
const fileName = this.getFileName(id);
entryCssAssetFileNames.add(fileName);
} catch (error) {
assets.delete(id);
}
}
}
const fileNameToAsset = /* @__PURE__ */ new Map();
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === "chunk") {
manifest[getChunkName(chunk)] = createChunk(chunk);
} else if (chunk.type === "asset" && typeof chunk.name === "string") {
const src = chunk.originalFileName ?? chunk.name;
const isEntry = entryCssAssetFileNames.has(chunk.fileName);
const asset = createAsset(chunk, src, isEntry);
const file2 = manifest[src]?.file;
if (file2 && endsWithJSRE.test(file2)) continue;
manifest[src] = asset;
fileNameToAsset.set(chunk.fileName, asset);
}
}
for (const [referenceId, { originalFileName }] of assets.entries()) {
if (!manifest[originalFileName]) {
const fileName = this.getFileName(referenceId);
const asset = fileNameToAsset.get(fileName);
if (asset) {
manifest[originalFileName] = asset;
}
}
}
outputCount++;
const output = config.build.rollupOptions?.output;
const outputLength = Array.isArray(output) ? output.length : 1;
if (outputCount >= outputLength) {
this.emitFile({
fileName: typeof config.build.manifest === "string" ? config.build.manifest : ".vite/manifest.json",
type: "asset",
source: JSON.stringify(sortObjectKeys(manifest), void 0, 2)
});
}
}
};
}
function getChunkOriginalFileName(chunk, root, format) {
if (chunk.facadeModuleId) {
let name = normalizePath$3(path$n.relative(root, chunk.facadeModuleId));
if (format === "system" && !chunk.name.includes("-legacy")) {
const ext = path$n.extname(name);
const endPos = ext.length !== 0 ? -ext.length : void 0;
name = name.slice(0, endPos) + `-legacy` + ext;
}
return name.replace(/\0/g, "");
} else {
return `_` + path$n.basename(chunk.fileName);
}
}
const dataUriRE = /^([^/]+\/[^;,]+)(;base64)?,([\s\S]*)$/;
const base64RE = /base64/i;
const dataUriPrefix = `\0/@data-uri/`;
function dataURIPlugin() {
let resolved;
return {
name: "vite:data-uri",
buildStart() {
resolved = /* @__PURE__ */ new Map();
},
resolveId(id) {
if (!id.trimStart().startsWith("data:")) {
return;
}
const uri = new URL$3(id);
if (uri.protocol !== "data:") {
return;
}
const match = dataUriRE.exec(uri.pathname);
if (!match) {
return;
}
const [, mime, format, data] = match;
if (mime !== "text/javascript") {
throw new Error(
`data URI with non-JavaScript mime type is not supported. If you're using legacy JavaScript MIME types (such as 'application/javascript'), please use 'text/javascript' instead.`
);
}
const base64 = format && base64RE.test(format.substring(1));
const content = base64 ? Buffer.from(data, "base64").toString("utf-8") : data;
resolved.set(id, content);
return dataUriPrefix + id;
},
load(id) {
if (id.startsWith(dataUriPrefix)) {
return resolved.get(id.slice(dataUriPrefix.length));
}
}
};
}
/* es-module-lexer 1.5.4 */
var ImportType;!function(A){A[A.Static=1]="Static",A[A.Dynamic=2]="Dynamic",A[A.ImportMeta=3]="ImportMeta",A[A.StaticSourcePhase=4]="StaticSourcePhase",A[A.DynamicSourcePhase=5]="DynamicSourcePhase";}(ImportType||(ImportType={}));const A=1===new Uint8Array(new Uint16Array([1]).buffer)[0];function parse$d(E,g="@"){if(!C)return init.then((()=>parse$d(E)));const I=E.length+1,w=(C.__heap_base.value||C.__heap_base)+4*I-C.memory.buffer.byteLength;w>0&&C.memory.grow(Math.ceil(w/65536));const K=C.sa(I-1);if((A?B:Q)(E,new Uint16Array(C.memory.buffer,K,I)),!C.parse())throw Object.assign(new Error(`Parse error ${g}:${E.slice(0,C.e()).split("\n").length}:${C.e()-E.lastIndexOf("\n",C.e()-1)}`),{idx:C.e()});const D=[],o=[];for(;C.ri();){const A=C.is(),Q=C.ie(),B=C.it(),g=C.ai(),I=C.id(),w=C.ss(),K=C.se();let o;C.ip()&&(o=k(E.slice(-1===I?A-1:A,-1===I?Q+1:Q))),D.push({n:o,t:B,s:A,e:Q,ss:w,se:K,d:I,a:g});}for(;C.re();){const A=C.es(),Q=C.ee(),B=C.els(),g=C.ele(),I=E.slice(A,Q),w=I[0],K=B<0?void 0:E.slice(B,g),D=K?K[0]:"";o.push({s:A,e:Q,ls:B,le:g,n:'"'===w||"'"===w?k(I):I,ln:'"'===D||"'"===D?k(K):K});}function k(A){try{return (0, eval)(A)}catch(A){}}return [D,o,!!C.f(),!!C.ms()]}function Q(A,Q){const B=A.length;let C=0;for(;C>>8;}}function B(A,Q){const B=A.length;let C=0;for(;CA.charCodeAt(0))))).then(WebAssembly.instantiate).then((({exports:A})=>{C=A;}));var E;
var convertSourceMap$1 = {};
(function (exports) {
Object.defineProperty(exports, 'commentRegex', {
get: function getCommentRegex () {
// Groups: 1: media type, 2: MIME type, 3: charset, 4: encoding, 5: data.
return /^\s*?\/[\/\*][@#]\s+?sourceMappingURL=data:(((?:application|text)\/json)(?:;charset=([^;,]+?)?)?)?(?:;(base64))?,(.*?)$/mg;
}
});
Object.defineProperty(exports, 'mapFileCommentRegex', {
get: function getMapFileCommentRegex () {
// Matches sourceMappingURL in either // or /* comment styles.
return /(?:\/\/[@#][ \t]+?sourceMappingURL=([^\s'"`]+?)[ \t]*?$)|(?:\/\*[@#][ \t]+sourceMappingURL=([^*]+?)[ \t]*?(?:\*\/){1}[ \t]*?$)/mg;
}
});
var decodeBase64;
if (typeof Buffer !== 'undefined') {
if (typeof Buffer.from === 'function') {
decodeBase64 = decodeBase64WithBufferFrom;
} else {
decodeBase64 = decodeBase64WithNewBuffer;
}
} else {
decodeBase64 = decodeBase64WithAtob;
}
function decodeBase64WithBufferFrom(base64) {
return Buffer.from(base64, 'base64').toString();
}
function decodeBase64WithNewBuffer(base64) {
if (typeof value === 'number') {
throw new TypeError('The value to decode must not be of type number.');
}
return new Buffer(base64, 'base64').toString();
}
function decodeBase64WithAtob(base64) {
return decodeURIComponent(escape(atob(base64)));
}
function stripComment(sm) {
return sm.split(',').pop();
}
function readFromFileMap(sm, read) {
var r = exports.mapFileCommentRegex.exec(sm);
// for some odd reason //# .. captures in 1 and /* .. */ in 2
var filename = r[1] || r[2];
try {
var sm = read(filename);
if (sm != null && typeof sm.catch === 'function') {
return sm.catch(throwError);
} else {
return sm;
}
} catch (e) {
throwError(e);
}
function throwError(e) {
throw new Error('An error occurred while trying to read the map file at ' + filename + '\n' + e.stack);
}
}
function Converter (sm, opts) {
opts = opts || {};
if (opts.hasComment) {
sm = stripComment(sm);
}
if (opts.encoding === 'base64') {
sm = decodeBase64(sm);
} else if (opts.encoding === 'uri') {
sm = decodeURIComponent(sm);
}
if (opts.isJSON || opts.encoding) {
sm = JSON.parse(sm);
}
this.sourcemap = sm;
}
Converter.prototype.toJSON = function (space) {
return JSON.stringify(this.sourcemap, null, space);
};
if (typeof Buffer !== 'undefined') {
if (typeof Buffer.from === 'function') {
Converter.prototype.toBase64 = encodeBase64WithBufferFrom;
} else {
Converter.prototype.toBase64 = encodeBase64WithNewBuffer;
}
} else {
Converter.prototype.toBase64 = encodeBase64WithBtoa;
}
function encodeBase64WithBufferFrom() {
var json = this.toJSON();
return Buffer.from(json, 'utf8').toString('base64');
}
function encodeBase64WithNewBuffer() {
var json = this.toJSON();
if (typeof json === 'number') {
throw new TypeError('The json to encode must not be of type number.');
}
return new Buffer(json, 'utf8').toString('base64');
}
function encodeBase64WithBtoa() {
var json = this.toJSON();
return btoa(unescape(encodeURIComponent(json)));
}
Converter.prototype.toURI = function () {
var json = this.toJSON();
return encodeURIComponent(json);
};
Converter.prototype.toComment = function (options) {
var encoding, content, data;
if (options != null && options.encoding === 'uri') {
encoding = '';
content = this.toURI();
} else {
encoding = ';base64';
content = this.toBase64();
}
data = 'sourceMappingURL=data:application/json;charset=utf-8' + encoding + ',' + content;
return options != null && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
};
// returns copy instead of original
Converter.prototype.toObject = function () {
return JSON.parse(this.toJSON());
};
Converter.prototype.addProperty = function (key, value) {
if (this.sourcemap.hasOwnProperty(key)) throw new Error('property "' + key + '" already exists on the sourcemap, use set property instead');
return this.setProperty(key, value);
};
Converter.prototype.setProperty = function (key, value) {
this.sourcemap[key] = value;
return this;
};
Converter.prototype.getProperty = function (key) {
return this.sourcemap[key];
};
exports.fromObject = function (obj) {
return new Converter(obj);
};
exports.fromJSON = function (json) {
return new Converter(json, { isJSON: true });
};
exports.fromURI = function (uri) {
return new Converter(uri, { encoding: 'uri' });
};
exports.fromBase64 = function (base64) {
return new Converter(base64, { encoding: 'base64' });
};
exports.fromComment = function (comment) {
var m, encoding;
comment = comment
.replace(/^\/\*/g, '//')
.replace(/\*\/$/g, '');
m = exports.commentRegex.exec(comment);
encoding = m && m[4] || 'uri';
return new Converter(comment, { encoding: encoding, hasComment: true });
};
function makeConverter(sm) {
return new Converter(sm, { isJSON: true });
}
exports.fromMapFileComment = function (comment, read) {
if (typeof read === 'string') {
throw new Error(
'String directory paths are no longer supported with `fromMapFileComment`\n' +
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
)
}
var sm = readFromFileMap(comment, read);
if (sm != null && typeof sm.then === 'function') {
return sm.then(makeConverter);
} else {
return makeConverter(sm);
}
};
// Finds last sourcemap comment in file or returns null if none was found
exports.fromSource = function (content) {
var m = content.match(exports.commentRegex);
return m ? exports.fromComment(m.pop()) : null;
};
// Finds last sourcemap comment in file or returns null if none was found
exports.fromMapFileSource = function (content, read) {
if (typeof read === 'string') {
throw new Error(
'String directory paths are no longer supported with `fromMapFileSource`\n' +
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
)
}
var m = content.match(exports.mapFileCommentRegex);
return m ? exports.fromMapFileComment(m.pop(), read) : null;
};
exports.removeComments = function (src) {
return src.replace(exports.commentRegex, '');
};
exports.removeMapFileComments = function (src) {
return src.replace(exports.mapFileCommentRegex, '');
};
exports.generateMapFileComment = function (file, options) {
var data = 'sourceMappingURL=' + file;
return options && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
};
} (convertSourceMap$1));
var convertSourceMap = /*@__PURE__*/getDefaultExportFromCjs(convertSourceMap$1);
const debug$g = createDebugger("vite:sourcemap", {
onlyWhenFocused: true
});
const virtualSourceRE = /^(?:dep:|browser-external:|virtual:)|\0/;
async function computeSourceRoute(map, file) {
let sourceRoot;
try {
sourceRoot = await fsp.realpath(
path$n.resolve(path$n.dirname(file), map.sourceRoot || "")
);
} catch {
}
return sourceRoot;
}
async function injectSourcesContent(map, file, logger) {
let sourceRootPromise;
const missingSources = [];
const sourcesContent = map.sourcesContent || [];
const sourcesContentPromises = [];
for (let index = 0; index < map.sources.length; index++) {
const sourcePath = map.sources[index];
if (sourcesContent[index] == null && sourcePath && !virtualSourceRE.test(sourcePath)) {
sourcesContentPromises.push(
(async () => {
sourceRootPromise ??= computeSourceRoute(map, file);
const sourceRoot = await sourceRootPromise;
let resolvedSourcePath = cleanUrl(decodeURI(sourcePath));
if (sourceRoot) {
resolvedSourcePath = path$n.resolve(sourceRoot, resolvedSourcePath);
}
sourcesContent[index] = await fsp.readFile(resolvedSourcePath, "utf-8").catch(() => {
missingSources.push(resolvedSourcePath);
return null;
});
})()
);
}
}
await Promise.all(sourcesContentPromises);
map.sourcesContent = sourcesContent;
if (missingSources.length) {
logger.warnOnce(`Sourcemap for "${file}" points to missing source files`);
debug$g?.(`Missing sources:
` + missingSources.join(`
`));
}
}
function genSourceMapUrl(map) {
if (typeof map !== "string") {
map = JSON.stringify(map);
}
return `data:application/json;base64,${Buffer.from(map).toString("base64")}`;
}
function getCodeWithSourcemap(type, code, map) {
if (debug$g) {
code += `
/*${JSON.stringify(map, null, 2).replace(/\*\//g, "*\\/")}*/
`;
}
if (type === "js") {
code += `
//# sourceMappingURL=${genSourceMapUrl(map)}`;
} else if (type === "css") {
code += `
/*# sourceMappingURL=${genSourceMapUrl(map)} */`;
}
return code;
}
function applySourcemapIgnoreList(map, sourcemapPath, sourcemapIgnoreList, logger) {
let { x_google_ignoreList } = map;
if (x_google_ignoreList === void 0) {
x_google_ignoreList = [];
}
for (let sourcesIndex = 0; sourcesIndex < map.sources.length; ++sourcesIndex) {
const sourcePath = map.sources[sourcesIndex];
if (!sourcePath) continue;
const ignoreList = sourcemapIgnoreList(
path$n.isAbsolute(sourcePath) ? sourcePath : path$n.resolve(path$n.dirname(sourcemapPath), sourcePath),
sourcemapPath
);
if (logger && typeof ignoreList !== "boolean") {
logger.warn("sourcemapIgnoreList function must return a boolean.");
}
if (ignoreList && !x_google_ignoreList.includes(sourcesIndex)) {
x_google_ignoreList.push(sourcesIndex);
}
}
if (x_google_ignoreList.length > 0) {
if (!map.x_google_ignoreList) map.x_google_ignoreList = x_google_ignoreList;
}
}
async function extractSourcemapFromFile(code, filePath) {
const map = (convertSourceMap.fromSource(code) || await convertSourceMap.fromMapFileSource(
code,
createConvertSourceMapReadMap(filePath)
))?.toObject();
if (map) {
return {
code: code.replace(convertSourceMap.mapFileCommentRegex, blankReplacer),
map
};
}
}
function createConvertSourceMapReadMap(originalFileName) {
return (filename) => {
return fsp.readFile(
path$n.resolve(path$n.dirname(originalFileName), filename),
"utf-8"
);
};
}
var tasks = {};
var utils$g = {};
var array$1 = {};
Object.defineProperty(array$1, "__esModule", { value: true });
array$1.splitWhen = array$1.flatten = void 0;
function flatten$1(items) {
return items.reduce((collection, item) => [].concat(collection, item), []);
}
array$1.flatten = flatten$1;
function splitWhen(items, predicate) {
const result = [[]];
let groupIndex = 0;
for (const item of items) {
if (predicate(item)) {
groupIndex++;
result[groupIndex] = [];
}
else {
result[groupIndex].push(item);
}
}
return result;
}
array$1.splitWhen = splitWhen;
var errno$1 = {};
Object.defineProperty(errno$1, "__esModule", { value: true });
errno$1.isEnoentCodeError = void 0;
function isEnoentCodeError(error) {
return error.code === 'ENOENT';
}
errno$1.isEnoentCodeError = isEnoentCodeError;
var fs$i = {};
Object.defineProperty(fs$i, "__esModule", { value: true });
fs$i.createDirentFromStats = void 0;
let DirentFromStats$1 = class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
};
function createDirentFromStats$1(name, stats) {
return new DirentFromStats$1(name, stats);
}
fs$i.createDirentFromStats = createDirentFromStats$1;
var path$i = {};
Object.defineProperty(path$i, "__esModule", { value: true });
path$i.convertPosixPathToPattern = path$i.convertWindowsPathToPattern = path$i.convertPathToPattern = path$i.escapePosixPath = path$i.escapeWindowsPath = path$i.escape = path$i.removeLeadingDotSegment = path$i.makeAbsolute = path$i.unixify = void 0;
const os$4 = require$$2;
const path$h = require$$0$4;
const IS_WINDOWS_PLATFORM = os$4.platform() === 'win32';
const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
/**
* All non-escaped special characters.
* Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters.
* Windows: (){}[], !+@ before (, ! at the beginning.
*/
const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g;
const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g;
/**
* The device path (\\.\ or \\?\).
* https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths
*/
const DOS_DEVICE_PATH_RE = /^\\\\([.?])/;
/**
* All backslashes except those escaping special characters.
* Windows: !()+@{}
* https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions
*/
const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g;
/**
* Designed to work only with simple paths: `dir\\file`.
*/
function unixify(filepath) {
return filepath.replace(/\\/g, '/');
}
path$i.unixify = unixify;
function makeAbsolute(cwd, filepath) {
return path$h.resolve(cwd, filepath);
}
path$i.makeAbsolute = makeAbsolute;
function removeLeadingDotSegment(entry) {
// We do not use `startsWith` because this is 10x slower than current implementation for some cases.
// eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
if (entry.charAt(0) === '.') {
const secondCharactery = entry.charAt(1);
if (secondCharactery === '/' || secondCharactery === '\\') {
return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);
}
}
return entry;
}
path$i.removeLeadingDotSegment = removeLeadingDotSegment;
path$i.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath;
function escapeWindowsPath(pattern) {
return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
}
path$i.escapeWindowsPath = escapeWindowsPath;
function escapePosixPath(pattern) {
return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
}
path$i.escapePosixPath = escapePosixPath;
path$i.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern;
function convertWindowsPathToPattern(filepath) {
return escapeWindowsPath(filepath)
.replace(DOS_DEVICE_PATH_RE, '//$1')
.replace(WINDOWS_BACKSLASHES_RE, '/');
}
path$i.convertWindowsPathToPattern = convertWindowsPathToPattern;
function convertPosixPathToPattern(filepath) {
return escapePosixPath(filepath);
}
path$i.convertPosixPathToPattern = convertPosixPathToPattern;
var pattern$1 = {};
/*!
* is-extglob
*
* Copyright (c) 2014-2016, Jon Schlinkert.
* Licensed under the MIT License.
*/
var isExtglob$1 = function isExtglob(str) {
if (typeof str !== 'string' || str === '') {
return false;
}
var match;
while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) {
if (match[2]) return true;
str = str.slice(match.index + match[0].length);
}
return false;
};
/*!
* is-glob
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
var isExtglob = isExtglob$1;
var chars = { '{': '}', '(': ')', '[': ']'};
var strictCheck = function(str) {
if (str[0] === '!') {
return true;
}
var index = 0;
var pipeIndex = -2;
var closeSquareIndex = -2;
var closeCurlyIndex = -2;
var closeParenIndex = -2;
var backSlashIndex = -2;
while (index < str.length) {
if (str[index] === '*') {
return true;
}
if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) {
return true;
}
if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') {
if (closeSquareIndex < index) {
closeSquareIndex = str.indexOf(']', index);
}
if (closeSquareIndex > index) {
if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) {
return true;
}
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) {
return true;
}
}
}
if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') {
closeCurlyIndex = str.indexOf('}', index);
if (closeCurlyIndex > index) {
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) {
return true;
}
}
}
if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') {
closeParenIndex = str.indexOf(')', index);
if (closeParenIndex > index) {
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) {
return true;
}
}
}
if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') {
if (pipeIndex < index) {
pipeIndex = str.indexOf('|', index);
}
if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') {
closeParenIndex = str.indexOf(')', pipeIndex);
if (closeParenIndex > pipeIndex) {
backSlashIndex = str.indexOf('\\', pipeIndex);
if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) {
return true;
}
}
}
}
if (str[index] === '\\') {
var open = str[index + 1];
index += 2;
var close = chars[open];
if (close) {
var n = str.indexOf(close, index);
if (n !== -1) {
index = n + 1;
}
}
if (str[index] === '!') {
return true;
}
} else {
index++;
}
}
return false;
};
var relaxedCheck = function(str) {
if (str[0] === '!') {
return true;
}
var index = 0;
while (index < str.length) {
if (/[*?{}()[\]]/.test(str[index])) {
return true;
}
if (str[index] === '\\') {
var open = str[index + 1];
index += 2;
var close = chars[open];
if (close) {
var n = str.indexOf(close, index);
if (n !== -1) {
index = n + 1;
}
}
if (str[index] === '!') {
return true;
}
} else {
index++;
}
}
return false;
};
var isGlob$2 = function isGlob(str, options) {
if (typeof str !== 'string' || str === '') {
return false;
}
if (isExtglob(str)) {
return true;
}
var check = strictCheck;
// optionally relax check
if (options && options.strict === false) {
check = relaxedCheck;
}
return check(str);
};
var isGlob$1 = isGlob$2;
var pathPosixDirname = require$$0$4.posix.dirname;
var isWin32 = require$$2.platform() === 'win32';
var slash = '/';
var backslash = /\\/g;
var enclosure = /[\{\[].*[\}\]]$/;
var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/;
var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g;
/**
* @param {string} str
* @param {Object} opts
* @param {boolean} [opts.flipBackslashes=true]
* @returns {string}
*/
var globParent$2 = function globParent(str, opts) {
var options = Object.assign({ flipBackslashes: true }, opts);
// flip windows path separators
if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {
str = str.replace(backslash, slash);
}
// special case for strings ending in enclosure containing path separator
if (enclosure.test(str)) {
str += slash;
}
// preserves full path in case of trailing path separator
str += 'a';
// remove path parts that are globby
do {
str = pathPosixDirname(str);
} while (isGlob$1(str) || globby.test(str));
// remove escape chars and return result
return str.replace(escaped, '$1');
};
var utils$f = {};
(function (exports) {
exports.isInteger = num => {
if (typeof num === 'number') {
return Number.isInteger(num);
}
if (typeof num === 'string' && num.trim() !== '') {
return Number.isInteger(Number(num));
}
return false;
};
/**
* Find a node of the given type
*/
exports.find = (node, type) => node.nodes.find(node => node.type === type);
/**
* Find a node of the given type
*/
exports.exceedsLimit = (min, max, step = 1, limit) => {
if (limit === false) return false;
if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
return ((Number(max) - Number(min)) / Number(step)) >= limit;
};
/**
* Escape the given node with '\\' before node.value
*/
exports.escapeNode = (block, n = 0, type) => {
const node = block.nodes[n];
if (!node) return;
if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
if (node.escaped !== true) {
node.value = '\\' + node.value;
node.escaped = true;
}
}
};
/**
* Returns true if the given brace node should be enclosed in literal braces
*/
exports.encloseBrace = node => {
if (node.type !== 'brace') return false;
if ((node.commas >> 0 + node.ranges >> 0) === 0) {
node.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a brace node is invalid.
*/
exports.isInvalidBrace = block => {
if (block.type !== 'brace') return false;
if (block.invalid === true || block.dollar) return true;
if ((block.commas >> 0 + block.ranges >> 0) === 0) {
block.invalid = true;
return true;
}
if (block.open !== true || block.close !== true) {
block.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a node is an open or close node
*/
exports.isOpenOrClose = node => {
if (node.type === 'open' || node.type === 'close') {
return true;
}
return node.open === true || node.close === true;
};
/**
* Reduce an array of text nodes.
*/
exports.reduce = nodes => nodes.reduce((acc, node) => {
if (node.type === 'text') acc.push(node.value);
if (node.type === 'range') node.type = 'text';
return acc;
}, []);
/**
* Flatten an array
*/
exports.flatten = (...args) => {
const result = [];
const flat = arr => {
for (let i = 0; i < arr.length; i++) {
const ele = arr[i];
if (Array.isArray(ele)) {
flat(ele);
continue;
}
if (ele !== undefined) {
result.push(ele);
}
}
return result;
};
flat(args);
return result;
};
} (utils$f));
const utils$e = utils$f;
var stringify$7 = (ast, options = {}) => {
const stringify = (node, parent = {}) => {
const invalidBlock = options.escapeInvalid && utils$e.isInvalidBrace(parent);
const invalidNode = node.invalid === true && options.escapeInvalid === true;
let output = '';
if (node.value) {
if ((invalidBlock || invalidNode) && utils$e.isOpenOrClose(node)) {
return '\\' + node.value;
}
return node.value;
}
if (node.value) {
return node.value;
}
if (node.nodes) {
for (const child of node.nodes) {
output += stringify(child);
}
}
return output;
};
return stringify(ast);
};
/*!
* is-number
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Released under the MIT License.
*/
var isNumber$2 = function(num) {
if (typeof num === 'number') {
return num - num === 0;
}
if (typeof num === 'string' && num.trim() !== '') {
return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);
}
return false;
};
/*!
* to-regex-range
*
* Copyright (c) 2015-present, Jon Schlinkert.
* Released under the MIT License.
*/
const isNumber$1 = isNumber$2;
const toRegexRange$1 = (min, max, options) => {
if (isNumber$1(min) === false) {
throw new TypeError('toRegexRange: expected the first argument to be a number');
}
if (max === void 0 || min === max) {
return String(min);
}
if (isNumber$1(max) === false) {
throw new TypeError('toRegexRange: expected the second argument to be a number.');
}
let opts = { relaxZeros: true, ...options };
if (typeof opts.strictZeros === 'boolean') {
opts.relaxZeros = opts.strictZeros === false;
}
let relax = String(opts.relaxZeros);
let shorthand = String(opts.shorthand);
let capture = String(opts.capture);
let wrap = String(opts.wrap);
let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;
if (toRegexRange$1.cache.hasOwnProperty(cacheKey)) {
return toRegexRange$1.cache[cacheKey].result;
}
let a = Math.min(min, max);
let b = Math.max(min, max);
if (Math.abs(a - b) === 1) {
let result = min + '|' + max;
if (opts.capture) {
return `(${result})`;
}
if (opts.wrap === false) {
return result;
}
return `(?:${result})`;
}
let isPadded = hasPadding(min) || hasPadding(max);
let state = { min, max, a, b };
let positives = [];
let negatives = [];
if (isPadded) {
state.isPadded = isPadded;
state.maxLen = String(state.max).length;
}
if (a < 0) {
let newMin = b < 0 ? Math.abs(b) : 1;
negatives = splitToPatterns(newMin, Math.abs(a), state, opts);
a = state.a = 0;
}
if (b >= 0) {
positives = splitToPatterns(a, b, state, opts);
}
state.negatives = negatives;
state.positives = positives;
state.result = collatePatterns(negatives, positives);
if (opts.capture === true) {
state.result = `(${state.result})`;
} else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {
state.result = `(?:${state.result})`;
}
toRegexRange$1.cache[cacheKey] = state;
return state.result;
};
function collatePatterns(neg, pos, options) {
let onlyNegative = filterPatterns(neg, pos, '-', false) || [];
let onlyPositive = filterPatterns(pos, neg, '', false) || [];
let intersected = filterPatterns(neg, pos, '-?', true) || [];
let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);
return subpatterns.join('|');
}
function splitToRanges(min, max) {
let nines = 1;
let zeros = 1;
let stop = countNines(min, nines);
let stops = new Set([max]);
while (min <= stop && stop <= max) {
stops.add(stop);
nines += 1;
stop = countNines(min, nines);
}
stop = countZeros(max + 1, zeros) - 1;
while (min < stop && stop <= max) {
stops.add(stop);
zeros += 1;
stop = countZeros(max + 1, zeros) - 1;
}
stops = [...stops];
stops.sort(compare);
return stops;
}
/**
* Convert a range to a regex pattern
* @param {Number} `start`
* @param {Number} `stop`
* @return {String}
*/
function rangeToPattern(start, stop, options) {
if (start === stop) {
return { pattern: start, count: [], digits: 0 };
}
let zipped = zip(start, stop);
let digits = zipped.length;
let pattern = '';
let count = 0;
for (let i = 0; i < digits; i++) {
let [startDigit, stopDigit] = zipped[i];
if (startDigit === stopDigit) {
pattern += startDigit;
} else if (startDigit !== '0' || stopDigit !== '9') {
pattern += toCharacterClass(startDigit, stopDigit);
} else {
count++;
}
}
if (count) {
pattern += options.shorthand === true ? '\\d' : '[0-9]';
}
return { pattern, count: [count], digits };
}
function splitToPatterns(min, max, tok, options) {
let ranges = splitToRanges(min, max);
let tokens = [];
let start = min;
let prev;
for (let i = 0; i < ranges.length; i++) {
let max = ranges[i];
let obj = rangeToPattern(String(start), String(max), options);
let zeros = '';
if (!tok.isPadded && prev && prev.pattern === obj.pattern) {
if (prev.count.length > 1) {
prev.count.pop();
}
prev.count.push(obj.count[0]);
prev.string = prev.pattern + toQuantifier(prev.count);
start = max + 1;
continue;
}
if (tok.isPadded) {
zeros = padZeros(max, tok, options);
}
obj.string = zeros + obj.pattern + toQuantifier(obj.count);
tokens.push(obj);
start = max + 1;
prev = obj;
}
return tokens;
}
function filterPatterns(arr, comparison, prefix, intersection, options) {
let result = [];
for (let ele of arr) {
let { string } = ele;
// only push if _both_ are negative...
if (!intersection && !contains(comparison, 'string', string)) {
result.push(prefix + string);
}
// or _both_ are positive
if (intersection && contains(comparison, 'string', string)) {
result.push(prefix + string);
}
}
return result;
}
/**
* Zip strings
*/
function zip(a, b) {
let arr = [];
for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
return arr;
}
function compare(a, b) {
return a > b ? 1 : b > a ? -1 : 0;
}
function contains(arr, key, val) {
return arr.some(ele => ele[key] === val);
}
function countNines(min, len) {
return Number(String(min).slice(0, -len) + '9'.repeat(len));
}
function countZeros(integer, zeros) {
return integer - (integer % Math.pow(10, zeros));
}
function toQuantifier(digits) {
let [start = 0, stop = ''] = digits;
if (stop || start > 1) {
return `{${start + (stop ? ',' + stop : '')}}`;
}
return '';
}
function toCharacterClass(a, b, options) {
return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;
}
function hasPadding(str) {
return /^-?(0+)\d/.test(str);
}
function padZeros(value, tok, options) {
if (!tok.isPadded) {
return value;
}
let diff = Math.abs(tok.maxLen - String(value).length);
let relax = options.relaxZeros !== false;
switch (diff) {
case 0:
return '';
case 1:
return relax ? '0?' : '0';
case 2:
return relax ? '0{0,2}' : '00';
default: {
return relax ? `0{0,${diff}}` : `0{${diff}}`;
}
}
}
/**
* Cache
*/
toRegexRange$1.cache = {};
toRegexRange$1.clearCache = () => (toRegexRange$1.cache = {});
/**
* Expose `toRegexRange`
*/
var toRegexRange_1 = toRegexRange$1;
/*!
* fill-range
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Licensed under the MIT License.
*/
const util$1 = require$$0$5;
const toRegexRange = toRegexRange_1;
const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
const transform = toNumber => {
return value => toNumber === true ? Number(value) : String(value);
};
const isValidValue = value => {
return typeof value === 'number' || (typeof value === 'string' && value !== '');
};
const isNumber = num => Number.isInteger(+num);
const zeros = input => {
let value = `${input}`;
let index = -1;
if (value[0] === '-') value = value.slice(1);
if (value === '0') return false;
while (value[++index] === '0');
return index > 0;
};
const stringify$6 = (start, end, options) => {
if (typeof start === 'string' || typeof end === 'string') {
return true;
}
return options.stringify === true;
};
const pad = (input, maxLength, toNumber) => {
if (maxLength > 0) {
let dash = input[0] === '-' ? '-' : '';
if (dash) input = input.slice(1);
input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
}
if (toNumber === false) {
return String(input);
}
return input;
};
const toMaxLen = (input, maxLength) => {
let negative = input[0] === '-' ? '-' : '';
if (negative) {
input = input.slice(1);
maxLength--;
}
while (input.length < maxLength) input = '0' + input;
return negative ? ('-' + input) : input;
};
const toSequence = (parts, options, maxLen) => {
parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
let prefix = options.capture ? '' : '?:';
let positives = '';
let negatives = '';
let result;
if (parts.positives.length) {
positives = parts.positives.map(v => toMaxLen(String(v), maxLen)).join('|');
}
if (parts.negatives.length) {
negatives = `-(${prefix}${parts.negatives.map(v => toMaxLen(String(v), maxLen)).join('|')})`;
}
if (positives && negatives) {
result = `${positives}|${negatives}`;
} else {
result = positives || negatives;
}
if (options.wrap) {
return `(${prefix}${result})`;
}
return result;
};
const toRange = (a, b, isNumbers, options) => {
if (isNumbers) {
return toRegexRange(a, b, { wrap: false, ...options });
}
let start = String.fromCharCode(a);
if (a === b) return start;
let stop = String.fromCharCode(b);
return `[${start}-${stop}]`;
};
const toRegex = (start, end, options) => {
if (Array.isArray(start)) {
let wrap = options.wrap === true;
let prefix = options.capture ? '' : '?:';
return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
}
return toRegexRange(start, end, options);
};
const rangeError = (...args) => {
return new RangeError('Invalid range arguments: ' + util$1.inspect(...args));
};
const invalidRange = (start, end, options) => {
if (options.strictRanges === true) throw rangeError([start, end]);
return [];
};
const invalidStep = (step, options) => {
if (options.strictRanges === true) {
throw new TypeError(`Expected step "${step}" to be a number`);
}
return [];
};
const fillNumbers = (start, end, step = 1, options = {}) => {
let a = Number(start);
let b = Number(end);
if (!Number.isInteger(a) || !Number.isInteger(b)) {
if (options.strictRanges === true) throw rangeError([start, end]);
return [];
}
// fix negative zero
if (a === 0) a = 0;
if (b === 0) b = 0;
let descending = a > b;
let startString = String(start);
let endString = String(end);
let stepString = String(step);
step = Math.max(Math.abs(step), 1);
let padded = zeros(startString) || zeros(endString) || zeros(stepString);
let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
let toNumber = padded === false && stringify$6(start, end, options) === false;
let format = options.transform || transform(toNumber);
if (options.toRegex && step === 1) {
return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
}
let parts = { negatives: [], positives: [] };
let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
let range = [];
let index = 0;
while (descending ? a >= b : a <= b) {
if (options.toRegex === true && step > 1) {
push(a);
} else {
range.push(pad(format(a, index), maxLen, toNumber));
}
a = descending ? a - step : a + step;
index++;
}
if (options.toRegex === true) {
return step > 1
? toSequence(parts, options, maxLen)
: toRegex(range, null, { wrap: false, ...options });
}
return range;
};
const fillLetters = (start, end, step = 1, options = {}) => {
if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
return invalidRange(start, end, options);
}
let format = options.transform || (val => String.fromCharCode(val));
let a = `${start}`.charCodeAt(0);
let b = `${end}`.charCodeAt(0);
let descending = a > b;
let min = Math.min(a, b);
let max = Math.max(a, b);
if (options.toRegex && step === 1) {
return toRange(min, max, false, options);
}
let range = [];
let index = 0;
while (descending ? a >= b : a <= b) {
range.push(format(a, index));
a = descending ? a - step : a + step;
index++;
}
if (options.toRegex === true) {
return toRegex(range, null, { wrap: false, options });
}
return range;
};
const fill$2 = (start, end, step, options = {}) => {
if (end == null && isValidValue(start)) {
return [start];
}
if (!isValidValue(start) || !isValidValue(end)) {
return invalidRange(start, end, options);
}
if (typeof step === 'function') {
return fill$2(start, end, 1, { transform: step });
}
if (isObject(step)) {
return fill$2(start, end, 0, step);
}
let opts = { ...options };
if (opts.capture === true) opts.wrap = true;
step = step || opts.step || 1;
if (!isNumber(step)) {
if (step != null && !isObject(step)) return invalidStep(step, opts);
return fill$2(start, end, 1, step);
}
if (isNumber(start) && isNumber(end)) {
return fillNumbers(start, end, step, opts);
}
return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
};
var fillRange = fill$2;
const fill$1 = fillRange;
const utils$d = utils$f;
const compile$1 = (ast, options = {}) => {
const walk = (node, parent = {}) => {
const invalidBlock = utils$d.isInvalidBrace(parent);
const invalidNode = node.invalid === true && options.escapeInvalid === true;
const invalid = invalidBlock === true || invalidNode === true;
const prefix = options.escapeInvalid === true ? '\\' : '';
let output = '';
if (node.isOpen === true) {
return prefix + node.value;
}
if (node.isClose === true) {
console.log('node.isClose', prefix, node.value);
return prefix + node.value;
}
if (node.type === 'open') {
return invalid ? prefix + node.value : '(';
}
if (node.type === 'close') {
return invalid ? prefix + node.value : ')';
}
if (node.type === 'comma') {
return node.prev.type === 'comma' ? '' : invalid ? node.value : '|';
}
if (node.value) {
return node.value;
}
if (node.nodes && node.ranges > 0) {
const args = utils$d.reduce(node.nodes);
const range = fill$1(...args, { ...options, wrap: false, toRegex: true, strictZeros: true });
if (range.length !== 0) {
return args.length > 1 && range.length > 1 ? `(${range})` : range;
}
}
if (node.nodes) {
for (const child of node.nodes) {
output += walk(child, node);
}
}
return output;
};
return walk(ast);
};
var compile_1 = compile$1;
const fill = fillRange;
const stringify$5 = stringify$7;
const utils$c = utils$f;
const append$1 = (queue = '', stash = '', enclose = false) => {
const result = [];
queue = [].concat(queue);
stash = [].concat(stash);
if (!stash.length) return queue;
if (!queue.length) {
return enclose ? utils$c.flatten(stash).map(ele => `{${ele}}`) : stash;
}
for (const item of queue) {
if (Array.isArray(item)) {
for (const value of item) {
result.push(append$1(value, stash, enclose));
}
} else {
for (let ele of stash) {
if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
result.push(Array.isArray(ele) ? append$1(item, ele, enclose) : item + ele);
}
}
}
return utils$c.flatten(result);
};
const expand$2 = (ast, options = {}) => {
const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit;
const walk = (node, parent = {}) => {
node.queue = [];
let p = parent;
let q = parent.queue;
while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
p = p.parent;
q = p.queue;
}
if (node.invalid || node.dollar) {
q.push(append$1(q.pop(), stringify$5(node, options)));
return;
}
if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
q.push(append$1(q.pop(), ['{}']));
return;
}
if (node.nodes && node.ranges > 0) {
const args = utils$c.reduce(node.nodes);
if (utils$c.exceedsLimit(...args, options.step, rangeLimit)) {
throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
}
let range = fill(...args, options);
if (range.length === 0) {
range = stringify$5(node, options);
}
q.push(append$1(q.pop(), range));
node.nodes = [];
return;
}
const enclose = utils$c.encloseBrace(node);
let queue = node.queue;
let block = node;
while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
block = block.parent;
queue = block.queue;
}
for (let i = 0; i < node.nodes.length; i++) {
const child = node.nodes[i];
if (child.type === 'comma' && node.type === 'brace') {
if (i === 1) queue.push('');
queue.push('');
continue;
}
if (child.type === 'close') {
q.push(append$1(q.pop(), queue, enclose));
continue;
}
if (child.value && child.type !== 'open') {
queue.push(append$1(queue.pop(), child.value));
continue;
}
if (child.nodes) {
walk(child, node);
}
}
return queue;
};
return utils$c.flatten(walk(ast));
};
var expand_1$1 = expand$2;
var constants$3 = {
MAX_LENGTH: 10000,
// Digits
CHAR_0: '0', /* 0 */
CHAR_9: '9', /* 9 */
// Alphabet chars.
CHAR_UPPERCASE_A: 'A', /* A */
CHAR_LOWERCASE_A: 'a', /* a */
CHAR_UPPERCASE_Z: 'Z', /* Z */
CHAR_LOWERCASE_Z: 'z', /* z */
CHAR_LEFT_PARENTHESES: '(', /* ( */
CHAR_RIGHT_PARENTHESES: ')', /* ) */
CHAR_ASTERISK: '*', /* * */
// Non-alphabetic chars.
CHAR_AMPERSAND: '&', /* & */
CHAR_AT: '@', /* @ */
CHAR_BACKSLASH: '\\', /* \ */
CHAR_BACKTICK: '`', /* ` */
CHAR_CARRIAGE_RETURN: '\r', /* \r */
CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
CHAR_COLON: ':', /* : */
CHAR_COMMA: ',', /* , */
CHAR_DOLLAR: '$', /* . */
CHAR_DOT: '.', /* . */
CHAR_DOUBLE_QUOTE: '"', /* " */
CHAR_EQUAL: '=', /* = */
CHAR_EXCLAMATION_MARK: '!', /* ! */
CHAR_FORM_FEED: '\f', /* \f */
CHAR_FORWARD_SLASH: '/', /* / */
CHAR_HASH: '#', /* # */
CHAR_HYPHEN_MINUS: '-', /* - */
CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
CHAR_LEFT_CURLY_BRACE: '{', /* { */
CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
CHAR_LINE_FEED: '\n', /* \n */
CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
CHAR_PERCENT: '%', /* % */
CHAR_PLUS: '+', /* + */
CHAR_QUESTION_MARK: '?', /* ? */
CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
CHAR_RIGHT_CURLY_BRACE: '}', /* } */
CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
CHAR_SEMICOLON: ';', /* ; */
CHAR_SINGLE_QUOTE: '\'', /* ' */
CHAR_SPACE: ' ', /* */
CHAR_TAB: '\t', /* \t */
CHAR_UNDERSCORE: '_', /* _ */
CHAR_VERTICAL_LINE: '|', /* | */
CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
};
const stringify$4 = stringify$7;
/**
* Constants
*/
const {
MAX_LENGTH,
CHAR_BACKSLASH, /* \ */
CHAR_BACKTICK, /* ` */
CHAR_COMMA, /* , */
CHAR_DOT, /* . */
CHAR_LEFT_PARENTHESES, /* ( */
CHAR_RIGHT_PARENTHESES, /* ) */
CHAR_LEFT_CURLY_BRACE, /* { */
CHAR_RIGHT_CURLY_BRACE, /* } */
CHAR_LEFT_SQUARE_BRACKET, /* [ */
CHAR_RIGHT_SQUARE_BRACKET, /* ] */
CHAR_DOUBLE_QUOTE, /* " */
CHAR_SINGLE_QUOTE, /* ' */
CHAR_NO_BREAK_SPACE,
CHAR_ZERO_WIDTH_NOBREAK_SPACE
} = constants$3;
/**
* parse
*/
const parse$c = (input, options = {}) => {
if (typeof input !== 'string') {
throw new TypeError('Expected a string');
}
const opts = options || {};
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
if (input.length > max) {
throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
}
const ast = { type: 'root', input, nodes: [] };
const stack = [ast];
let block = ast;
let prev = ast;
let brackets = 0;
const length = input.length;
let index = 0;
let depth = 0;
let value;
/**
* Helpers
*/
const advance = () => input[index++];
const push = node => {
if (node.type === 'text' && prev.type === 'dot') {
prev.type = 'text';
}
if (prev && prev.type === 'text' && node.type === 'text') {
prev.value += node.value;
return;
}
block.nodes.push(node);
node.parent = block;
node.prev = prev;
prev = node;
return node;
};
push({ type: 'bos' });
while (index < length) {
block = stack[stack.length - 1];
value = advance();
/**
* Invalid chars
*/
if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
continue;
}
/**
* Escaped chars
*/
if (value === CHAR_BACKSLASH) {
push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
continue;
}
/**
* Right square bracket (literal): ']'
*/
if (value === CHAR_RIGHT_SQUARE_BRACKET) {
push({ type: 'text', value: '\\' + value });
continue;
}
/**
* Left square bracket: '['
*/
if (value === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
let next;
while (index < length && (next = advance())) {
value += next;
if (next === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
continue;
}
if (next === CHAR_BACKSLASH) {
value += advance();
continue;
}
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
brackets--;
if (brackets === 0) {
break;
}
}
}
push({ type: 'text', value });
continue;
}
/**
* Parentheses
*/
if (value === CHAR_LEFT_PARENTHESES) {
block = push({ type: 'paren', nodes: [] });
stack.push(block);
push({ type: 'text', value });
continue;
}
if (value === CHAR_RIGHT_PARENTHESES) {
if (block.type !== 'paren') {
push({ type: 'text', value });
continue;
}
block = stack.pop();
push({ type: 'text', value });
block = stack[stack.length - 1];
continue;
}
/**
* Quotes: '|"|`
*/
if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
const open = value;
let next;
if (options.keepQuotes !== true) {
value = '';
}
while (index < length && (next = advance())) {
if (next === CHAR_BACKSLASH) {
value += next + advance();
continue;
}
if (next === open) {
if (options.keepQuotes === true) value += next;
break;
}
value += next;
}
push({ type: 'text', value });
continue;
}
/**
* Left curly brace: '{'
*/
if (value === CHAR_LEFT_CURLY_BRACE) {
depth++;
const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
const brace = {
type: 'brace',
open: true,
close: false,
dollar,
depth,
commas: 0,
ranges: 0,
nodes: []
};
block = push(brace);
stack.push(block);
push({ type: 'open', value });
continue;
}
/**
* Right curly brace: '}'
*/
if (value === CHAR_RIGHT_CURLY_BRACE) {
if (block.type !== 'brace') {
push({ type: 'text', value });
continue;
}
const type = 'close';
block = stack.pop();
block.close = true;
push({ type, value });
depth--;
block = stack[stack.length - 1];
continue;
}
/**
* Comma: ','
*/
if (value === CHAR_COMMA && depth > 0) {
if (block.ranges > 0) {
block.ranges = 0;
const open = block.nodes.shift();
block.nodes = [open, { type: 'text', value: stringify$4(block) }];
}
push({ type: 'comma', value });
block.commas++;
continue;
}
/**
* Dot: '.'
*/
if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
const siblings = block.nodes;
if (depth === 0 || siblings.length === 0) {
push({ type: 'text', value });
continue;
}
if (prev.type === 'dot') {
block.range = [];
prev.value += value;
prev.type = 'range';
if (block.nodes.length !== 3 && block.nodes.length !== 5) {
block.invalid = true;
block.ranges = 0;
prev.type = 'text';
continue;
}
block.ranges++;
block.args = [];
continue;
}
if (prev.type === 'range') {
siblings.pop();
const before = siblings[siblings.length - 1];
before.value += prev.value + value;
prev = before;
block.ranges--;
continue;
}
push({ type: 'dot', value });
continue;
}
/**
* Text
*/
push({ type: 'text', value });
}
// Mark imbalanced braces and brackets as invalid
do {
block = stack.pop();
if (block.type !== 'root') {
block.nodes.forEach(node => {
if (!node.nodes) {
if (node.type === 'open') node.isOpen = true;
if (node.type === 'close') node.isClose = true;
if (!node.nodes) node.type = 'text';
node.invalid = true;
}
});
// get the location of the block on parent.nodes (block's siblings)
const parent = stack[stack.length - 1];
const index = parent.nodes.indexOf(block);
// replace the (invalid) block with it's nodes
parent.nodes.splice(index, 1, ...block.nodes);
}
} while (stack.length > 0);
push({ type: 'eos' });
return ast;
};
var parse_1$2 = parse$c;
const stringify$3 = stringify$7;
const compile = compile_1;
const expand$1 = expand_1$1;
const parse$b = parse_1$2;
/**
* Expand the given pattern or create a regex-compatible string.
*
* ```js
* const braces = require('braces');
* console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
* console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
* ```
* @param {String} `str`
* @param {Object} `options`
* @return {String}
* @api public
*/
const braces$2 = (input, options = {}) => {
let output = [];
if (Array.isArray(input)) {
for (const pattern of input) {
const result = braces$2.create(pattern, options);
if (Array.isArray(result)) {
output.push(...result);
} else {
output.push(result);
}
}
} else {
output = [].concat(braces$2.create(input, options));
}
if (options && options.expand === true && options.nodupes === true) {
output = [...new Set(output)];
}
return output;
};
/**
* Parse the given `str` with the given `options`.
*
* ```js
* // braces.parse(pattern, [, options]);
* const ast = braces.parse('a/{b,c}/d');
* console.log(ast);
* ```
* @param {String} pattern Brace pattern to parse
* @param {Object} options
* @return {Object} Returns an AST
* @api public
*/
braces$2.parse = (input, options = {}) => parse$b(input, options);
/**
* Creates a braces string from an AST, or an AST node.
*
* ```js
* const braces = require('braces');
* let ast = braces.parse('foo/{a,b}/bar');
* console.log(stringify(ast.nodes[2])); //=> '{a,b}'
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.stringify = (input, options = {}) => {
if (typeof input === 'string') {
return stringify$3(braces$2.parse(input, options), options);
}
return stringify$3(input, options);
};
/**
* Compiles a brace pattern into a regex-compatible, optimized string.
* This method is called by the main [braces](#braces) function by default.
*
* ```js
* const braces = require('braces');
* console.log(braces.compile('a/{b,c}/d'));
* //=> ['a/(b|c)/d']
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.compile = (input, options = {}) => {
if (typeof input === 'string') {
input = braces$2.parse(input, options);
}
return compile(input, options);
};
/**
* Expands a brace pattern into an array. This method is called by the
* main [braces](#braces) function when `options.expand` is true. Before
* using this method it's recommended that you read the [performance notes](#performance))
* and advantages of using [.compile](#compile) instead.
*
* ```js
* const braces = require('braces');
* console.log(braces.expand('a/{b,c}/d'));
* //=> ['a/b/d', 'a/c/d'];
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.expand = (input, options = {}) => {
if (typeof input === 'string') {
input = braces$2.parse(input, options);
}
let result = expand$1(input, options);
// filter out empty strings if specified
if (options.noempty === true) {
result = result.filter(Boolean);
}
// filter out duplicates if specified
if (options.nodupes === true) {
result = [...new Set(result)];
}
return result;
};
/**
* Processes a brace pattern and returns either an expanded array
* (if `options.expand` is true), a highly optimized regex-compatible string.
* This method is called by the main [braces](#braces) function.
*
* ```js
* const braces = require('braces');
* console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
* //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.create = (input, options = {}) => {
if (input === '' || input.length < 3) {
return [input];
}
return options.expand !== true
? braces$2.compile(input, options)
: braces$2.expand(input, options);
};
/**
* Expose "braces"
*/
var braces_1 = braces$2;
const util = require$$0$5;
const braces$1 = braces_1;
const picomatch$2 = picomatch$3;
const utils$b = utils$k;
const isEmptyString = v => v === '' || v === './';
const hasBraces = v => {
const index = v.indexOf('{');
return index > -1 && v.indexOf('}', index) > -1;
};
/**
* Returns an array of strings that match one or more glob patterns.
*
* ```js
* const mm = require('micromatch');
* // mm(list, patterns[, options]);
*
* console.log(mm(['a.js', 'a.txt'], ['*.js']));
* //=> [ 'a.js' ]
* ```
* @param {String|Array} `list` List of strings to match.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options)
* @return {Array} Returns an array of matches
* @summary false
* @api public
*/
const micromatch$1 = (list, patterns, options) => {
patterns = [].concat(patterns);
list = [].concat(list);
let omit = new Set();
let keep = new Set();
let items = new Set();
let negatives = 0;
let onResult = state => {
items.add(state.output);
if (options && options.onResult) {
options.onResult(state);
}
};
for (let i = 0; i < patterns.length; i++) {
let isMatch = picomatch$2(String(patterns[i]), { ...options, onResult }, true);
let negated = isMatch.state.negated || isMatch.state.negatedExtglob;
if (negated) negatives++;
for (let item of list) {
let matched = isMatch(item, true);
let match = negated ? !matched.isMatch : matched.isMatch;
if (!match) continue;
if (negated) {
omit.add(matched.output);
} else {
omit.delete(matched.output);
keep.add(matched.output);
}
}
}
let result = negatives === patterns.length ? [...items] : [...keep];
let matches = result.filter(item => !omit.has(item));
if (options && matches.length === 0) {
if (options.failglob === true) {
throw new Error(`No matches found for "${patterns.join(', ')}"`);
}
if (options.nonull === true || options.nullglob === true) {
return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns;
}
}
return matches;
};
/**
* Backwards compatibility
*/
micromatch$1.match = micromatch$1;
/**
* Returns a matcher function from the given glob `pattern` and `options`.
* The returned function takes a string to match as its only argument and returns
* true if the string is a match.
*
* ```js
* const mm = require('micromatch');
* // mm.matcher(pattern[, options]);
*
* const isMatch = mm.matcher('*.!(*a)');
* console.log(isMatch('a.a')); //=> false
* console.log(isMatch('a.b')); //=> true
* ```
* @param {String} `pattern` Glob pattern
* @param {Object} `options`
* @return {Function} Returns a matcher function.
* @api public
*/
micromatch$1.matcher = (pattern, options) => picomatch$2(pattern, options);
/**
* Returns true if **any** of the given glob `patterns` match the specified `string`.
*
* ```js
* const mm = require('micromatch');
* // mm.isMatch(string, patterns[, options]);
*
* console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true
* console.log(mm.isMatch('a.a', 'b.*')); //=> false
* ```
* @param {String} `str` The string to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `[options]` See available [options](#options).
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
micromatch$1.isMatch = (str, patterns, options) => picomatch$2(patterns, options)(str);
/**
* Backwards compatibility
*/
micromatch$1.any = micromatch$1.isMatch;
/**
* Returns a list of strings that _**do not match any**_ of the given `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.not(list, patterns[, options]);
*
* console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));
* //=> ['b.b', 'c.c']
* ```
* @param {Array} `list` Array of strings to match.
* @param {String|Array} `patterns` One or more glob pattern to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Array} Returns an array of strings that **do not match** the given patterns.
* @api public
*/
micromatch$1.not = (list, patterns, options = {}) => {
patterns = [].concat(patterns).map(String);
let result = new Set();
let items = [];
let onResult = state => {
if (options.onResult) options.onResult(state);
items.push(state.output);
};
let matches = new Set(micromatch$1(list, patterns, { ...options, onResult }));
for (let item of items) {
if (!matches.has(item)) {
result.add(item);
}
}
return [...result];
};
/**
* Returns true if the given `string` contains the given pattern. Similar
* to [.isMatch](#isMatch) but the pattern can match any part of the string.
*
* ```js
* var mm = require('micromatch');
* // mm.contains(string, pattern[, options]);
*
* console.log(mm.contains('aa/bb/cc', '*b'));
* //=> true
* console.log(mm.contains('aa/bb/cc', '*d'));
* //=> false
* ```
* @param {String} `str` The string to match.
* @param {String|Array} `patterns` Glob pattern to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any of the patterns matches any part of `str`.
* @api public
*/
micromatch$1.contains = (str, pattern, options) => {
if (typeof str !== 'string') {
throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
}
if (Array.isArray(pattern)) {
return pattern.some(p => micromatch$1.contains(str, p, options));
}
if (typeof pattern === 'string') {
if (isEmptyString(str) || isEmptyString(pattern)) {
return false;
}
if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {
return true;
}
}
return micromatch$1.isMatch(str, pattern, { ...options, contains: true });
};
/**
* Filter the keys of the given object with the given `glob` pattern
* and `options`. Does not attempt to match nested keys. If you need this feature,
* use [glob-object][] instead.
*
* ```js
* const mm = require('micromatch');
* // mm.matchKeys(object, patterns[, options]);
*
* const obj = { aa: 'a', ab: 'b', ac: 'c' };
* console.log(mm.matchKeys(obj, '*b'));
* //=> { ab: 'b' }
* ```
* @param {Object} `object` The object with keys to filter.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Object} Returns an object with only keys that match the given patterns.
* @api public
*/
micromatch$1.matchKeys = (obj, patterns, options) => {
if (!utils$b.isObject(obj)) {
throw new TypeError('Expected the first argument to be an object');
}
let keys = micromatch$1(Object.keys(obj), patterns, options);
let res = {};
for (let key of keys) res[key] = obj[key];
return res;
};
/**
* Returns true if some of the strings in the given `list` match any of the given glob `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.some(list, patterns[, options]);
*
* console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
* // true
* console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));
* // false
* ```
* @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any `patterns` matches any of the strings in `list`
* @api public
*/
micromatch$1.some = (list, patterns, options) => {
let items = [].concat(list);
for (let pattern of [].concat(patterns)) {
let isMatch = picomatch$2(String(pattern), options);
if (items.some(item => isMatch(item))) {
return true;
}
}
return false;
};
/**
* Returns true if every string in the given `list` matches
* any of the given glob `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.every(list, patterns[, options]);
*
* console.log(mm.every('foo.js', ['foo.js']));
* // true
* console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));
* // true
* console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
* // false
* console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));
* // false
* ```
* @param {String|Array} `list` The string or array of strings to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if all `patterns` matches all of the strings in `list`
* @api public
*/
micromatch$1.every = (list, patterns, options) => {
let items = [].concat(list);
for (let pattern of [].concat(patterns)) {
let isMatch = picomatch$2(String(pattern), options);
if (!items.every(item => isMatch(item))) {
return false;
}
}
return true;
};
/**
* Returns true if **all** of the given `patterns` match
* the specified string.
*
* ```js
* const mm = require('micromatch');
* // mm.all(string, patterns[, options]);
*
* console.log(mm.all('foo.js', ['foo.js']));
* // true
*
* console.log(mm.all('foo.js', ['*.js', '!foo.js']));
* // false
*
* console.log(mm.all('foo.js', ['*.js', 'foo.js']));
* // true
*
* console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));
* // true
* ```
* @param {String|Array} `str` The string to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
micromatch$1.all = (str, patterns, options) => {
if (typeof str !== 'string') {
throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
}
return [].concat(patterns).every(p => picomatch$2(p, options)(str));
};
/**
* Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.
*
* ```js
* const mm = require('micromatch');
* // mm.capture(pattern, string[, options]);
*
* console.log(mm.capture('test/*.js', 'test/foo.js'));
* //=> ['foo']
* console.log(mm.capture('test/*.js', 'foo/bar.css'));
* //=> null
* ```
* @param {String} `glob` Glob pattern to use for matching.
* @param {String} `input` String to match
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`.
* @api public
*/
micromatch$1.capture = (glob, input, options) => {
let posix = utils$b.isWindows(options);
let regex = picomatch$2.makeRe(String(glob), { ...options, capture: true });
let match = regex.exec(posix ? utils$b.toPosixSlashes(input) : input);
if (match) {
return match.slice(1).map(v => v === void 0 ? '' : v);
}
};
/**
* Create a regular expression from the given glob `pattern`.
*
* ```js
* const mm = require('micromatch');
* // mm.makeRe(pattern[, options]);
*
* console.log(mm.makeRe('*.js'));
* //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/
* ```
* @param {String} `pattern` A glob pattern to convert to regex.
* @param {Object} `options`
* @return {RegExp} Returns a regex created from the given pattern.
* @api public
*/
micromatch$1.makeRe = (...args) => picomatch$2.makeRe(...args);
/**
* Scan a glob pattern to separate the pattern into segments. Used
* by the [split](#split) method.
*
* ```js
* const mm = require('micromatch');
* const state = mm.scan(pattern[, options]);
* ```
* @param {String} `pattern`
* @param {Object} `options`
* @return {Object} Returns an object with
* @api public
*/
micromatch$1.scan = (...args) => picomatch$2.scan(...args);
/**
* Parse a glob pattern to create the source string for a regular
* expression.
*
* ```js
* const mm = require('micromatch');
* const state = mm.parse(pattern[, options]);
* ```
* @param {String} `glob`
* @param {Object} `options`
* @return {Object} Returns an object with useful properties and output to be used as regex source string.
* @api public
*/
micromatch$1.parse = (patterns, options) => {
let res = [];
for (let pattern of [].concat(patterns || [])) {
for (let str of braces$1(String(pattern), options)) {
res.push(picomatch$2.parse(str, options));
}
}
return res;
};
/**
* Process the given brace `pattern`.
*
* ```js
* const { braces } = require('micromatch');
* console.log(braces('foo/{a,b,c}/bar'));
* //=> [ 'foo/(a|b|c)/bar' ]
*
* console.log(braces('foo/{a,b,c}/bar', { expand: true }));
* //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]
* ```
* @param {String} `pattern` String with brace pattern to process.
* @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.
* @return {Array}
* @api public
*/
micromatch$1.braces = (pattern, options) => {
if (typeof pattern !== 'string') throw new TypeError('Expected a string');
if ((options && options.nobrace === true) || !hasBraces(pattern)) {
return [pattern];
}
return braces$1(pattern, options);
};
/**
* Expand braces
*/
micromatch$1.braceExpand = (pattern, options) => {
if (typeof pattern !== 'string') throw new TypeError('Expected a string');
return micromatch$1.braces(pattern, { ...options, expand: true });
};
/**
* Expose micromatch
*/
// exposed for tests
micromatch$1.hasBraces = hasBraces;
var micromatch_1 = micromatch$1;
var micromatch$2 = /*@__PURE__*/getDefaultExportFromCjs(micromatch_1);
Object.defineProperty(pattern$1, "__esModule", { value: true });
pattern$1.removeDuplicateSlashes = pattern$1.matchAny = pattern$1.convertPatternsToRe = pattern$1.makeRe = pattern$1.getPatternParts = pattern$1.expandBraceExpansion = pattern$1.expandPatternsWithBraceExpansion = pattern$1.isAffectDepthOfReadingPattern = pattern$1.endsWithSlashGlobStar = pattern$1.hasGlobStar = pattern$1.getBaseDirectory = pattern$1.isPatternRelatedToParentDirectory = pattern$1.getPatternsOutsideCurrentDirectory = pattern$1.getPatternsInsideCurrentDirectory = pattern$1.getPositivePatterns = pattern$1.getNegativePatterns = pattern$1.isPositivePattern = pattern$1.isNegativePattern = pattern$1.convertToNegativePattern = pattern$1.convertToPositivePattern = pattern$1.isDynamicPattern = pattern$1.isStaticPattern = void 0;
const path$g = require$$0$4;
const globParent$1 = globParent$2;
const micromatch = micromatch_1;
const GLOBSTAR$1 = '**';
const ESCAPE_SYMBOL = '\\';
const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;
const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/;
const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/;
const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/;
const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./;
/**
* Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string.
* The latter is due to the presence of the device path at the beginning of the UNC path.
*/
const DOUBLE_SLASH_RE$1 = /(?!^)\/{2,}/g;
function isStaticPattern(pattern, options = {}) {
return !isDynamicPattern(pattern, options);
}
pattern$1.isStaticPattern = isStaticPattern;
function isDynamicPattern(pattern, options = {}) {
/**
* A special case with an empty string is necessary for matching patterns that start with a forward slash.
* An empty string cannot be a dynamic pattern.
* For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
*/
if (pattern === '') {
return false;
}
/**
* When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
* filepath directly (without read directory).
*/
if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {
return true;
}
if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {
return true;
}
if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {
return true;
}
if (options.braceExpansion !== false && hasBraceExpansion(pattern)) {
return true;
}
return false;
}
pattern$1.isDynamicPattern = isDynamicPattern;
function hasBraceExpansion(pattern) {
const openingBraceIndex = pattern.indexOf('{');
if (openingBraceIndex === -1) {
return false;
}
const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1);
if (closingBraceIndex === -1) {
return false;
}
const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex);
return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent);
}
function convertToPositivePattern(pattern) {
return isNegativePattern(pattern) ? pattern.slice(1) : pattern;
}
pattern$1.convertToPositivePattern = convertToPositivePattern;
function convertToNegativePattern(pattern) {
return '!' + pattern;
}
pattern$1.convertToNegativePattern = convertToNegativePattern;
function isNegativePattern(pattern) {
return pattern.startsWith('!') && pattern[1] !== '(';
}
pattern$1.isNegativePattern = isNegativePattern;
function isPositivePattern(pattern) {
return !isNegativePattern(pattern);
}
pattern$1.isPositivePattern = isPositivePattern;
function getNegativePatterns(patterns) {
return patterns.filter(isNegativePattern);
}
pattern$1.getNegativePatterns = getNegativePatterns;
function getPositivePatterns$1(patterns) {
return patterns.filter(isPositivePattern);
}
pattern$1.getPositivePatterns = getPositivePatterns$1;
/**
* Returns patterns that can be applied inside the current directory.
*
* @example
* // ['./*', '*', 'a/*']
* getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*'])
*/
function getPatternsInsideCurrentDirectory(patterns) {
return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern));
}
pattern$1.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory;
/**
* Returns patterns to be expanded relative to (outside) the current directory.
*
* @example
* // ['../*', './../*']
* getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*'])
*/
function getPatternsOutsideCurrentDirectory(patterns) {
return patterns.filter(isPatternRelatedToParentDirectory);
}
pattern$1.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory;
function isPatternRelatedToParentDirectory(pattern) {
return pattern.startsWith('..') || pattern.startsWith('./..');
}
pattern$1.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory;
function getBaseDirectory(pattern) {
return globParent$1(pattern, { flipBackslashes: false });
}
pattern$1.getBaseDirectory = getBaseDirectory;
function hasGlobStar(pattern) {
return pattern.includes(GLOBSTAR$1);
}
pattern$1.hasGlobStar = hasGlobStar;
function endsWithSlashGlobStar(pattern) {
return pattern.endsWith('/' + GLOBSTAR$1);
}
pattern$1.endsWithSlashGlobStar = endsWithSlashGlobStar;
function isAffectDepthOfReadingPattern(pattern) {
const basename = path$g.basename(pattern);
return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);
}
pattern$1.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
function expandPatternsWithBraceExpansion(patterns) {
return patterns.reduce((collection, pattern) => {
return collection.concat(expandBraceExpansion(pattern));
}, []);
}
pattern$1.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
function expandBraceExpansion(pattern) {
const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true });
/**
* Sort the patterns by length so that the same depth patterns are processed side by side.
* `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']`
*/
patterns.sort((a, b) => a.length - b.length);
/**
* Micromatch can return an empty string in the case of patterns like `{a,}`.
*/
return patterns.filter((pattern) => pattern !== '');
}
pattern$1.expandBraceExpansion = expandBraceExpansion;
function getPatternParts(pattern, options) {
let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
/**
* The scan method returns an empty array in some cases.
* See micromatch/picomatch#58 for more details.
*/
if (parts.length === 0) {
parts = [pattern];
}
/**
* The scan method does not return an empty part for the pattern with a forward slash.
* This is another part of micromatch/picomatch#58.
*/
if (parts[0].startsWith('/')) {
parts[0] = parts[0].slice(1);
parts.unshift('');
}
return parts;
}
pattern$1.getPatternParts = getPatternParts;
function makeRe(pattern, options) {
return micromatch.makeRe(pattern, options);
}
pattern$1.makeRe = makeRe;
function convertPatternsToRe(patterns, options) {
return patterns.map((pattern) => makeRe(pattern, options));
}
pattern$1.convertPatternsToRe = convertPatternsToRe;
function matchAny(entry, patternsRe) {
return patternsRe.some((patternRe) => patternRe.test(entry));
}
pattern$1.matchAny = matchAny;
/**
* This package only works with forward slashes as a path separator.
* Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes.
*/
function removeDuplicateSlashes(pattern) {
return pattern.replace(DOUBLE_SLASH_RE$1, '/');
}
pattern$1.removeDuplicateSlashes = removeDuplicateSlashes;
var stream$4 = {};
/*
* merge2
* https://github.com/teambition/merge2
*
* Copyright (c) 2014-2020 Teambition
* Licensed under the MIT license.
*/
const Stream = require$$0$6;
const PassThrough = Stream.PassThrough;
const slice = Array.prototype.slice;
var merge2_1 = merge2$1;
function merge2$1 () {
const streamsQueue = [];
const args = slice.call(arguments);
let merging = false;
let options = args[args.length - 1];
if (options && !Array.isArray(options) && options.pipe == null) {
args.pop();
} else {
options = {};
}
const doEnd = options.end !== false;
const doPipeError = options.pipeError === true;
if (options.objectMode == null) {
options.objectMode = true;
}
if (options.highWaterMark == null) {
options.highWaterMark = 64 * 1024;
}
const mergedStream = PassThrough(options);
function addStream () {
for (let i = 0, len = arguments.length; i < len; i++) {
streamsQueue.push(pauseStreams(arguments[i], options));
}
mergeStream();
return this
}
function mergeStream () {
if (merging) {
return
}
merging = true;
let streams = streamsQueue.shift();
if (!streams) {
process.nextTick(endStream);
return
}
if (!Array.isArray(streams)) {
streams = [streams];
}
let pipesCount = streams.length + 1;
function next () {
if (--pipesCount > 0) {
return
}
merging = false;
mergeStream();
}
function pipe (stream) {
function onend () {
stream.removeListener('merge2UnpipeEnd', onend);
stream.removeListener('end', onend);
if (doPipeError) {
stream.removeListener('error', onerror);
}
next();
}
function onerror (err) {
mergedStream.emit('error', err);
}
// skip ended stream
if (stream._readableState.endEmitted) {
return next()
}
stream.on('merge2UnpipeEnd', onend);
stream.on('end', onend);
if (doPipeError) {
stream.on('error', onerror);
}
stream.pipe(mergedStream, { end: false });
// compatible for old stream
stream.resume();
}
for (let i = 0; i < streams.length; i++) {
pipe(streams[i]);
}
next();
}
function endStream () {
merging = false;
// emit 'queueDrain' when all streams merged.
mergedStream.emit('queueDrain');
if (doEnd) {
mergedStream.end();
}
}
mergedStream.setMaxListeners(0);
mergedStream.add = addStream;
mergedStream.on('unpipe', function (stream) {
stream.emit('merge2UnpipeEnd');
});
if (args.length) {
addStream.apply(null, args);
}
return mergedStream
}
// check and pause streams for pipe.
function pauseStreams (streams, options) {
if (!Array.isArray(streams)) {
// Backwards-compat with old-style streams
if (!streams._readableState && streams.pipe) {
streams = streams.pipe(PassThrough(options));
}
if (!streams._readableState || !streams.pause || !streams.pipe) {
throw new Error('Only readable stream can be merged.')
}
streams.pause();
} else {
for (let i = 0, len = streams.length; i < len; i++) {
streams[i] = pauseStreams(streams[i], options);
}
}
return streams
}
Object.defineProperty(stream$4, "__esModule", { value: true });
stream$4.merge = void 0;
const merge2 = merge2_1;
function merge$1(streams) {
const mergedStream = merge2(streams);
streams.forEach((stream) => {
stream.once('error', (error) => mergedStream.emit('error', error));
});
mergedStream.once('close', () => propagateCloseEventToSources(streams));
mergedStream.once('end', () => propagateCloseEventToSources(streams));
return mergedStream;
}
stream$4.merge = merge$1;
function propagateCloseEventToSources(streams) {
streams.forEach((stream) => stream.emit('close'));
}
var string$2 = {};
Object.defineProperty(string$2, "__esModule", { value: true });
string$2.isEmpty = string$2.isString = void 0;
function isString$1(input) {
return typeof input === 'string';
}
string$2.isString = isString$1;
function isEmpty$1(input) {
return input === '';
}
string$2.isEmpty = isEmpty$1;
Object.defineProperty(utils$g, "__esModule", { value: true });
utils$g.string = utils$g.stream = utils$g.pattern = utils$g.path = utils$g.fs = utils$g.errno = utils$g.array = void 0;
const array = array$1;
utils$g.array = array;
const errno = errno$1;
utils$g.errno = errno;
const fs$h = fs$i;
utils$g.fs = fs$h;
const path$f = path$i;
utils$g.path = path$f;
const pattern = pattern$1;
utils$g.pattern = pattern;
const stream$3 = stream$4;
utils$g.stream = stream$3;
const string$1 = string$2;
utils$g.string = string$1;
Object.defineProperty(tasks, "__esModule", { value: true });
tasks.convertPatternGroupToTask = tasks.convertPatternGroupsToTasks = tasks.groupPatternsByBaseDirectory = tasks.getNegativePatternsAsPositive = tasks.getPositivePatterns = tasks.convertPatternsToTasks = tasks.generate = void 0;
const utils$a = utils$g;
function generate(input, settings) {
const patterns = processPatterns(input, settings);
const ignore = processPatterns(settings.ignore, settings);
const positivePatterns = getPositivePatterns(patterns);
const negativePatterns = getNegativePatternsAsPositive(patterns, ignore);
const staticPatterns = positivePatterns.filter((pattern) => utils$a.pattern.isStaticPattern(pattern, settings));
const dynamicPatterns = positivePatterns.filter((pattern) => utils$a.pattern.isDynamicPattern(pattern, settings));
const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);
const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);
return staticTasks.concat(dynamicTasks);
}
tasks.generate = generate;
function processPatterns(input, settings) {
let patterns = input;
/**
* The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry
* and some problems with the micromatch package (see fast-glob issues: #365, #394).
*
* To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown
* in matching in the case of a large set of patterns after expansion.
*/
if (settings.braceExpansion) {
patterns = utils$a.pattern.expandPatternsWithBraceExpansion(patterns);
}
/**
* If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used
* at any nesting level.
*
* We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change
* the pattern in the filter before creating a regular expression. There is no need to change the patterns
* in the application. Only on the input.
*/
if (settings.baseNameMatch) {
patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`);
}
/**
* This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion.
*/
return patterns.map((pattern) => utils$a.pattern.removeDuplicateSlashes(pattern));
}
/**
* Returns tasks grouped by basic pattern directories.
*
* Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately.
* This is necessary because directory traversal starts at the base directory and goes deeper.
*/
function convertPatternsToTasks(positive, negative, dynamic) {
const tasks = [];
const patternsOutsideCurrentDirectory = utils$a.pattern.getPatternsOutsideCurrentDirectory(positive);
const patternsInsideCurrentDirectory = utils$a.pattern.getPatternsInsideCurrentDirectory(positive);
const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory);
const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory);
tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic));
/*
* For the sake of reducing future accesses to the file system, we merge all tasks within the current directory
* into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest.
*/
if ('.' in insideCurrentDirectoryGroup) {
tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic));
}
else {
tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic));
}
return tasks;
}
tasks.convertPatternsToTasks = convertPatternsToTasks;
function getPositivePatterns(patterns) {
return utils$a.pattern.getPositivePatterns(patterns);
}
tasks.getPositivePatterns = getPositivePatterns;
function getNegativePatternsAsPositive(patterns, ignore) {
const negative = utils$a.pattern.getNegativePatterns(patterns).concat(ignore);
const positive = negative.map(utils$a.pattern.convertToPositivePattern);
return positive;
}
tasks.getNegativePatternsAsPositive = getNegativePatternsAsPositive;
function groupPatternsByBaseDirectory(patterns) {
const group = {};
return patterns.reduce((collection, pattern) => {
const base = utils$a.pattern.getBaseDirectory(pattern);
if (base in collection) {
collection[base].push(pattern);
}
else {
collection[base] = [pattern];
}
return collection;
}, group);
}
tasks.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;
function convertPatternGroupsToTasks(positive, negative, dynamic) {
return Object.keys(positive).map((base) => {
return convertPatternGroupToTask(base, positive[base], negative, dynamic);
});
}
tasks.convertPatternGroupsToTasks = convertPatternGroupsToTasks;
function convertPatternGroupToTask(base, positive, negative, dynamic) {
return {
dynamic,
positive,
negative,
base,
patterns: [].concat(positive, negative.map(utils$a.pattern.convertToNegativePattern))
};
}
tasks.convertPatternGroupToTask = convertPatternGroupToTask;
var async$7 = {};
var async$6 = {};
var out$3 = {};
var async$5 = {};
var async$4 = {};
var out$2 = {};
var async$3 = {};
var out$1 = {};
var async$2 = {};
Object.defineProperty(async$2, "__esModule", { value: true });
async$2.read = void 0;
function read$3(path, settings, callback) {
settings.fs.lstat(path, (lstatError, lstat) => {
if (lstatError !== null) {
callFailureCallback$2(callback, lstatError);
return;
}
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
callSuccessCallback$2(callback, lstat);
return;
}
settings.fs.stat(path, (statError, stat) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
callFailureCallback$2(callback, statError);
return;
}
callSuccessCallback$2(callback, lstat);
return;
}
if (settings.markSymbolicLink) {
stat.isSymbolicLink = () => true;
}
callSuccessCallback$2(callback, stat);
});
});
}
async$2.read = read$3;
function callFailureCallback$2(callback, error) {
callback(error);
}
function callSuccessCallback$2(callback, result) {
callback(null, result);
}
var sync$8 = {};
Object.defineProperty(sync$8, "__esModule", { value: true });
sync$8.read = void 0;
function read$2(path, settings) {
const lstat = settings.fs.lstatSync(path);
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
return lstat;
}
try {
const stat = settings.fs.statSync(path);
if (settings.markSymbolicLink) {
stat.isSymbolicLink = () => true;
}
return stat;
}
catch (error) {
if (!settings.throwErrorOnBrokenSymbolicLink) {
return lstat;
}
throw error;
}
}
sync$8.read = read$2;
var settings$3 = {};
var fs$g = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;
} (fs$g));
Object.defineProperty(settings$3, "__esModule", { value: true });
const fs$f = fs$g;
let Settings$2 = class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);
this.fs = fs$f.createFileSystemAdapter(this._options.fs);
this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
};
settings$3.default = Settings$2;
Object.defineProperty(out$1, "__esModule", { value: true });
out$1.statSync = out$1.stat = out$1.Settings = void 0;
const async$1 = async$2;
const sync$7 = sync$8;
const settings_1$3 = settings$3;
out$1.Settings = settings_1$3.default;
function stat$4(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async$1.read(path, getSettings$2(), optionsOrSettingsOrCallback);
return;
}
async$1.read(path, getSettings$2(optionsOrSettingsOrCallback), callback);
}
out$1.stat = stat$4;
function statSync(path, optionsOrSettings) {
const settings = getSettings$2(optionsOrSettings);
return sync$7.read(path, settings);
}
out$1.statSync = statSync;
function getSettings$2(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$3.default) {
return settingsOrOptions;
}
return new settings_1$3.default(settingsOrOptions);
}
/*! queue-microtask. MIT License. Feross Aboukhadijeh */
let promise;
var queueMicrotask_1 = typeof queueMicrotask === 'function'
? queueMicrotask.bind(typeof window !== 'undefined' ? window : commonjsGlobal)
// reuse resolved promise, and allocate it lazily
: cb => (promise || (promise = Promise.resolve()))
.then(cb)
.catch(err => setTimeout(() => { throw err }, 0));
/*! run-parallel. MIT License. Feross Aboukhadijeh */
var runParallel_1 = runParallel;
const queueMicrotask$1 = queueMicrotask_1;
function runParallel (tasks, cb) {
let results, pending, keys;
let isSync = true;
if (Array.isArray(tasks)) {
results = [];
pending = tasks.length;
} else {
keys = Object.keys(tasks);
results = {};
pending = keys.length;
}
function done (err) {
function end () {
if (cb) cb(err, results);
cb = null;
}
if (isSync) queueMicrotask$1(end);
else end();
}
function each (i, err, result) {
results[i] = result;
if (--pending === 0 || err) {
done(err);
}
}
if (!pending) {
// empty
done(null);
} else if (keys) {
// object
keys.forEach(function (key) {
tasks[key](function (err, result) { each(key, err, result); });
});
} else {
// array
tasks.forEach(function (task, i) {
task(function (err, result) { each(i, err, result); });
});
}
isSync = false;
}
var constants$2 = {};
Object.defineProperty(constants$2, "__esModule", { value: true });
constants$2.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;
const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) {
throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`);
}
const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
const SUPPORTED_MAJOR_VERSION = 10;
const SUPPORTED_MINOR_VERSION = 10;
const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;
const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;
/**
* IS `true` for Node.js 10.10 and greater.
*/
constants$2.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;
var utils$9 = {};
var fs$e = {};
Object.defineProperty(fs$e, "__esModule", { value: true });
fs$e.createDirentFromStats = void 0;
class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
}
function createDirentFromStats(name, stats) {
return new DirentFromStats(name, stats);
}
fs$e.createDirentFromStats = createDirentFromStats;
Object.defineProperty(utils$9, "__esModule", { value: true });
utils$9.fs = void 0;
const fs$d = fs$e;
utils$9.fs = fs$d;
var common$a = {};
Object.defineProperty(common$a, "__esModule", { value: true });
common$a.joinPathSegments = void 0;
function joinPathSegments$1(a, b, separator) {
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
common$a.joinPathSegments = joinPathSegments$1;
Object.defineProperty(async$3, "__esModule", { value: true });
async$3.readdir = async$3.readdirWithFileTypes = async$3.read = void 0;
const fsStat$5 = out$1;
const rpl = runParallel_1;
const constants_1$1 = constants$2;
const utils$8 = utils$9;
const common$9 = common$a;
function read$1(directory, settings, callback) {
if (!settings.stats && constants_1$1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
readdirWithFileTypes$1(directory, settings, callback);
return;
}
readdir$3(directory, settings, callback);
}
async$3.read = read$1;
function readdirWithFileTypes$1(directory, settings, callback) {
settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {
if (readdirError !== null) {
callFailureCallback$1(callback, readdirError);
return;
}
const entries = dirents.map((dirent) => ({
dirent,
name: dirent.name,
path: common$9.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
}));
if (!settings.followSymbolicLinks) {
callSuccessCallback$1(callback, entries);
return;
}
const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));
rpl(tasks, (rplError, rplEntries) => {
if (rplError !== null) {
callFailureCallback$1(callback, rplError);
return;
}
callSuccessCallback$1(callback, rplEntries);
});
});
}
async$3.readdirWithFileTypes = readdirWithFileTypes$1;
function makeRplTaskEntry(entry, settings) {
return (done) => {
if (!entry.dirent.isSymbolicLink()) {
done(null, entry);
return;
}
settings.fs.stat(entry.path, (statError, stats) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
done(statError);
return;
}
done(null, entry);
return;
}
entry.dirent = utils$8.fs.createDirentFromStats(entry.name, stats);
done(null, entry);
});
};
}
function readdir$3(directory, settings, callback) {
settings.fs.readdir(directory, (readdirError, names) => {
if (readdirError !== null) {
callFailureCallback$1(callback, readdirError);
return;
}
const tasks = names.map((name) => {
const path = common$9.joinPathSegments(directory, name, settings.pathSegmentSeparator);
return (done) => {
fsStat$5.stat(path, settings.fsStatSettings, (error, stats) => {
if (error !== null) {
done(error);
return;
}
const entry = {
name,
path,
dirent: utils$8.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
done(null, entry);
});
};
});
rpl(tasks, (rplError, entries) => {
if (rplError !== null) {
callFailureCallback$1(callback, rplError);
return;
}
callSuccessCallback$1(callback, entries);
});
});
}
async$3.readdir = readdir$3;
function callFailureCallback$1(callback, error) {
callback(error);
}
function callSuccessCallback$1(callback, result) {
callback(null, result);
}
var sync$6 = {};
Object.defineProperty(sync$6, "__esModule", { value: true });
sync$6.readdir = sync$6.readdirWithFileTypes = sync$6.read = void 0;
const fsStat$4 = out$1;
const constants_1 = constants$2;
const utils$7 = utils$9;
const common$8 = common$a;
function read(directory, settings) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
return readdirWithFileTypes(directory, settings);
}
return readdir$2(directory, settings);
}
sync$6.read = read;
function readdirWithFileTypes(directory, settings) {
const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });
return dirents.map((dirent) => {
const entry = {
dirent,
name: dirent.name,
path: common$8.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
};
if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {
try {
const stats = settings.fs.statSync(entry.path);
entry.dirent = utils$7.fs.createDirentFromStats(entry.name, stats);
}
catch (error) {
if (settings.throwErrorOnBrokenSymbolicLink) {
throw error;
}
}
}
return entry;
});
}
sync$6.readdirWithFileTypes = readdirWithFileTypes;
function readdir$2(directory, settings) {
const names = settings.fs.readdirSync(directory);
return names.map((name) => {
const entryPath = common$8.joinPathSegments(directory, name, settings.pathSegmentSeparator);
const stats = fsStat$4.statSync(entryPath, settings.fsStatSettings);
const entry = {
name,
path: entryPath,
dirent: utils$7.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
return entry;
});
}
sync$6.readdir = readdir$2;
var settings$2 = {};
var fs$c = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;
} (fs$c));
Object.defineProperty(settings$2, "__esModule", { value: true });
const path$e = require$$0$4;
const fsStat$3 = out$1;
const fs$b = fs$c;
let Settings$1 = class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
this.fs = fs$b.createFileSystemAdapter(this._options.fs);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path$e.sep);
this.stats = this._getValue(this._options.stats, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
this.fsStatSettings = new fsStat$3.Settings({
followSymbolicLink: this.followSymbolicLinks,
fs: this.fs,
throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
};
settings$2.default = Settings$1;
Object.defineProperty(out$2, "__esModule", { value: true });
out$2.Settings = out$2.scandirSync = out$2.scandir = void 0;
const async = async$3;
const sync$5 = sync$6;
const settings_1$2 = settings$2;
out$2.Settings = settings_1$2.default;
function scandir(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async.read(path, getSettings$1(), optionsOrSettingsOrCallback);
return;
}
async.read(path, getSettings$1(optionsOrSettingsOrCallback), callback);
}
out$2.scandir = scandir;
function scandirSync(path, optionsOrSettings) {
const settings = getSettings$1(optionsOrSettings);
return sync$5.read(path, settings);
}
out$2.scandirSync = scandirSync;
function getSettings$1(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$2.default) {
return settingsOrOptions;
}
return new settings_1$2.default(settingsOrOptions);
}
var queue = {exports: {}};
function reusify$1 (Constructor) {
var head = new Constructor();
var tail = head;
function get () {
var current = head;
if (current.next) {
head = current.next;
} else {
head = new Constructor();
tail = head;
}
current.next = null;
return current
}
function release (obj) {
tail.next = obj;
tail = obj;
}
return {
get: get,
release: release
}
}
var reusify_1 = reusify$1;
/* eslint-disable no-var */
var reusify = reusify_1;
function fastqueue (context, worker, _concurrency) {
if (typeof context === 'function') {
_concurrency = worker;
worker = context;
context = null;
}
if (!(_concurrency >= 1)) {
throw new Error('fastqueue concurrency must be equal to or greater than 1')
}
var cache = reusify(Task);
var queueHead = null;
var queueTail = null;
var _running = 0;
var errorHandler = null;
var self = {
push: push,
drain: noop$4,
saturated: noop$4,
pause: pause,
paused: false,
get concurrency () {
return _concurrency
},
set concurrency (value) {
if (!(value >= 1)) {
throw new Error('fastqueue concurrency must be equal to or greater than 1')
}
_concurrency = value;
if (self.paused) return
for (; queueHead && _running < _concurrency;) {
_running++;
release();
}
},
running: running,
resume: resume,
idle: idle,
length: length,
getQueue: getQueue,
unshift: unshift,
empty: noop$4,
kill: kill,
killAndDrain: killAndDrain,
error: error
};
return self
function running () {
return _running
}
function pause () {
self.paused = true;
}
function length () {
var current = queueHead;
var counter = 0;
while (current) {
current = current.next;
counter++;
}
return counter
}
function getQueue () {
var current = queueHead;
var tasks = [];
while (current) {
tasks.push(current.value);
current = current.next;
}
return tasks
}
function resume () {
if (!self.paused) return
self.paused = false;
if (queueHead === null) {
_running++;
release();
return
}
for (; queueHead && _running < _concurrency;) {
_running++;
release();
}
}
function idle () {
return _running === 0 && self.length() === 0
}
function push (value, done) {
var current = cache.get();
current.context = context;
current.release = release;
current.value = value;
current.callback = done || noop$4;
current.errorHandler = errorHandler;
if (_running >= _concurrency || self.paused) {
if (queueTail) {
queueTail.next = current;
queueTail = current;
} else {
queueHead = current;
queueTail = current;
self.saturated();
}
} else {
_running++;
worker.call(context, current.value, current.worked);
}
}
function unshift (value, done) {
var current = cache.get();
current.context = context;
current.release = release;
current.value = value;
current.callback = done || noop$4;
current.errorHandler = errorHandler;
if (_running >= _concurrency || self.paused) {
if (queueHead) {
current.next = queueHead;
queueHead = current;
} else {
queueHead = current;
queueTail = current;
self.saturated();
}
} else {
_running++;
worker.call(context, current.value, current.worked);
}
}
function release (holder) {
if (holder) {
cache.release(holder);
}
var next = queueHead;
if (next && _running <= _concurrency) {
if (!self.paused) {
if (queueTail === queueHead) {
queueTail = null;
}
queueHead = next.next;
next.next = null;
worker.call(context, next.value, next.worked);
if (queueTail === null) {
self.empty();
}
} else {
_running--;
}
} else if (--_running === 0) {
self.drain();
}
}
function kill () {
queueHead = null;
queueTail = null;
self.drain = noop$4;
}
function killAndDrain () {
queueHead = null;
queueTail = null;
self.drain();
self.drain = noop$4;
}
function error (handler) {
errorHandler = handler;
}
}
function noop$4 () {}
function Task () {
this.value = null;
this.callback = noop$4;
this.next = null;
this.release = noop$4;
this.context = null;
this.errorHandler = null;
var self = this;
this.worked = function worked (err, result) {
var callback = self.callback;
var errorHandler = self.errorHandler;
var val = self.value;
self.value = null;
self.callback = noop$4;
if (self.errorHandler) {
errorHandler(err, val);
}
callback.call(self.context, err, result);
self.release(self);
};
}
function queueAsPromised (context, worker, _concurrency) {
if (typeof context === 'function') {
_concurrency = worker;
worker = context;
context = null;
}
function asyncWrapper (arg, cb) {
worker.call(this, arg)
.then(function (res) {
cb(null, res);
}, cb);
}
var queue = fastqueue(context, asyncWrapper, _concurrency);
var pushCb = queue.push;
var unshiftCb = queue.unshift;
queue.push = push;
queue.unshift = unshift;
queue.drained = drained;
return queue
function push (value) {
var p = new Promise(function (resolve, reject) {
pushCb(value, function (err, result) {
if (err) {
reject(err);
return
}
resolve(result);
});
});
// Let's fork the promise chain to
// make the error bubble up to the user but
// not lead to a unhandledRejection
p.catch(noop$4);
return p
}
function unshift (value) {
var p = new Promise(function (resolve, reject) {
unshiftCb(value, function (err, result) {
if (err) {
reject(err);
return
}
resolve(result);
});
});
// Let's fork the promise chain to
// make the error bubble up to the user but
// not lead to a unhandledRejection
p.catch(noop$4);
return p
}
function drained () {
if (queue.idle()) {
return new Promise(function (resolve) {
resolve();
})
}
var previousDrain = queue.drain;
var p = new Promise(function (resolve) {
queue.drain = function () {
previousDrain();
resolve();
};
});
return p
}
}
queue.exports = fastqueue;
queue.exports.promise = queueAsPromised;
var queueExports = queue.exports;
var common$7 = {};
Object.defineProperty(common$7, "__esModule", { value: true });
common$7.joinPathSegments = common$7.replacePathSegmentSeparator = common$7.isAppliedFilter = common$7.isFatalError = void 0;
function isFatalError(settings, error) {
if (settings.errorFilter === null) {
return true;
}
return !settings.errorFilter(error);
}
common$7.isFatalError = isFatalError;
function isAppliedFilter(filter, value) {
return filter === null || filter(value);
}
common$7.isAppliedFilter = isAppliedFilter;
function replacePathSegmentSeparator(filepath, separator) {
return filepath.split(/[/\\]/).join(separator);
}
common$7.replacePathSegmentSeparator = replacePathSegmentSeparator;
function joinPathSegments(a, b, separator) {
if (a === '') {
return b;
}
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
common$7.joinPathSegments = joinPathSegments;
var reader$1 = {};
Object.defineProperty(reader$1, "__esModule", { value: true });
const common$6 = common$7;
let Reader$1 = class Reader {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._root = common$6.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);
}
};
reader$1.default = Reader$1;
Object.defineProperty(async$4, "__esModule", { value: true });
const events_1 = require$$0$7;
const fsScandir$2 = out$2;
const fastq = queueExports;
const common$5 = common$7;
const reader_1$4 = reader$1;
class AsyncReader extends reader_1$4.default {
constructor(_root, _settings) {
super(_root, _settings);
this._settings = _settings;
this._scandir = fsScandir$2.scandir;
this._emitter = new events_1.EventEmitter();
this._queue = fastq(this._worker.bind(this), this._settings.concurrency);
this._isFatalError = false;
this._isDestroyed = false;
this._queue.drain = () => {
if (!this._isFatalError) {
this._emitter.emit('end');
}
};
}
read() {
this._isFatalError = false;
this._isDestroyed = false;
setImmediate(() => {
this._pushToQueue(this._root, this._settings.basePath);
});
return this._emitter;
}
get isDestroyed() {
return this._isDestroyed;
}
destroy() {
if (this._isDestroyed) {
throw new Error('The reader is already destroyed');
}
this._isDestroyed = true;
this._queue.killAndDrain();
}
onEntry(callback) {
this._emitter.on('entry', callback);
}
onError(callback) {
this._emitter.once('error', callback);
}
onEnd(callback) {
this._emitter.once('end', callback);
}
_pushToQueue(directory, base) {
const queueItem = { directory, base };
this._queue.push(queueItem, (error) => {
if (error !== null) {
this._handleError(error);
}
});
}
_worker(item, done) {
this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {
if (error !== null) {
done(error, undefined);
return;
}
for (const entry of entries) {
this._handleEntry(entry, item.base);
}
done(null, undefined);
});
}
_handleError(error) {
if (this._isDestroyed || !common$5.isFatalError(this._settings, error)) {
return;
}
this._isFatalError = true;
this._isDestroyed = true;
this._emitter.emit('error', error);
}
_handleEntry(entry, base) {
if (this._isDestroyed || this._isFatalError) {
return;
}
const fullpath = entry.path;
if (base !== undefined) {
entry.path = common$5.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
}
if (common$5.isAppliedFilter(this._settings.entryFilter, entry)) {
this._emitEntry(entry);
}
if (entry.dirent.isDirectory() && common$5.isAppliedFilter(this._settings.deepFilter, entry)) {
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
}
}
_emitEntry(entry) {
this._emitter.emit('entry', entry);
}
}
async$4.default = AsyncReader;
Object.defineProperty(async$5, "__esModule", { value: true });
const async_1$4 = async$4;
class AsyncProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new async_1$4.default(this._root, this._settings);
this._storage = [];
}
read(callback) {
this._reader.onError((error) => {
callFailureCallback(callback, error);
});
this._reader.onEntry((entry) => {
this._storage.push(entry);
});
this._reader.onEnd(() => {
callSuccessCallback(callback, this._storage);
});
this._reader.read();
}
}
async$5.default = AsyncProvider;
function callFailureCallback(callback, error) {
callback(error);
}
function callSuccessCallback(callback, entries) {
callback(null, entries);
}
var stream$2 = {};
Object.defineProperty(stream$2, "__esModule", { value: true });
const stream_1$5 = require$$0$6;
const async_1$3 = async$4;
class StreamProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new async_1$3.default(this._root, this._settings);
this._stream = new stream_1$5.Readable({
objectMode: true,
read: () => { },
destroy: () => {
if (!this._reader.isDestroyed) {
this._reader.destroy();
}
}
});
}
read() {
this._reader.onError((error) => {
this._stream.emit('error', error);
});
this._reader.onEntry((entry) => {
this._stream.push(entry);
});
this._reader.onEnd(() => {
this._stream.push(null);
});
this._reader.read();
return this._stream;
}
}
stream$2.default = StreamProvider;
var sync$4 = {};
var sync$3 = {};
Object.defineProperty(sync$3, "__esModule", { value: true });
const fsScandir$1 = out$2;
const common$4 = common$7;
const reader_1$3 = reader$1;
class SyncReader extends reader_1$3.default {
constructor() {
super(...arguments);
this._scandir = fsScandir$1.scandirSync;
this._storage = [];
this._queue = new Set();
}
read() {
this._pushToQueue(this._root, this._settings.basePath);
this._handleQueue();
return this._storage;
}
_pushToQueue(directory, base) {
this._queue.add({ directory, base });
}
_handleQueue() {
for (const item of this._queue.values()) {
this._handleDirectory(item.directory, item.base);
}
}
_handleDirectory(directory, base) {
try {
const entries = this._scandir(directory, this._settings.fsScandirSettings);
for (const entry of entries) {
this._handleEntry(entry, base);
}
}
catch (error) {
this._handleError(error);
}
}
_handleError(error) {
if (!common$4.isFatalError(this._settings, error)) {
return;
}
throw error;
}
_handleEntry(entry, base) {
const fullpath = entry.path;
if (base !== undefined) {
entry.path = common$4.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
}
if (common$4.isAppliedFilter(this._settings.entryFilter, entry)) {
this._pushToStorage(entry);
}
if (entry.dirent.isDirectory() && common$4.isAppliedFilter(this._settings.deepFilter, entry)) {
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
}
}
_pushToStorage(entry) {
this._storage.push(entry);
}
}
sync$3.default = SyncReader;
Object.defineProperty(sync$4, "__esModule", { value: true });
const sync_1$3 = sync$3;
class SyncProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new sync_1$3.default(this._root, this._settings);
}
read() {
return this._reader.read();
}
}
sync$4.default = SyncProvider;
var settings$1 = {};
Object.defineProperty(settings$1, "__esModule", { value: true });
const path$d = require$$0$4;
const fsScandir = out$2;
class Settings {
constructor(_options = {}) {
this._options = _options;
this.basePath = this._getValue(this._options.basePath, undefined);
this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY);
this.deepFilter = this._getValue(this._options.deepFilter, null);
this.entryFilter = this._getValue(this._options.entryFilter, null);
this.errorFilter = this._getValue(this._options.errorFilter, null);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path$d.sep);
this.fsScandirSettings = new fsScandir.Settings({
followSymbolicLinks: this._options.followSymbolicLinks,
fs: this._options.fs,
pathSegmentSeparator: this._options.pathSegmentSeparator,
stats: this._options.stats,
throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
}
settings$1.default = Settings;
Object.defineProperty(out$3, "__esModule", { value: true });
out$3.Settings = out$3.walkStream = out$3.walkSync = out$3.walk = void 0;
const async_1$2 = async$5;
const stream_1$4 = stream$2;
const sync_1$2 = sync$4;
const settings_1$1 = settings$1;
out$3.Settings = settings_1$1.default;
function walk$2(directory, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
new async_1$2.default(directory, getSettings()).read(optionsOrSettingsOrCallback);
return;
}
new async_1$2.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);
}
out$3.walk = walk$2;
function walkSync(directory, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
const provider = new sync_1$2.default(directory, settings);
return provider.read();
}
out$3.walkSync = walkSync;
function walkStream(directory, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
const provider = new stream_1$4.default(directory, settings);
return provider.read();
}
out$3.walkStream = walkStream;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$1.default) {
return settingsOrOptions;
}
return new settings_1$1.default(settingsOrOptions);
}
var reader = {};
Object.defineProperty(reader, "__esModule", { value: true });
const path$c = require$$0$4;
const fsStat$2 = out$1;
const utils$6 = utils$g;
class Reader {
constructor(_settings) {
this._settings = _settings;
this._fsStatSettings = new fsStat$2.Settings({
followSymbolicLink: this._settings.followSymbolicLinks,
fs: this._settings.fs,
throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks
});
}
_getFullEntryPath(filepath) {
return path$c.resolve(this._settings.cwd, filepath);
}
_makeEntry(stats, pattern) {
const entry = {
name: pattern,
path: pattern,
dirent: utils$6.fs.createDirentFromStats(pattern, stats)
};
if (this._settings.stats) {
entry.stats = stats;
}
return entry;
}
_isFatalError(error) {
return !utils$6.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;
}
}
reader.default = Reader;
var stream$1 = {};
Object.defineProperty(stream$1, "__esModule", { value: true });
const stream_1$3 = require$$0$6;
const fsStat$1 = out$1;
const fsWalk$2 = out$3;
const reader_1$2 = reader;
class ReaderStream extends reader_1$2.default {
constructor() {
super(...arguments);
this._walkStream = fsWalk$2.walkStream;
this._stat = fsStat$1.stat;
}
dynamic(root, options) {
return this._walkStream(root, options);
}
static(patterns, options) {
const filepaths = patterns.map(this._getFullEntryPath, this);
const stream = new stream_1$3.PassThrough({ objectMode: true });
stream._write = (index, _enc, done) => {
return this._getEntry(filepaths[index], patterns[index], options)
.then((entry) => {
if (entry !== null && options.entryFilter(entry)) {
stream.push(entry);
}
if (index === filepaths.length - 1) {
stream.end();
}
done();
})
.catch(done);
};
for (let i = 0; i < filepaths.length; i++) {
stream.write(i);
}
return stream;
}
_getEntry(filepath, pattern, options) {
return this._getStat(filepath)
.then((stats) => this._makeEntry(stats, pattern))
.catch((error) => {
if (options.errorFilter(error)) {
return null;
}
throw error;
});
}
_getStat(filepath) {
return new Promise((resolve, reject) => {
this._stat(filepath, this._fsStatSettings, (error, stats) => {
return error === null ? resolve(stats) : reject(error);
});
});
}
}
stream$1.default = ReaderStream;
Object.defineProperty(async$6, "__esModule", { value: true });
const fsWalk$1 = out$3;
const reader_1$1 = reader;
const stream_1$2 = stream$1;
class ReaderAsync extends reader_1$1.default {
constructor() {
super(...arguments);
this._walkAsync = fsWalk$1.walk;
this._readerStream = new stream_1$2.default(this._settings);
}
dynamic(root, options) {
return new Promise((resolve, reject) => {
this._walkAsync(root, options, (error, entries) => {
if (error === null) {
resolve(entries);
}
else {
reject(error);
}
});
});
}
async static(patterns, options) {
const entries = [];
const stream = this._readerStream.static(patterns, options);
// After #235, replace it with an asynchronous iterator.
return new Promise((resolve, reject) => {
stream.once('error', reject);
stream.on('data', (entry) => entries.push(entry));
stream.once('end', () => resolve(entries));
});
}
}
async$6.default = ReaderAsync;
var provider = {};
var deep = {};
var partial = {};
var matcher = {};
Object.defineProperty(matcher, "__esModule", { value: true });
const utils$5 = utils$g;
class Matcher {
constructor(_patterns, _settings, _micromatchOptions) {
this._patterns = _patterns;
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
this._storage = [];
this._fillStorage();
}
_fillStorage() {
for (const pattern of this._patterns) {
const segments = this._getPatternSegments(pattern);
const sections = this._splitSegmentsIntoSections(segments);
this._storage.push({
complete: sections.length <= 1,
pattern,
segments,
sections
});
}
}
_getPatternSegments(pattern) {
const parts = utils$5.pattern.getPatternParts(pattern, this._micromatchOptions);
return parts.map((part) => {
const dynamic = utils$5.pattern.isDynamicPattern(part, this._settings);
if (!dynamic) {
return {
dynamic: false,
pattern: part
};
}
return {
dynamic: true,
pattern: part,
patternRe: utils$5.pattern.makeRe(part, this._micromatchOptions)
};
});
}
_splitSegmentsIntoSections(segments) {
return utils$5.array.splitWhen(segments, (segment) => segment.dynamic && utils$5.pattern.hasGlobStar(segment.pattern));
}
}
matcher.default = Matcher;
Object.defineProperty(partial, "__esModule", { value: true });
const matcher_1 = matcher;
class PartialMatcher extends matcher_1.default {
match(filepath) {
const parts = filepath.split('/');
const levels = parts.length;
const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);
for (const pattern of patterns) {
const section = pattern.sections[0];
/**
* In this case, the pattern has a globstar and we must read all directories unconditionally,
* but only if the level has reached the end of the first group.
*
* fixtures/{a,b}/**
* ^ true/false ^ always true
*/
if (!pattern.complete && levels > section.length) {
return true;
}
const match = parts.every((part, index) => {
const segment = pattern.segments[index];
if (segment.dynamic && segment.patternRe.test(part)) {
return true;
}
if (!segment.dynamic && segment.pattern === part) {
return true;
}
return false;
});
if (match) {
return true;
}
}
return false;
}
}
partial.default = PartialMatcher;
Object.defineProperty(deep, "__esModule", { value: true });
const utils$4 = utils$g;
const partial_1 = partial;
class DeepFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
}
getFilter(basePath, positive, negative) {
const matcher = this._getMatcher(positive);
const negativeRe = this._getNegativePatternsRe(negative);
return (entry) => this._filter(basePath, entry, matcher, negativeRe);
}
_getMatcher(patterns) {
return new partial_1.default(patterns, this._settings, this._micromatchOptions);
}
_getNegativePatternsRe(patterns) {
const affectDepthOfReadingPatterns = patterns.filter(utils$4.pattern.isAffectDepthOfReadingPattern);
return utils$4.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
}
_filter(basePath, entry, matcher, negativeRe) {
if (this._isSkippedByDeep(basePath, entry.path)) {
return false;
}
if (this._isSkippedSymbolicLink(entry)) {
return false;
}
const filepath = utils$4.path.removeLeadingDotSegment(entry.path);
if (this._isSkippedByPositivePatterns(filepath, matcher)) {
return false;
}
return this._isSkippedByNegativePatterns(filepath, negativeRe);
}
_isSkippedByDeep(basePath, entryPath) {
/**
* Avoid unnecessary depth calculations when it doesn't matter.
*/
if (this._settings.deep === Infinity) {
return false;
}
return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
}
_getEntryLevel(basePath, entryPath) {
const entryPathDepth = entryPath.split('/').length;
if (basePath === '') {
return entryPathDepth;
}
const basePathDepth = basePath.split('/').length;
return entryPathDepth - basePathDepth;
}
_isSkippedSymbolicLink(entry) {
return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
}
_isSkippedByPositivePatterns(entryPath, matcher) {
return !this._settings.baseNameMatch && !matcher.match(entryPath);
}
_isSkippedByNegativePatterns(entryPath, patternsRe) {
return !utils$4.pattern.matchAny(entryPath, patternsRe);
}
}
deep.default = DeepFilter;
var entry$1 = {};
Object.defineProperty(entry$1, "__esModule", { value: true });
const utils$3 = utils$g;
class EntryFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
this.index = new Map();
}
getFilter(positive, negative) {
const positiveRe = utils$3.pattern.convertPatternsToRe(positive, this._micromatchOptions);
const negativeRe = utils$3.pattern.convertPatternsToRe(negative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true }));
return (entry) => this._filter(entry, positiveRe, negativeRe);
}
_filter(entry, positiveRe, negativeRe) {
const filepath = utils$3.path.removeLeadingDotSegment(entry.path);
if (this._settings.unique && this._isDuplicateEntry(filepath)) {
return false;
}
if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
return false;
}
if (this._isSkippedByAbsoluteNegativePatterns(filepath, negativeRe)) {
return false;
}
const isDirectory = entry.dirent.isDirectory();
const isMatched = this._isMatchToPatterns(filepath, positiveRe, isDirectory) && !this._isMatchToPatterns(filepath, negativeRe, isDirectory);
if (this._settings.unique && isMatched) {
this._createIndexRecord(filepath);
}
return isMatched;
}
_isDuplicateEntry(filepath) {
return this.index.has(filepath);
}
_createIndexRecord(filepath) {
this.index.set(filepath, undefined);
}
_onlyFileFilter(entry) {
return this._settings.onlyFiles && !entry.dirent.isFile();
}
_onlyDirectoryFilter(entry) {
return this._settings.onlyDirectories && !entry.dirent.isDirectory();
}
_isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
if (!this._settings.absolute) {
return false;
}
const fullpath = utils$3.path.makeAbsolute(this._settings.cwd, entryPath);
return utils$3.pattern.matchAny(fullpath, patternsRe);
}
_isMatchToPatterns(filepath, patternsRe, isDirectory) {
// Trying to match files and directories by patterns.
const isMatched = utils$3.pattern.matchAny(filepath, patternsRe);
// A pattern with a trailling slash can be used for directory matching.
// To apply such pattern, we need to add a tralling slash to the path.
if (!isMatched && isDirectory) {
return utils$3.pattern.matchAny(filepath + '/', patternsRe);
}
return isMatched;
}
}
entry$1.default = EntryFilter;
var error$1 = {};
Object.defineProperty(error$1, "__esModule", { value: true });
const utils$2 = utils$g;
class ErrorFilter {
constructor(_settings) {
this._settings = _settings;
}
getFilter() {
return (error) => this._isNonFatalError(error);
}
_isNonFatalError(error) {
return utils$2.errno.isEnoentCodeError(error) || this._settings.suppressErrors;
}
}
error$1.default = ErrorFilter;
var entry = {};
Object.defineProperty(entry, "__esModule", { value: true });
const utils$1 = utils$g;
class EntryTransformer {
constructor(_settings) {
this._settings = _settings;
}
getTransformer() {
return (entry) => this._transform(entry);
}
_transform(entry) {
let filepath = entry.path;
if (this._settings.absolute) {
filepath = utils$1.path.makeAbsolute(this._settings.cwd, filepath);
filepath = utils$1.path.unixify(filepath);
}
if (this._settings.markDirectories && entry.dirent.isDirectory()) {
filepath += '/';
}
if (!this._settings.objectMode) {
return filepath;
}
return Object.assign(Object.assign({}, entry), { path: filepath });
}
}
entry.default = EntryTransformer;
Object.defineProperty(provider, "__esModule", { value: true });
const path$b = require$$0$4;
const deep_1 = deep;
const entry_1 = entry$1;
const error_1 = error$1;
const entry_2 = entry;
class Provider {
constructor(_settings) {
this._settings = _settings;
this.errorFilter = new error_1.default(this._settings);
this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());
this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());
this.entryTransformer = new entry_2.default(this._settings);
}
_getRootDirectory(task) {
return path$b.resolve(this._settings.cwd, task.base);
}
_getReaderOptions(task) {
const basePath = task.base === '.' ? '' : task.base;
return {
basePath,
pathSegmentSeparator: '/',
concurrency: this._settings.concurrency,
deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),
entryFilter: this.entryFilter.getFilter(task.positive, task.negative),
errorFilter: this.errorFilter.getFilter(),
followSymbolicLinks: this._settings.followSymbolicLinks,
fs: this._settings.fs,
stats: this._settings.stats,
throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,
transform: this.entryTransformer.getTransformer()
};
}
_getMicromatchOptions() {
return {
dot: this._settings.dot,
matchBase: this._settings.baseNameMatch,
nobrace: !this._settings.braceExpansion,
nocase: !this._settings.caseSensitiveMatch,
noext: !this._settings.extglob,
noglobstar: !this._settings.globstar,
posix: true,
strictSlashes: false
};
}
}
provider.default = Provider;
Object.defineProperty(async$7, "__esModule", { value: true });
const async_1$1 = async$6;
const provider_1$2 = provider;
class ProviderAsync extends provider_1$2.default {
constructor() {
super(...arguments);
this._reader = new async_1$1.default(this._settings);
}
async read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const entries = await this.api(root, task, options);
return entries.map((entry) => options.transform(entry));
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
async$7.default = ProviderAsync;
var stream = {};
Object.defineProperty(stream, "__esModule", { value: true });
const stream_1$1 = require$$0$6;
const stream_2 = stream$1;
const provider_1$1 = provider;
class ProviderStream extends provider_1$1.default {
constructor() {
super(...arguments);
this._reader = new stream_2.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const source = this.api(root, task, options);
const destination = new stream_1$1.Readable({ objectMode: true, read: () => { } });
source
.once('error', (error) => destination.emit('error', error))
.on('data', (entry) => destination.emit('data', options.transform(entry)))
.once('end', () => destination.emit('end'));
destination
.once('close', () => source.destroy());
return destination;
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
stream.default = ProviderStream;
var sync$2 = {};
var sync$1 = {};
Object.defineProperty(sync$1, "__esModule", { value: true });
const fsStat = out$1;
const fsWalk = out$3;
const reader_1 = reader;
class ReaderSync extends reader_1.default {
constructor() {
super(...arguments);
this._walkSync = fsWalk.walkSync;
this._statSync = fsStat.statSync;
}
dynamic(root, options) {
return this._walkSync(root, options);
}
static(patterns, options) {
const entries = [];
for (const pattern of patterns) {
const filepath = this._getFullEntryPath(pattern);
const entry = this._getEntry(filepath, pattern, options);
if (entry === null || !options.entryFilter(entry)) {
continue;
}
entries.push(entry);
}
return entries;
}
_getEntry(filepath, pattern, options) {
try {
const stats = this._getStat(filepath);
return this._makeEntry(stats, pattern);
}
catch (error) {
if (options.errorFilter(error)) {
return null;
}
throw error;
}
}
_getStat(filepath) {
return this._statSync(filepath, this._fsStatSettings);
}
}
sync$1.default = ReaderSync;
Object.defineProperty(sync$2, "__esModule", { value: true });
const sync_1$1 = sync$1;
const provider_1 = provider;
class ProviderSync extends provider_1.default {
constructor() {
super(...arguments);
this._reader = new sync_1$1.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const entries = this.api(root, task, options);
return entries.map(options.transform);
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
sync$2.default = ProviderSync;
var settings = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
const os = require$$2;
/**
* The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
* https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
*/
const CPU_COUNT = Math.max(os.cpus().length, 1);
exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
lstatSync: fs.lstatSync,
stat: fs.stat,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
class Settings {
constructor(_options = {}) {
this._options = _options;
this.absolute = this._getValue(this._options.absolute, false);
this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);
this.braceExpansion = this._getValue(this._options.braceExpansion, true);
this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);
this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);
this.cwd = this._getValue(this._options.cwd, process.cwd());
this.deep = this._getValue(this._options.deep, Infinity);
this.dot = this._getValue(this._options.dot, false);
this.extglob = this._getValue(this._options.extglob, true);
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);
this.fs = this._getFileSystemMethods(this._options.fs);
this.globstar = this._getValue(this._options.globstar, true);
this.ignore = this._getValue(this._options.ignore, []);
this.markDirectories = this._getValue(this._options.markDirectories, false);
this.objectMode = this._getValue(this._options.objectMode, false);
this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);
this.onlyFiles = this._getValue(this._options.onlyFiles, true);
this.stats = this._getValue(this._options.stats, false);
this.suppressErrors = this._getValue(this._options.suppressErrors, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);
this.unique = this._getValue(this._options.unique, true);
if (this.onlyDirectories) {
this.onlyFiles = false;
}
if (this.stats) {
this.objectMode = true;
}
// Remove the cast to the array in the next major (#404).
this.ignore = [].concat(this.ignore);
}
_getValue(option, value) {
return option === undefined ? value : option;
}
_getFileSystemMethods(methods = {}) {
return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);
}
}
exports.default = Settings;
} (settings));
const taskManager = tasks;
const async_1 = async$7;
const stream_1 = stream;
const sync_1 = sync$2;
const settings_1 = settings;
const utils = utils$g;
async function FastGlob(source, options) {
assertPatternsInput(source);
const works = getWorks(source, async_1.default, options);
const result = await Promise.all(works);
return utils.array.flatten(result);
}
// https://github.com/typescript-eslint/typescript-eslint/issues/60
// eslint-disable-next-line no-redeclare
(function (FastGlob) {
FastGlob.glob = FastGlob;
FastGlob.globSync = sync;
FastGlob.globStream = stream;
FastGlob.async = FastGlob;
function sync(source, options) {
assertPatternsInput(source);
const works = getWorks(source, sync_1.default, options);
return utils.array.flatten(works);
}
FastGlob.sync = sync;
function stream(source, options) {
assertPatternsInput(source);
const works = getWorks(source, stream_1.default, options);
/**
* The stream returned by the provider cannot work with an asynchronous iterator.
* To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.
* This affects performance (+25%). I don't see best solution right now.
*/
return utils.stream.merge(works);
}
FastGlob.stream = stream;
function generateTasks(source, options) {
assertPatternsInput(source);
const patterns = [].concat(source);
const settings = new settings_1.default(options);
return taskManager.generate(patterns, settings);
}
FastGlob.generateTasks = generateTasks;
function isDynamicPattern(source, options) {
assertPatternsInput(source);
const settings = new settings_1.default(options);
return utils.pattern.isDynamicPattern(source, settings);
}
FastGlob.isDynamicPattern = isDynamicPattern;
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escape(source);
}
FastGlob.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertPathToPattern(source);
}
FastGlob.convertPathToPattern = convertPathToPattern;
(function (posix) {
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escapePosixPath(source);
}
posix.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertPosixPathToPattern(source);
}
posix.convertPathToPattern = convertPathToPattern;
})(FastGlob.posix || (FastGlob.posix = {}));
(function (win32) {
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escapeWindowsPath(source);
}
win32.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertWindowsPathToPattern(source);
}
win32.convertPathToPattern = convertPathToPattern;
})(FastGlob.win32 || (FastGlob.win32 = {}));
})(FastGlob || (FastGlob = {}));
function getWorks(source, _Provider, options) {
const patterns = [].concat(source);
const settings = new settings_1.default(options);
const tasks = taskManager.generate(patterns, settings);
const provider = new _Provider(settings);
return tasks.map(provider.read, provider);
}
function assertPatternsInput(input) {
const source = [].concat(input);
const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));
if (!isValidSource) {
throw new TypeError('Patterns must be a string (non empty) or an array of strings');
}
}
var out = FastGlob;
var glob = /*@__PURE__*/getDefaultExportFromCjs(out);
var src$2 = {};
// @ts-check
const path$a = require$$0$4;
const fs$a = require$$0__default;
const os$3 = require$$2;
const fsReadFileAsync = fs$a.promises.readFile;
/** @type {(name: string, sync: boolean) => string[]} */
function getDefaultSearchPlaces(name, sync) {
return [
'package.json',
`.${name}rc.json`,
`.${name}rc.js`,
`.${name}rc.cjs`,
...(sync ? [] : [`.${name}rc.mjs`]),
`.config/${name}rc`,
`.config/${name}rc.json`,
`.config/${name}rc.js`,
`.config/${name}rc.cjs`,
...(sync ? [] : [`.config/${name}rc.mjs`]),
`${name}.config.js`,
`${name}.config.cjs`,
...(sync ? [] : [`${name}.config.mjs`]),
];
}
/**
* @type {(p: string) => string}
*
* see #17
* On *nix, if cwd is not under homedir,
* the last path will be '', ('/build' -> '')
* but it should be '/' actually.
* And on Windows, this will never happen. ('C:\build' -> 'C:')
*/
function parentDir(p) {
return path$a.dirname(p) || path$a.sep;
}
/** @type {import('./index').LoaderSync} */
const jsonLoader = (_, content) => JSON.parse(content);
// Use plain require in webpack context for dynamic import
const requireFunc = typeof __webpack_require__ === "function" ? __non_webpack_require__ : __require;
/** @type {import('./index').LoadersSync} */
const defaultLoadersSync = Object.freeze({
'.js': requireFunc,
'.json': requireFunc,
'.cjs': requireFunc,
noExt: jsonLoader,
});
src$2.defaultLoadersSync = defaultLoadersSync;
/** @type {import('./index').Loader} */
const dynamicImport = async id => {
try {
const mod = await import(/* webpackIgnore: true */ id);
return mod.default;
} catch (e) {
try {
return requireFunc(id);
} catch (/** @type {any} */ requireE) {
if (
requireE.code === 'ERR_REQUIRE_ESM' ||
(requireE instanceof SyntaxError &&
requireE
.toString()
.includes('Cannot use import statement outside a module'))
) {
throw e;
}
throw requireE;
}
}
};
/** @type {import('./index').Loaders} */
const defaultLoaders = Object.freeze({
'.js': dynamicImport,
'.mjs': dynamicImport,
'.cjs': dynamicImport,
'.json': jsonLoader,
noExt: jsonLoader,
});
src$2.defaultLoaders = defaultLoaders;
/**
* @param {string} name
* @param {import('./index').Options | import('./index').OptionsSync} options
* @param {boolean} sync
* @returns {Required}
*/
function getOptions(name, options, sync) {
/** @type {Required} */
const conf = {
stopDir: os$3.homedir(),
searchPlaces: getDefaultSearchPlaces(name, sync),
ignoreEmptySearchPlaces: true,
cache: true,
transform: x => x,
packageProp: [name],
...options,
loaders: {
...(sync ? defaultLoadersSync : defaultLoaders),
...options.loaders,
},
};
conf.searchPlaces.forEach(place => {
const key = path$a.extname(place) || 'noExt';
const loader = conf.loaders[key];
if (!loader) {
throw new Error(`Missing loader for extension "${place}"`);
}
if (typeof loader !== 'function') {
throw new Error(
`Loader for extension "${place}" is not a function: Received ${typeof loader}.`,
);
}
});
return conf;
}
/** @type {(props: string | string[], obj: Record) => unknown} */
function getPackageProp(props, obj) {
if (typeof props === 'string' && props in obj) return obj[props];
return (
(Array.isArray(props) ? props : props.split('.')).reduce(
(acc, prop) => (acc === undefined ? acc : acc[prop]),
obj,
) || null
);
}
/** @param {string} filepath */
function validateFilePath(filepath) {
if (!filepath) throw new Error('load must pass a non-empty string');
}
/** @type {(loader: import('./index').Loader, ext: string) => void} */
function validateLoader(loader, ext) {
if (!loader) throw new Error(`No loader specified for extension "${ext}"`);
if (typeof loader !== 'function') throw new Error('loader is not a function');
}
/** @type {(enableCache: boolean) => (c: Map, filepath: string, res: T) => T} */
const makeEmplace = enableCache => (c, filepath, res) => {
if (enableCache) c.set(filepath, res);
return res;
};
/** @type {import('./index').lilconfig} */
src$2.lilconfig = function lilconfig(name, options) {
const {
ignoreEmptySearchPlaces,
loaders,
packageProp,
searchPlaces,
stopDir,
transform,
cache,
} = getOptions(name, options ?? {}, false);
const searchCache = new Map();
const loadCache = new Map();
const emplace = makeEmplace(cache);
return {
async search(searchFrom = process.cwd()) {
/** @type {import('./index').LilconfigResult} */
const result = {
config: null,
filepath: '',
};
/** @type {Set} */
const visited = new Set();
let dir = searchFrom;
dirLoop: while (true) {
if (cache) {
const r = searchCache.get(dir);
if (r !== undefined) {
for (const p of visited) searchCache.set(p, r);
return r;
}
visited.add(dir);
}
for (const searchPlace of searchPlaces) {
const filepath = path$a.join(dir, searchPlace);
try {
await fs$a.promises.access(filepath);
} catch {
continue;
}
const content = String(await fsReadFileAsync(filepath));
const loaderKey = path$a.extname(searchPlace) || 'noExt';
const loader = loaders[loaderKey];
// handle package.json
if (searchPlace === 'package.json') {
const pkg = await loader(filepath, content);
const maybeConfig = getPackageProp(packageProp, pkg);
if (maybeConfig != null) {
result.config = maybeConfig;
result.filepath = filepath;
break dirLoop;
}
continue;
}
// handle other type of configs
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces) continue;
if (isEmpty) {
result.isEmpty = true;
result.config = undefined;
} else {
validateLoader(loader, loaderKey);
result.config = await loader(filepath, content);
}
result.filepath = filepath;
break dirLoop;
}
if (dir === stopDir || dir === parentDir(dir)) break dirLoop;
dir = parentDir(dir);
}
const transformed =
// not found
result.filepath === '' && result.config === null
? transform(null)
: transform(result);
if (cache) {
for (const p of visited) searchCache.set(p, transformed);
}
return transformed;
},
async load(filepath) {
validateFilePath(filepath);
const absPath = path$a.resolve(process.cwd(), filepath);
if (cache && loadCache.has(absPath)) {
return loadCache.get(absPath);
}
const {base, ext} = path$a.parse(absPath);
const loaderKey = ext || 'noExt';
const loader = loaders[loaderKey];
validateLoader(loader, loaderKey);
const content = String(await fsReadFileAsync(absPath));
if (base === 'package.json') {
const pkg = await loader(absPath, content);
return emplace(
loadCache,
absPath,
transform({
config: getPackageProp(packageProp, pkg),
filepath: absPath,
}),
);
}
/** @type {import('./index').LilconfigResult} */
const result = {
config: null,
filepath: absPath,
};
// handle other type of configs
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces)
return emplace(
loadCache,
absPath,
transform({
config: undefined,
filepath: absPath,
isEmpty: true,
}),
);
// cosmiconfig returns undefined for empty files
result.config = isEmpty ? undefined : await loader(absPath, content);
return emplace(
loadCache,
absPath,
transform(isEmpty ? {...result, isEmpty, config: undefined} : result),
);
},
clearLoadCache() {
if (cache) loadCache.clear();
},
clearSearchCache() {
if (cache) searchCache.clear();
},
clearCaches() {
if (cache) {
loadCache.clear();
searchCache.clear();
}
},
};
};
/** @type {import('./index').lilconfigSync} */
src$2.lilconfigSync = function lilconfigSync(name, options) {
const {
ignoreEmptySearchPlaces,
loaders,
packageProp,
searchPlaces,
stopDir,
transform,
cache,
} = getOptions(name, options ?? {}, true);
const searchCache = new Map();
const loadCache = new Map();
const emplace = makeEmplace(cache);
return {
search(searchFrom = process.cwd()) {
/** @type {import('./index').LilconfigResult} */
const result = {
config: null,
filepath: '',
};
/** @type {Set} */
const visited = new Set();
let dir = searchFrom;
dirLoop: while (true) {
if (cache) {
const r = searchCache.get(dir);
if (r !== undefined) {
for (const p of visited) searchCache.set(p, r);
return r;
}
visited.add(dir);
}
for (const searchPlace of searchPlaces) {
const filepath = path$a.join(dir, searchPlace);
try {
fs$a.accessSync(filepath);
} catch {
continue;
}
const loaderKey = path$a.extname(searchPlace) || 'noExt';
const loader = loaders[loaderKey];
const content = String(fs$a.readFileSync(filepath));
// handle package.json
if (searchPlace === 'package.json') {
const pkg = loader(filepath, content);
const maybeConfig = getPackageProp(packageProp, pkg);
if (maybeConfig != null) {
result.config = maybeConfig;
result.filepath = filepath;
break dirLoop;
}
continue;
}
// handle other type of configs
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces) continue;
if (isEmpty) {
result.isEmpty = true;
result.config = undefined;
} else {
validateLoader(loader, loaderKey);
result.config = loader(filepath, content);
}
result.filepath = filepath;
break dirLoop;
}
if (dir === stopDir || dir === parentDir(dir)) break dirLoop;
dir = parentDir(dir);
}
const transformed =
// not found
result.filepath === '' && result.config === null
? transform(null)
: transform(result);
if (cache) {
for (const p of visited) searchCache.set(p, transformed);
}
return transformed;
},
load(filepath) {
validateFilePath(filepath);
const absPath = path$a.resolve(process.cwd(), filepath);
if (cache && loadCache.has(absPath)) {
return loadCache.get(absPath);
}
const {base, ext} = path$a.parse(absPath);
const loaderKey = ext || 'noExt';
const loader = loaders[loaderKey];
validateLoader(loader, loaderKey);
const content = String(fs$a.readFileSync(absPath));
if (base === 'package.json') {
const pkg = loader(absPath, content);
return transform({
config: getPackageProp(packageProp, pkg),
filepath: absPath,
});
}
const result = {
config: null,
filepath: absPath,
};
// handle other type of configs
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces)
return emplace(
loadCache,
absPath,
transform({
filepath: absPath,
config: undefined,
isEmpty: true,
}),
);
// cosmiconfig returns undefined for empty files
result.config = isEmpty ? undefined : loader(absPath, content);
return emplace(
loadCache,
absPath,
transform(isEmpty ? {...result, isEmpty, config: undefined} : result),
);
},
clearLoadCache() {
if (cache) loadCache.clear();
},
clearSearchCache() {
if (cache) searchCache.clear();
},
clearCaches() {
if (cache) {
loadCache.clear();
searchCache.clear();
}
},
};
};
const ALIAS = Symbol.for('yaml.alias');
const DOC = Symbol.for('yaml.document');
const MAP = Symbol.for('yaml.map');
const PAIR = Symbol.for('yaml.pair');
const SCALAR$1 = Symbol.for('yaml.scalar');
const SEQ = Symbol.for('yaml.seq');
const NODE_TYPE = Symbol.for('yaml.node.type');
const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
const isScalar$1 = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR$1;
const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
function isCollection$1(node) {
if (node && typeof node === 'object')
switch (node[NODE_TYPE]) {
case MAP:
case SEQ:
return true;
}
return false;
}
function isNode$1(node) {
if (node && typeof node === 'object')
switch (node[NODE_TYPE]) {
case ALIAS:
case MAP:
case SCALAR$1:
case SEQ:
return true;
}
return false;
}
const hasAnchor = (node) => (isScalar$1(node) || isCollection$1(node)) && !!node.anchor;
const BREAK$1 = Symbol('break visit');
const SKIP$1 = Symbol('skip children');
const REMOVE$1 = Symbol('remove node');
/**
* Apply a visitor to an AST node or document.
*
* Walks through the tree (depth-first) starting from `node`, calling a
* `visitor` function with three arguments:
* - `key`: For sequence values and map `Pair`, the node's index in the
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
* `null` for the root node.
* - `node`: The current node.
* - `path`: The ancestry of the current node.
*
* The return value of the visitor may be used to control the traversal:
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this node, continue with next
* sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current node, then continue with the next one
* - `Node`: Replace the current node, then continue by visiting it
* - `number`: While iterating the items of a sequence or map, set the index
* of the next step. This is useful especially if the index of the current
* node has changed.
*
* If `visitor` is a single function, it will be called with all values
* encountered in the tree, including e.g. `null` values. Alternatively,
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
* `Alias` and `Scalar` node. To define the same visitor function for more than
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
* specific defined one will be used for each node.
*/
function visit$1(node, visitor) {
const visitor_ = initVisitor(visitor);
if (isDocument(node)) {
const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));
if (cd === REMOVE$1)
node.contents = null;
}
else
visit_(null, node, visitor_, Object.freeze([]));
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visit$1.BREAK = BREAK$1;
/** Do not visit the children of the current node */
visit$1.SKIP = SKIP$1;
/** Remove the current node */
visit$1.REMOVE = REMOVE$1;
function visit_(key, node, visitor, path) {
const ctrl = callVisitor(key, node, visitor, path);
if (isNode$1(ctrl) || isPair(ctrl)) {
replaceNode(key, path, ctrl);
return visit_(key, ctrl, visitor, path);
}
if (typeof ctrl !== 'symbol') {
if (isCollection$1(node)) {
path = Object.freeze(path.concat(node));
for (let i = 0; i < node.items.length; ++i) {
const ci = visit_(i, node.items[i], visitor, path);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK$1)
return BREAK$1;
else if (ci === REMOVE$1) {
node.items.splice(i, 1);
i -= 1;
}
}
}
else if (isPair(node)) {
path = Object.freeze(path.concat(node));
const ck = visit_('key', node.key, visitor, path);
if (ck === BREAK$1)
return BREAK$1;
else if (ck === REMOVE$1)
node.key = null;
const cv = visit_('value', node.value, visitor, path);
if (cv === BREAK$1)
return BREAK$1;
else if (cv === REMOVE$1)
node.value = null;
}
}
return ctrl;
}
/**
* Apply an async visitor to an AST node or document.
*
* Walks through the tree (depth-first) starting from `node`, calling a
* `visitor` function with three arguments:
* - `key`: For sequence values and map `Pair`, the node's index in the
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
* `null` for the root node.
* - `node`: The current node.
* - `path`: The ancestry of the current node.
*
* The return value of the visitor may be used to control the traversal:
* - `Promise`: Must resolve to one of the following values
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this node, continue with next
* sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current node, then continue with the next one
* - `Node`: Replace the current node, then continue by visiting it
* - `number`: While iterating the items of a sequence or map, set the index
* of the next step. This is useful especially if the index of the current
* node has changed.
*
* If `visitor` is a single function, it will be called with all values
* encountered in the tree, including e.g. `null` values. Alternatively,
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
* `Alias` and `Scalar` node. To define the same visitor function for more than
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
* specific defined one will be used for each node.
*/
async function visitAsync(node, visitor) {
const visitor_ = initVisitor(visitor);
if (isDocument(node)) {
const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));
if (cd === REMOVE$1)
node.contents = null;
}
else
await visitAsync_(null, node, visitor_, Object.freeze([]));
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visitAsync.BREAK = BREAK$1;
/** Do not visit the children of the current node */
visitAsync.SKIP = SKIP$1;
/** Remove the current node */
visitAsync.REMOVE = REMOVE$1;
async function visitAsync_(key, node, visitor, path) {
const ctrl = await callVisitor(key, node, visitor, path);
if (isNode$1(ctrl) || isPair(ctrl)) {
replaceNode(key, path, ctrl);
return visitAsync_(key, ctrl, visitor, path);
}
if (typeof ctrl !== 'symbol') {
if (isCollection$1(node)) {
path = Object.freeze(path.concat(node));
for (let i = 0; i < node.items.length; ++i) {
const ci = await visitAsync_(i, node.items[i], visitor, path);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK$1)
return BREAK$1;
else if (ci === REMOVE$1) {
node.items.splice(i, 1);
i -= 1;
}
}
}
else if (isPair(node)) {
path = Object.freeze(path.concat(node));
const ck = await visitAsync_('key', node.key, visitor, path);
if (ck === BREAK$1)
return BREAK$1;
else if (ck === REMOVE$1)
node.key = null;
const cv = await visitAsync_('value', node.value, visitor, path);
if (cv === BREAK$1)
return BREAK$1;
else if (cv === REMOVE$1)
node.value = null;
}
}
return ctrl;
}
function initVisitor(visitor) {
if (typeof visitor === 'object' &&
(visitor.Collection || visitor.Node || visitor.Value)) {
return Object.assign({
Alias: visitor.Node,
Map: visitor.Node,
Scalar: visitor.Node,
Seq: visitor.Node
}, visitor.Value && {
Map: visitor.Value,
Scalar: visitor.Value,
Seq: visitor.Value
}, visitor.Collection && {
Map: visitor.Collection,
Seq: visitor.Collection
}, visitor);
}
return visitor;
}
function callVisitor(key, node, visitor, path) {
if (typeof visitor === 'function')
return visitor(key, node, path);
if (isMap(node))
return visitor.Map?.(key, node, path);
if (isSeq(node))
return visitor.Seq?.(key, node, path);
if (isPair(node))
return visitor.Pair?.(key, node, path);
if (isScalar$1(node))
return visitor.Scalar?.(key, node, path);
if (isAlias(node))
return visitor.Alias?.(key, node, path);
return undefined;
}
function replaceNode(key, path, node) {
const parent = path[path.length - 1];
if (isCollection$1(parent)) {
parent.items[key] = node;
}
else if (isPair(parent)) {
if (key === 'key')
parent.key = node;
else
parent.value = node;
}
else if (isDocument(parent)) {
parent.contents = node;
}
else {
const pt = isAlias(parent) ? 'alias' : 'scalar';
throw new Error(`Cannot replace node with ${pt} parent`);
}
}
const escapeChars = {
'!': '%21',
',': '%2C',
'[': '%5B',
']': '%5D',
'{': '%7B',
'}': '%7D'
};
const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
class Directives {
constructor(yaml, tags) {
/**
* The directives-end/doc-start marker `---`. If `null`, a marker may still be
* included in the document's stringified representation.
*/
this.docStart = null;
/** The doc-end marker `...`. */
this.docEnd = false;
this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
this.tags = Object.assign({}, Directives.defaultTags, tags);
}
clone() {
const copy = new Directives(this.yaml, this.tags);
copy.docStart = this.docStart;
return copy;
}
/**
* During parsing, get a Directives instance for the current document and
* update the stream state according to the current version's spec.
*/
atDocument() {
const res = new Directives(this.yaml, this.tags);
switch (this.yaml.version) {
case '1.1':
this.atNextDocument = true;
break;
case '1.2':
this.atNextDocument = false;
this.yaml = {
explicit: Directives.defaultYaml.explicit,
version: '1.2'
};
this.tags = Object.assign({}, Directives.defaultTags);
break;
}
return res;
}
/**
* @param onError - May be called even if the action was successful
* @returns `true` on success
*/
add(line, onError) {
if (this.atNextDocument) {
this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
this.tags = Object.assign({}, Directives.defaultTags);
this.atNextDocument = false;
}
const parts = line.trim().split(/[ \t]+/);
const name = parts.shift();
switch (name) {
case '%TAG': {
if (parts.length !== 2) {
onError(0, '%TAG directive should contain exactly two parts');
if (parts.length < 2)
return false;
}
const [handle, prefix] = parts;
this.tags[handle] = prefix;
return true;
}
case '%YAML': {
this.yaml.explicit = true;
if (parts.length !== 1) {
onError(0, '%YAML directive should contain exactly one part');
return false;
}
const [version] = parts;
if (version === '1.1' || version === '1.2') {
this.yaml.version = version;
return true;
}
else {
const isValid = /^\d+\.\d+$/.test(version);
onError(6, `Unsupported YAML version ${version}`, isValid);
return false;
}
}
default:
onError(0, `Unknown directive ${name}`, true);
return false;
}
}
/**
* Resolves a tag, matching handles to those defined in %TAG directives.
*
* @returns Resolved tag, which may also be the non-specific tag `'!'` or a
* `'!local'` tag, or `null` if unresolvable.
*/
tagName(source, onError) {
if (source === '!')
return '!'; // non-specific tag
if (source[0] !== '!') {
onError(`Not a valid tag: ${source}`);
return null;
}
if (source[1] === '<') {
const verbatim = source.slice(2, -1);
if (verbatim === '!' || verbatim === '!!') {
onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
return null;
}
if (source[source.length - 1] !== '>')
onError('Verbatim tags must end with a >');
return verbatim;
}
const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s);
if (!suffix)
onError(`The ${source} tag has no suffix`);
const prefix = this.tags[handle];
if (prefix) {
try {
return prefix + decodeURIComponent(suffix);
}
catch (error) {
onError(String(error));
return null;
}
}
if (handle === '!')
return source; // local tag
onError(`Could not resolve tag: ${source}`);
return null;
}
/**
* Given a fully resolved tag, returns its printable string form,
* taking into account current tag prefixes and defaults.
*/
tagString(tag) {
for (const [handle, prefix] of Object.entries(this.tags)) {
if (tag.startsWith(prefix))
return handle + escapeTagName(tag.substring(prefix.length));
}
return tag[0] === '!' ? tag : `!<${tag}>`;
}
toString(doc) {
const lines = this.yaml.explicit
? [`%YAML ${this.yaml.version || '1.2'}`]
: [];
const tagEntries = Object.entries(this.tags);
let tagNames;
if (doc && tagEntries.length > 0 && isNode$1(doc.contents)) {
const tags = {};
visit$1(doc.contents, (_key, node) => {
if (isNode$1(node) && node.tag)
tags[node.tag] = true;
});
tagNames = Object.keys(tags);
}
else
tagNames = [];
for (const [handle, prefix] of tagEntries) {
if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
continue;
if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
lines.push(`%TAG ${handle} ${prefix}`);
}
return lines.join('\n');
}
}
Directives.defaultYaml = { explicit: false, version: '1.2' };
Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
/**
* Verify that the input string is a valid anchor.
*
* Will throw on errors.
*/
function anchorIsValid(anchor) {
if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
const sa = JSON.stringify(anchor);
const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
throw new Error(msg);
}
return true;
}
function anchorNames(root) {
const anchors = new Set();
visit$1(root, {
Value(_key, node) {
if (node.anchor)
anchors.add(node.anchor);
}
});
return anchors;
}
/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
function findNewAnchor(prefix, exclude) {
for (let i = 1; true; ++i) {
const name = `${prefix}${i}`;
if (!exclude.has(name))
return name;
}
}
function createNodeAnchors(doc, prefix) {
const aliasObjects = [];
const sourceObjects = new Map();
let prevAnchors = null;
return {
onAnchor: (source) => {
aliasObjects.push(source);
if (!prevAnchors)
prevAnchors = anchorNames(doc);
const anchor = findNewAnchor(prefix, prevAnchors);
prevAnchors.add(anchor);
return anchor;
},
/**
* With circular references, the source node is only resolved after all
* of its child nodes are. This is why anchors are set only after all of
* the nodes have been created.
*/
setAnchors: () => {
for (const source of aliasObjects) {
const ref = sourceObjects.get(source);
if (typeof ref === 'object' &&
ref.anchor &&
(isScalar$1(ref.node) || isCollection$1(ref.node))) {
ref.node.anchor = ref.anchor;
}
else {
const error = new Error('Failed to resolve repeated object (this should not happen)');
error.source = source;
throw error;
}
}
},
sourceObjects
};
}
/**
* Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
* in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
* 2021 edition: https://tc39.es/ecma262/#sec-json.parse
*
* Includes extensions for handling Map and Set objects.
*/
function applyReviver(reviver, obj, key, val) {
if (val && typeof val === 'object') {
if (Array.isArray(val)) {
for (let i = 0, len = val.length; i < len; ++i) {
const v0 = val[i];
const v1 = applyReviver(reviver, val, String(i), v0);
if (v1 === undefined)
delete val[i];
else if (v1 !== v0)
val[i] = v1;
}
}
else if (val instanceof Map) {
for (const k of Array.from(val.keys())) {
const v0 = val.get(k);
const v1 = applyReviver(reviver, val, k, v0);
if (v1 === undefined)
val.delete(k);
else if (v1 !== v0)
val.set(k, v1);
}
}
else if (val instanceof Set) {
for (const v0 of Array.from(val)) {
const v1 = applyReviver(reviver, val, v0, v0);
if (v1 === undefined)
val.delete(v0);
else if (v1 !== v0) {
val.delete(v0);
val.add(v1);
}
}
}
else {
for (const [k, v0] of Object.entries(val)) {
const v1 = applyReviver(reviver, val, k, v0);
if (v1 === undefined)
delete val[k];
else if (v1 !== v0)
val[k] = v1;
}
}
}
return reviver.call(obj, key, val);
}
/**
* Recursively convert any node or its contents to native JavaScript
*
* @param value - The input value
* @param arg - If `value` defines a `toJSON()` method, use this
* as its first argument
* @param ctx - Conversion context, originally set in Document#toJS(). If
* `{ keep: true }` is not set, output should be suitable for JSON
* stringification.
*/
function toJS(value, arg, ctx) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
if (Array.isArray(value))
return value.map((v, i) => toJS(v, String(i), ctx));
if (value && typeof value.toJSON === 'function') {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
if (!ctx || !hasAnchor(value))
return value.toJSON(arg, ctx);
const data = { aliasCount: 0, count: 1, res: undefined };
ctx.anchors.set(value, data);
ctx.onCreate = res => {
data.res = res;
delete ctx.onCreate;
};
const res = value.toJSON(arg, ctx);
if (ctx.onCreate)
ctx.onCreate(res);
return res;
}
if (typeof value === 'bigint' && !ctx?.keep)
return Number(value);
return value;
}
class NodeBase {
constructor(type) {
Object.defineProperty(this, NODE_TYPE, { value: type });
}
/** Create a copy of this node. */
clone() {
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
if (this.range)
copy.range = this.range.slice();
return copy;
}
/** A plain JavaScript representation of this node. */
toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
if (!isDocument(doc))
throw new TypeError('A document argument is required');
const ctx = {
anchors: new Map(),
doc,
keep: true,
mapAsMap: mapAsMap === true,
mapKeyWarned: false,
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100
};
const res = toJS(this, '', ctx);
if (typeof onAnchor === 'function')
for (const { count, res } of ctx.anchors.values())
onAnchor(res, count);
return typeof reviver === 'function'
? applyReviver(reviver, { '': res }, '', res)
: res;
}
}
class Alias extends NodeBase {
constructor(source) {
super(ALIAS);
this.source = source;
Object.defineProperty(this, 'tag', {
set() {
throw new Error('Alias nodes cannot have tags');
}
});
}
/**
* Resolve the value of this alias within `doc`, finding the last
* instance of the `source` anchor before this node.
*/
resolve(doc) {
let found = undefined;
visit$1(doc, {
Node: (_key, node) => {
if (node === this)
return visit$1.BREAK;
if (node.anchor === this.source)
found = node;
}
});
return found;
}
toJSON(_arg, ctx) {
if (!ctx)
return { source: this.source };
const { anchors, doc, maxAliasCount } = ctx;
const source = this.resolve(doc);
if (!source) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new ReferenceError(msg);
}
let data = anchors.get(source);
if (!data) {
// Resolve anchors for Node.prototype.toJS()
toJS(source, null, ctx);
data = anchors.get(source);
}
/* istanbul ignore if */
if (!data || data.res === undefined) {
const msg = 'This should not happen: Alias anchor was not resolved?';
throw new ReferenceError(msg);
}
if (maxAliasCount >= 0) {
data.count += 1;
if (data.aliasCount === 0)
data.aliasCount = getAliasCount(doc, source, anchors);
if (data.count * data.aliasCount > maxAliasCount) {
const msg = 'Excessive alias count indicates a resource exhaustion attack';
throw new ReferenceError(msg);
}
}
return data.res;
}
toString(ctx, _onComment, _onChompKeep) {
const src = `*${this.source}`;
if (ctx) {
anchorIsValid(this.source);
if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new Error(msg);
}
if (ctx.implicitKey)
return `${src} `;
}
return src;
}
}
function getAliasCount(doc, node, anchors) {
if (isAlias(node)) {
const source = node.resolve(doc);
const anchor = anchors && source && anchors.get(source);
return anchor ? anchor.count * anchor.aliasCount : 0;
}
else if (isCollection$1(node)) {
let count = 0;
for (const item of node.items) {
const c = getAliasCount(doc, item, anchors);
if (c > count)
count = c;
}
return count;
}
else if (isPair(node)) {
const kc = getAliasCount(doc, node.key, anchors);
const vc = getAliasCount(doc, node.value, anchors);
return Math.max(kc, vc);
}
return 1;
}
const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
class Scalar extends NodeBase {
constructor(value) {
super(SCALAR$1);
this.value = value;
}
toJSON(arg, ctx) {
return ctx?.keep ? this.value : toJS(this.value, arg, ctx);
}
toString() {
return String(this.value);
}
}
Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
Scalar.PLAIN = 'PLAIN';
Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
const defaultTagPrefix = 'tag:yaml.org,2002:';
function findTagObject(value, tagName, tags) {
if (tagName) {
const match = tags.filter(t => t.tag === tagName);
const tagObj = match.find(t => !t.format) ?? match[0];
if (!tagObj)
throw new Error(`Tag ${tagName} not found`);
return tagObj;
}
return tags.find(t => t.identify?.(value) && !t.format);
}
function createNode(value, tagName, ctx) {
if (isDocument(value))
value = value.contents;
if (isNode$1(value))
return value;
if (isPair(value)) {
const map = ctx.schema[MAP].createNode?.(ctx.schema, null, ctx);
map.items.push(value);
return map;
}
if (value instanceof String ||
value instanceof Number ||
value instanceof Boolean ||
(typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere
) {
// https://tc39.es/ecma262/#sec-serializejsonproperty
value = value.valueOf();
}
const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
// Detect duplicate references to the same object & use Alias nodes for all
// after first. The `ref` wrapper allows for circular references to resolve.
let ref = undefined;
if (aliasDuplicateObjects && value && typeof value === 'object') {
ref = sourceObjects.get(value);
if (ref) {
if (!ref.anchor)
ref.anchor = onAnchor(value);
return new Alias(ref.anchor);
}
else {
ref = { anchor: null, node: null };
sourceObjects.set(value, ref);
}
}
if (tagName?.startsWith('!!'))
tagName = defaultTagPrefix + tagName.slice(2);
let tagObj = findTagObject(value, tagName, schema.tags);
if (!tagObj) {
if (value && typeof value.toJSON === 'function') {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
value = value.toJSON();
}
if (!value || typeof value !== 'object') {
const node = new Scalar(value);
if (ref)
ref.node = node;
return node;
}
tagObj =
value instanceof Map
? schema[MAP]
: Symbol.iterator in Object(value)
? schema[SEQ]
: schema[MAP];
}
if (onTagObj) {
onTagObj(tagObj);
delete ctx.onTagObj;
}
const node = tagObj?.createNode
? tagObj.createNode(ctx.schema, value, ctx)
: typeof tagObj?.nodeClass?.from === 'function'
? tagObj.nodeClass.from(ctx.schema, value, ctx)
: new Scalar(value);
if (tagName)
node.tag = tagName;
else if (!tagObj.default)
node.tag = tagObj.tag;
if (ref)
ref.node = node;
return node;
}
function collectionFromPath(schema, path, value) {
let v = value;
for (let i = path.length - 1; i >= 0; --i) {
const k = path[i];
if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
const a = [];
a[k] = v;
v = a;
}
else {
v = new Map([[k, v]]);
}
}
return createNode(v, undefined, {
aliasDuplicateObjects: false,
keepUndefined: false,
onAnchor: () => {
throw new Error('This should not happen, please report a bug.');
},
schema,
sourceObjects: new Map()
});
}
// Type guard is intentionally a little wrong so as to be more useful,
// as it does not cover untypable empty non-string iterables (e.g. []).
const isEmptyPath = (path) => path == null ||
(typeof path === 'object' && !!path[Symbol.iterator]().next().done);
class Collection extends NodeBase {
constructor(type, schema) {
super(type);
Object.defineProperty(this, 'schema', {
value: schema,
configurable: true,
enumerable: false,
writable: true
});
}
/**
* Create a copy of this collection.
*
* @param schema - If defined, overwrites the original's schema
*/
clone(schema) {
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
if (schema)
copy.schema = schema;
copy.items = copy.items.map(it => isNode$1(it) || isPair(it) ? it.clone(schema) : it);
if (this.range)
copy.range = this.range.slice();
return copy;
}
/**
* Adds a value to the collection. For `!!map` and `!!omap` the value must
* be a Pair instance or a `{ key, value }` object, which may not have a key
* that already exists in the map.
*/
addIn(path, value) {
if (isEmptyPath(path))
this.add(value);
else {
const [key, ...rest] = path;
const node = this.get(key, true);
if (isCollection$1(node))
node.addIn(rest, value);
else if (node === undefined && this.schema)
this.set(key, collectionFromPath(this.schema, rest, value));
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
}
/**
* Removes a value from the collection.
* @returns `true` if the item was found and removed.
*/
deleteIn(path) {
const [key, ...rest] = path;
if (rest.length === 0)
return this.delete(key);
const node = this.get(key, true);
if (isCollection$1(node))
return node.deleteIn(rest);
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
/**
* Returns item at `key`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
getIn(path, keepScalar) {
const [key, ...rest] = path;
const node = this.get(key, true);
if (rest.length === 0)
return !keepScalar && isScalar$1(node) ? node.value : node;
else
return isCollection$1(node) ? node.getIn(rest, keepScalar) : undefined;
}
hasAllNullValues(allowScalar) {
return this.items.every(node => {
if (!isPair(node))
return false;
const n = node.value;
return (n == null ||
(allowScalar &&
isScalar$1(n) &&
n.value == null &&
!n.commentBefore &&
!n.comment &&
!n.tag));
});
}
/**
* Checks if the collection includes a value with the key `key`.
*/
hasIn(path) {
const [key, ...rest] = path;
if (rest.length === 0)
return this.has(key);
const node = this.get(key, true);
return isCollection$1(node) ? node.hasIn(rest) : false;
}
/**
* Sets a value in this collection. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
setIn(path, value) {
const [key, ...rest] = path;
if (rest.length === 0) {
this.set(key, value);
}
else {
const node = this.get(key, true);
if (isCollection$1(node))
node.setIn(rest, value);
else if (node === undefined && this.schema)
this.set(key, collectionFromPath(this.schema, rest, value));
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
}
}
/**
* Stringifies a comment.
*
* Empty comment lines are left empty,
* lines consisting of a single space are replaced by `#`,
* and all other lines are prefixed with a `#`.
*/
const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');
function indentComment(comment, indent) {
if (/^\n+$/.test(comment))
return comment.substring(1);
return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;
}
const lineComment = (str, indent, comment) => str.endsWith('\n')
? indentComment(comment, indent)
: comment.includes('\n')
? '\n' + indentComment(comment, indent)
: (str.endsWith(' ') ? '' : ' ') + comment;
const FOLD_FLOW = 'flow';
const FOLD_BLOCK = 'block';
const FOLD_QUOTED = 'quoted';
/**
* Tries to keep input at up to `lineWidth` characters, splitting only on spaces
* not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
* terminated with `\n` and started with `indent`.
*/
function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
if (!lineWidth || lineWidth < 0)
return text;
if (lineWidth < minContentWidth)
minContentWidth = 0;
const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
if (text.length <= endStep)
return text;
const folds = [];
const escapedFolds = {};
let end = lineWidth - indent.length;
if (typeof indentAtStart === 'number') {
if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
folds.push(0);
else
end = lineWidth - indentAtStart;
}
let split = undefined;
let prev = undefined;
let overflow = false;
let i = -1;
let escStart = -1;
let escEnd = -1;
if (mode === FOLD_BLOCK) {
i = consumeMoreIndentedLines(text, i, indent.length);
if (i !== -1)
end = i + endStep;
}
for (let ch; (ch = text[(i += 1)]);) {
if (mode === FOLD_QUOTED && ch === '\\') {
escStart = i;
switch (text[i + 1]) {
case 'x':
i += 3;
break;
case 'u':
i += 5;
break;
case 'U':
i += 9;
break;
default:
i += 1;
}
escEnd = i;
}
if (ch === '\n') {
if (mode === FOLD_BLOCK)
i = consumeMoreIndentedLines(text, i, indent.length);
end = i + indent.length + endStep;
split = undefined;
}
else {
if (ch === ' ' &&
prev &&
prev !== ' ' &&
prev !== '\n' &&
prev !== '\t') {
// space surrounded by non-space can be replaced with newline + indent
const next = text[i + 1];
if (next && next !== ' ' && next !== '\n' && next !== '\t')
split = i;
}
if (i >= end) {
if (split) {
folds.push(split);
end = split + endStep;
split = undefined;
}
else if (mode === FOLD_QUOTED) {
// white-space collected at end may stretch past lineWidth
while (prev === ' ' || prev === '\t') {
prev = ch;
ch = text[(i += 1)];
overflow = true;
}
// Account for newline escape, but don't break preceding escape
const j = i > escEnd + 1 ? i - 2 : escStart - 1;
// Bail out if lineWidth & minContentWidth are shorter than an escape string
if (escapedFolds[j])
return text;
folds.push(j);
escapedFolds[j] = true;
end = j + endStep;
split = undefined;
}
else {
overflow = true;
}
}
}
prev = ch;
}
if (overflow && onOverflow)
onOverflow();
if (folds.length === 0)
return text;
if (onFold)
onFold();
let res = text.slice(0, folds[0]);
for (let i = 0; i < folds.length; ++i) {
const fold = folds[i];
const end = folds[i + 1] || text.length;
if (fold === 0)
res = `\n${indent}${text.slice(0, end)}`;
else {
if (mode === FOLD_QUOTED && escapedFolds[fold])
res += `${text[fold]}\\`;
res += `\n${indent}${text.slice(fold + 1, end)}`;
}
}
return res;
}
/**
* Presumes `i + 1` is at the start of a line
* @returns index of last newline in more-indented block
*/
function consumeMoreIndentedLines(text, i, indent) {
let end = i;
let start = i + 1;
let ch = text[start];
while (ch === ' ' || ch === '\t') {
if (i < start + indent) {
ch = text[++i];
}
else {
do {
ch = text[++i];
} while (ch && ch !== '\n');
end = i;
start = i + 1;
ch = text[start];
}
}
return end;
}
const getFoldOptions = (ctx, isBlock) => ({
indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart,
lineWidth: ctx.options.lineWidth,
minContentWidth: ctx.options.minContentWidth
});
// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
// presume that's starting a new document.
const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
function lineLengthOverLimit(str, lineWidth, indentLength) {
if (!lineWidth || lineWidth < 0)
return false;
const limit = lineWidth - indentLength;
const strLen = str.length;
if (strLen <= limit)
return false;
for (let i = 0, start = 0; i < strLen; ++i) {
if (str[i] === '\n') {
if (i - start > limit)
return true;
start = i + 1;
if (strLen - start <= limit)
return false;
}
}
return true;
}
function doubleQuotedString(value, ctx) {
const json = JSON.stringify(value);
if (ctx.options.doubleQuotedAsJSON)
return json;
const { implicitKey } = ctx;
const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
let str = '';
let start = 0;
for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
// space before newline needs to be escaped to not be folded
str += json.slice(start, i) + '\\ ';
i += 1;
start = i;
ch = '\\';
}
if (ch === '\\')
switch (json[i + 1]) {
case 'u':
{
str += json.slice(start, i);
const code = json.substr(i + 2, 4);
switch (code) {
case '0000':
str += '\\0';
break;
case '0007':
str += '\\a';
break;
case '000b':
str += '\\v';
break;
case '001b':
str += '\\e';
break;
case '0085':
str += '\\N';
break;
case '00a0':
str += '\\_';
break;
case '2028':
str += '\\L';
break;
case '2029':
str += '\\P';
break;
default:
if (code.substr(0, 2) === '00')
str += '\\x' + code.substr(2);
else
str += json.substr(i, 6);
}
i += 5;
start = i + 1;
}
break;
case 'n':
if (implicitKey ||
json[i + 2] === '"' ||
json.length < minMultiLineLength) {
i += 1;
}
else {
// folding will eat first newline
str += json.slice(start, i) + '\n\n';
while (json[i + 2] === '\\' &&
json[i + 3] === 'n' &&
json[i + 4] !== '"') {
str += '\n';
i += 2;
}
str += indent;
// space after newline needs to be escaped to not be folded
if (json[i + 2] === ' ')
str += '\\';
i += 1;
start = i + 1;
}
break;
default:
i += 1;
}
}
str = start ? str + json.slice(start) : json;
return implicitKey
? str
: foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx, false));
}
function singleQuotedString(value, ctx) {
if (ctx.options.singleQuote === false ||
(ctx.implicitKey && value.includes('\n')) ||
/[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline
)
return doubleQuotedString(value, ctx);
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
return ctx.implicitKey
? res
: foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx, false));
}
function quotedString(value, ctx) {
const { singleQuote } = ctx.options;
let qs;
if (singleQuote === false)
qs = doubleQuotedString;
else {
const hasDouble = value.includes('"');
const hasSingle = value.includes("'");
if (hasDouble && !hasSingle)
qs = singleQuotedString;
else if (hasSingle && !hasDouble)
qs = doubleQuotedString;
else
qs = singleQuote ? singleQuotedString : doubleQuotedString;
}
return qs(value, ctx);
}
// The negative lookbehind avoids a polynomial search,
// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind
let blockEndNewlines;
try {
blockEndNewlines = new RegExp('(^|(?\n';
// determine chomping from whitespace at value end
let chomp;
let endStart;
for (endStart = value.length; endStart > 0; --endStart) {
const ch = value[endStart - 1];
if (ch !== '\n' && ch !== '\t' && ch !== ' ')
break;
}
let end = value.substring(endStart);
const endNlPos = end.indexOf('\n');
if (endNlPos === -1) {
chomp = '-'; // strip
}
else if (value === end || endNlPos !== end.length - 1) {
chomp = '+'; // keep
if (onChompKeep)
onChompKeep();
}
else {
chomp = ''; // clip
}
if (end) {
value = value.slice(0, -end.length);
if (end[end.length - 1] === '\n')
end = end.slice(0, -1);
end = end.replace(blockEndNewlines, `$&${indent}`);
}
// determine indent indicator from whitespace at value start
let startWithSpace = false;
let startEnd;
let startNlPos = -1;
for (startEnd = 0; startEnd < value.length; ++startEnd) {
const ch = value[startEnd];
if (ch === ' ')
startWithSpace = true;
else if (ch === '\n')
startNlPos = startEnd;
else
break;
}
let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);
if (start) {
value = value.substring(start.length);
start = start.replace(/\n+/g, `$&${indent}`);
}
const indentSize = indent ? '2' : '1'; // root is at -1
let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
if (comment) {
header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
if (onComment)
onComment();
}
if (literal) {
value = value.replace(/\n+/g, `$&${indent}`);
return `${header}\n${indent}${start}${value}${end}`;
}
value = value
.replace(/\n+/g, '\n$&')
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
.replace(/\n+/g, `$&${indent}`);
const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx, true));
return `${header}\n${indent}${body}`;
}
function plainString(item, ctx, onComment, onChompKeep) {
const { type, value } = item;
const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;
if ((implicitKey && value.includes('\n')) ||
(inFlow && /[[\]{},]/.test(value))) {
return quotedString(value, ctx);
}
if (!value ||
/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
// not allowed:
// - empty string, '-' or '?'
// - start with an indicator character (except [?:-]) or /[?-] /
// - '\n ', ': ' or ' \n' anywhere
// - '#' not preceded by a non-space char
// - end with ' ' or ':'
return implicitKey || inFlow || !value.includes('\n')
? quotedString(value, ctx)
: blockString(item, ctx, onComment, onChompKeep);
}
if (!implicitKey &&
!inFlow &&
type !== Scalar.PLAIN &&
value.includes('\n')) {
// Where allowed & type not set explicitly, prefer block style for multiline strings
return blockString(item, ctx, onComment, onChompKeep);
}
if (containsDocumentMarker(value)) {
if (indent === '') {
ctx.forceBlockIndent = true;
return blockString(item, ctx, onComment, onChompKeep);
}
else if (implicitKey && indent === indentStep) {
return quotedString(value, ctx);
}
}
const str = value.replace(/\n+/g, `$&\n${indent}`);
// Verify that output will be parsed as a string, as e.g. plain numbers and
// booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
// and others in v1.1.
if (actualString) {
const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);
const { compat, tags } = ctx.doc.schema;
if (tags.some(test) || compat?.some(test))
return quotedString(value, ctx);
}
return implicitKey
? str
: foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx, false));
}
function stringifyString(item, ctx, onComment, onChompKeep) {
const { implicitKey, inFlow } = ctx;
const ss = typeof item.value === 'string'
? item
: Object.assign({}, item, { value: String(item.value) });
let { type } = item;
if (type !== Scalar.QUOTE_DOUBLE) {
// force double quotes on control characters & unpaired surrogates
if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
type = Scalar.QUOTE_DOUBLE;
}
const _stringify = (_type) => {
switch (_type) {
case Scalar.BLOCK_FOLDED:
case Scalar.BLOCK_LITERAL:
return implicitKey || inFlow
? quotedString(ss.value, ctx) // blocks are not valid inside flow containers
: blockString(ss, ctx, onComment, onChompKeep);
case Scalar.QUOTE_DOUBLE:
return doubleQuotedString(ss.value, ctx);
case Scalar.QUOTE_SINGLE:
return singleQuotedString(ss.value, ctx);
case Scalar.PLAIN:
return plainString(ss, ctx, onComment, onChompKeep);
default:
return null;
}
};
let res = _stringify(type);
if (res === null) {
const { defaultKeyType, defaultStringType } = ctx.options;
const t = (implicitKey && defaultKeyType) || defaultStringType;
res = _stringify(t);
if (res === null)
throw new Error(`Unsupported default string type ${t}`);
}
return res;
}
function createStringifyContext(doc, options) {
const opt = Object.assign({
blockQuote: true,
commentString: stringifyComment,
defaultKeyType: null,
defaultStringType: 'PLAIN',
directives: null,
doubleQuotedAsJSON: false,
doubleQuotedMinMultiLineLength: 40,
falseStr: 'false',
flowCollectionPadding: true,
indentSeq: true,
lineWidth: 80,
minContentWidth: 20,
nullStr: 'null',
simpleKeys: false,
singleQuote: null,
trueStr: 'true',
verifyAliasOrder: true
}, doc.schema.toStringOptions, options);
let inFlow;
switch (opt.collectionStyle) {
case 'block':
inFlow = false;
break;
case 'flow':
inFlow = true;
break;
default:
inFlow = null;
}
return {
anchors: new Set(),
doc,
flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '',
indent: '',
indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',
inFlow,
options: opt
};
}
function getTagObject(tags, item) {
if (item.tag) {
const match = tags.filter(t => t.tag === item.tag);
if (match.length > 0)
return match.find(t => t.format === item.format) ?? match[0];
}
let tagObj = undefined;
let obj;
if (isScalar$1(item)) {
obj = item.value;
const match = tags.filter(t => t.identify?.(obj));
tagObj =
match.find(t => t.format === item.format) ?? match.find(t => !t.format);
}
else {
obj = item;
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
}
if (!tagObj) {
const name = obj?.constructor?.name ?? typeof obj;
throw new Error(`Tag not resolved for ${name} value`);
}
return tagObj;
}
// needs to be called before value stringifier to allow for circular anchor refs
function stringifyProps(node, tagObj, { anchors, doc }) {
if (!doc.directives)
return '';
const props = [];
const anchor = (isScalar$1(node) || isCollection$1(node)) && node.anchor;
if (anchor && anchorIsValid(anchor)) {
anchors.add(anchor);
props.push(`&${anchor}`);
}
const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
if (tag)
props.push(doc.directives.tagString(tag));
return props.join(' ');
}
function stringify$2(item, ctx, onComment, onChompKeep) {
if (isPair(item))
return item.toString(ctx, onComment, onChompKeep);
if (isAlias(item)) {
if (ctx.doc.directives)
return item.toString(ctx);
if (ctx.resolvedAliases?.has(item)) {
throw new TypeError(`Cannot stringify circular structure without alias nodes`);
}
else {
if (ctx.resolvedAliases)
ctx.resolvedAliases.add(item);
else
ctx.resolvedAliases = new Set([item]);
item = item.resolve(ctx.doc);
}
}
let tagObj = undefined;
const node = isNode$1(item)
? item
: ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
if (!tagObj)
tagObj = getTagObject(ctx.doc.schema.tags, node);
const props = stringifyProps(node, tagObj, ctx);
if (props.length > 0)
ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;
const str = typeof tagObj.stringify === 'function'
? tagObj.stringify(node, ctx, onComment, onChompKeep)
: isScalar$1(node)
? stringifyString(node, ctx, onComment, onChompKeep)
: node.toString(ctx, onComment, onChompKeep);
if (!props)
return str;
return isScalar$1(node) || str[0] === '{' || str[0] === '['
? `${props} ${str}`
: `${props}\n${ctx.indent}${str}`;
}
function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;
let keyComment = (isNode$1(key) && key.comment) || null;
if (simpleKeys) {
if (keyComment) {
throw new Error('With simple keys, key nodes cannot have comments');
}
if (isCollection$1(key) || (!isNode$1(key) && typeof key === 'object')) {
const msg = 'With simple keys, collection cannot be used as a key value';
throw new Error(msg);
}
}
let explicitKey = !simpleKeys &&
(!key ||
(keyComment && value == null && !ctx.inFlow) ||
isCollection$1(key) ||
(isScalar$1(key)
? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL
: typeof key === 'object'));
ctx = Object.assign({}, ctx, {
allNullValues: false,
implicitKey: !explicitKey && (simpleKeys || !allNullValues),
indent: indent + indentStep
});
let keyCommentDone = false;
let chompKeep = false;
let str = stringify$2(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));
if (!explicitKey && !ctx.inFlow && str.length > 1024) {
if (simpleKeys)
throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
explicitKey = true;
}
if (ctx.inFlow) {
if (allNullValues || value == null) {
if (keyCommentDone && onComment)
onComment();
return str === '' ? '?' : explicitKey ? `? ${str}` : str;
}
}
else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {
str = `? ${str}`;
if (keyComment && !keyCommentDone) {
str += lineComment(str, ctx.indent, commentString(keyComment));
}
else if (chompKeep && onChompKeep)
onChompKeep();
return str;
}
if (keyCommentDone)
keyComment = null;
if (explicitKey) {
if (keyComment)
str += lineComment(str, ctx.indent, commentString(keyComment));
str = `? ${str}\n${indent}:`;
}
else {
str = `${str}:`;
if (keyComment)
str += lineComment(str, ctx.indent, commentString(keyComment));
}
let vsb, vcb, valueComment;
if (isNode$1(value)) {
vsb = !!value.spaceBefore;
vcb = value.commentBefore;
valueComment = value.comment;
}
else {
vsb = false;
vcb = null;
valueComment = null;
if (value && typeof value === 'object')
value = doc.createNode(value);
}
ctx.implicitKey = false;
if (!explicitKey && !keyComment && isScalar$1(value))
ctx.indentAtStart = str.length + 1;
chompKeep = false;
if (!indentSeq &&
indentStep.length >= 2 &&
!ctx.inFlow &&
!explicitKey &&
isSeq(value) &&
!value.flow &&
!value.tag &&
!value.anchor) {
// If indentSeq === false, consider '- ' as part of indentation where possible
ctx.indent = ctx.indent.substring(2);
}
let valueCommentDone = false;
const valueStr = stringify$2(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));
let ws = ' ';
if (keyComment || vsb || vcb) {
ws = vsb ? '\n' : '';
if (vcb) {
const cs = commentString(vcb);
ws += `\n${indentComment(cs, ctx.indent)}`;
}
if (valueStr === '' && !ctx.inFlow) {
if (ws === '\n')
ws = '\n\n';
}
else {
ws += `\n${ctx.indent}`;
}
}
else if (!explicitKey && isCollection$1(value)) {
const vs0 = valueStr[0];
const nl0 = valueStr.indexOf('\n');
const hasNewline = nl0 !== -1;
const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0;
if (hasNewline || !flow) {
let hasPropsLine = false;
if (hasNewline && (vs0 === '&' || vs0 === '!')) {
let sp0 = valueStr.indexOf(' ');
if (vs0 === '&' &&
sp0 !== -1 &&
sp0 < nl0 &&
valueStr[sp0 + 1] === '!') {
sp0 = valueStr.indexOf(' ', sp0 + 1);
}
if (sp0 === -1 || nl0 < sp0)
hasPropsLine = true;
}
if (!hasPropsLine)
ws = `\n${ctx.indent}`;
}
}
else if (valueStr === '' || valueStr[0] === '\n') {
ws = '';
}
str += ws + valueStr;
if (ctx.inFlow) {
if (valueCommentDone && onComment)
onComment();
}
else if (valueComment && !valueCommentDone) {
str += lineComment(str, ctx.indent, commentString(valueComment));
}
else if (chompKeep && onChompKeep) {
onChompKeep();
}
return str;
}
function warn(logLevel, warning) {
if (logLevel === 'debug' || logLevel === 'warn') {
// https://github.com/typescript-eslint/typescript-eslint/issues/7478
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
if (typeof process !== 'undefined' && process.emitWarning)
process.emitWarning(warning);
else
console.warn(warning);
}
}
const MERGE_KEY = '<<';
function addPairToJSMap(ctx, map, { key, value }) {
if (ctx?.doc.schema.merge && isMergeKey(key)) {
value = isAlias(value) ? value.resolve(ctx.doc) : value;
if (isSeq(value))
for (const it of value.items)
mergeToJSMap(ctx, map, it);
else if (Array.isArray(value))
for (const it of value)
mergeToJSMap(ctx, map, it);
else
mergeToJSMap(ctx, map, value);
}
else {
const jsKey = toJS(key, '', ctx);
if (map instanceof Map) {
map.set(jsKey, toJS(value, jsKey, ctx));
}
else if (map instanceof Set) {
map.add(jsKey);
}
else {
const stringKey = stringifyKey(key, jsKey, ctx);
const jsValue = toJS(value, stringKey, ctx);
if (stringKey in map)
Object.defineProperty(map, stringKey, {
value: jsValue,
writable: true,
enumerable: true,
configurable: true
});
else
map[stringKey] = jsValue;
}
}
return map;
}
const isMergeKey = (key) => key === MERGE_KEY ||
(isScalar$1(key) &&
key.value === MERGE_KEY &&
(!key.type || key.type === Scalar.PLAIN));
// If the value associated with a merge key is a single mapping node, each of
// its key/value pairs is inserted into the current mapping, unless the key
// already exists in it. If the value associated with the merge key is a
// sequence, then this sequence is expected to contain mapping nodes and each
// of these nodes is merged in turn according to its order in the sequence.
// Keys in mapping nodes earlier in the sequence override keys specified in
// later mapping nodes. -- http://yaml.org/type/merge.html
function mergeToJSMap(ctx, map, value) {
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (!isMap(source))
throw new Error('Merge sources must be maps or map aliases');
const srcMap = source.toJSON(null, ctx, Map);
for (const [key, value] of srcMap) {
if (map instanceof Map) {
if (!map.has(key))
map.set(key, value);
}
else if (map instanceof Set) {
map.add(key);
}
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
Object.defineProperty(map, key, {
value,
writable: true,
enumerable: true,
configurable: true
});
}
}
return map;
}
function stringifyKey(key, jsKey, ctx) {
if (jsKey === null)
return '';
if (typeof jsKey !== 'object')
return String(jsKey);
if (isNode$1(key) && ctx?.doc) {
const strCtx = createStringifyContext(ctx.doc, {});
strCtx.anchors = new Set();
for (const node of ctx.anchors.keys())
strCtx.anchors.add(node.anchor);
strCtx.inFlow = true;
strCtx.inStringifyKey = true;
const strKey = key.toString(strCtx);
if (!ctx.mapKeyWarned) {
let jsonStr = JSON.stringify(strKey);
if (jsonStr.length > 40)
jsonStr = jsonStr.substring(0, 36) + '..."';
warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
ctx.mapKeyWarned = true;
}
return strKey;
}
return JSON.stringify(jsKey);
}
function createPair(key, value, ctx) {
const k = createNode(key, undefined, ctx);
const v = createNode(value, undefined, ctx);
return new Pair(k, v);
}
class Pair {
constructor(key, value = null) {
Object.defineProperty(this, NODE_TYPE, { value: PAIR });
this.key = key;
this.value = value;
}
clone(schema) {
let { key, value } = this;
if (isNode$1(key))
key = key.clone(schema);
if (isNode$1(value))
value = value.clone(schema);
return new Pair(key, value);
}
toJSON(_, ctx) {
const pair = ctx?.mapAsMap ? new Map() : {};
return addPairToJSMap(ctx, pair, this);
}
toString(ctx, onComment, onChompKeep) {
return ctx?.doc
? stringifyPair(this, ctx, onComment, onChompKeep)
: JSON.stringify(this);
}
}
function stringifyCollection(collection, ctx, options) {
const flow = ctx.inFlow ?? collection.flow;
const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;
return stringify(collection, ctx, options);
}
function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {
const { indent, options: { commentString } } = ctx;
const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });
let chompKeep = false; // flag for the preceding node's status
const lines = [];
for (let i = 0; i < items.length; ++i) {
const item = items[i];
let comment = null;
if (isNode$1(item)) {
if (!chompKeep && item.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, item.commentBefore, chompKeep);
if (item.comment)
comment = item.comment;
}
else if (isPair(item)) {
const ik = isNode$1(item.key) ? item.key : null;
if (ik) {
if (!chompKeep && ik.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);
}
}
chompKeep = false;
let str = stringify$2(item, itemCtx, () => (comment = null), () => (chompKeep = true));
if (comment)
str += lineComment(str, itemIndent, commentString(comment));
if (chompKeep && comment)
chompKeep = false;
lines.push(blockItemPrefix + str);
}
let str;
if (lines.length === 0) {
str = flowChars.start + flowChars.end;
}
else {
str = lines[0];
for (let i = 1; i < lines.length; ++i) {
const line = lines[i];
str += line ? `\n${indent}${line}` : '\n';
}
}
if (comment) {
str += '\n' + indentComment(commentString(comment), indent);
if (onComment)
onComment();
}
else if (chompKeep && onChompKeep)
onChompKeep();
return str;
}
function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) {
const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;
itemIndent += indentStep;
const itemCtx = Object.assign({}, ctx, {
indent: itemIndent,
inFlow: true,
type: null
});
let reqNewline = false;
let linesAtValue = 0;
const lines = [];
for (let i = 0; i < items.length; ++i) {
const item = items[i];
let comment = null;
if (isNode$1(item)) {
if (item.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, item.commentBefore, false);
if (item.comment)
comment = item.comment;
}
else if (isPair(item)) {
const ik = isNode$1(item.key) ? item.key : null;
if (ik) {
if (ik.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, ik.commentBefore, false);
if (ik.comment)
reqNewline = true;
}
const iv = isNode$1(item.value) ? item.value : null;
if (iv) {
if (iv.comment)
comment = iv.comment;
if (iv.commentBefore)
reqNewline = true;
}
else if (item.value == null && ik?.comment) {
comment = ik.comment;
}
}
if (comment)
reqNewline = true;
let str = stringify$2(item, itemCtx, () => (comment = null));
if (i < items.length - 1)
str += ',';
if (comment)
str += lineComment(str, itemIndent, commentString(comment));
if (!reqNewline && (lines.length > linesAtValue || str.includes('\n')))
reqNewline = true;
lines.push(str);
linesAtValue = lines.length;
}
const { start, end } = flowChars;
if (lines.length === 0) {
return start + end;
}
else {
if (!reqNewline) {
const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth;
}
if (reqNewline) {
let str = start;
for (const line of lines)
str += line ? `\n${indentStep}${indent}${line}` : '\n';
return `${str}\n${indent}${end}`;
}
else {
return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;
}
}
}
function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
if (comment && chompKeep)
comment = comment.replace(/^\n+/, '');
if (comment) {
const ic = indentComment(commentString(comment), indent);
lines.push(ic.trimStart()); // Avoid double indent on first line
}
}
function findPair(items, key) {
const k = isScalar$1(key) ? key.value : key;
for (const it of items) {
if (isPair(it)) {
if (it.key === key || it.key === k)
return it;
if (isScalar$1(it.key) && it.key.value === k)
return it;
}
}
return undefined;
}
class YAMLMap extends Collection {
static get tagName() {
return 'tag:yaml.org,2002:map';
}
constructor(schema) {
super(MAP, schema);
this.items = [];
}
/**
* A generic collection parsing method that can be extended
* to other node classes that inherit from YAMLMap
*/
static from(schema, obj, ctx) {
const { keepUndefined, replacer } = ctx;
const map = new this(schema);
const add = (key, value) => {
if (typeof replacer === 'function')
value = replacer.call(obj, key, value);
else if (Array.isArray(replacer) && !replacer.includes(key))
return;
if (value !== undefined || keepUndefined)
map.items.push(createPair(key, value, ctx));
};
if (obj instanceof Map) {
for (const [key, value] of obj)
add(key, value);
}
else if (obj && typeof obj === 'object') {
for (const key of Object.keys(obj))
add(key, obj[key]);
}
if (typeof schema.sortMapEntries === 'function') {
map.items.sort(schema.sortMapEntries);
}
return map;
}
/**
* Adds a value to the collection.
*
* @param overwrite - If not set `true`, using a key that is already in the
* collection will throw. Otherwise, overwrites the previous value.
*/
add(pair, overwrite) {
let _pair;
if (isPair(pair))
_pair = pair;
else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
// In TypeScript, this never happens.
_pair = new Pair(pair, pair?.value);
}
else
_pair = new Pair(pair.key, pair.value);
const prev = findPair(this.items, _pair.key);
const sortEntries = this.schema?.sortMapEntries;
if (prev) {
if (!overwrite)
throw new Error(`Key ${_pair.key} already set`);
// For scalars, keep the old node & its comments and anchors
if (isScalar$1(prev.value) && isScalarValue(_pair.value))
prev.value.value = _pair.value;
else
prev.value = _pair.value;
}
else if (sortEntries) {
const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
if (i === -1)
this.items.push(_pair);
else
this.items.splice(i, 0, _pair);
}
else {
this.items.push(_pair);
}
}
delete(key) {
const it = findPair(this.items, key);
if (!it)
return false;
const del = this.items.splice(this.items.indexOf(it), 1);
return del.length > 0;
}
get(key, keepScalar) {
const it = findPair(this.items, key);
const node = it?.value;
return (!keepScalar && isScalar$1(node) ? node.value : node) ?? undefined;
}
has(key) {
return !!findPair(this.items, key);
}
set(key, value) {
this.add(new Pair(key, value), true);
}
/**
* @param ctx - Conversion context, originally set in Document#toJS()
* @param {Class} Type - If set, forces the returned collection type
* @returns Instance of Type, Map, or Object
*/
toJSON(_, ctx, Type) {
const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};
if (ctx?.onCreate)
ctx.onCreate(map);
for (const item of this.items)
addPairToJSMap(ctx, map, item);
return map;
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
for (const item of this.items) {
if (!isPair(item))
throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
}
if (!ctx.allNullValues && this.hasAllNullValues(false))
ctx = Object.assign({}, ctx, { allNullValues: true });
return stringifyCollection(this, ctx, {
blockItemPrefix: '',
flowChars: { start: '{', end: '}' },
itemIndent: ctx.indent || '',
onChompKeep,
onComment
});
}
}
const map$1 = {
collection: 'map',
default: true,
nodeClass: YAMLMap,
tag: 'tag:yaml.org,2002:map',
resolve(map, onError) {
if (!isMap(map))
onError('Expected a mapping for this tag');
return map;
},
createNode: (schema, obj, ctx) => YAMLMap.from(schema, obj, ctx)
};
class YAMLSeq extends Collection {
static get tagName() {
return 'tag:yaml.org,2002:seq';
}
constructor(schema) {
super(SEQ, schema);
this.items = [];
}
add(value) {
this.items.push(value);
}
/**
* Removes a value from the collection.
*
* `key` must contain a representation of an integer for this to succeed.
* It may be wrapped in a `Scalar`.
*
* @returns `true` if the item was found and removed.
*/
delete(key) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
return false;
const del = this.items.splice(idx, 1);
return del.length > 0;
}
get(key, keepScalar) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
return undefined;
const it = this.items[idx];
return !keepScalar && isScalar$1(it) ? it.value : it;
}
/**
* Checks if the collection includes a value with the key `key`.
*
* `key` must contain a representation of an integer for this to succeed.
* It may be wrapped in a `Scalar`.
*/
has(key) {
const idx = asItemIndex(key);
return typeof idx === 'number' && idx < this.items.length;
}
/**
* Sets a value in this collection. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*
* If `key` does not contain a representation of an integer, this will throw.
* It may be wrapped in a `Scalar`.
*/
set(key, value) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
throw new Error(`Expected a valid index, not ${key}.`);
const prev = this.items[idx];
if (isScalar$1(prev) && isScalarValue(value))
prev.value = value;
else
this.items[idx] = value;
}
toJSON(_, ctx) {
const seq = [];
if (ctx?.onCreate)
ctx.onCreate(seq);
let i = 0;
for (const item of this.items)
seq.push(toJS(item, String(i++), ctx));
return seq;
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
return stringifyCollection(this, ctx, {
blockItemPrefix: '- ',
flowChars: { start: '[', end: ']' },
itemIndent: (ctx.indent || '') + ' ',
onChompKeep,
onComment
});
}
static from(schema, obj, ctx) {
const { replacer } = ctx;
const seq = new this(schema);
if (obj && Symbol.iterator in Object(obj)) {
let i = 0;
for (let it of obj) {
if (typeof replacer === 'function') {
const key = obj instanceof Set ? it : String(i++);
it = replacer.call(obj, key, it);
}
seq.items.push(createNode(it, undefined, ctx));
}
}
return seq;
}
}
function asItemIndex(key) {
let idx = isScalar$1(key) ? key.value : key;
if (idx && typeof idx === 'string')
idx = Number(idx);
return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
? idx
: null;
}
const seq = {
collection: 'seq',
default: true,
nodeClass: YAMLSeq,
tag: 'tag:yaml.org,2002:seq',
resolve(seq, onError) {
if (!isSeq(seq))
onError('Expected a sequence for this tag');
return seq;
},
createNode: (schema, obj, ctx) => YAMLSeq.from(schema, obj, ctx)
};
const string = {
identify: value => typeof value === 'string',
default: true,
tag: 'tag:yaml.org,2002:str',
resolve: str => str,
stringify(item, ctx, onComment, onChompKeep) {
ctx = Object.assign({ actualString: true }, ctx);
return stringifyString(item, ctx, onComment, onChompKeep);
}
};
const nullTag = {
identify: value => value == null,
createNode: () => new Scalar(null),
default: true,
tag: 'tag:yaml.org,2002:null',
test: /^(?:~|[Nn]ull|NULL)?$/,
resolve: () => new Scalar(null),
stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)
? source
: ctx.options.nullStr
};
const boolTag = {
identify: value => typeof value === 'boolean',
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'),
stringify({ source, value }, ctx) {
if (source && boolTag.test.test(source)) {
const sv = source[0] === 't' || source[0] === 'T';
if (value === sv)
return source;
}
return value ? ctx.options.trueStr : ctx.options.falseStr;
}
};
function stringifyNumber({ format, minFractionDigits, tag, value }) {
if (typeof value === 'bigint')
return String(value);
const num = typeof value === 'number' ? value : Number(value);
if (!isFinite(num))
return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
let n = JSON.stringify(value);
if (!format &&
minFractionDigits &&
(!tag || tag === 'tag:yaml.org,2002:float') &&
/^\d/.test(n)) {
let i = n.indexOf('.');
if (i < 0) {
i = n.length;
n += '.';
}
let d = minFractionDigits - (n.length - i - 1);
while (d-- > 0)
n += '0';
}
return n;
}
const floatNaN$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/,
resolve: str => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'
? Number.NEGATIVE_INFINITY
: Number.POSITIVE_INFINITY,
stringify: stringifyNumber
};
const floatExp$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'EXP',
test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
resolve: str => parseFloat(str),
stringify(node) {
const num = Number(node.value);
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
}
};
const float$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
resolve(str) {
const node = new Scalar(parseFloat(str));
const dot = str.indexOf('.');
if (dot !== -1 && str[str.length - 1] === '0')
node.minFractionDigits = str.length - dot - 1;
return node;
},
stringify: stringifyNumber
};
const intIdentify$2 = (value) => typeof value === 'bigint' || Number.isInteger(value);
const intResolve$1 = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
function intStringify$1(node, radix, prefix) {
const { value } = node;
if (intIdentify$2(value) && value >= 0)
return prefix + value.toString(radix);
return stringifyNumber(node);
}
const intOct$1 = {
identify: value => intIdentify$2(value) && value >= 0,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'OCT',
test: /^0o[0-7]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 2, 8, opt),
stringify: node => intStringify$1(node, 8, '0o')
};
const int$1 = {
identify: intIdentify$2,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^[-+]?[0-9]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 0, 10, opt),
stringify: stringifyNumber
};
const intHex$1 = {
identify: value => intIdentify$2(value) && value >= 0,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'HEX',
test: /^0x[0-9a-fA-F]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 2, 16, opt),
stringify: node => intStringify$1(node, 16, '0x')
};
const schema$2 = [
map$1,
seq,
string,
nullTag,
boolTag,
intOct$1,
int$1,
intHex$1,
floatNaN$1,
floatExp$1,
float$1
];
function intIdentify$1(value) {
return typeof value === 'bigint' || Number.isInteger(value);
}
const stringifyJSON = ({ value }) => JSON.stringify(value);
const jsonScalars = [
{
identify: value => typeof value === 'string',
default: true,
tag: 'tag:yaml.org,2002:str',
resolve: str => str,
stringify: stringifyJSON
},
{
identify: value => value == null,
createNode: () => new Scalar(null),
default: true,
tag: 'tag:yaml.org,2002:null',
test: /^null$/,
resolve: () => null,
stringify: stringifyJSON
},
{
identify: value => typeof value === 'boolean',
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^true|false$/,
resolve: str => str === 'true',
stringify: stringifyJSON
},
{
identify: intIdentify$1,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^-?(?:0|[1-9][0-9]*)$/,
resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
stringify: ({ value }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value)
},
{
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
resolve: str => parseFloat(str),
stringify: stringifyJSON
}
];
const jsonError = {
default: true,
tag: '',
test: /^/,
resolve(str, onError) {
onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
return str;
}
};
const schema$1 = [map$1, seq].concat(jsonScalars, jsonError);
const binary = {
identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array
default: false,
tag: 'tag:yaml.org,2002:binary',
/**
* Returns a Buffer in node and an Uint8Array in browsers
*
* To use the resulting buffer as an image, you'll want to do something like:
*
* const blob = new Blob([buffer], { type: 'image/jpeg' })
* document.querySelector('#photo').src = URL.createObjectURL(blob)
*/
resolve(src, onError) {
if (typeof Buffer === 'function') {
return Buffer.from(src, 'base64');
}
else if (typeof atob === 'function') {
// On IE 11, atob() can't handle newlines
const str = atob(src.replace(/[\n\r]/g, ''));
const buffer = new Uint8Array(str.length);
for (let i = 0; i < str.length; ++i)
buffer[i] = str.charCodeAt(i);
return buffer;
}
else {
onError('This environment does not support reading binary tags; either Buffer or atob is required');
return src;
}
},
stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
const buf = value; // checked earlier by binary.identify()
let str;
if (typeof Buffer === 'function') {
str =
buf instanceof Buffer
? buf.toString('base64')
: Buffer.from(buf.buffer).toString('base64');
}
else if (typeof btoa === 'function') {
let s = '';
for (let i = 0; i < buf.length; ++i)
s += String.fromCharCode(buf[i]);
str = btoa(s);
}
else {
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
}
if (!type)
type = Scalar.BLOCK_LITERAL;
if (type !== Scalar.QUOTE_DOUBLE) {
const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
const n = Math.ceil(str.length / lineWidth);
const lines = new Array(n);
for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
lines[i] = str.substr(o, lineWidth);
}
str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' ');
}
return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
}
};
function resolvePairs(seq, onError) {
if (isSeq(seq)) {
for (let i = 0; i < seq.items.length; ++i) {
let item = seq.items[i];
if (isPair(item))
continue;
else if (isMap(item)) {
if (item.items.length > 1)
onError('Each pair must have its own sequence indicator');
const pair = item.items[0] || new Pair(new Scalar(null));
if (item.commentBefore)
pair.key.commentBefore = pair.key.commentBefore
? `${item.commentBefore}\n${pair.key.commentBefore}`
: item.commentBefore;
if (item.comment) {
const cn = pair.value ?? pair.key;
cn.comment = cn.comment
? `${item.comment}\n${cn.comment}`
: item.comment;
}
item = pair;
}
seq.items[i] = isPair(item) ? item : new Pair(item);
}
}
else
onError('Expected a sequence for this tag');
return seq;
}
function createPairs(schema, iterable, ctx) {
const { replacer } = ctx;
const pairs = new YAMLSeq(schema);
pairs.tag = 'tag:yaml.org,2002:pairs';
let i = 0;
if (iterable && Symbol.iterator in Object(iterable))
for (let it of iterable) {
if (typeof replacer === 'function')
it = replacer.call(iterable, String(i++), it);
let key, value;
if (Array.isArray(it)) {
if (it.length === 2) {
key = it[0];
value = it[1];
}
else
throw new TypeError(`Expected [key, value] tuple: ${it}`);
}
else if (it && it instanceof Object) {
const keys = Object.keys(it);
if (keys.length === 1) {
key = keys[0];
value = it[key];
}
else {
throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`);
}
}
else {
key = it;
}
pairs.items.push(createPair(key, value, ctx));
}
return pairs;
}
const pairs = {
collection: 'seq',
default: false,
tag: 'tag:yaml.org,2002:pairs',
resolve: resolvePairs,
createNode: createPairs
};
class YAMLOMap extends YAMLSeq {
constructor() {
super();
this.add = YAMLMap.prototype.add.bind(this);
this.delete = YAMLMap.prototype.delete.bind(this);
this.get = YAMLMap.prototype.get.bind(this);
this.has = YAMLMap.prototype.has.bind(this);
this.set = YAMLMap.prototype.set.bind(this);
this.tag = YAMLOMap.tag;
}
/**
* If `ctx` is given, the return type is actually `Map`,
* but TypeScript won't allow widening the signature of a child method.
*/
toJSON(_, ctx) {
if (!ctx)
return super.toJSON(_);
const map = new Map();
if (ctx?.onCreate)
ctx.onCreate(map);
for (const pair of this.items) {
let key, value;
if (isPair(pair)) {
key = toJS(pair.key, '', ctx);
value = toJS(pair.value, key, ctx);
}
else {
key = toJS(pair, '', ctx);
}
if (map.has(key))
throw new Error('Ordered maps must not include duplicate keys');
map.set(key, value);
}
return map;
}
static from(schema, iterable, ctx) {
const pairs = createPairs(schema, iterable, ctx);
const omap = new this();
omap.items = pairs.items;
return omap;
}
}
YAMLOMap.tag = 'tag:yaml.org,2002:omap';
const omap = {
collection: 'seq',
identify: value => value instanceof Map,
nodeClass: YAMLOMap,
default: false,
tag: 'tag:yaml.org,2002:omap',
resolve(seq, onError) {
const pairs = resolvePairs(seq, onError);
const seenKeys = [];
for (const { key } of pairs.items) {
if (isScalar$1(key)) {
if (seenKeys.includes(key.value)) {
onError(`Ordered maps must not include duplicate keys: ${key.value}`);
}
else {
seenKeys.push(key.value);
}
}
}
return Object.assign(new YAMLOMap(), pairs);
},
createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx)
};
function boolStringify({ value, source }, ctx) {
const boolObj = value ? trueTag : falseTag;
if (source && boolObj.test.test(source))
return source;
return value ? ctx.options.trueStr : ctx.options.falseStr;
}
const trueTag = {
identify: value => value === true,
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
resolve: () => new Scalar(true),
stringify: boolStringify
};
const falseTag = {
identify: value => value === false,
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/,
resolve: () => new Scalar(false),
stringify: boolStringify
};
const floatNaN = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/,
resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'
? Number.NEGATIVE_INFINITY
: Number.POSITIVE_INFINITY,
stringify: stringifyNumber
};
const floatExp = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'EXP',
test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
resolve: (str) => parseFloat(str.replace(/_/g, '')),
stringify(node) {
const num = Number(node.value);
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
}
};
const float = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
resolve(str) {
const node = new Scalar(parseFloat(str.replace(/_/g, '')));
const dot = str.indexOf('.');
if (dot !== -1) {
const f = str.substring(dot + 1).replace(/_/g, '');
if (f[f.length - 1] === '0')
node.minFractionDigits = f.length;
}
return node;
},
stringify: stringifyNumber
};
const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
function intResolve(str, offset, radix, { intAsBigInt }) {
const sign = str[0];
if (sign === '-' || sign === '+')
offset += 1;
str = str.substring(offset).replace(/_/g, '');
if (intAsBigInt) {
switch (radix) {
case 2:
str = `0b${str}`;
break;
case 8:
str = `0o${str}`;
break;
case 16:
str = `0x${str}`;
break;
}
const n = BigInt(str);
return sign === '-' ? BigInt(-1) * n : n;
}
const n = parseInt(str, radix);
return sign === '-' ? -1 * n : n;
}
function intStringify(node, radix, prefix) {
const { value } = node;
if (intIdentify(value)) {
const str = value.toString(radix);
return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
}
return stringifyNumber(node);
}
const intBin = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'BIN',
test: /^[-+]?0b[0-1_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
stringify: node => intStringify(node, 2, '0b')
};
const intOct = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'OCT',
test: /^[-+]?0[0-7_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
stringify: node => intStringify(node, 8, '0')
};
const int = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^[-+]?[0-9][0-9_]*$/,
resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
stringify: stringifyNumber
};
const intHex = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'HEX',
test: /^[-+]?0x[0-9a-fA-F_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
stringify: node => intStringify(node, 16, '0x')
};
class YAMLSet extends YAMLMap {
constructor(schema) {
super(schema);
this.tag = YAMLSet.tag;
}
add(key) {
let pair;
if (isPair(key))
pair = key;
else if (key &&
typeof key === 'object' &&
'key' in key &&
'value' in key &&
key.value === null)
pair = new Pair(key.key, null);
else
pair = new Pair(key, null);
const prev = findPair(this.items, pair.key);
if (!prev)
this.items.push(pair);
}
/**
* If `keepPair` is `true`, returns the Pair matching `key`.
* Otherwise, returns the value of that Pair's key.
*/
get(key, keepPair) {
const pair = findPair(this.items, key);
return !keepPair && isPair(pair)
? isScalar$1(pair.key)
? pair.key.value
: pair.key
: pair;
}
set(key, value) {
if (typeof value !== 'boolean')
throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
const prev = findPair(this.items, key);
if (prev && !value) {
this.items.splice(this.items.indexOf(prev), 1);
}
else if (!prev && value) {
this.items.push(new Pair(key));
}
}
toJSON(_, ctx) {
return super.toJSON(_, ctx, Set);
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
if (this.hasAllNullValues(true))
return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
else
throw new Error('Set items must all have null values');
}
static from(schema, iterable, ctx) {
const { replacer } = ctx;
const set = new this(schema);
if (iterable && Symbol.iterator in Object(iterable))
for (let value of iterable) {
if (typeof replacer === 'function')
value = replacer.call(iterable, value, value);
set.items.push(createPair(value, null, ctx));
}
return set;
}
}
YAMLSet.tag = 'tag:yaml.org,2002:set';
const set = {
collection: 'map',
identify: value => value instanceof Set,
nodeClass: YAMLSet,
default: false,
tag: 'tag:yaml.org,2002:set',
createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx),
resolve(map, onError) {
if (isMap(map)) {
if (map.hasAllNullValues(true))
return Object.assign(new YAMLSet(), map);
else
onError('Set items must all have null values');
}
else
onError('Expected a mapping for this tag');
return map;
}
};
/** Internal types handle bigint as number, because TS can't figure it out. */
function parseSexagesimal(str, asBigInt) {
const sign = str[0];
const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
const num = (n) => asBigInt ? BigInt(n) : Number(n);
const res = parts
.replace(/_/g, '')
.split(':')
.reduce((res, p) => res * num(60) + num(p), num(0));
return (sign === '-' ? num(-1) * res : res);
}
/**
* hhhh:mm:ss.sss
*
* Internal types handle bigint as number, because TS can't figure it out.
*/
function stringifySexagesimal(node) {
let { value } = node;
let num = (n) => n;
if (typeof value === 'bigint')
num = n => BigInt(n);
else if (isNaN(value) || !isFinite(value))
return stringifyNumber(node);
let sign = '';
if (value < 0) {
sign = '-';
value *= num(-1);
}
const _60 = num(60);
const parts = [value % _60]; // seconds, including ms
if (value < 60) {
parts.unshift(0); // at least one : is required
}
else {
value = (value - parts[0]) / _60;
parts.unshift(value % _60); // minutes
if (value >= 60) {
value = (value - parts[0]) / _60;
parts.unshift(value); // hours
}
}
return (sign +
parts
.map(n => String(n).padStart(2, '0'))
.join(':')
.replace(/000000\d*$/, '') // % 60 may introduce error
);
}
const intTime = {
identify: value => typeof value === 'bigint' || Number.isInteger(value),
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'TIME',
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
stringify: stringifySexagesimal
};
const floatTime = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'TIME',
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
resolve: str => parseSexagesimal(str, false),
stringify: stringifySexagesimal
};
const timestamp = {
identify: value => value instanceof Date,
default: true,
tag: 'tag:yaml.org,2002:timestamp',
// If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
// may be omitted altogether, resulting in a date format. In such a case, the time part is
// assumed to be 00:00:00Z (start of day, UTC).
test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
'(?:' + // time is optional
'(?:t|T|[ \\t]+)' + // t | T | whitespace
'([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
'(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
')?$'),
resolve(str) {
const match = str.match(timestamp.test);
if (!match)
throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
const [, year, month, day, hour, minute, second] = match.map(Number);
const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
const tz = match[8];
if (tz && tz !== 'Z') {
let d = parseSexagesimal(tz, false);
if (Math.abs(d) < 30)
d *= 60;
date -= 60000 * d;
}
return new Date(date);
},
stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
};
const schema = [
map$1,
seq,
string,
nullTag,
trueTag,
falseTag,
intBin,
intOct,
int,
intHex,
floatNaN,
floatExp,
float,
binary,
omap,
pairs,
set,
intTime,
floatTime,
timestamp
];
const schemas = new Map([
['core', schema$2],
['failsafe', [map$1, seq, string]],
['json', schema$1],
['yaml11', schema],
['yaml-1.1', schema]
]);
const tagsByName = {
binary,
bool: boolTag,
float: float$1,
floatExp: floatExp$1,
floatNaN: floatNaN$1,
floatTime,
int: int$1,
intHex: intHex$1,
intOct: intOct$1,
intTime,
map: map$1,
null: nullTag,
omap,
pairs,
seq,
set,
timestamp
};
const coreKnownTags = {
'tag:yaml.org,2002:binary': binary,
'tag:yaml.org,2002:omap': omap,
'tag:yaml.org,2002:pairs': pairs,
'tag:yaml.org,2002:set': set,
'tag:yaml.org,2002:timestamp': timestamp
};
function getTags(customTags, schemaName) {
let tags = schemas.get(schemaName);
if (!tags) {
if (Array.isArray(customTags))
tags = [];
else {
const keys = Array.from(schemas.keys())
.filter(key => key !== 'yaml11')
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`);
}
}
if (Array.isArray(customTags)) {
for (const tag of customTags)
tags = tags.concat(tag);
}
else if (typeof customTags === 'function') {
tags = customTags(tags.slice());
}
return tags.map(tag => {
if (typeof tag !== 'string')
return tag;
const tagObj = tagsByName[tag];
if (tagObj)
return tagObj;
const keys = Object.keys(tagsByName)
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
});
}
const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
class Schema {
constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
this.compat = Array.isArray(compat)
? getTags(compat, 'compat')
: compat
? getTags(null, compat)
: null;
this.merge = !!merge;
this.name = (typeof schema === 'string' && schema) || 'core';
this.knownTags = resolveKnownTags ? coreKnownTags : {};
this.tags = getTags(customTags, this.name);
this.toStringOptions = toStringDefaults ?? null;
Object.defineProperty(this, MAP, { value: map$1 });
Object.defineProperty(this, SCALAR$1, { value: string });
Object.defineProperty(this, SEQ, { value: seq });
// Used by createMap()
this.sortMapEntries =
typeof sortMapEntries === 'function'
? sortMapEntries
: sortMapEntries === true
? sortMapEntriesByKey
: null;
}
clone() {
const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
copy.tags = this.tags.slice();
return copy;
}
}
function stringifyDocument(doc, options) {
const lines = [];
let hasDirectives = options.directives === true;
if (options.directives !== false && doc.directives) {
const dir = doc.directives.toString(doc);
if (dir) {
lines.push(dir);
hasDirectives = true;
}
else if (doc.directives.docStart)
hasDirectives = true;
}
if (hasDirectives)
lines.push('---');
const ctx = createStringifyContext(doc, options);
const { commentString } = ctx.options;
if (doc.commentBefore) {
if (lines.length !== 1)
lines.unshift('');
const cs = commentString(doc.commentBefore);
lines.unshift(indentComment(cs, ''));
}
let chompKeep = false;
let contentComment = null;
if (doc.contents) {
if (isNode$1(doc.contents)) {
if (doc.contents.spaceBefore && hasDirectives)
lines.push('');
if (doc.contents.commentBefore) {
const cs = commentString(doc.contents.commentBefore);
lines.push(indentComment(cs, ''));
}
// top-level block scalars need to be indented if followed by a comment
ctx.forceBlockIndent = !!doc.comment;
contentComment = doc.contents.comment;
}
const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
let body = stringify$2(doc.contents, ctx, () => (contentComment = null), onChompKeep);
if (contentComment)
body += lineComment(body, '', commentString(contentComment));
if ((body[0] === '|' || body[0] === '>') &&
lines[lines.length - 1] === '---') {
// Top-level block scalars with a preceding doc marker ought to use the
// same line for their header.
lines[lines.length - 1] = `--- ${body}`;
}
else
lines.push(body);
}
else {
lines.push(stringify$2(doc.contents, ctx));
}
if (doc.directives?.docEnd) {
if (doc.comment) {
const cs = commentString(doc.comment);
if (cs.includes('\n')) {
lines.push('...');
lines.push(indentComment(cs, ''));
}
else {
lines.push(`... ${cs}`);
}
}
else {
lines.push('...');
}
}
else {
let dc = doc.comment;
if (dc && chompKeep)
dc = dc.replace(/^\n+/, '');
if (dc) {
if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
lines.push('');
lines.push(indentComment(commentString(dc), ''));
}
}
return lines.join('\n') + '\n';
}
class Document {
constructor(value, replacer, options) {
/** A comment before this Document */
this.commentBefore = null;
/** A comment immediately after this Document */
this.comment = null;
/** Errors encountered during parsing. */
this.errors = [];
/** Warnings encountered during parsing. */
this.warnings = [];
Object.defineProperty(this, NODE_TYPE, { value: DOC });
let _replacer = null;
if (typeof replacer === 'function' || Array.isArray(replacer)) {
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
replacer = undefined;
}
const opt = Object.assign({
intAsBigInt: false,
keepSourceTokens: false,
logLevel: 'warn',
prettyErrors: true,
strict: true,
uniqueKeys: true,
version: '1.2'
}, options);
this.options = opt;
let { version } = opt;
if (options?._directives) {
this.directives = options._directives.atDocument();
if (this.directives.yaml.explicit)
version = this.directives.yaml.version;
}
else
this.directives = new Directives({ version });
this.setSchema(version, options);
// @ts-expect-error We can't really know that this matches Contents.
this.contents =
value === undefined ? null : this.createNode(value, _replacer, options);
}
/**
* Create a deep copy of this Document and its contents.
*
* Custom Node values that inherit from `Object` still refer to their original instances.
*/
clone() {
const copy = Object.create(Document.prototype, {
[NODE_TYPE]: { value: DOC }
});
copy.commentBefore = this.commentBefore;
copy.comment = this.comment;
copy.errors = this.errors.slice();
copy.warnings = this.warnings.slice();
copy.options = Object.assign({}, this.options);
if (this.directives)
copy.directives = this.directives.clone();
copy.schema = this.schema.clone();
// @ts-expect-error We can't really know that this matches Contents.
copy.contents = isNode$1(this.contents)
? this.contents.clone(copy.schema)
: this.contents;
if (this.range)
copy.range = this.range.slice();
return copy;
}
/** Adds a value to the document. */
add(value) {
if (assertCollection(this.contents))
this.contents.add(value);
}
/** Adds a value to the document. */
addIn(path, value) {
if (assertCollection(this.contents))
this.contents.addIn(path, value);
}
/**
* Create a new `Alias` node, ensuring that the target `node` has the required anchor.
*
* If `node` already has an anchor, `name` is ignored.
* Otherwise, the `node.anchor` value will be set to `name`,
* or if an anchor with that name is already present in the document,
* `name` will be used as a prefix for a new unique anchor.
* If `name` is undefined, the generated anchor will use 'a' as a prefix.
*/
createAlias(node, name) {
if (!node.anchor) {
const prev = anchorNames(this);
node.anchor =
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
!name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name;
}
return new Alias(node.anchor);
}
createNode(value, replacer, options) {
let _replacer = undefined;
if (typeof replacer === 'function') {
value = replacer.call({ '': value }, '', value);
_replacer = replacer;
}
else if (Array.isArray(replacer)) {
const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
const asStr = replacer.filter(keyToStr).map(String);
if (asStr.length > 0)
replacer = replacer.concat(asStr);
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
replacer = undefined;
}
const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};
const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this,
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
anchorPrefix || 'a');
const ctx = {
aliasDuplicateObjects: aliasDuplicateObjects ?? true,
keepUndefined: keepUndefined ?? false,
onAnchor,
onTagObj,
replacer: _replacer,
schema: this.schema,
sourceObjects
};
const node = createNode(value, tag, ctx);
if (flow && isCollection$1(node))
node.flow = true;
setAnchors();
return node;
}
/**
* Convert a key and a value into a `Pair` using the current schema,
* recursively wrapping all values as `Scalar` or `Collection` nodes.
*/
createPair(key, value, options = {}) {
const k = this.createNode(key, null, options);
const v = this.createNode(value, null, options);
return new Pair(k, v);
}
/**
* Removes a value from the document.
* @returns `true` if the item was found and removed.
*/
delete(key) {
return assertCollection(this.contents) ? this.contents.delete(key) : false;
}
/**
* Removes a value from the document.
* @returns `true` if the item was found and removed.
*/
deleteIn(path) {
if (isEmptyPath(path)) {
if (this.contents == null)
return false;
// @ts-expect-error Presumed impossible if Strict extends false
this.contents = null;
return true;
}
return assertCollection(this.contents)
? this.contents.deleteIn(path)
: false;
}
/**
* Returns item at `key`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
get(key, keepScalar) {
return isCollection$1(this.contents)
? this.contents.get(key, keepScalar)
: undefined;
}
/**
* Returns item at `path`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
getIn(path, keepScalar) {
if (isEmptyPath(path))
return !keepScalar && isScalar$1(this.contents)
? this.contents.value
: this.contents;
return isCollection$1(this.contents)
? this.contents.getIn(path, keepScalar)
: undefined;
}
/**
* Checks if the document includes a value with the key `key`.
*/
has(key) {
return isCollection$1(this.contents) ? this.contents.has(key) : false;
}
/**
* Checks if the document includes a value at `path`.
*/
hasIn(path) {
if (isEmptyPath(path))
return this.contents !== undefined;
return isCollection$1(this.contents) ? this.contents.hasIn(path) : false;
}
/**
* Sets a value in this document. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
set(key, value) {
if (this.contents == null) {
// @ts-expect-error We can't really know that this matches Contents.
this.contents = collectionFromPath(this.schema, [key], value);
}
else if (assertCollection(this.contents)) {
this.contents.set(key, value);
}
}
/**
* Sets a value in this document. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
setIn(path, value) {
if (isEmptyPath(path)) {
// @ts-expect-error We can't really know that this matches Contents.
this.contents = value;
}
else if (this.contents == null) {
// @ts-expect-error We can't really know that this matches Contents.
this.contents = collectionFromPath(this.schema, Array.from(path), value);
}
else if (assertCollection(this.contents)) {
this.contents.setIn(path, value);
}
}
/**
* Change the YAML version and schema used by the document.
* A `null` version disables support for directives, explicit tags, anchors, and aliases.
* It also requires the `schema` option to be given as a `Schema` instance value.
*
* Overrides all previously set schema options.
*/
setSchema(version, options = {}) {
if (typeof version === 'number')
version = String(version);
let opt;
switch (version) {
case '1.1':
if (this.directives)
this.directives.yaml.version = '1.1';
else
this.directives = new Directives({ version: '1.1' });
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
break;
case '1.2':
case 'next':
if (this.directives)
this.directives.yaml.version = version;
else
this.directives = new Directives({ version });
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
break;
case null:
if (this.directives)
delete this.directives;
opt = null;
break;
default: {
const sv = JSON.stringify(version);
throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
}
}
// Not using `instanceof Schema` to allow for duck typing
if (options.schema instanceof Object)
this.schema = options.schema;
else if (opt)
this.schema = new Schema(Object.assign(opt, options));
else
throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
}
// json & jsonArg are only used from toJSON()
toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
const ctx = {
anchors: new Map(),
doc: this,
keep: !json,
mapAsMap: mapAsMap === true,
mapKeyWarned: false,
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100
};
const res = toJS(this.contents, jsonArg ?? '', ctx);
if (typeof onAnchor === 'function')
for (const { count, res } of ctx.anchors.values())
onAnchor(res, count);
return typeof reviver === 'function'
? applyReviver(reviver, { '': res }, '', res)
: res;
}
/**
* A JSON representation of the document `contents`.
*
* @param jsonArg Used by `JSON.stringify` to indicate the array index or
* property name.
*/
toJSON(jsonArg, onAnchor) {
return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
}
/** A YAML representation of the document. */
toString(options = {}) {
if (this.errors.length > 0)
throw new Error('Document with errors cannot be stringified');
if ('indent' in options &&
(!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
const s = JSON.stringify(options.indent);
throw new Error(`"indent" option must be a positive integer, not ${s}`);
}
return stringifyDocument(this, options);
}
}
function assertCollection(contents) {
if (isCollection$1(contents))
return true;
throw new Error('Expected a YAML collection as document contents');
}
class YAMLError extends Error {
constructor(name, pos, code, message) {
super();
this.name = name;
this.code = code;
this.message = message;
this.pos = pos;
}
}
class YAMLParseError extends YAMLError {
constructor(pos, code, message) {
super('YAMLParseError', pos, code, message);
}
}
class YAMLWarning extends YAMLError {
constructor(pos, code, message) {
super('YAMLWarning', pos, code, message);
}
}
const prettifyError = (src, lc) => (error) => {
if (error.pos[0] === -1)
return;
error.linePos = error.pos.map(pos => lc.linePos(pos));
const { line, col } = error.linePos[0];
error.message += ` at line ${line}, column ${col}`;
let ci = col - 1;
let lineStr = src
.substring(lc.lineStarts[line - 1], lc.lineStarts[line])
.replace(/[\n\r]+$/, '');
// Trim to max 80 chars, keeping col position near the middle
if (ci >= 60 && lineStr.length > 80) {
const trimStart = Math.min(ci - 39, lineStr.length - 79);
lineStr = '…' + lineStr.substring(trimStart);
ci -= trimStart - 1;
}
if (lineStr.length > 80)
lineStr = lineStr.substring(0, 79) + '…';
// Include previous line in context if pointing at line start
if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
// Regexp won't match if start is trimmed
let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
if (prev.length > 80)
prev = prev.substring(0, 79) + '…\n';
lineStr = prev + lineStr;
}
if (/[^ ]/.test(lineStr)) {
let count = 1;
const end = error.linePos[1];
if (end && end.line === line && end.col > col) {
count = Math.max(1, Math.min(end.col - col, 80 - ci));
}
const pointer = ' '.repeat(ci) + '^'.repeat(count);
error.message += `:\n\n${lineStr}\n${pointer}\n`;
}
};
function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) {
let spaceBefore = false;
let atNewline = startOnNewline;
let hasSpace = startOnNewline;
let comment = '';
let commentSep = '';
let hasNewline = false;
let reqSpace = false;
let tab = null;
let anchor = null;
let tag = null;
let newlineAfterProp = null;
let comma = null;
let found = null;
let start = null;
for (const token of tokens) {
if (reqSpace) {
if (token.type !== 'space' &&
token.type !== 'newline' &&
token.type !== 'comma')
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
reqSpace = false;
}
if (tab) {
if (atNewline && token.type !== 'comment' && token.type !== 'newline') {
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
}
tab = null;
}
switch (token.type) {
case 'space':
// At the doc level, tabs at line start may be parsed
// as leading white space rather than indentation.
// In a flow collection, only the parser handles indent.
if (!flow &&
(indicator !== 'doc-start' || next?.type !== 'flow-collection') &&
token.source.includes('\t')) {
tab = token;
}
hasSpace = true;
break;
case 'comment': {
if (!hasSpace)
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
const cb = token.source.substring(1) || ' ';
if (!comment)
comment = cb;
else
comment += commentSep + cb;
commentSep = '';
atNewline = false;
break;
}
case 'newline':
if (atNewline) {
if (comment)
comment += token.source;
else
spaceBefore = true;
}
else
commentSep += token.source;
atNewline = true;
hasNewline = true;
if (anchor || tag)
newlineAfterProp = token;
hasSpace = true;
break;
case 'anchor':
if (anchor)
onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
if (token.source.endsWith(':'))
onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
anchor = token;
if (start === null)
start = token.offset;
atNewline = false;
hasSpace = false;
reqSpace = true;
break;
case 'tag': {
if (tag)
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
tag = token;
if (start === null)
start = token.offset;
atNewline = false;
hasSpace = false;
reqSpace = true;
break;
}
case indicator:
// Could here handle preceding comments differently
if (anchor || tag)
onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
if (found)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);
found = token;
atNewline =
indicator === 'seq-item-ind' || indicator === 'explicit-key-ind';
hasSpace = false;
break;
case 'comma':
if (flow) {
if (comma)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
comma = token;
atNewline = false;
hasSpace = false;
break;
}
// else fallthrough
default:
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
atNewline = false;
hasSpace = false;
}
}
const last = tokens[tokens.length - 1];
const end = last ? last.offset + last.source.length : offset;
if (reqSpace &&
next &&
next.type !== 'space' &&
next.type !== 'newline' &&
next.type !== 'comma' &&
(next.type !== 'scalar' || next.source !== '')) {
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
}
if (tab &&
((atNewline && tab.indent <= parentIndent) ||
next?.type === 'block-map' ||
next?.type === 'block-seq'))
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
return {
comma,
found,
spaceBefore,
comment,
hasNewline,
anchor,
tag,
newlineAfterProp,
end,
start: start ?? end
};
}
function containsNewline(key) {
if (!key)
return null;
switch (key.type) {
case 'alias':
case 'scalar':
case 'double-quoted-scalar':
case 'single-quoted-scalar':
if (key.source.includes('\n'))
return true;
if (key.end)
for (const st of key.end)
if (st.type === 'newline')
return true;
return false;
case 'flow-collection':
for (const it of key.items) {
for (const st of it.start)
if (st.type === 'newline')
return true;
if (it.sep)
for (const st of it.sep)
if (st.type === 'newline')
return true;
if (containsNewline(it.key) || containsNewline(it.value))
return true;
}
return false;
default:
return true;
}
}
function flowIndentCheck(indent, fc, onError) {
if (fc?.type === 'flow-collection') {
const end = fc.end[0];
if (end.indent === indent &&
(end.source === ']' || end.source === '}') &&
containsNewline(fc)) {
const msg = 'Flow end indicator should be more indented than parent';
onError(end, 'BAD_INDENT', msg, true);
}
}
}
function mapIncludes(ctx, items, search) {
const { uniqueKeys } = ctx.options;
if (uniqueKeys === false)
return false;
const isEqual = typeof uniqueKeys === 'function'
? uniqueKeys
: (a, b) => a === b ||
(isScalar$1(a) &&
isScalar$1(b) &&
a.value === b.value &&
!(a.value === '<<' && ctx.schema.merge));
return items.some(pair => isEqual(pair.key, search));
}
const startColMsg = 'All mapping items must start at the same column';
function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) {
const NodeClass = tag?.nodeClass ?? YAMLMap;
const map = new NodeClass(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
let offset = bm.offset;
let commentEnd = null;
for (const collItem of bm.items) {
const { start, key, sep, value } = collItem;
// key properties
const keyProps = resolveProps(start, {
indicator: 'explicit-key-ind',
next: key ?? sep?.[0],
offset,
onError,
parentIndent: bm.indent,
startOnNewline: true
});
const implicitKey = !keyProps.found;
if (implicitKey) {
if (key) {
if (key.type === 'block-seq')
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
else if ('indent' in key && key.indent !== bm.indent)
onError(offset, 'BAD_INDENT', startColMsg);
}
if (!keyProps.anchor && !keyProps.tag && !sep) {
commentEnd = keyProps.end;
if (keyProps.comment) {
if (map.comment)
map.comment += '\n' + keyProps.comment;
else
map.comment = keyProps.comment;
}
continue;
}
if (keyProps.newlineAfterProp || containsNewline(key)) {
onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
}
}
else if (keyProps.found?.indent !== bm.indent) {
onError(offset, 'BAD_INDENT', startColMsg);
}
// key value
const keyStart = keyProps.end;
const keyNode = key
? composeNode(ctx, key, keyProps, onError)
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
if (ctx.schema.compat)
flowIndentCheck(bm.indent, key, onError);
if (mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
// value properties
const valueProps = resolveProps(sep ?? [], {
indicator: 'map-value-ind',
next: value,
offset: keyNode.range[2],
onError,
parentIndent: bm.indent,
startOnNewline: !key || key.type === 'block-scalar'
});
offset = valueProps.end;
if (valueProps.found) {
if (implicitKey) {
if (value?.type === 'block-map' && !valueProps.hasNewline)
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
if (ctx.options.strict &&
keyProps.start < valueProps.found.offset - 1024)
onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
}
// value value
const valueNode = value
? composeNode(ctx, value, valueProps, onError)
: composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
if (ctx.schema.compat)
flowIndentCheck(bm.indent, value, onError);
offset = valueNode.range[2];
const pair = new Pair(keyNode, valueNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
map.items.push(pair);
}
else {
// key with no value
if (implicitKey)
onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
if (valueProps.comment) {
if (keyNode.comment)
keyNode.comment += '\n' + valueProps.comment;
else
keyNode.comment = valueProps.comment;
}
const pair = new Pair(keyNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
map.items.push(pair);
}
}
if (commentEnd && commentEnd < offset)
onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content');
map.range = [bm.offset, offset, commentEnd ?? offset];
return map;
}
function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) {
const NodeClass = tag?.nodeClass ?? YAMLSeq;
const seq = new NodeClass(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
let offset = bs.offset;
let commentEnd = null;
for (const { start, value } of bs.items) {
const props = resolveProps(start, {
indicator: 'seq-item-ind',
next: value,
offset,
onError,
parentIndent: bs.indent,
startOnNewline: true
});
if (!props.found) {
if (props.anchor || props.tag || value) {
if (value && value.type === 'block-seq')
onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column');
else
onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
}
else {
commentEnd = props.end;
if (props.comment)
seq.comment = props.comment;
continue;
}
}
const node = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, props.end, start, null, props, onError);
if (ctx.schema.compat)
flowIndentCheck(bs.indent, value, onError);
offset = node.range[2];
seq.items.push(node);
}
seq.range = [bs.offset, offset, commentEnd ?? offset];
return seq;
}
function resolveEnd(end, offset, reqSpace, onError) {
let comment = '';
if (end) {
let hasSpace = false;
let sep = '';
for (const token of end) {
const { source, type } = token;
switch (type) {
case 'space':
hasSpace = true;
break;
case 'comment': {
if (reqSpace && !hasSpace)
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
const cb = source.substring(1) || ' ';
if (!comment)
comment = cb;
else
comment += sep + cb;
sep = '';
break;
}
case 'newline':
if (comment)
sep += source;
hasSpace = true;
break;
default:
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
}
offset += source.length;
}
}
return { comment, offset };
}
const blockMsg = 'Block collections are not allowed within flow collections';
const isBlock$1 = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) {
const isMap = fc.start.source === '{';
const fcName = isMap ? 'flow map' : 'flow sequence';
const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap : YAMLSeq));
const coll = new NodeClass(ctx.schema);
coll.flow = true;
const atRoot = ctx.atRoot;
if (atRoot)
ctx.atRoot = false;
let offset = fc.offset + fc.start.source.length;
for (let i = 0; i < fc.items.length; ++i) {
const collItem = fc.items[i];
const { start, key, sep, value } = collItem;
const props = resolveProps(start, {
flow: fcName,
indicator: 'explicit-key-ind',
next: key ?? sep?.[0],
offset,
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (!props.found) {
if (!props.anchor && !props.tag && !sep && !value) {
if (i === 0 && props.comma)
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
else if (i < fc.items.length - 1)
onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
if (props.comment) {
if (coll.comment)
coll.comment += '\n' + props.comment;
else
coll.comment = props.comment;
}
offset = props.end;
continue;
}
if (!isMap && ctx.options.strict && containsNewline(key))
onError(key, // checked by containsNewline()
'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
}
if (i === 0) {
if (props.comma)
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
}
else {
if (!props.comma)
onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
if (props.comment) {
let prevItemComment = '';
loop: for (const st of start) {
switch (st.type) {
case 'comma':
case 'space':
break;
case 'comment':
prevItemComment = st.source.substring(1);
break loop;
default:
break loop;
}
}
if (prevItemComment) {
let prev = coll.items[coll.items.length - 1];
if (isPair(prev))
prev = prev.value ?? prev.key;
if (prev.comment)
prev.comment += '\n' + prevItemComment;
else
prev.comment = prevItemComment;
props.comment = props.comment.substring(prevItemComment.length + 1);
}
}
}
if (!isMap && !sep && !props.found) {
// item is a value in a seq
// → key & sep are empty, start does not include ? or :
const valueNode = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, props.end, sep, null, props, onError);
coll.items.push(valueNode);
offset = valueNode.range[2];
if (isBlock$1(value))
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
}
else {
// item is a key+value pair
// key value
const keyStart = props.end;
const keyNode = key
? composeNode(ctx, key, props, onError)
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
if (isBlock$1(key))
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
// value properties
const valueProps = resolveProps(sep ?? [], {
flow: fcName,
indicator: 'map-value-ind',
next: value,
offset: keyNode.range[2],
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (valueProps.found) {
if (!isMap && !props.found && ctx.options.strict) {
if (sep)
for (const st of sep) {
if (st === valueProps.found)
break;
if (st.type === 'newline') {
onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
break;
}
}
if (props.start < valueProps.found.offset - 1024)
onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
}
}
else if (value) {
if ('source' in value && value.source && value.source[0] === ':')
onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
else
onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
}
// value value
const valueNode = value
? composeNode(ctx, value, valueProps, onError)
: valueProps.found
? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
: null;
if (valueNode) {
if (isBlock$1(value))
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
}
else if (valueProps.comment) {
if (keyNode.comment)
keyNode.comment += '\n' + valueProps.comment;
else
keyNode.comment = valueProps.comment;
}
const pair = new Pair(keyNode, valueNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
if (isMap) {
const map = coll;
if (mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
map.items.push(pair);
}
else {
const map = new YAMLMap(ctx.schema);
map.flow = true;
map.items.push(pair);
coll.items.push(map);
}
offset = valueNode ? valueNode.range[2] : valueProps.end;
}
}
const expectedEnd = isMap ? '}' : ']';
const [ce, ...ee] = fc.end;
let cePos = offset;
if (ce && ce.source === expectedEnd)
cePos = ce.offset + ce.source.length;
else {
const name = fcName[0].toUpperCase() + fcName.substring(1);
const msg = atRoot
? `${name} must end with a ${expectedEnd}`
: `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
if (ce && ce.source.length !== 1)
ee.unshift(ce);
}
if (ee.length > 0) {
const end = resolveEnd(ee, cePos, ctx.options.strict, onError);
if (end.comment) {
if (coll.comment)
coll.comment += '\n' + end.comment;
else
coll.comment = end.comment;
}
coll.range = [fc.offset, cePos, end.offset];
}
else {
coll.range = [fc.offset, cePos, cePos];
}
return coll;
}
function resolveCollection(CN, ctx, token, onError, tagName, tag) {
const coll = token.type === 'block-map'
? resolveBlockMap(CN, ctx, token, onError, tag)
: token.type === 'block-seq'
? resolveBlockSeq(CN, ctx, token, onError, tag)
: resolveFlowCollection(CN, ctx, token, onError, tag);
const Coll = coll.constructor;
// If we got a tagName matching the class, or the tag name is '!',
// then use the tagName from the node class used to create it.
if (tagName === '!' || tagName === Coll.tagName) {
coll.tag = Coll.tagName;
return coll;
}
if (tagName)
coll.tag = tagName;
return coll;
}
function composeCollection(CN, ctx, token, props, onError) {
const tagToken = props.tag;
const tagName = !tagToken
? null
: ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
if (token.type === 'block-seq') {
const { anchor, newlineAfterProp: nl } = props;
const lastProp = anchor && tagToken
? anchor.offset > tagToken.offset
? anchor
: tagToken
: (anchor ?? tagToken);
if (lastProp && (!nl || nl.offset < lastProp.offset)) {
const message = 'Missing newline after block sequence props';
onError(lastProp, 'MISSING_CHAR', message);
}
}
const expType = token.type === 'block-map'
? 'map'
: token.type === 'block-seq'
? 'seq'
: token.start.source === '{'
? 'map'
: 'seq';
// shortcut: check if it's a generic YAMLMap or YAMLSeq
// before jumping into the custom tag logic.
if (!tagToken ||
!tagName ||
tagName === '!' ||
(tagName === YAMLMap.tagName && expType === 'map') ||
(tagName === YAMLSeq.tagName && expType === 'seq')) {
return resolveCollection(CN, ctx, token, onError, tagName);
}
let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType);
if (!tag) {
const kt = ctx.schema.knownTags[tagName];
if (kt && kt.collection === expType) {
ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
tag = kt;
}
else {
if (kt?.collection) {
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true);
}
else {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
}
return resolveCollection(CN, ctx, token, onError, tagName);
}
}
const coll = resolveCollection(CN, ctx, token, onError, tagName, tag);
const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll;
const node = isNode$1(res)
? res
: new Scalar(res);
node.range = coll.range;
node.tag = tagName;
if (tag?.format)
node.format = tag.format;
return node;
}
function resolveBlockScalar(ctx, scalar, onError) {
const start = scalar.offset;
const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError);
if (!header)
return { value: '', type: null, comment: '', range: [start, start, start] };
const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
const lines = scalar.source ? splitLines(scalar.source) : [];
// determine the end of content & start of chomping
let chompStart = lines.length;
for (let i = lines.length - 1; i >= 0; --i) {
const content = lines[i][1];
if (content === '' || content === '\r')
chompStart = i;
else
break;
}
// shortcut for empty contents
if (chompStart === 0) {
const value = header.chomp === '+' && lines.length > 0
? '\n'.repeat(Math.max(1, lines.length - 1))
: '';
let end = start + header.length;
if (scalar.source)
end += scalar.source.length;
return { value, type, comment: header.comment, range: [start, end, end] };
}
// find the indentation level to trim from start
let trimIndent = scalar.indent + header.indent;
let offset = scalar.offset + header.length;
let contentStart = 0;
for (let i = 0; i < chompStart; ++i) {
const [indent, content] = lines[i];
if (content === '' || content === '\r') {
if (header.indent === 0 && indent.length > trimIndent)
trimIndent = indent.length;
}
else {
if (indent.length < trimIndent) {
const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
onError(offset + indent.length, 'MISSING_CHAR', message);
}
if (header.indent === 0)
trimIndent = indent.length;
contentStart = i;
if (trimIndent === 0 && !ctx.atRoot) {
const message = 'Block scalar values in collections must be indented';
onError(offset, 'BAD_INDENT', message);
}
break;
}
offset += indent.length + content.length + 1;
}
// include trailing more-indented empty lines in content
for (let i = lines.length - 1; i >= chompStart; --i) {
if (lines[i][0].length > trimIndent)
chompStart = i + 1;
}
let value = '';
let sep = '';
let prevMoreIndented = false;
// leading whitespace is kept intact
for (let i = 0; i < contentStart; ++i)
value += lines[i][0].slice(trimIndent) + '\n';
for (let i = contentStart; i < chompStart; ++i) {
let [indent, content] = lines[i];
offset += indent.length + content.length + 1;
const crlf = content[content.length - 1] === '\r';
if (crlf)
content = content.slice(0, -1);
/* istanbul ignore if already caught in lexer */
if (content && indent.length < trimIndent) {
const src = header.indent
? 'explicit indentation indicator'
: 'first line';
const message = `Block scalar lines must not be less indented than their ${src}`;
onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
indent = '';
}
if (type === Scalar.BLOCK_LITERAL) {
value += sep + indent.slice(trimIndent) + content;
sep = '\n';
}
else if (indent.length > trimIndent || content[0] === '\t') {
// more-indented content within a folded block
if (sep === ' ')
sep = '\n';
else if (!prevMoreIndented && sep === '\n')
sep = '\n\n';
value += sep + indent.slice(trimIndent) + content;
sep = '\n';
prevMoreIndented = true;
}
else if (content === '') {
// empty line
if (sep === '\n')
value += '\n';
else
sep = '\n';
}
else {
value += sep + content;
sep = ' ';
prevMoreIndented = false;
}
}
switch (header.chomp) {
case '-':
break;
case '+':
for (let i = chompStart; i < lines.length; ++i)
value += '\n' + lines[i][0].slice(trimIndent);
if (value[value.length - 1] !== '\n')
value += '\n';
break;
default:
value += '\n';
}
const end = start + header.length + scalar.source.length;
return { value, type, comment: header.comment, range: [start, end, end] };
}
function parseBlockScalarHeader({ offset, props }, strict, onError) {
/* istanbul ignore if should not happen */
if (props[0].type !== 'block-scalar-header') {
onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
return null;
}
const { source } = props[0];
const mode = source[0];
let indent = 0;
let chomp = '';
let error = -1;
for (let i = 1; i < source.length; ++i) {
const ch = source[i];
if (!chomp && (ch === '-' || ch === '+'))
chomp = ch;
else {
const n = Number(ch);
if (!indent && n)
indent = n;
else if (error === -1)
error = offset + i;
}
}
if (error !== -1)
onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
let hasSpace = false;
let comment = '';
let length = source.length;
for (let i = 1; i < props.length; ++i) {
const token = props[i];
switch (token.type) {
case 'space':
hasSpace = true;
// fallthrough
case 'newline':
length += token.source.length;
break;
case 'comment':
if (strict && !hasSpace) {
const message = 'Comments must be separated from other tokens by white space characters';
onError(token, 'MISSING_CHAR', message);
}
length += token.source.length;
comment = token.source.substring(1);
break;
case 'error':
onError(token, 'UNEXPECTED_TOKEN', token.message);
length += token.source.length;
break;
/* istanbul ignore next should not happen */
default: {
const message = `Unexpected token in block scalar header: ${token.type}`;
onError(token, 'UNEXPECTED_TOKEN', message);
const ts = token.source;
if (ts && typeof ts === 'string')
length += ts.length;
}
}
}
return { mode, indent, chomp, comment, length };
}
/** @returns Array of lines split up as `[indent, content]` */
function splitLines(source) {
const split = source.split(/\n( *)/);
const first = split[0];
const m = first.match(/^( *)/);
const line0 = m?.[1]
? [m[1], first.slice(m[1].length)]
: ['', first];
const lines = [line0];
for (let i = 1; i < split.length; i += 2)
lines.push([split[i], split[i + 1]]);
return lines;
}
function resolveFlowScalar(scalar, strict, onError) {
const { offset, type, source, end } = scalar;
let _type;
let value;
const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
switch (type) {
case 'scalar':
_type = Scalar.PLAIN;
value = plainValue(source, _onError);
break;
case 'single-quoted-scalar':
_type = Scalar.QUOTE_SINGLE;
value = singleQuotedValue(source, _onError);
break;
case 'double-quoted-scalar':
_type = Scalar.QUOTE_DOUBLE;
value = doubleQuotedValue(source, _onError);
break;
/* istanbul ignore next should not happen */
default:
onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
return {
value: '',
type: null,
comment: '',
range: [offset, offset + source.length, offset + source.length]
};
}
const valueEnd = offset + source.length;
const re = resolveEnd(end, valueEnd, strict, onError);
return {
value,
type: _type,
comment: re.comment,
range: [offset, valueEnd, re.offset]
};
}
function plainValue(source, onError) {
let badChar = '';
switch (source[0]) {
/* istanbul ignore next should not happen */
case '\t':
badChar = 'a tab character';
break;
case ',':
badChar = 'flow indicator character ,';
break;
case '%':
badChar = 'directive indicator character %';
break;
case '|':
case '>': {
badChar = `block scalar indicator ${source[0]}`;
break;
}
case '@':
case '`': {
badChar = `reserved character ${source[0]}`;
break;
}
}
if (badChar)
onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
return foldLines(source);
}
function singleQuotedValue(source, onError) {
if (source[source.length - 1] !== "'" || source.length === 1)
onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
return foldLines(source.slice(1, -1)).replace(/''/g, "'");
}
function foldLines(source) {
/**
* The negative lookbehind here and in the `re` RegExp is to
* prevent causing a polynomial search time in certain cases.
*
* The try-catch is for Safari, which doesn't support this yet:
* https://caniuse.com/js-regexp-lookbehind
*/
let first, line;
try {
first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;
}
else {
res += ch;
}
}
if (source[source.length - 1] !== '"' || source.length === 1)
onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
return res;
}
/**
* Fold a single newline into a space, multiple newlines to N - 1 newlines.
* Presumes `source[offset] === '\n'`
*/
function foldNewline(source, offset) {
let fold = '';
let ch = source[offset + 1];
while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
if (ch === '\r' && source[offset + 2] !== '\n')
break;
if (ch === '\n')
fold += '\n';
offset += 1;
ch = source[offset + 1];
}
if (!fold)
fold = ' ';
return { fold, offset };
}
const escapeCodes = {
'0': '\0', // null character
a: '\x07', // bell character
b: '\b', // backspace
e: '\x1b', // escape character
f: '\f', // form feed
n: '\n', // line feed
r: '\r', // carriage return
t: '\t', // horizontal tab
v: '\v', // vertical tab
N: '\u0085', // Unicode next line
_: '\u00a0', // Unicode non-breaking space
L: '\u2028', // Unicode line separator
P: '\u2029', // Unicode paragraph separator
' ': ' ',
'"': '"',
'/': '/',
'\\': '\\',
'\t': '\t'
};
function parseCharCode(source, offset, length, onError) {
const cc = source.substr(offset, length);
const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
const code = ok ? parseInt(cc, 16) : NaN;
if (isNaN(code)) {
const raw = source.substr(offset - 2, length + 2);
onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
return raw;
}
return String.fromCodePoint(code);
}
function composeScalar(ctx, token, tagToken, onError) {
const { value, type, comment, range } = token.type === 'block-scalar'
? resolveBlockScalar(ctx, token, onError)
: resolveFlowScalar(token, ctx.options.strict, onError);
const tagName = tagToken
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
: null;
const tag = tagToken && tagName
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
: token.type === 'scalar'
? findScalarTagByTest(ctx, value, token, onError)
: ctx.schema[SCALAR$1];
let scalar;
try {
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
scalar = isScalar$1(res) ? res : new Scalar(res);
}
catch (error) {
const msg = error instanceof Error ? error.message : String(error);
onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);
scalar = new Scalar(value);
}
scalar.range = range;
scalar.source = value;
if (type)
scalar.type = type;
if (tagName)
scalar.tag = tagName;
if (tag.format)
scalar.format = tag.format;
if (comment)
scalar.comment = comment;
return scalar;
}
function findScalarTagByName(schema, value, tagName, tagToken, onError) {
if (tagName === '!')
return schema[SCALAR$1]; // non-specific tag
const matchWithTest = [];
for (const tag of schema.tags) {
if (!tag.collection && tag.tag === tagName) {
if (tag.default && tag.test)
matchWithTest.push(tag);
else
return tag;
}
}
for (const tag of matchWithTest)
if (tag.test?.test(value))
return tag;
const kt = schema.knownTags[tagName];
if (kt && !kt.collection) {
// Ensure that the known tag is available for stringifying,
// but does not get used by default.
schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
return kt;
}
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
return schema[SCALAR$1];
}
function findScalarTagByTest({ directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[SCALAR$1];
if (schema.compat) {
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
schema[SCALAR$1];
if (tag.tag !== compat.tag) {
const ts = directives.tagString(tag.tag);
const cs = directives.tagString(compat.tag);
const msg = `Value may be parsed as either ${ts} or ${cs}`;
onError(token, 'TAG_RESOLVE_FAILED', msg, true);
}
}
return tag;
}
function emptyScalarPosition(offset, before, pos) {
if (before) {
if (pos === null)
pos = before.length;
for (let i = pos - 1; i >= 0; --i) {
let st = before[i];
switch (st.type) {
case 'space':
case 'comment':
case 'newline':
offset -= st.source.length;
continue;
}
// Technically, an empty scalar is immediately after the last non-empty
// node, but it's more useful to place it after any whitespace.
st = before[++i];
while (st?.type === 'space') {
offset += st.source.length;
st = before[++i];
}
break;
}
}
return offset;
}
const CN = { composeNode, composeEmptyNode };
function composeNode(ctx, token, props, onError) {
const { spaceBefore, comment, anchor, tag } = props;
let node;
let isSrcToken = true;
switch (token.type) {
case 'alias':
node = composeAlias(ctx, token, onError);
if (anchor || tag)
onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
break;
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
case 'block-scalar':
node = composeScalar(ctx, token, tag, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
case 'block-map':
case 'block-seq':
case 'flow-collection':
node = composeCollection(CN, ctx, token, props, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
default: {
const message = token.type === 'error'
? token.message
: `Unsupported token (type: ${token.type})`;
onError(token, 'UNEXPECTED_TOKEN', message);
node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);
isSrcToken = false;
}
}
if (anchor && node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
if (spaceBefore)
node.spaceBefore = true;
if (comment) {
if (token.type === 'scalar' && token.source === '')
node.comment = comment;
else
node.commentBefore = comment;
}
// @ts-expect-error Type checking misses meaning of isSrcToken
if (ctx.options.keepSourceTokens && isSrcToken)
node.srcToken = token;
return node;
}
function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) {
const token = {
type: 'scalar',
offset: emptyScalarPosition(offset, before, pos),
indent: -1,
source: ''
};
const node = composeScalar(ctx, token, tag, onError);
if (anchor) {
node.anchor = anchor.source.substring(1);
if (node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
}
if (spaceBefore)
node.spaceBefore = true;
if (comment) {
node.comment = comment;
node.range[2] = end;
}
return node;
}
function composeAlias({ options }, { offset, source, end }, onError) {
const alias = new Alias(source.substring(1));
if (alias.source === '')
onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
if (alias.source.endsWith(':'))
onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);
const valueEnd = offset + source.length;
const re = resolveEnd(end, valueEnd, options.strict, onError);
alias.range = [offset, valueEnd, re.offset];
if (re.comment)
alias.comment = re.comment;
return alias;
}
function composeDoc(options, directives, { offset, start, value, end }, onError) {
const opts = Object.assign({ _directives: directives }, options);
const doc = new Document(undefined, opts);
const ctx = {
atRoot: true,
directives: doc.directives,
options: doc.options,
schema: doc.schema
};
const props = resolveProps(start, {
indicator: 'doc-start',
next: value ?? end?.[0],
offset,
onError,
parentIndent: 0,
startOnNewline: true
});
if (props.found) {
doc.directives.docStart = true;
if (value &&
(value.type === 'block-map' || value.type === 'block-seq') &&
!props.hasNewline)
onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
}
// @ts-expect-error If Contents is set, let's trust the user
doc.contents = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, props.end, start, null, props, onError);
const contentEnd = doc.contents.range[2];
const re = resolveEnd(end, contentEnd, false, onError);
if (re.comment)
doc.comment = re.comment;
doc.range = [offset, contentEnd, re.offset];
return doc;
}
function getErrorPos(src) {
if (typeof src === 'number')
return [src, src + 1];
if (Array.isArray(src))
return src.length === 2 ? src : [src[0], src[1]];
const { offset, source } = src;
return [offset, offset + (typeof source === 'string' ? source.length : 1)];
}
function parsePrelude(prelude) {
let comment = '';
let atComment = false;
let afterEmptyLine = false;
for (let i = 0; i < prelude.length; ++i) {
const source = prelude[i];
switch (source[0]) {
case '#':
comment +=
(comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
(source.substring(1) || ' ');
atComment = true;
afterEmptyLine = false;
break;
case '%':
if (prelude[i + 1]?.[0] !== '#')
i += 1;
atComment = false;
break;
default:
// This may be wrong after doc-end, but in that case it doesn't matter
if (!atComment)
afterEmptyLine = true;
atComment = false;
}
}
return { comment, afterEmptyLine };
}
/**
* Compose a stream of CST nodes into a stream of YAML Documents.
*
* ```ts
* import { Composer, Parser } from 'yaml'
*
* const src: string = ...
* const tokens = new Parser().parse(src)
* const docs = new Composer().compose(tokens)
* ```
*/
class Composer {
constructor(options = {}) {
this.doc = null;
this.atDirectives = false;
this.prelude = [];
this.errors = [];
this.warnings = [];
this.onError = (source, code, message, warning) => {
const pos = getErrorPos(source);
if (warning)
this.warnings.push(new YAMLWarning(pos, code, message));
else
this.errors.push(new YAMLParseError(pos, code, message));
};
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
this.directives = new Directives({ version: options.version || '1.2' });
this.options = options;
}
decorate(doc, afterDoc) {
const { comment, afterEmptyLine } = parsePrelude(this.prelude);
//console.log({ dc: doc.comment, prelude, comment })
if (comment) {
const dc = doc.contents;
if (afterDoc) {
doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
}
else if (afterEmptyLine || doc.directives.docStart || !dc) {
doc.commentBefore = comment;
}
else if (isCollection$1(dc) && !dc.flow && dc.items.length > 0) {
let it = dc.items[0];
if (isPair(it))
it = it.key;
const cb = it.commentBefore;
it.commentBefore = cb ? `${comment}\n${cb}` : comment;
}
else {
const cb = dc.commentBefore;
dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
}
}
if (afterDoc) {
Array.prototype.push.apply(doc.errors, this.errors);
Array.prototype.push.apply(doc.warnings, this.warnings);
}
else {
doc.errors = this.errors;
doc.warnings = this.warnings;
}
this.prelude = [];
this.errors = [];
this.warnings = [];
}
/**
* Current stream status information.
*
* Mostly useful at the end of input for an empty stream.
*/
streamInfo() {
return {
comment: parsePrelude(this.prelude).comment,
directives: this.directives,
errors: this.errors,
warnings: this.warnings
};
}
/**
* Compose tokens into documents.
*
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
*/
*compose(tokens, forceDoc = false, endOffset = -1) {
for (const token of tokens)
yield* this.next(token);
yield* this.end(forceDoc, endOffset);
}
/** Advance the composer by one CST token. */
*next(token) {
switch (token.type) {
case 'directive':
this.directives.add(token.source, (offset, message, warning) => {
const pos = getErrorPos(token);
pos[0] += offset;
this.onError(pos, 'BAD_DIRECTIVE', message, warning);
});
this.prelude.push(token.source);
this.atDirectives = true;
break;
case 'document': {
const doc = composeDoc(this.options, this.directives, token, this.onError);
if (this.atDirectives && !doc.directives.docStart)
this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');
this.decorate(doc, false);
if (this.doc)
yield this.doc;
this.doc = doc;
this.atDirectives = false;
break;
}
case 'byte-order-mark':
case 'space':
break;
case 'comment':
case 'newline':
this.prelude.push(token.source);
break;
case 'error': {
const msg = token.source
? `${token.message}: ${JSON.stringify(token.source)}`
: token.message;
const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
if (this.atDirectives || !this.doc)
this.errors.push(error);
else
this.doc.errors.push(error);
break;
}
case 'doc-end': {
if (!this.doc) {
const msg = 'Unexpected doc-end without preceding document';
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
break;
}
this.doc.directives.docEnd = true;
const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
this.decorate(this.doc, true);
if (end.comment) {
const dc = this.doc.comment;
this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
}
this.doc.range[2] = end.offset;
break;
}
default:
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
}
}
/**
* Call at end of input to yield any remaining document.
*
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
*/
*end(forceDoc = false, endOffset = -1) {
if (this.doc) {
this.decorate(this.doc, true);
yield this.doc;
this.doc = null;
}
else if (forceDoc) {
const opts = Object.assign({ _directives: this.directives }, this.options);
const doc = new Document(undefined, opts);
if (this.atDirectives)
this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
doc.range = [0, endOffset, endOffset];
this.decorate(doc, false);
yield doc;
}
}
}
function resolveAsScalar(token, strict = true, onError) {
if (token) {
const _onError = (pos, code, message) => {
const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
if (onError)
onError(offset, code, message);
else
throw new YAMLParseError([offset, offset + 1], code, message);
};
switch (token.type) {
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return resolveFlowScalar(token, strict, _onError);
case 'block-scalar':
return resolveBlockScalar({ options: { strict } }, token, _onError);
}
}
return null;
}
/**
* Create a new scalar token with `value`
*
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
* as this function does not support any schema operations and won't check for such conflicts.
*
* @param value The string representation of the value, which will have its content properly indented.
* @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
* @param context.indent The indent level of the token.
* @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
* @param context.offset The offset position of the token.
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
*/
function createScalarToken(value, context) {
const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
const source = stringifyString({ type, value }, {
implicitKey,
indent: indent > 0 ? ' '.repeat(indent) : '',
inFlow,
options: { blockQuote: true, lineWidth: -1 }
});
const end = context.end ?? [
{ type: 'newline', offset: -1, indent, source: '\n' }
];
switch (source[0]) {
case '|':
case '>': {
const he = source.indexOf('\n');
const head = source.substring(0, he);
const body = source.substring(he + 1) + '\n';
const props = [
{ type: 'block-scalar-header', offset, indent, source: head }
];
if (!addEndtoBlockProps(props, end))
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
return { type: 'block-scalar', offset, indent, props, source: body };
}
case '"':
return { type: 'double-quoted-scalar', offset, indent, source, end };
case "'":
return { type: 'single-quoted-scalar', offset, indent, source, end };
default:
return { type: 'scalar', offset, indent, source, end };
}
}
/**
* Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
*
* Best efforts are made to retain any comments previously associated with the `token`,
* though all contents within a collection's `items` will be overwritten.
*
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
* as this function does not support any schema operations and won't check for such conflicts.
*
* @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
* @param value The string representation of the value, which will have its content properly indented.
* @param context.afterKey In most cases, values after a key should have an additional level of indentation.
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
* @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
*/
function setScalarValue(token, value, context = {}) {
let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
let indent = 'indent' in token ? token.indent : null;
if (afterKey && typeof indent === 'number')
indent += 2;
if (!type)
switch (token.type) {
case 'single-quoted-scalar':
type = 'QUOTE_SINGLE';
break;
case 'double-quoted-scalar':
type = 'QUOTE_DOUBLE';
break;
case 'block-scalar': {
const header = token.props[0];
if (header.type !== 'block-scalar-header')
throw new Error('Invalid block scalar header');
type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
break;
}
default:
type = 'PLAIN';
}
const source = stringifyString({ type, value }, {
implicitKey: implicitKey || indent === null,
indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
inFlow,
options: { blockQuote: true, lineWidth: -1 }
});
switch (source[0]) {
case '|':
case '>':
setBlockScalarValue(token, source);
break;
case '"':
setFlowScalarValue(token, source, 'double-quoted-scalar');
break;
case "'":
setFlowScalarValue(token, source, 'single-quoted-scalar');
break;
default:
setFlowScalarValue(token, source, 'scalar');
}
}
function setBlockScalarValue(token, source) {
const he = source.indexOf('\n');
const head = source.substring(0, he);
const body = source.substring(he + 1) + '\n';
if (token.type === 'block-scalar') {
const header = token.props[0];
if (header.type !== 'block-scalar-header')
throw new Error('Invalid block scalar header');
header.source = head;
token.source = body;
}
else {
const { offset } = token;
const indent = 'indent' in token ? token.indent : -1;
const props = [
{ type: 'block-scalar-header', offset, indent, source: head }
];
if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
for (const key of Object.keys(token))
if (key !== 'type' && key !== 'offset')
delete token[key];
Object.assign(token, { type: 'block-scalar', indent, props, source: body });
}
}
/** @returns `true` if last token is a newline */
function addEndtoBlockProps(props, end) {
if (end)
for (const st of end)
switch (st.type) {
case 'space':
case 'comment':
props.push(st);
break;
case 'newline':
props.push(st);
return true;
}
return false;
}
function setFlowScalarValue(token, source, type) {
switch (token.type) {
case 'scalar':
case 'double-quoted-scalar':
case 'single-quoted-scalar':
token.type = type;
token.source = source;
break;
case 'block-scalar': {
const end = token.props.slice(1);
let oa = source.length;
if (token.props[0].type === 'block-scalar-header')
oa -= token.props[0].source.length;
for (const tok of end)
tok.offset += oa;
delete token.props;
Object.assign(token, { type, source, end });
break;
}
case 'block-map':
case 'block-seq': {
const offset = token.offset + source.length;
const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
delete token.items;
Object.assign(token, { type, source, end: [nl] });
break;
}
default: {
const indent = 'indent' in token ? token.indent : -1;
const end = 'end' in token && Array.isArray(token.end)
? token.end.filter(st => st.type === 'space' ||
st.type === 'comment' ||
st.type === 'newline')
: [];
for (const key of Object.keys(token))
if (key !== 'type' && key !== 'offset')
delete token[key];
Object.assign(token, { type, indent, source, end });
}
}
}
/**
* Stringify a CST document, token, or collection item
*
* Fair warning: This applies no validation whatsoever, and
* simply concatenates the sources in their logical order.
*/
const stringify$1 = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
function stringifyToken(token) {
switch (token.type) {
case 'block-scalar': {
let res = '';
for (const tok of token.props)
res += stringifyToken(tok);
return res + token.source;
}
case 'block-map':
case 'block-seq': {
let res = '';
for (const item of token.items)
res += stringifyItem(item);
return res;
}
case 'flow-collection': {
let res = token.start.source;
for (const item of token.items)
res += stringifyItem(item);
for (const st of token.end)
res += st.source;
return res;
}
case 'document': {
let res = stringifyItem(token);
if (token.end)
for (const st of token.end)
res += st.source;
return res;
}
default: {
let res = token.source;
if ('end' in token && token.end)
for (const st of token.end)
res += st.source;
return res;
}
}
}
function stringifyItem({ start, key, sep, value }) {
let res = '';
for (const st of start)
res += st.source;
if (key)
res += stringifyToken(key);
if (sep)
for (const st of sep)
res += st.source;
if (value)
res += stringifyToken(value);
return res;
}
const BREAK = Symbol('break visit');
const SKIP = Symbol('skip children');
const REMOVE = Symbol('remove item');
/**
* Apply a visitor to a CST document or item.
*
* Walks through the tree (depth-first) starting from the root, calling a
* `visitor` function with two arguments when entering each item:
* - `item`: The current item, which included the following members:
* - `start: SourceToken[]` – Source tokens before the key or value,
* possibly including its anchor or tag.
* - `key?: Token | null` – Set for pair values. May then be `null`, if
* the key before the `:` separator is empty.
* - `sep?: SourceToken[]` – Source tokens between the key and the value,
* which should include the `:` map value indicator if `value` is set.
* - `value?: Token` – The value of a sequence item, or of a map pair.
* - `path`: The steps from the root to the current node, as an array of
* `['key' | 'value', number]` tuples.
*
* The return value of the visitor may be used to control the traversal:
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this token, continue with
* next sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current item, then continue with the next one
* - `number`: Set the index of the next step. This is useful especially if
* the index of the current token has changed.
* - `function`: Define the next visitor for this item. After the original
* visitor is called on item entry, next visitors are called after handling
* a non-empty `key` and when exiting the item.
*/
function visit(cst, visitor) {
if ('type' in cst && cst.type === 'document')
cst = { start: cst.start, value: cst.value };
_visit(Object.freeze([]), cst, visitor);
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visit.BREAK = BREAK;
/** Do not visit the children of the current item */
visit.SKIP = SKIP;
/** Remove the current item */
visit.REMOVE = REMOVE;
/** Find the item at `path` from `cst` as the root */
visit.itemAtPath = (cst, path) => {
let item = cst;
for (const [field, index] of path) {
const tok = item?.[field];
if (tok && 'items' in tok) {
item = tok.items[index];
}
else
return undefined;
}
return item;
};
/**
* Get the immediate parent collection of the item at `path` from `cst` as the root.
*
* Throws an error if the collection is not found, which should never happen if the item itself exists.
*/
visit.parentCollection = (cst, path) => {
const parent = visit.itemAtPath(cst, path.slice(0, -1));
const field = path[path.length - 1][0];
const coll = parent?.[field];
if (coll && 'items' in coll)
return coll;
throw new Error('Parent collection not found');
};
function _visit(path, item, visitor) {
let ctrl = visitor(item, path);
if (typeof ctrl === 'symbol')
return ctrl;
for (const field of ['key', 'value']) {
const token = item[field];
if (token && 'items' in token) {
for (let i = 0; i < token.items.length; ++i) {
const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK)
return BREAK;
else if (ci === REMOVE) {
token.items.splice(i, 1);
i -= 1;
}
}
if (typeof ctrl === 'function' && field === 'key')
ctrl = ctrl(item, path);
}
}
return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
}
/** The byte order mark */
const BOM = '\u{FEFF}';
/** Start of doc-mode */
const DOCUMENT = '\x02'; // C0: Start of Text
/** Unexpected end of flow-mode */
const FLOW_END = '\x18'; // C0: Cancel
/** Next token is a scalar value */
const SCALAR = '\x1f'; // C0: Unit Separator
/** @returns `true` if `token` is a flow or block collection */
const isCollection = (token) => !!token && 'items' in token;
/** @returns `true` if `token` is a flow or block scalar; not an alias */
const isScalar = (token) => !!token &&
(token.type === 'scalar' ||
token.type === 'single-quoted-scalar' ||
token.type === 'double-quoted-scalar' ||
token.type === 'block-scalar');
/* istanbul ignore next */
/** Get a printable representation of a lexer token */
function prettyToken(token) {
switch (token) {
case BOM:
return '';
case DOCUMENT:
return '';
case FLOW_END:
return '';
case SCALAR:
return '';
default:
return JSON.stringify(token);
}
}
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
function tokenType(source) {
switch (source) {
case BOM:
return 'byte-order-mark';
case DOCUMENT:
return 'doc-mode';
case FLOW_END:
return 'flow-error-end';
case SCALAR:
return 'scalar';
case '---':
return 'doc-start';
case '...':
return 'doc-end';
case '':
case '\n':
case '\r\n':
return 'newline';
case '-':
return 'seq-item-ind';
case '?':
return 'explicit-key-ind';
case ':':
return 'map-value-ind';
case '{':
return 'flow-map-start';
case '}':
return 'flow-map-end';
case '[':
return 'flow-seq-start';
case ']':
return 'flow-seq-end';
case ',':
return 'comma';
}
switch (source[0]) {
case ' ':
case '\t':
return 'space';
case '#':
return 'comment';
case '%':
return 'directive-line';
case '*':
return 'alias';
case '&':
return 'anchor';
case '!':
return 'tag';
case "'":
return 'single-quoted-scalar';
case '"':
return 'double-quoted-scalar';
case '|':
case '>':
return 'block-scalar-header';
}
return null;
}
var cst = {
__proto__: null,
BOM: BOM,
DOCUMENT: DOCUMENT,
FLOW_END: FLOW_END,
SCALAR: SCALAR,
createScalarToken: createScalarToken,
isCollection: isCollection,
isScalar: isScalar,
prettyToken: prettyToken,
resolveAsScalar: resolveAsScalar,
setScalarValue: setScalarValue,
stringify: stringify$1,
tokenType: tokenType,
visit: visit
};
/*
START -> stream
stream
directive -> line-end -> stream
indent + line-end -> stream
[else] -> line-start
line-end
comment -> line-end
newline -> .
input-end -> END
line-start
doc-start -> doc
doc-end -> stream
[else] -> indent -> block-start
block-start
seq-item-start -> block-start
explicit-key-start -> block-start
map-value-start -> block-start
[else] -> doc
doc
line-end -> line-start
spaces -> doc
anchor -> doc
tag -> doc
flow-start -> flow -> doc
flow-end -> error -> doc
seq-item-start -> error -> doc
explicit-key-start -> error -> doc
map-value-start -> doc
alias -> doc
quote-start -> quoted-scalar -> doc
block-scalar-header -> line-end -> block-scalar(min) -> line-start
[else] -> plain-scalar(false, min) -> doc
flow
line-end -> flow
spaces -> flow
anchor -> flow
tag -> flow
flow-start -> flow -> flow
flow-end -> .
seq-item-start -> error -> flow
explicit-key-start -> flow
map-value-start -> flow
alias -> flow
quote-start -> quoted-scalar -> flow
comma -> flow
[else] -> plain-scalar(true, 0) -> flow
quoted-scalar
quote-end -> .
[else] -> quoted-scalar
block-scalar(min)
newline + peek(indent < min) -> .
[else] -> block-scalar(min)
plain-scalar(is-flow, min)
scalar-end(is-flow) -> .
peek(newline + (indent < min)) -> .
[else] -> plain-scalar(min)
*/
function isEmpty(ch) {
switch (ch) {
case undefined:
case ' ':
case '\n':
case '\r':
case '\t':
return true;
default:
return false;
}
}
const hexDigits = new Set('0123456789ABCDEFabcdef');
const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()");
const flowIndicatorChars = new Set(',[]{}');
const invalidAnchorChars = new Set(' ,[]{}\n\r\t');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch);
/**
* Splits an input string into lexical tokens, i.e. smaller strings that are
* easily identifiable by `tokens.tokenType()`.
*
* Lexing starts always in a "stream" context. Incomplete input may be buffered
* until a complete token can be emitted.
*
* In addition to slices of the original input, the following control characters
* may also be emitted:
*
* - `\x02` (Start of Text): A document starts with the next token
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
* - `\x1f` (Unit Separator): Next token is a scalar value
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
*/
class Lexer {
constructor() {
/**
* Flag indicating whether the end of the current buffer marks the end of
* all input
*/
this.atEnd = false;
/**
* Explicit indent set in block scalar header, as an offset from the current
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
* explicitly set.
*/
this.blockScalarIndent = -1;
/**
* Block scalars that include a + (keep) chomping indicator in their header
* include trailing empty lines, which are otherwise excluded from the
* scalar's contents.
*/
this.blockScalarKeep = false;
/** Current input */
this.buffer = '';
/**
* Flag noting whether the map value indicator : can immediately follow this
* node within a flow context.
*/
this.flowKey = false;
/** Count of surrounding flow collection levels. */
this.flowLevel = 0;
/**
* Minimum level of indentation required for next lines to be parsed as a
* part of the current scalar value.
*/
this.indentNext = 0;
/** Indentation level of the current line. */
this.indentValue = 0;
/** Position of the next \n character. */
this.lineEndPos = null;
/** Stores the state of the lexer if reaching the end of incpomplete input */
this.next = null;
/** A pointer to `buffer`; the current position of the lexer. */
this.pos = 0;
}
/**
* Generate YAML tokens from the `source` string. If `incomplete`,
* a part of the last line may be left as a buffer for the next call.
*
* @returns A generator of lexical tokens
*/
*lex(source, incomplete = false) {
if (source) {
if (typeof source !== 'string')
throw TypeError('source is not a string');
this.buffer = this.buffer ? this.buffer + source : source;
this.lineEndPos = null;
}
this.atEnd = !incomplete;
let next = this.next ?? 'stream';
while (next && (incomplete || this.hasChars(1)))
next = yield* this.parseNext(next);
}
atLineEnd() {
let i = this.pos;
let ch = this.buffer[i];
while (ch === ' ' || ch === '\t')
ch = this.buffer[++i];
if (!ch || ch === '#' || ch === '\n')
return true;
if (ch === '\r')
return this.buffer[i + 1] === '\n';
return false;
}
charAt(n) {
return this.buffer[this.pos + n];
}
continueScalar(offset) {
let ch = this.buffer[offset];
if (this.indentNext > 0) {
let indent = 0;
while (ch === ' ')
ch = this.buffer[++indent + offset];
if (ch === '\r') {
const next = this.buffer[indent + offset + 1];
if (next === '\n' || (!next && !this.atEnd))
return offset + indent + 1;
}
return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
? offset + indent
: -1;
}
if (ch === '-' || ch === '.') {
const dt = this.buffer.substr(offset, 3);
if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
return -1;
}
return offset;
}
getLine() {
let end = this.lineEndPos;
if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
end = this.buffer.indexOf('\n', this.pos);
this.lineEndPos = end;
}
if (end === -1)
return this.atEnd ? this.buffer.substring(this.pos) : null;
if (this.buffer[end - 1] === '\r')
end -= 1;
return this.buffer.substring(this.pos, end);
}
hasChars(n) {
return this.pos + n <= this.buffer.length;
}
setNext(state) {
this.buffer = this.buffer.substring(this.pos);
this.pos = 0;
this.lineEndPos = null;
this.next = state;
return null;
}
peek(n) {
return this.buffer.substr(this.pos, n);
}
*parseNext(next) {
switch (next) {
case 'stream':
return yield* this.parseStream();
case 'line-start':
return yield* this.parseLineStart();
case 'block-start':
return yield* this.parseBlockStart();
case 'doc':
return yield* this.parseDocument();
case 'flow':
return yield* this.parseFlowCollection();
case 'quoted-scalar':
return yield* this.parseQuotedScalar();
case 'block-scalar':
return yield* this.parseBlockScalar();
case 'plain-scalar':
return yield* this.parsePlainScalar();
}
}
*parseStream() {
let line = this.getLine();
if (line === null)
return this.setNext('stream');
if (line[0] === BOM) {
yield* this.pushCount(1);
line = line.substring(1);
}
if (line[0] === '%') {
let dirEnd = line.length;
let cs = line.indexOf('#');
while (cs !== -1) {
const ch = line[cs - 1];
if (ch === ' ' || ch === '\t') {
dirEnd = cs - 1;
break;
}
else {
cs = line.indexOf('#', cs + 1);
}
}
while (true) {
const ch = line[dirEnd - 1];
if (ch === ' ' || ch === '\t')
dirEnd -= 1;
else
break;
}
const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
yield* this.pushCount(line.length - n); // possible comment
this.pushNewline();
return 'stream';
}
if (this.atLineEnd()) {
const sp = yield* this.pushSpaces(true);
yield* this.pushCount(line.length - sp);
yield* this.pushNewline();
return 'stream';
}
yield DOCUMENT;
return yield* this.parseLineStart();
}
*parseLineStart() {
const ch = this.charAt(0);
if (!ch && !this.atEnd)
return this.setNext('line-start');
if (ch === '-' || ch === '.') {
if (!this.atEnd && !this.hasChars(4))
return this.setNext('line-start');
const s = this.peek(3);
if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
this.indentValue = 0;
this.indentNext = 0;
return s === '---' ? 'doc' : 'stream';
}
}
this.indentValue = yield* this.pushSpaces(false);
if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
this.indentNext = this.indentValue;
return yield* this.parseBlockStart();
}
*parseBlockStart() {
const [ch0, ch1] = this.peek(2);
if (!ch1 && !this.atEnd)
return this.setNext('block-start');
if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
this.indentNext = this.indentValue + 1;
this.indentValue += n;
return yield* this.parseBlockStart();
}
return 'doc';
}
*parseDocument() {
yield* this.pushSpaces(true);
const line = this.getLine();
if (line === null)
return this.setNext('doc');
let n = yield* this.pushIndicators();
switch (line[n]) {
case '#':
yield* this.pushCount(line.length - n);
// fallthrough
case undefined:
yield* this.pushNewline();
return yield* this.parseLineStart();
case '{':
case '[':
yield* this.pushCount(1);
this.flowKey = false;
this.flowLevel = 1;
return 'flow';
case '}':
case ']':
// this is an error
yield* this.pushCount(1);
return 'doc';
case '*':
yield* this.pushUntil(isNotAnchorChar);
return 'doc';
case '"':
case "'":
return yield* this.parseQuotedScalar();
case '|':
case '>':
n += yield* this.parseBlockScalarHeader();
n += yield* this.pushSpaces(true);
yield* this.pushCount(line.length - n);
yield* this.pushNewline();
return yield* this.parseBlockScalar();
default:
return yield* this.parsePlainScalar();
}
}
*parseFlowCollection() {
let nl, sp;
let indent = -1;
do {
nl = yield* this.pushNewline();
if (nl > 0) {
sp = yield* this.pushSpaces(false);
this.indentValue = indent = sp;
}
else {
sp = 0;
}
sp += yield* this.pushSpaces(true);
} while (nl + sp > 0);
const line = this.getLine();
if (line === null)
return this.setNext('flow');
if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
(indent === 0 &&
(line.startsWith('---') || line.startsWith('...')) &&
isEmpty(line[3]))) {
// Allowing for the terminal ] or } at the same (rather than greater)
// indent level as the initial [ or { is technically invalid, but
// failing here would be surprising to users.
const atFlowEndMarker = indent === this.indentNext - 1 &&
this.flowLevel === 1 &&
(line[0] === ']' || line[0] === '}');
if (!atFlowEndMarker) {
// this is an error
this.flowLevel = 0;
yield FLOW_END;
return yield* this.parseLineStart();
}
}
let n = 0;
while (line[n] === ',') {
n += yield* this.pushCount(1);
n += yield* this.pushSpaces(true);
this.flowKey = false;
}
n += yield* this.pushIndicators();
switch (line[n]) {
case undefined:
return 'flow';
case '#':
yield* this.pushCount(line.length - n);
return 'flow';
case '{':
case '[':
yield* this.pushCount(1);
this.flowKey = false;
this.flowLevel += 1;
return 'flow';
case '}':
case ']':
yield* this.pushCount(1);
this.flowKey = true;
this.flowLevel -= 1;
return this.flowLevel ? 'flow' : 'doc';
case '*':
yield* this.pushUntil(isNotAnchorChar);
return 'flow';
case '"':
case "'":
this.flowKey = true;
return yield* this.parseQuotedScalar();
case ':': {
const next = this.charAt(1);
if (this.flowKey || isEmpty(next) || next === ',') {
this.flowKey = false;
yield* this.pushCount(1);
yield* this.pushSpaces(true);
return 'flow';
}
}
// fallthrough
default:
this.flowKey = false;
return yield* this.parsePlainScalar();
}
}
*parseQuotedScalar() {
const quote = this.charAt(0);
let end = this.buffer.indexOf(quote, this.pos + 1);
if (quote === "'") {
while (end !== -1 && this.buffer[end + 1] === "'")
end = this.buffer.indexOf("'", end + 2);
}
else {
// double-quote
while (end !== -1) {
let n = 0;
while (this.buffer[end - 1 - n] === '\\')
n += 1;
if (n % 2 === 0)
break;
end = this.buffer.indexOf('"', end + 1);
}
}
// Only looking for newlines within the quotes
const qb = this.buffer.substring(0, end);
let nl = qb.indexOf('\n', this.pos);
if (nl !== -1) {
while (nl !== -1) {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
break;
nl = qb.indexOf('\n', cs);
}
if (nl !== -1) {
// this is an error caused by an unexpected unindent
end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
}
}
if (end === -1) {
if (!this.atEnd)
return this.setNext('quoted-scalar');
end = this.buffer.length;
}
yield* this.pushToIndex(end + 1, false);
return this.flowLevel ? 'flow' : 'doc';
}
*parseBlockScalarHeader() {
this.blockScalarIndent = -1;
this.blockScalarKeep = false;
let i = this.pos;
while (true) {
const ch = this.buffer[++i];
if (ch === '+')
this.blockScalarKeep = true;
else if (ch > '0' && ch <= '9')
this.blockScalarIndent = Number(ch) - 1;
else if (ch !== '-')
break;
}
return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
}
*parseBlockScalar() {
let nl = this.pos - 1; // may be -1 if this.pos === 0
let indent = 0;
let ch;
loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
switch (ch) {
case ' ':
indent += 1;
break;
case '\n':
nl = i;
indent = 0;
break;
case '\r': {
const next = this.buffer[i + 1];
if (!next && !this.atEnd)
return this.setNext('block-scalar');
if (next === '\n')
break;
} // fallthrough
default:
break loop;
}
}
if (!ch && !this.atEnd)
return this.setNext('block-scalar');
if (indent >= this.indentNext) {
if (this.blockScalarIndent === -1)
this.indentNext = indent;
else {
this.indentNext =
this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext);
}
do {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
break;
nl = this.buffer.indexOf('\n', cs);
} while (nl !== -1);
if (nl === -1) {
if (!this.atEnd)
return this.setNext('block-scalar');
nl = this.buffer.length;
}
}
// Trailing insufficiently indented tabs are invalid.
// To catch that during parsing, we include them in the block scalar value.
let i = nl + 1;
ch = this.buffer[i];
while (ch === ' ')
ch = this.buffer[++i];
if (ch === '\t') {
while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n')
ch = this.buffer[++i];
nl = i - 1;
}
else if (!this.blockScalarKeep) {
do {
let i = nl - 1;
let ch = this.buffer[i];
if (ch === '\r')
ch = this.buffer[--i];
const lastChar = i; // Drop the line if last char not more indented
while (ch === ' ')
ch = this.buffer[--i];
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
nl = i;
else
break;
} while (true);
}
yield SCALAR;
yield* this.pushToIndex(nl + 1, true);
return yield* this.parseLineStart();
}
*parsePlainScalar() {
const inFlow = this.flowLevel > 0;
let end = this.pos - 1;
let i = this.pos - 1;
let ch;
while ((ch = this.buffer[++i])) {
if (ch === ':') {
const next = this.buffer[i + 1];
if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next)))
break;
end = i;
}
else if (isEmpty(ch)) {
let next = this.buffer[i + 1];
if (ch === '\r') {
if (next === '\n') {
i += 1;
ch = '\n';
next = this.buffer[i + 1];
}
else
end = i;
}
if (next === '#' || (inFlow && flowIndicatorChars.has(next)))
break;
if (ch === '\n') {
const cs = this.continueScalar(i + 1);
if (cs === -1)
break;
i = Math.max(i, cs - 2); // to advance, but still account for ' #'
}
}
else {
if (inFlow && flowIndicatorChars.has(ch))
break;
end = i;
}
}
if (!ch && !this.atEnd)
return this.setNext('plain-scalar');
yield SCALAR;
yield* this.pushToIndex(end + 1, true);
return inFlow ? 'flow' : 'doc';
}
*pushCount(n) {
if (n > 0) {
yield this.buffer.substr(this.pos, n);
this.pos += n;
return n;
}
return 0;
}
*pushToIndex(i, allowEmpty) {
const s = this.buffer.slice(this.pos, i);
if (s) {
yield s;
this.pos += s.length;
return s.length;
}
else if (allowEmpty)
yield '';
return 0;
}
*pushIndicators() {
switch (this.charAt(0)) {
case '!':
return ((yield* this.pushTag()) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
case '&':
return ((yield* this.pushUntil(isNotAnchorChar)) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
case '-': // this is an error
case '?': // this is an error outside flow collections
case ':': {
const inFlow = this.flowLevel > 0;
const ch1 = this.charAt(1);
if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) {
if (!inFlow)
this.indentNext = this.indentValue + 1;
else if (this.flowKey)
this.flowKey = false;
return ((yield* this.pushCount(1)) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
}
}
}
return 0;
}
*pushTag() {
if (this.charAt(1) === '<') {
let i = this.pos + 2;
let ch = this.buffer[i];
while (!isEmpty(ch) && ch !== '>')
ch = this.buffer[++i];
return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
}
else {
let i = this.pos + 1;
let ch = this.buffer[i];
while (ch) {
if (tagChars.has(ch))
ch = this.buffer[++i];
else if (ch === '%' &&
hexDigits.has(this.buffer[i + 1]) &&
hexDigits.has(this.buffer[i + 2])) {
ch = this.buffer[(i += 3)];
}
else
break;
}
return yield* this.pushToIndex(i, false);
}
}
*pushNewline() {
const ch = this.buffer[this.pos];
if (ch === '\n')
return yield* this.pushCount(1);
else if (ch === '\r' && this.charAt(1) === '\n')
return yield* this.pushCount(2);
else
return 0;
}
*pushSpaces(allowTabs) {
let i = this.pos - 1;
let ch;
do {
ch = this.buffer[++i];
} while (ch === ' ' || (allowTabs && ch === '\t'));
const n = i - this.pos;
if (n > 0) {
yield this.buffer.substr(this.pos, n);
this.pos = i;
}
return n;
}
*pushUntil(test) {
let i = this.pos;
let ch = this.buffer[i];
while (!test(ch))
ch = this.buffer[++i];
return yield* this.pushToIndex(i, false);
}
}
/**
* Tracks newlines during parsing in order to provide an efficient API for
* determining the one-indexed `{ line, col }` position for any offset
* within the input.
*/
class LineCounter {
constructor() {
this.lineStarts = [];
/**
* Should be called in ascending order. Otherwise, call
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
*/
this.addNewLine = (offset) => this.lineStarts.push(offset);
/**
* Performs a binary search and returns the 1-indexed { line, col }
* position of `offset`. If `line === 0`, `addNewLine` has never been
* called or `offset` is before the first known newline.
*/
this.linePos = (offset) => {
let low = 0;
let high = this.lineStarts.length;
while (low < high) {
const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
if (this.lineStarts[mid] < offset)
low = mid + 1;
else
high = mid;
}
if (this.lineStarts[low] === offset)
return { line: low + 1, col: 1 };
if (low === 0)
return { line: 0, col: offset };
const start = this.lineStarts[low - 1];
return { line: low, col: offset - start + 1 };
};
}
}
function includesToken(list, type) {
for (let i = 0; i < list.length; ++i)
if (list[i].type === type)
return true;
return false;
}
function findNonEmptyIndex(list) {
for (let i = 0; i < list.length; ++i) {
switch (list[i].type) {
case 'space':
case 'comment':
case 'newline':
break;
default:
return i;
}
}
return -1;
}
function isFlowToken(token) {
switch (token?.type) {
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
case 'flow-collection':
return true;
default:
return false;
}
}
function getPrevProps(parent) {
switch (parent.type) {
case 'document':
return parent.start;
case 'block-map': {
const it = parent.items[parent.items.length - 1];
return it.sep ?? it.start;
}
case 'block-seq':
return parent.items[parent.items.length - 1].start;
/* istanbul ignore next should not happen */
default:
return [];
}
}
/** Note: May modify input array */
function getFirstKeyStartProps(prev) {
if (prev.length === 0)
return [];
let i = prev.length;
loop: while (--i >= 0) {
switch (prev[i].type) {
case 'doc-start':
case 'explicit-key-ind':
case 'map-value-ind':
case 'seq-item-ind':
case 'newline':
break loop;
}
}
return prev.splice(i, prev.length);
}
function fixFlowSeqItems(fc) {
if (fc.start.type === 'flow-seq-start') {
for (const it of fc.items) {
if (it.sep &&
!it.value &&
!includesToken(it.start, 'explicit-key-ind') &&
!includesToken(it.sep, 'map-value-ind')) {
if (it.key)
it.value = it.key;
delete it.key;
if (isFlowToken(it.value)) {
if (it.value.end)
Array.prototype.push.apply(it.value.end, it.sep);
else
it.value.end = it.sep;
}
else
Array.prototype.push.apply(it.start, it.sep);
delete it.sep;
}
}
}
}
/**
* A YAML concrete syntax tree (CST) parser
*
* ```ts
* const src: string = ...
* for (const token of new Parser().parse(src)) {
* // token: Token
* }
* ```
*
* To use the parser with a user-provided lexer:
*
* ```ts
* function* parse(source: string, lexer: Lexer) {
* const parser = new Parser()
* for (const lexeme of lexer.lex(source))
* yield* parser.next(lexeme)
* yield* parser.end()
* }
*
* const src: string = ...
* const lexer = new Lexer()
* for (const token of parse(src, lexer)) {
* // token: Token
* }
* ```
*/
class Parser {
/**
* @param onNewLine - If defined, called separately with the start position of
* each new line (in `parse()`, including the start of input).
*/
constructor(onNewLine) {
/** If true, space and sequence indicators count as indentation */
this.atNewLine = true;
/** If true, next token is a scalar value */
this.atScalar = false;
/** Current indentation level */
this.indent = 0;
/** Current offset since the start of parsing */
this.offset = 0;
/** On the same line with a block map key */
this.onKeyLine = false;
/** Top indicates the node that's currently being built */
this.stack = [];
/** The source of the current token, set in parse() */
this.source = '';
/** The type of the current token, set in parse() */
this.type = '';
// Must be defined after `next()`
this.lexer = new Lexer();
this.onNewLine = onNewLine;
}
/**
* Parse `source` as a YAML stream.
* If `incomplete`, a part of the last line may be left as a buffer for the next call.
*
* Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
*
* @returns A generator of tokens representing each directive, document, and other structure.
*/
*parse(source, incomplete = false) {
if (this.onNewLine && this.offset === 0)
this.onNewLine(0);
for (const lexeme of this.lexer.lex(source, incomplete))
yield* this.next(lexeme);
if (!incomplete)
yield* this.end();
}
/**
* Advance the parser by the `source` of one lexical token.
*/
*next(source) {
this.source = source;
if (this.atScalar) {
this.atScalar = false;
yield* this.step();
this.offset += source.length;
return;
}
const type = tokenType(source);
if (!type) {
const message = `Not a YAML token: ${source}`;
yield* this.pop({ type: 'error', offset: this.offset, message, source });
this.offset += source.length;
}
else if (type === 'scalar') {
this.atNewLine = false;
this.atScalar = true;
this.type = 'scalar';
}
else {
this.type = type;
yield* this.step();
switch (type) {
case 'newline':
this.atNewLine = true;
this.indent = 0;
if (this.onNewLine)
this.onNewLine(this.offset + source.length);
break;
case 'space':
if (this.atNewLine && source[0] === ' ')
this.indent += source.length;
break;
case 'explicit-key-ind':
case 'map-value-ind':
case 'seq-item-ind':
if (this.atNewLine)
this.indent += source.length;
break;
case 'doc-mode':
case 'flow-error-end':
return;
default:
this.atNewLine = false;
}
this.offset += source.length;
}
}
/** Call at end of input to push out any remaining constructions */
*end() {
while (this.stack.length > 0)
yield* this.pop();
}
get sourceToken() {
const st = {
type: this.type,
offset: this.offset,
indent: this.indent,
source: this.source
};
return st;
}
*step() {
const top = this.peek(1);
if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
while (this.stack.length > 0)
yield* this.pop();
this.stack.push({
type: 'doc-end',
offset: this.offset,
source: this.source
});
return;
}
if (!top)
return yield* this.stream();
switch (top.type) {
case 'document':
return yield* this.document(top);
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return yield* this.scalar(top);
case 'block-scalar':
return yield* this.blockScalar(top);
case 'block-map':
return yield* this.blockMap(top);
case 'block-seq':
return yield* this.blockSequence(top);
case 'flow-collection':
return yield* this.flowCollection(top);
case 'doc-end':
return yield* this.documentEnd(top);
}
/* istanbul ignore next should not happen */
yield* this.pop();
}
peek(n) {
return this.stack[this.stack.length - n];
}
*pop(error) {
const token = error ?? this.stack.pop();
/* istanbul ignore if should not happen */
if (!token) {
const message = 'Tried to pop an empty stack';
yield { type: 'error', offset: this.offset, source: '', message };
}
else if (this.stack.length === 0) {
yield token;
}
else {
const top = this.peek(1);
if (token.type === 'block-scalar') {
// Block scalars use their parent rather than header indent
token.indent = 'indent' in top ? top.indent : 0;
}
else if (token.type === 'flow-collection' && top.type === 'document') {
// Ignore all indent for top-level flow collections
token.indent = 0;
}
if (token.type === 'flow-collection')
fixFlowSeqItems(token);
switch (top.type) {
case 'document':
top.value = token;
break;
case 'block-scalar':
top.props.push(token); // error
break;
case 'block-map': {
const it = top.items[top.items.length - 1];
if (it.value) {
top.items.push({ start: [], key: token, sep: [] });
this.onKeyLine = true;
return;
}
else if (it.sep) {
it.value = token;
}
else {
Object.assign(it, { key: token, sep: [] });
this.onKeyLine = !it.explicitKey;
return;
}
break;
}
case 'block-seq': {
const it = top.items[top.items.length - 1];
if (it.value)
top.items.push({ start: [], value: token });
else
it.value = token;
break;
}
case 'flow-collection': {
const it = top.items[top.items.length - 1];
if (!it || it.value)
top.items.push({ start: [], key: token, sep: [] });
else if (it.sep)
it.value = token;
else
Object.assign(it, { key: token, sep: [] });
return;
}
/* istanbul ignore next should not happen */
default:
yield* this.pop();
yield* this.pop(token);
}
if ((top.type === 'document' ||
top.type === 'block-map' ||
top.type === 'block-seq') &&
(token.type === 'block-map' || token.type === 'block-seq')) {
const last = token.items[token.items.length - 1];
if (last &&
!last.sep &&
!last.value &&
last.start.length > 0 &&
findNonEmptyIndex(last.start) === -1 &&
(token.indent === 0 ||
last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
if (top.type === 'document')
top.end = last.start;
else
top.items.push({ start: last.start });
token.items.splice(-1, 1);
}
}
}
}
*stream() {
switch (this.type) {
case 'directive-line':
yield { type: 'directive', offset: this.offset, source: this.source };
return;
case 'byte-order-mark':
case 'space':
case 'comment':
case 'newline':
yield this.sourceToken;
return;
case 'doc-mode':
case 'doc-start': {
const doc = {
type: 'document',
offset: this.offset,
start: []
};
if (this.type === 'doc-start')
doc.start.push(this.sourceToken);
this.stack.push(doc);
return;
}
}
yield {
type: 'error',
offset: this.offset,
message: `Unexpected ${this.type} token in YAML stream`,
source: this.source
};
}
*document(doc) {
if (doc.value)
return yield* this.lineEnd(doc);
switch (this.type) {
case 'doc-start': {
if (findNonEmptyIndex(doc.start) !== -1) {
yield* this.pop();
yield* this.step();
}
else
doc.start.push(this.sourceToken);
return;
}
case 'anchor':
case 'tag':
case 'space':
case 'comment':
case 'newline':
doc.start.push(this.sourceToken);
return;
}
const bv = this.startBlockValue(doc);
if (bv)
this.stack.push(bv);
else {
yield {
type: 'error',
offset: this.offset,
message: `Unexpected ${this.type} token in YAML document`,
source: this.source
};
}
}
*scalar(scalar) {
if (this.type === 'map-value-ind') {
const prev = getPrevProps(this.peek(2));
const start = getFirstKeyStartProps(prev);
let sep;
if (scalar.end) {
sep = scalar.end;
sep.push(this.sourceToken);
delete scalar.end;
}
else
sep = [this.sourceToken];
const map = {
type: 'block-map',
offset: scalar.offset,
indent: scalar.indent,
items: [{ start, key: scalar, sep }]
};
this.onKeyLine = true;
this.stack[this.stack.length - 1] = map;
}
else
yield* this.lineEnd(scalar);
}
*blockScalar(scalar) {
switch (this.type) {
case 'space':
case 'comment':
case 'newline':
scalar.props.push(this.sourceToken);
return;
case 'scalar':
scalar.source = this.source;
// block-scalar source includes trailing newline
this.atNewLine = true;
this.indent = 0;
if (this.onNewLine) {
let nl = this.source.indexOf('\n') + 1;
while (nl !== 0) {
this.onNewLine(this.offset + nl);
nl = this.source.indexOf('\n', nl) + 1;
}
}
yield* this.pop();
break;
/* istanbul ignore next should not happen */
default:
yield* this.pop();
yield* this.step();
}
}
*blockMap(map) {
const it = map.items[map.items.length - 1];
// it.sep is true-ish if pair already has key or : separator
switch (this.type) {
case 'newline':
this.onKeyLine = false;
if (it.value) {
const end = 'end' in it.value ? it.value.end : undefined;
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
if (last?.type === 'comment')
end?.push(this.sourceToken);
else
map.items.push({ start: [this.sourceToken] });
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
it.start.push(this.sourceToken);
}
return;
case 'space':
case 'comment':
if (it.value) {
map.items.push({ start: [this.sourceToken] });
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
if (this.atIndentedComment(it.start, map.indent)) {
const prev = map.items[map.items.length - 2];
const end = prev?.value?.end;
if (Array.isArray(end)) {
Array.prototype.push.apply(end, it.start);
end.push(this.sourceToken);
map.items.pop();
return;
}
}
it.start.push(this.sourceToken);
}
return;
}
if (this.indent >= map.indent) {
const atMapIndent = !this.onKeyLine && this.indent === map.indent;
const atNextItem = atMapIndent &&
(it.sep || it.explicitKey) &&
this.type !== 'seq-item-ind';
// For empty nodes, assign newline-separated not indented empty tokens to following node
let start = [];
if (atNextItem && it.sep && !it.value) {
const nl = [];
for (let i = 0; i < it.sep.length; ++i) {
const st = it.sep[i];
switch (st.type) {
case 'newline':
nl.push(i);
break;
case 'space':
break;
case 'comment':
if (st.indent > map.indent)
nl.length = 0;
break;
default:
nl.length = 0;
}
}
if (nl.length >= 2)
start = it.sep.splice(nl[1]);
}
switch (this.type) {
case 'anchor':
case 'tag':
if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start });
this.onKeyLine = true;
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
it.start.push(this.sourceToken);
}
return;
case 'explicit-key-ind':
if (!it.sep && !it.explicitKey) {
it.start.push(this.sourceToken);
it.explicitKey = true;
}
else if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start, explicitKey: true });
}
else {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken], explicitKey: true }]
});
}
this.onKeyLine = true;
return;
case 'map-value-ind':
if (it.explicitKey) {
if (!it.sep) {
if (includesToken(it.start, 'newline')) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
}
else {
const start = getFirstKeyStartProps(it.start);
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
});
}
}
else if (it.value) {
map.items.push({ start: [], key: null, sep: [this.sourceToken] });
}
else if (includesToken(it.sep, 'map-value-ind')) {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
});
}
else if (isFlowToken(it.key) &&
!includesToken(it.sep, 'newline')) {
const start = getFirstKeyStartProps(it.start);
const key = it.key;
const sep = it.sep;
sep.push(this.sourceToken);
// @ts-expect-error type guard is wrong here
delete it.key, delete it.sep;
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key, sep }]
});
}
else if (start.length > 0) {
// Not actually at next item
it.sep = it.sep.concat(start, this.sourceToken);
}
else {
it.sep.push(this.sourceToken);
}
}
else {
if (!it.sep) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
}
else if (it.value || atNextItem) {
map.items.push({ start, key: null, sep: [this.sourceToken] });
}
else if (includesToken(it.sep, 'map-value-ind')) {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [], key: null, sep: [this.sourceToken] }]
});
}
else {
it.sep.push(this.sourceToken);
}
}
this.onKeyLine = true;
return;
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar': {
const fs = this.flowScalar(this.type);
if (atNextItem || it.value) {
map.items.push({ start, key: fs, sep: [] });
this.onKeyLine = true;
}
else if (it.sep) {
this.stack.push(fs);
}
else {
Object.assign(it, { key: fs, sep: [] });
this.onKeyLine = true;
}
return;
}
default: {
const bv = this.startBlockValue(map);
if (bv) {
if (atMapIndent && bv.type !== 'block-seq') {
map.items.push({ start });
}
this.stack.push(bv);
return;
}
}
}
}
yield* this.pop();
yield* this.step();
}
*blockSequence(seq) {
const it = seq.items[seq.items.length - 1];
switch (this.type) {
case 'newline':
if (it.value) {
const end = 'end' in it.value ? it.value.end : undefined;
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
if (last?.type === 'comment')
end?.push(this.sourceToken);
else
seq.items.push({ start: [this.sourceToken] });
}
else
it.start.push(this.sourceToken);
return;
case 'space':
case 'comment':
if (it.value)
seq.items.push({ start: [this.sourceToken] });
else {
if (this.atIndentedComment(it.start, seq.indent)) {
const prev = seq.items[seq.items.length - 2];
const end = prev?.value?.end;
if (Array.isArray(end)) {
Array.prototype.push.apply(end, it.start);
end.push(this.sourceToken);
seq.items.pop();
return;
}
}
it.start.push(this.sourceToken);
}
return;
case 'anchor':
case 'tag':
if (it.value || this.indent <= seq.indent)
break;
it.start.push(this.sourceToken);
return;
case 'seq-item-ind':
if (this.indent !== seq.indent)
break;
if (it.value || includesToken(it.start, 'seq-item-ind'))
seq.items.push({ start: [this.sourceToken] });
else
it.start.push(this.sourceToken);
return;
}
if (this.indent > seq.indent) {
const bv = this.startBlockValue(seq);
if (bv) {
this.stack.push(bv);
return;
}
}
yield* this.pop();
yield* this.step();
}
*flowCollection(fc) {
const it = fc.items[fc.items.length - 1];
if (this.type === 'flow-error-end') {
let top;
do {
yield* this.pop();
top = this.peek(1);
} while (top && top.type === 'flow-collection');
}
else if (fc.end.length === 0) {
switch (this.type) {
case 'comma':
case 'explicit-key-ind':
if (!it || it.sep)
fc.items.push({ start: [this.sourceToken] });
else
it.start.push(this.sourceToken);
return;
case 'map-value-ind':
if (!it || it.value)
fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
else if (it.sep)
it.sep.push(this.sourceToken);
else
Object.assign(it, { key: null, sep: [this.sourceToken] });
return;
case 'space':
case 'comment':
case 'newline':
case 'anchor':
case 'tag':
if (!it || it.value)
fc.items.push({ start: [this.sourceToken] });
else if (it.sep)
it.sep.push(this.sourceToken);
else
it.start.push(this.sourceToken);
return;
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar': {
const fs = this.flowScalar(this.type);
if (!it || it.value)
fc.items.push({ start: [], key: fs, sep: [] });
else if (it.sep)
this.stack.push(fs);
else
Object.assign(it, { key: fs, sep: [] });
return;
}
case 'flow-map-end':
case 'flow-seq-end':
fc.end.push(this.sourceToken);
return;
}
const bv = this.startBlockValue(fc);
/* istanbul ignore else should not happen */
if (bv)
this.stack.push(bv);
else {
yield* this.pop();
yield* this.step();
}
}
else {
const parent = this.peek(2);
if (parent.type === 'block-map' &&
((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
(this.type === 'newline' &&
!parent.items[parent.items.length - 1].sep))) {
yield* this.pop();
yield* this.step();
}
else if (this.type === 'map-value-ind' &&
parent.type !== 'flow-collection') {
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
fixFlowSeqItems(fc);
const sep = fc.end.splice(1, fc.end.length);
sep.push(this.sourceToken);
const map = {
type: 'block-map',
offset: fc.offset,
indent: fc.indent,
items: [{ start, key: fc, sep }]
};
this.onKeyLine = true;
this.stack[this.stack.length - 1] = map;
}
else {
yield* this.lineEnd(fc);
}
}
}
flowScalar(type) {
if (this.onNewLine) {
let nl = this.source.indexOf('\n') + 1;
while (nl !== 0) {
this.onNewLine(this.offset + nl);
nl = this.source.indexOf('\n', nl) + 1;
}
}
return {
type,
offset: this.offset,
indent: this.indent,
source: this.source
};
}
startBlockValue(parent) {
switch (this.type) {
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return this.flowScalar(this.type);
case 'block-scalar-header':
return {
type: 'block-scalar',
offset: this.offset,
indent: this.indent,
props: [this.sourceToken],
source: ''
};
case 'flow-map-start':
case 'flow-seq-start':
return {
type: 'flow-collection',
offset: this.offset,
indent: this.indent,
start: this.sourceToken,
items: [],
end: []
};
case 'seq-item-ind':
return {
type: 'block-seq',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken] }]
};
case 'explicit-key-ind': {
this.onKeyLine = true;
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
start.push(this.sourceToken);
return {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, explicitKey: true }]
};
}
case 'map-value-ind': {
this.onKeyLine = true;
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
return {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
};
}
}
return null;
}
atIndentedComment(start, indent) {
if (this.type !== 'comment')
return false;
if (this.indent <= indent)
return false;
return start.every(st => st.type === 'newline' || st.type === 'space');
}
*documentEnd(docEnd) {
if (this.type !== 'doc-mode') {
if (docEnd.end)
docEnd.end.push(this.sourceToken);
else
docEnd.end = [this.sourceToken];
if (this.type === 'newline')
yield* this.pop();
}
}
*lineEnd(token) {
switch (this.type) {
case 'comma':
case 'doc-start':
case 'doc-end':
case 'flow-seq-end':
case 'flow-map-end':
case 'map-value-ind':
yield* this.pop();
yield* this.step();
break;
case 'newline':
this.onKeyLine = false;
// fallthrough
case 'space':
case 'comment':
default:
// all other values are errors
if (token.end)
token.end.push(this.sourceToken);
else
token.end = [this.sourceToken];
if (this.type === 'newline')
yield* this.pop();
}
}
}
function parseOptions(options) {
const prettyErrors = options.prettyErrors !== false;
const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null;
return { lineCounter, prettyErrors };
}
/**
* Parse the input as a stream of YAML documents.
*
* Documents should be separated from each other by `...` or `---` marker lines.
*
* @returns If an empty `docs` array is returned, it will be of type
* EmptyStream and contain additional stream information. In
* TypeScript, you should use `'empty' in docs` as a type guard for it.
*/
function parseAllDocuments(source, options = {}) {
const { lineCounter, prettyErrors } = parseOptions(options);
const parser = new Parser(lineCounter?.addNewLine);
const composer = new Composer(options);
const docs = Array.from(composer.compose(parser.parse(source)));
if (prettyErrors && lineCounter)
for (const doc of docs) {
doc.errors.forEach(prettifyError(source, lineCounter));
doc.warnings.forEach(prettifyError(source, lineCounter));
}
if (docs.length > 0)
return docs;
return Object.assign([], { empty: true }, composer.streamInfo());
}
/** Parse an input string into a single YAML.Document */
function parseDocument(source, options = {}) {
const { lineCounter, prettyErrors } = parseOptions(options);
const parser = new Parser(lineCounter?.addNewLine);
const composer = new Composer(options);
// `doc` is always set by compose.end(true) at the very latest
let doc = null;
for (const _doc of composer.compose(parser.parse(source), true, source.length)) {
if (!doc)
doc = _doc;
else if (doc.options.logLevel !== 'silent') {
doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
break;
}
}
if (prettyErrors && lineCounter) {
doc.errors.forEach(prettifyError(source, lineCounter));
doc.warnings.forEach(prettifyError(source, lineCounter));
}
return doc;
}
function parse$a(src, reviver, options) {
let _reviver = undefined;
if (typeof reviver === 'function') {
_reviver = reviver;
}
else if (options === undefined && reviver && typeof reviver === 'object') {
options = reviver;
}
const doc = parseDocument(src, options);
if (!doc)
return null;
doc.warnings.forEach(warning => warn(doc.options.logLevel, warning));
if (doc.errors.length > 0) {
if (doc.options.logLevel !== 'silent')
throw doc.errors[0];
else
doc.errors = [];
}
return doc.toJS(Object.assign({ reviver: _reviver }, options));
}
function stringify(value, replacer, options) {
let _replacer = null;
if (typeof replacer === 'function' || Array.isArray(replacer)) {
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
}
if (typeof options === 'string')
options = options.length;
if (typeof options === 'number') {
const indent = Math.round(options);
options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
}
if (value === undefined) {
const { keepUndefined } = options ?? replacer ?? {};
if (!keepUndefined)
return undefined;
}
return new Document(value, _replacer, options).toString(options);
}
var YAML = {
__proto__: null,
Alias: Alias,
CST: cst,
Composer: Composer,
Document: Document,
Lexer: Lexer,
LineCounter: LineCounter,
Pair: Pair,
Parser: Parser,
Scalar: Scalar,
Schema: Schema,
YAMLError: YAMLError,
YAMLMap: YAMLMap,
YAMLParseError: YAMLParseError,
YAMLSeq: YAMLSeq,
YAMLWarning: YAMLWarning,
isAlias: isAlias,
isCollection: isCollection$1,
isDocument: isDocument,
isMap: isMap,
isNode: isNode$1,
isPair: isPair,
isScalar: isScalar$1,
isSeq: isSeq,
parse: parse$a,
parseAllDocuments: parseAllDocuments,
parseDocument: parseDocument,
stringify: stringify,
visit: visit$1,
visitAsync: visitAsync
};
// `export * as default from ...` fails on Webpack v4
// https://github.com/eemeli/yaml/issues/228
var browser$2 = {
__proto__: null,
Alias: Alias,
CST: cst,
Composer: Composer,
Document: Document,
Lexer: Lexer,
LineCounter: LineCounter,
Pair: Pair,
Parser: Parser,
Scalar: Scalar,
Schema: Schema,
YAMLError: YAMLError,
YAMLMap: YAMLMap,
YAMLParseError: YAMLParseError,
YAMLSeq: YAMLSeq,
YAMLWarning: YAMLWarning,
default: YAML,
isAlias: isAlias,
isCollection: isCollection$1,
isDocument: isDocument,
isMap: isMap,
isNode: isNode$1,
isPair: isPair,
isScalar: isScalar$1,
isSeq: isSeq,
parse: parse$a,
parseAllDocuments: parseAllDocuments,
parseDocument: parseDocument,
stringify: stringify,
visit: visit$1,
visitAsync: visitAsync
};
var require$$3 = /*@__PURE__*/getAugmentedNamespace(browser$2);
// eslint-disable-next-line n/no-deprecated-api
const { createRequire, createRequireFromPath } = require$$0$8;
function req$2 (name, rootFile) {
const create = createRequire || createRequireFromPath;
const require = create(rootFile);
return require(name)
}
var req_1 = req$2;
const req$1 = req_1;
/**
* Load Options
*
* @private
* @method options
*
* @param {Object} config PostCSS Config
*
* @return {Object} options PostCSS Options
*/
const options = (config, file) => {
if (config.parser && typeof config.parser === 'string') {
try {
config.parser = req$1(config.parser, file);
} catch (err) {
throw new Error(`Loading PostCSS Parser failed: ${err.message}\n\n(@${file})`)
}
}
if (config.syntax && typeof config.syntax === 'string') {
try {
config.syntax = req$1(config.syntax, file);
} catch (err) {
throw new Error(`Loading PostCSS Syntax failed: ${err.message}\n\n(@${file})`)
}
}
if (config.stringifier && typeof config.stringifier === 'string') {
try {
config.stringifier = req$1(config.stringifier, file);
} catch (err) {
throw new Error(`Loading PostCSS Stringifier failed: ${err.message}\n\n(@${file})`)
}
}
if (config.plugins) {
delete config.plugins;
}
return config
};
var options_1 = options;
const req = req_1;
/**
* Plugin Loader
*
* @private
* @method load
*
* @param {String} plugin PostCSS Plugin Name
* @param {Object} options PostCSS Plugin Options
*
* @return {Function} PostCSS Plugin
*/
const load = (plugin, options, file) => {
try {
if (
options === null ||
options === undefined ||
Object.keys(options).length === 0
) {
return req(plugin, file)
} else {
return req(plugin, file)(options)
}
} catch (err) {
throw new Error(`Loading PostCSS Plugin failed: ${err.message}\n\n(@${file})`)
}
};
/**
* Load Plugins
*
* @private
* @method plugins
*
* @param {Object} config PostCSS Config Plugins
*
* @return {Array} plugins PostCSS Plugins
*/
const plugins = (config, file) => {
let plugins = [];
if (Array.isArray(config.plugins)) {
plugins = config.plugins.filter(Boolean);
} else {
plugins = Object.keys(config.plugins)
.filter((plugin) => {
return config.plugins[plugin] !== false ? plugin : ''
})
.map((plugin) => {
return load(plugin, config.plugins[plugin], file)
});
}
if (plugins.length && plugins.length > 0) {
plugins.forEach((plugin, i) => {
if (plugin.default) {
plugin = plugin.default;
}
if (plugin.postcss === true) {
plugin = plugin();
} else if (plugin.postcss) {
plugin = plugin.postcss;
}
if (
// eslint-disable-next-line
!(
(typeof plugin === 'object' && Array.isArray(plugin.plugins)) ||
(typeof plugin === 'object' && plugin.postcssPlugin) ||
(typeof plugin === 'function')
)
) {
throw new TypeError(`Invalid PostCSS Plugin found at: plugins[${i}]\n\n(@${file})`)
}
});
}
return plugins
};
var plugins_1 = plugins;
const resolve = require$$0$4.resolve;
const url$4 = require$$0$9;
const config$1 = src$2;
const yaml = require$$3;
const loadOptions = options_1;
const loadPlugins = plugins_1;
/* istanbul ignore next */
const interopRequireDefault = (obj) => obj && obj.__esModule ? obj : { default: obj };
/**
* Process the result from cosmiconfig
*
* @param {Object} ctx Config Context
* @param {Object} result Cosmiconfig result
*
* @return {Object} PostCSS Config
*/
const processResult = (ctx, result) => {
const file = result.filepath || '';
let config = interopRequireDefault(result.config).default || {};
if (typeof config === 'function') {
config = config(ctx);
} else {
config = Object.assign({}, config, ctx);
}
if (!config.plugins) {
config.plugins = [];
}
return {
plugins: loadPlugins(config, file),
options: loadOptions(config, file),
file
}
};
/**
* Builds the Config Context
*
* @param {Object} ctx Config Context
*
* @return {Object} Config Context
*/
const createContext = (ctx) => {
/**
* @type {Object}
*
* @prop {String} cwd=process.cwd() Config search start location
* @prop {String} env=process.env.NODE_ENV Config Enviroment, will be set to `development` by `postcss-load-config` if `process.env.NODE_ENV` is `undefined`
*/
ctx = Object.assign({
cwd: process.cwd(),
env: process.env.NODE_ENV
}, ctx);
if (!ctx.env) {
process.env.NODE_ENV = 'development';
}
return ctx
};
const importDefault = async filepath => {
const module = await import(url$4.pathToFileURL(filepath).href);
return module.default
};
const addTypeScriptLoader = (options = {}, loader) => {
const moduleName = 'postcss';
return {
...options,
searchPlaces: [
...(options.searchPlaces || []),
'package.json',
`.${moduleName}rc`,
`.${moduleName}rc.json`,
`.${moduleName}rc.yaml`,
`.${moduleName}rc.yml`,
`.${moduleName}rc.ts`,
`.${moduleName}rc.cts`,
`.${moduleName}rc.js`,
`.${moduleName}rc.cjs`,
`.${moduleName}rc.mjs`,
`${moduleName}.config.ts`,
`${moduleName}.config.cts`,
`${moduleName}.config.js`,
`${moduleName}.config.cjs`,
`${moduleName}.config.mjs`
],
loaders: {
...options.loaders,
'.yaml': (filepath, content) => yaml.parse(content),
'.yml': (filepath, content) => yaml.parse(content),
'.js': importDefault,
'.cjs': importDefault,
'.mjs': importDefault,
'.ts': loader,
'.cts': loader
}
}
};
const withTypeScriptLoader = (rcFunc) => {
return (ctx, path, options) => {
return rcFunc(ctx, path, addTypeScriptLoader(options, (configFile) => {
let registerer = { enabled () {} };
try {
// Register TypeScript compiler instance
registerer = __require('ts-node').register({
// transpile to cjs even if compilerOptions.module in tsconfig is not Node16/NodeNext.
moduleTypes: { '**/*.cts': 'cjs' }
});
return __require(configFile)
} catch (err) {
if (err.code === 'MODULE_NOT_FOUND') {
throw new Error(
`'ts-node' is required for the TypeScript configuration files. Make sure it is installed\nError: ${err.message}`
)
}
throw err
} finally {
registerer.enabled(false);
}
}))
}
};
/**
* Load Config
*
* @method rc
*
* @param {Object} ctx Config Context
* @param {String} path Config Path
* @param {Object} options Config Options
*
* @return {Promise} config PostCSS Config
*/
const rc = withTypeScriptLoader((ctx, path, options) => {
/**
* @type {Object} The full Config Context
*/
ctx = createContext(ctx);
/**
* @type {String} `process.cwd()`
*/
path = path ? resolve(path) : process.cwd();
return config$1.lilconfig('postcss', options)
.search(path)
.then((result) => {
if (!result) {
throw new Error(`No PostCSS Config found in: ${path}`)
}
return processResult(ctx, result)
})
});
/**
* Autoload Config for PostCSS
*
* @author Michael Ciniawsky @michael-ciniawsky
* @license MIT
*
* @module postcss-load-config
* @version 2.1.0
*
* @requires comsiconfig
* @requires ./options
* @requires ./plugins
*/
var src$1 = rc;
var postcssrc = /*@__PURE__*/getDefaultExportFromCjs(src$1);
// Copyright 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Simon Lydell
// License: MIT.
var HashbangComment, Identifier, JSXIdentifier, JSXPunctuator, JSXString, JSXText, KeywordsWithExpressionAfter, KeywordsWithNoLineTerminatorAfter, LineTerminatorSequence, MultiLineComment, Newline, NumericLiteral, Punctuator, RegularExpressionLiteral, SingleLineComment, StringLiteral, Template, TokensNotPrecedingObjectLiteral, TokensPrecedingExpression, WhiteSpace;
RegularExpressionLiteral = /\/(?![*\/])(?:\[(?:[^\]\\\n\r\u2028\u2029]+|\\.)*\]|[^\/\\\n\r\u2028\u2029]+|\\.)*(\/[$_\u200C\u200D\p{ID_Continue}]*|\\)?/yu;
Punctuator = /--|\+\+|=>|\.{3}|\??\.(?!\d)|(?:&&|\|\||\?\?|[+\-%&|^]|\*{1,2}|<{1,2}|>{1,3}|!=?|={1,2}|\/(?![\/*]))=?|[?~,:;[\](){}]/y;
Identifier = /(\x23?)(?=[$_\p{ID_Start}\\])(?:[$_\u200C\u200D\p{ID_Continue}]+|\\u[\da-fA-F]{4}|\\u\{[\da-fA-F]+\})+/yu;
StringLiteral = /(['"])(?:[^'"\\\n\r]+|(?!\1)['"]|\\(?:\r\n|[^]))*(\1)?/y;
NumericLiteral = /(?:0[xX][\da-fA-F](?:_?[\da-fA-F])*|0[oO][0-7](?:_?[0-7])*|0[bB][01](?:_?[01])*)n?|0n|[1-9](?:_?\d)*n|(?:(?:0(?!\d)|0\d*[89]\d*|[1-9](?:_?\d)*)(?:\.(?:\d(?:_?\d)*)?)?|\.\d(?:_?\d)*)(?:[eE][+-]?\d(?:_?\d)*)?|0[0-7]+/y;
Template = /[`}](?:[^`\\$]+|\\[^]|\$(?!\{))*(`|\$\{)?/y;
WhiteSpace = /[\t\v\f\ufeff\p{Zs}]+/yu;
LineTerminatorSequence = /\r?\n|[\r\u2028\u2029]/y;
MultiLineComment = /\/\*(?:[^*]+|\*(?!\/))*(\*\/)?/y;
SingleLineComment = /\/\/.*/y;
HashbangComment = /^#!.*/;
JSXPunctuator = /[<>.:={}]|\/(?![\/*])/y;
JSXIdentifier = /[$_\p{ID_Start}][$_\u200C\u200D\p{ID_Continue}-]*/yu;
JSXString = /(['"])(?:[^'"]+|(?!\1)['"])*(\1)?/y;
JSXText = /[^<>{}]+/y;
TokensPrecedingExpression = /^(?:[\/+-]|\.{3}|\?(?:InterpolationIn(?:JSX|Template)|NoLineTerminatorHere|NonExpressionParenEnd|UnaryIncDec))?$|[{}([,;<>=*%&|^!~?:]$/;
TokensNotPrecedingObjectLiteral = /^(?:=>|[;\]){}]|else|\?(?:NoLineTerminatorHere|NonExpressionParenEnd))?$/;
KeywordsWithExpressionAfter = /^(?:await|case|default|delete|do|else|instanceof|new|return|throw|typeof|void|yield)$/;
KeywordsWithNoLineTerminatorAfter = /^(?:return|throw|yield)$/;
Newline = RegExp(LineTerminatorSequence.source);
var jsTokens_1 = function*(input, {jsx = false} = {}) {
var braces, firstCodePoint, isExpression, lastIndex, lastSignificantToken, length, match, mode, nextLastIndex, nextLastSignificantToken, parenNesting, postfixIncDec, punctuator, stack;
({length} = input);
lastIndex = 0;
lastSignificantToken = "";
stack = [
{tag: "JS"}
];
braces = [];
parenNesting = 0;
postfixIncDec = false;
if (match = HashbangComment.exec(input)) {
yield ({
type: "HashbangComment",
value: match[0]
});
lastIndex = match[0].length;
}
while (lastIndex < length) {
mode = stack[stack.length - 1];
switch (mode.tag) {
case "JS":
case "JSNonExpressionParen":
case "InterpolationInTemplate":
case "InterpolationInJSX":
if (input[lastIndex] === "/" && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken))) {
RegularExpressionLiteral.lastIndex = lastIndex;
if (match = RegularExpressionLiteral.exec(input)) {
lastIndex = RegularExpressionLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "RegularExpressionLiteral",
value: match[0],
closed: match[1] !== void 0 && match[1] !== "\\"
});
continue;
}
}
Punctuator.lastIndex = lastIndex;
if (match = Punctuator.exec(input)) {
punctuator = match[0];
nextLastIndex = Punctuator.lastIndex;
nextLastSignificantToken = punctuator;
switch (punctuator) {
case "(":
if (lastSignificantToken === "?NonExpressionParenKeyword") {
stack.push({
tag: "JSNonExpressionParen",
nesting: parenNesting
});
}
parenNesting++;
postfixIncDec = false;
break;
case ")":
parenNesting--;
postfixIncDec = true;
if (mode.tag === "JSNonExpressionParen" && parenNesting === mode.nesting) {
stack.pop();
nextLastSignificantToken = "?NonExpressionParenEnd";
postfixIncDec = false;
}
break;
case "{":
Punctuator.lastIndex = 0;
isExpression = !TokensNotPrecedingObjectLiteral.test(lastSignificantToken) && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken));
braces.push(isExpression);
postfixIncDec = false;
break;
case "}":
switch (mode.tag) {
case "InterpolationInTemplate":
if (braces.length === mode.nesting) {
Template.lastIndex = lastIndex;
match = Template.exec(input);
lastIndex = Template.lastIndex;
lastSignificantToken = match[0];
if (match[1] === "${") {
lastSignificantToken = "?InterpolationInTemplate";
postfixIncDec = false;
yield ({
type: "TemplateMiddle",
value: match[0]
});
} else {
stack.pop();
postfixIncDec = true;
yield ({
type: "TemplateTail",
value: match[0],
closed: match[1] === "`"
});
}
continue;
}
break;
case "InterpolationInJSX":
if (braces.length === mode.nesting) {
stack.pop();
lastIndex += 1;
lastSignificantToken = "}";
yield ({
type: "JSXPunctuator",
value: "}"
});
continue;
}
}
postfixIncDec = braces.pop();
nextLastSignificantToken = postfixIncDec ? "?ExpressionBraceEnd" : "}";
break;
case "]":
postfixIncDec = true;
break;
case "++":
case "--":
nextLastSignificantToken = postfixIncDec ? "?PostfixIncDec" : "?UnaryIncDec";
break;
case "<":
if (jsx && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken))) {
stack.push({tag: "JSXTag"});
lastIndex += 1;
lastSignificantToken = "<";
yield ({
type: "JSXPunctuator",
value: punctuator
});
continue;
}
postfixIncDec = false;
break;
default:
postfixIncDec = false;
}
lastIndex = nextLastIndex;
lastSignificantToken = nextLastSignificantToken;
yield ({
type: "Punctuator",
value: punctuator
});
continue;
}
Identifier.lastIndex = lastIndex;
if (match = Identifier.exec(input)) {
lastIndex = Identifier.lastIndex;
nextLastSignificantToken = match[0];
switch (match[0]) {
case "for":
case "if":
case "while":
case "with":
if (lastSignificantToken !== "." && lastSignificantToken !== "?.") {
nextLastSignificantToken = "?NonExpressionParenKeyword";
}
}
lastSignificantToken = nextLastSignificantToken;
postfixIncDec = !KeywordsWithExpressionAfter.test(match[0]);
yield ({
type: match[1] === "#" ? "PrivateIdentifier" : "IdentifierName",
value: match[0]
});
continue;
}
StringLiteral.lastIndex = lastIndex;
if (match = StringLiteral.exec(input)) {
lastIndex = StringLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "StringLiteral",
value: match[0],
closed: match[2] !== void 0
});
continue;
}
NumericLiteral.lastIndex = lastIndex;
if (match = NumericLiteral.exec(input)) {
lastIndex = NumericLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "NumericLiteral",
value: match[0]
});
continue;
}
Template.lastIndex = lastIndex;
if (match = Template.exec(input)) {
lastIndex = Template.lastIndex;
lastSignificantToken = match[0];
if (match[1] === "${") {
lastSignificantToken = "?InterpolationInTemplate";
stack.push({
tag: "InterpolationInTemplate",
nesting: braces.length
});
postfixIncDec = false;
yield ({
type: "TemplateHead",
value: match[0]
});
} else {
postfixIncDec = true;
yield ({
type: "NoSubstitutionTemplate",
value: match[0],
closed: match[1] === "`"
});
}
continue;
}
break;
case "JSXTag":
case "JSXTagEnd":
JSXPunctuator.lastIndex = lastIndex;
if (match = JSXPunctuator.exec(input)) {
lastIndex = JSXPunctuator.lastIndex;
nextLastSignificantToken = match[0];
switch (match[0]) {
case "<":
stack.push({tag: "JSXTag"});
break;
case ">":
stack.pop();
if (lastSignificantToken === "/" || mode.tag === "JSXTagEnd") {
nextLastSignificantToken = "?JSX";
postfixIncDec = true;
} else {
stack.push({tag: "JSXChildren"});
}
break;
case "{":
stack.push({
tag: "InterpolationInJSX",
nesting: braces.length
});
nextLastSignificantToken = "?InterpolationInJSX";
postfixIncDec = false;
break;
case "/":
if (lastSignificantToken === "<") {
stack.pop();
if (stack[stack.length - 1].tag === "JSXChildren") {
stack.pop();
}
stack.push({tag: "JSXTagEnd"});
}
}
lastSignificantToken = nextLastSignificantToken;
yield ({
type: "JSXPunctuator",
value: match[0]
});
continue;
}
JSXIdentifier.lastIndex = lastIndex;
if (match = JSXIdentifier.exec(input)) {
lastIndex = JSXIdentifier.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXIdentifier",
value: match[0]
});
continue;
}
JSXString.lastIndex = lastIndex;
if (match = JSXString.exec(input)) {
lastIndex = JSXString.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXString",
value: match[0],
closed: match[2] !== void 0
});
continue;
}
break;
case "JSXChildren":
JSXText.lastIndex = lastIndex;
if (match = JSXText.exec(input)) {
lastIndex = JSXText.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXText",
value: match[0]
});
continue;
}
switch (input[lastIndex]) {
case "<":
stack.push({tag: "JSXTag"});
lastIndex++;
lastSignificantToken = "<";
yield ({
type: "JSXPunctuator",
value: "<"
});
continue;
case "{":
stack.push({
tag: "InterpolationInJSX",
nesting: braces.length
});
lastIndex++;
lastSignificantToken = "?InterpolationInJSX";
postfixIncDec = false;
yield ({
type: "JSXPunctuator",
value: "{"
});
continue;
}
}
WhiteSpace.lastIndex = lastIndex;
if (match = WhiteSpace.exec(input)) {
lastIndex = WhiteSpace.lastIndex;
yield ({
type: "WhiteSpace",
value: match[0]
});
continue;
}
LineTerminatorSequence.lastIndex = lastIndex;
if (match = LineTerminatorSequence.exec(input)) {
lastIndex = LineTerminatorSequence.lastIndex;
postfixIncDec = false;
if (KeywordsWithNoLineTerminatorAfter.test(lastSignificantToken)) {
lastSignificantToken = "?NoLineTerminatorHere";
}
yield ({
type: "LineTerminatorSequence",
value: match[0]
});
continue;
}
MultiLineComment.lastIndex = lastIndex;
if (match = MultiLineComment.exec(input)) {
lastIndex = MultiLineComment.lastIndex;
if (Newline.test(match[0])) {
postfixIncDec = false;
if (KeywordsWithNoLineTerminatorAfter.test(lastSignificantToken)) {
lastSignificantToken = "?NoLineTerminatorHere";
}
}
yield ({
type: "MultiLineComment",
value: match[0],
closed: match[1] !== void 0
});
continue;
}
SingleLineComment.lastIndex = lastIndex;
if (match = SingleLineComment.exec(input)) {
lastIndex = SingleLineComment.lastIndex;
postfixIncDec = false;
yield ({
type: "SingleLineComment",
value: match[0]
});
continue;
}
firstCodePoint = String.fromCodePoint(input.codePointAt(lastIndex));
lastIndex += firstCodePoint.length;
lastSignificantToken = firstCodePoint;
postfixIncDec = false;
yield ({
type: mode.tag.startsWith("JSX") ? "JSXInvalid" : "Invalid",
value: firstCodePoint
});
}
return void 0;
};
var jsTokens = /*@__PURE__*/getDefaultExportFromCjs(jsTokens_1);
function stripLiteralJsTokens(code, options) {
const FILL = " ";
const FILL_COMMENT = " ";
let result = "";
const tokens = [];
for (const token of jsTokens(code, { jsx: false })) {
tokens.push(token);
if (token.type === "SingleLineComment") {
result += FILL_COMMENT.repeat(token.value.length);
continue;
}
if (token.type === "MultiLineComment") {
result += token.value.replace(/[^\n]/g, FILL_COMMENT);
continue;
}
if (token.type === "StringLiteral") {
if (!token.closed) {
result += token.value;
continue;
}
const body = token.value.slice(1, -1);
{
result += token.value[0] + FILL.repeat(body.length) + token.value[token.value.length - 1];
continue;
}
}
if (token.type === "NoSubstitutionTemplate") {
const body = token.value.slice(1, -1);
{
result += `\`${body.replace(/[^\n]/g, FILL)}\``;
continue;
}
}
if (token.type === "RegularExpressionLiteral") {
const body = token.value;
{
result += body.replace(/\/(.*)\/(\w?)$/g, (_, $1, $2) => `/${FILL.repeat($1.length)}/${$2}`);
continue;
}
}
if (token.type === "TemplateHead") {
const body = token.value.slice(1, -2);
{
result += `\`${body.replace(/[^\n]/g, FILL)}\${`;
continue;
}
}
if (token.type === "TemplateTail") {
const body = token.value.slice(0, -2);
{
result += `}${body.replace(/[^\n]/g, FILL)}\``;
continue;
}
}
if (token.type === "TemplateMiddle") {
const body = token.value.slice(1, -2);
{
result += `}${body.replace(/[^\n]/g, FILL)}\${`;
continue;
}
}
result += token.value;
}
return {
result,
tokens
};
}
function stripLiteral(code, options) {
return stripLiteralDetailed(code).result;
}
function stripLiteralDetailed(code, options) {
return stripLiteralJsTokens(code);
}
var main$1 = {exports: {}};
var name = "dotenv";
var version$1 = "16.4.5";
var description = "Loads environment variables from .env file";
var main = "lib/main.js";
var types = "lib/main.d.ts";
var exports = {
".": {
types: "./lib/main.d.ts",
require: "./lib/main.js",
"default": "./lib/main.js"
},
"./config": "./config.js",
"./config.js": "./config.js",
"./lib/env-options": "./lib/env-options.js",
"./lib/env-options.js": "./lib/env-options.js",
"./lib/cli-options": "./lib/cli-options.js",
"./lib/cli-options.js": "./lib/cli-options.js",
"./package.json": "./package.json"
};
var scripts = {
"dts-check": "tsc --project tests/types/tsconfig.json",
lint: "standard",
"lint-readme": "standard-markdown",
pretest: "npm run lint && npm run dts-check",
test: "tap tests/*.js --100 -Rspec",
"test:coverage": "tap --coverage-report=lcov",
prerelease: "npm test",
release: "standard-version"
};
var repository = {
type: "git",
url: "git://github.com/motdotla/dotenv.git"
};
var funding = "https://dotenvx.com";
var keywords = [
"dotenv",
"env",
".env",
"environment",
"variables",
"config",
"settings"
];
var readmeFilename = "README.md";
var license = "BSD-2-Clause";
var devDependencies = {
"@definitelytyped/dtslint": "^0.0.133",
"@types/node": "^18.11.3",
decache: "^4.6.1",
sinon: "^14.0.1",
standard: "^17.0.0",
"standard-markdown": "^7.1.0",
"standard-version": "^9.5.0",
tap: "^16.3.0",
tar: "^6.1.11",
typescript: "^4.8.4"
};
var engines = {
node: ">=12"
};
var browser$1 = {
fs: false
};
var require$$4 = {
name: name,
version: version$1,
description: description,
main: main,
types: types,
exports: exports,
scripts: scripts,
repository: repository,
funding: funding,
keywords: keywords,
readmeFilename: readmeFilename,
license: license,
devDependencies: devDependencies,
engines: engines,
browser: browser$1
};
const fs$9 = require$$0__default;
const path$9 = require$$0$4;
const os$2 = require$$2;
const crypto$1 = require$$3$1;
const packageJson = require$$4;
const version = packageJson.version;
const LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg;
// Parse src into an Object
function parse$9 (src) {
const obj = {};
// Convert buffer to string
let lines = src.toString();
// Convert line breaks to same format
lines = lines.replace(/\r\n?/mg, '\n');
let match;
while ((match = LINE.exec(lines)) != null) {
const key = match[1];
// Default undefined or null to empty string
let value = (match[2] || '');
// Remove whitespace
value = value.trim();
// Check if double quoted
const maybeQuote = value[0];
// Remove surrounding quotes
value = value.replace(/^(['"`])([\s\S]*)\1$/mg, '$2');
// Expand newlines if double quoted
if (maybeQuote === '"') {
value = value.replace(/\\n/g, '\n');
value = value.replace(/\\r/g, '\r');
}
// Add to object
obj[key] = value;
}
return obj
}
function _parseVault (options) {
const vaultPath = _vaultPath(options);
// Parse .env.vault
const result = DotenvModule.configDotenv({ path: vaultPath });
if (!result.parsed) {
const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`);
err.code = 'MISSING_DATA';
throw err
}
// handle scenario for comma separated keys - for use with key rotation
// example: DOTENV_KEY="dotenv://:[email protected]/vault/.env.vault?environment=prod,dotenv://:[email protected]/vault/.env.vault?environment=prod"
const keys = _dotenvKey(options).split(',');
const length = keys.length;
let decrypted;
for (let i = 0; i < length; i++) {
try {
// Get full key
const key = keys[i].trim();
// Get instructions for decrypt
const attrs = _instructions(result, key);
// Decrypt
decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key);
break
} catch (error) {
// last key
if (i + 1 >= length) {
throw error
}
// try next key
}
}
// Parse decrypted .env string
return DotenvModule.parse(decrypted)
}
function _log (message) {
console.log(`[dotenv@${version}][INFO] ${message}`);
}
function _warn (message) {
console.log(`[dotenv@${version}][WARN] ${message}`);
}
function _debug (message) {
console.log(`[dotenv@${version}][DEBUG] ${message}`);
}
function _dotenvKey (options) {
// prioritize developer directly setting options.DOTENV_KEY
if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {
return options.DOTENV_KEY
}
// secondary infra already contains a DOTENV_KEY environment variable
if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {
return process.env.DOTENV_KEY
}
// fallback to empty string
return ''
}
function _instructions (result, dotenvKey) {
// Parse DOTENV_KEY. Format is a URI
let uri;
try {
uri = new URL(dotenvKey);
} catch (error) {
if (error.code === 'ERR_INVALID_URL') {
const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:[email protected]/vault/.env.vault?environment=development');
err.code = 'INVALID_DOTENV_KEY';
throw err
}
throw error
}
// Get decrypt key
const key = uri.password;
if (!key) {
const err = new Error('INVALID_DOTENV_KEY: Missing key part');
err.code = 'INVALID_DOTENV_KEY';
throw err
}
// Get environment
const environment = uri.searchParams.get('environment');
if (!environment) {
const err = new Error('INVALID_DOTENV_KEY: Missing environment part');
err.code = 'INVALID_DOTENV_KEY';
throw err
}
// Get ciphertext payload
const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`;
const ciphertext = result.parsed[environmentKey]; // DOTENV_VAULT_PRODUCTION
if (!ciphertext) {
const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`);
err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT';
throw err
}
return { ciphertext, key }
}
function _vaultPath (options) {
let possibleVaultPath = null;
if (options && options.path && options.path.length > 0) {
if (Array.isArray(options.path)) {
for (const filepath of options.path) {
if (fs$9.existsSync(filepath)) {
possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`;
}
}
} else {
possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`;
}
} else {
possibleVaultPath = path$9.resolve(process.cwd(), '.env.vault');
}
if (fs$9.existsSync(possibleVaultPath)) {
return possibleVaultPath
}
return null
}
function _resolveHome (envPath) {
return envPath[0] === '~' ? path$9.join(os$2.homedir(), envPath.slice(1)) : envPath
}
function _configVault (options) {
_log('Loading env from encrypted .env.vault');
const parsed = DotenvModule._parseVault(options);
let processEnv = process.env;
if (options && options.processEnv != null) {
processEnv = options.processEnv;
}
DotenvModule.populate(processEnv, parsed, options);
return { parsed }
}
function configDotenv (options) {
const dotenvPath = path$9.resolve(process.cwd(), '.env');
let encoding = 'utf8';
const debug = Boolean(options && options.debug);
if (options && options.encoding) {
encoding = options.encoding;
} else {
if (debug) {
_debug('No encoding is specified. UTF-8 is used by default');
}
}
let optionPaths = [dotenvPath]; // default, look for .env
if (options && options.path) {
if (!Array.isArray(options.path)) {
optionPaths = [_resolveHome(options.path)];
} else {
optionPaths = []; // reset default
for (const filepath of options.path) {
optionPaths.push(_resolveHome(filepath));
}
}
}
// Build the parsed data in a temporary object (because we need to return it). Once we have the final
// parsed data, we will combine it with process.env (or options.processEnv if provided).
let lastError;
const parsedAll = {};
for (const path of optionPaths) {
try {
// Specifying an encoding returns a string instead of a buffer
const parsed = DotenvModule.parse(fs$9.readFileSync(path, { encoding }));
DotenvModule.populate(parsedAll, parsed, options);
} catch (e) {
if (debug) {
_debug(`Failed to load ${path} ${e.message}`);
}
lastError = e;
}
}
let processEnv = process.env;
if (options && options.processEnv != null) {
processEnv = options.processEnv;
}
DotenvModule.populate(processEnv, parsedAll, options);
if (lastError) {
return { parsed: parsedAll, error: lastError }
} else {
return { parsed: parsedAll }
}
}
// Populates process.env from .env file
function config (options) {
// fallback to original dotenv if DOTENV_KEY is not set
if (_dotenvKey(options).length === 0) {
return DotenvModule.configDotenv(options)
}
const vaultPath = _vaultPath(options);
// dotenvKey exists but .env.vault file does not exist
if (!vaultPath) {
_warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`);
return DotenvModule.configDotenv(options)
}
return DotenvModule._configVault(options)
}
function decrypt (encrypted, keyStr) {
const key = Buffer.from(keyStr.slice(-64), 'hex');
let ciphertext = Buffer.from(encrypted, 'base64');
const nonce = ciphertext.subarray(0, 12);
const authTag = ciphertext.subarray(-16);
ciphertext = ciphertext.subarray(12, -16);
try {
const aesgcm = crypto$1.createDecipheriv('aes-256-gcm', key, nonce);
aesgcm.setAuthTag(authTag);
return `${aesgcm.update(ciphertext)}${aesgcm.final()}`
} catch (error) {
const isRange = error instanceof RangeError;
const invalidKeyLength = error.message === 'Invalid key length';
const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data';
if (isRange || invalidKeyLength) {
const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)');
err.code = 'INVALID_DOTENV_KEY';
throw err
} else if (decryptionFailed) {
const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY');
err.code = 'DECRYPTION_FAILED';
throw err
} else {
throw error
}
}
}
// Populate process.env with parsed values
function populate (processEnv, parsed, options = {}) {
const debug = Boolean(options && options.debug);
const override = Boolean(options && options.override);
if (typeof parsed !== 'object') {
const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate');
err.code = 'OBJECT_REQUIRED';
throw err
}
// Set process.env
for (const key of Object.keys(parsed)) {
if (Object.prototype.hasOwnProperty.call(processEnv, key)) {
if (override === true) {
processEnv[key] = parsed[key];
}
if (debug) {
if (override === true) {
_debug(`"${key}" is already defined and WAS overwritten`);
} else {
_debug(`"${key}" is already defined and was NOT overwritten`);
}
}
} else {
processEnv[key] = parsed[key];
}
}
}
const DotenvModule = {
configDotenv,
_configVault,
_parseVault,
config,
decrypt,
parse: parse$9,
populate
};
main$1.exports.configDotenv = DotenvModule.configDotenv;
main$1.exports._configVault = DotenvModule._configVault;
main$1.exports._parseVault = DotenvModule._parseVault;
main$1.exports.config = DotenvModule.config;
main$1.exports.decrypt = DotenvModule.decrypt;
var parse_1$1 = main$1.exports.parse = DotenvModule.parse;
main$1.exports.populate = DotenvModule.populate;
main$1.exports = DotenvModule;
// * /
// * (\\)? # is it escaped with a backslash?
// * (\$) # literal $
// * (?!\() # shouldnt be followed by parenthesis
// * (\{?) # first brace wrap opening
// * ([\w.]+) # key
// * (?::-((?:\$\{(?:\$\{(?:\$\{[^}]*\}|[^}])*}|[^}])*}|[^}])+))? # optional default nested 3 times
// * (\}?) # last brace warp closing
// * /xi
const DOTENV_SUBSTITUTION_REGEX = /(\\)?(\$)(?!\()(\{?)([\w.]+)(?::?-((?:\$\{(?:\$\{(?:\$\{[^}]*\}|[^}])*}|[^}])*}|[^}])+))?(\}?)/gi;
function _resolveEscapeSequences (value) {
return value.replace(/\\\$/g, '$')
}
function interpolate (value, processEnv, parsed) {
return value.replace(DOTENV_SUBSTITUTION_REGEX, (match, escaped, dollarSign, openBrace, key, defaultValue, closeBrace) => {
if (escaped === '\\') {
return match.slice(1)
} else {
if (processEnv[key]) {
if (processEnv[key] === parsed[key]) {
return processEnv[key]
} else {
// scenario: PASSWORD_EXPAND_NESTED=${PASSWORD_EXPAND}
return interpolate(processEnv[key], processEnv, parsed)
}
}
if (parsed[key]) {
// avoid recursion from EXPAND_SELF=$EXPAND_SELF
if (parsed[key] === value) {
return parsed[key]
} else {
return interpolate(parsed[key], processEnv, parsed)
}
}
if (defaultValue) {
if (defaultValue.startsWith('$')) {
return interpolate(defaultValue, processEnv, parsed)
} else {
return defaultValue
}
}
return ''
}
})
}
function expand (options) {
let processEnv = process.env;
if (options && options.processEnv != null) {
processEnv = options.processEnv;
}
for (const key in options.parsed) {
let value = options.parsed[key];
const inProcessEnv = Object.prototype.hasOwnProperty.call(processEnv, key);
if (inProcessEnv) {
if (processEnv[key] === options.parsed[key]) {
// assume was set to processEnv from the .env file if the values match and therefore interpolate
value = interpolate(value, processEnv, options.parsed);
} else {
// do not interpolate - assume processEnv had the intended value even if containing a $.
value = processEnv[key];
}
} else {
// not inProcessEnv so assume interpolation for this .env key
value = interpolate(value, processEnv, options.parsed);
}
options.parsed[key] = _resolveEscapeSequences(value);
}
for (const processKey in options.parsed) {
processEnv[processKey] = options.parsed[processKey];
}
return options
}
var expand_1 = expand;
function getEnvFilesForMode(mode, envDir) {
return [
/** default file */
`.env`,
/** local file */
`.env.local`,
/** mode file */
`.env.${mode}`,
/** mode local file */
`.env.${mode}.local`
].map((file) => normalizePath$3(path$n.join(envDir, file)));
}
function loadEnv(mode, envDir, prefixes = "VITE_") {
if (mode === "local") {
throw new Error(
`"local" cannot be used as a mode name because it conflicts with the .local postfix for .env files.`
);
}
prefixes = arraify(prefixes);
const env = {};
const envFiles = getEnvFilesForMode(mode, envDir);
const parsed = Object.fromEntries(
envFiles.flatMap((filePath) => {
if (!tryStatSync(filePath)?.isFile()) return [];
return Object.entries(parse_1$1(fs__default.readFileSync(filePath)));
})
);
if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === void 0) {
process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV;
}
if (parsed.BROWSER && process.env.BROWSER === void 0) {
process.env.BROWSER = parsed.BROWSER;
}
if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === void 0) {
process.env.BROWSER_ARGS = parsed.BROWSER_ARGS;
}
const processEnv = { ...process.env };
expand_1({ parsed, processEnv });
for (const [key, value] of Object.entries(parsed)) {
if (prefixes.some((prefix) => key.startsWith(prefix))) {
env[key] = value;
}
}
for (const key in process.env) {
if (prefixes.some((prefix) => key.startsWith(prefix))) {
env[key] = process.env[key];
}
}
return env;
}
function resolveEnvPrefix({
envPrefix = "VITE_"
}) {
envPrefix = arraify(envPrefix);
if (envPrefix.includes("")) {
throw new Error(
`envPrefix option contains value '', which could lead unexpected exposure of sensitive information.`
);
}
return envPrefix;
}
const modulePreloadPolyfillId = "vite/modulepreload-polyfill";
const resolvedModulePreloadPolyfillId = "\0" + modulePreloadPolyfillId + ".js";
function modulePreloadPolyfillPlugin(config) {
const skip = config.command !== "build" || config.build.ssr;
let polyfillString;
return {
name: "vite:modulepreload-polyfill",
resolveId(id) {
if (id === modulePreloadPolyfillId) {
return resolvedModulePreloadPolyfillId;
}
},
load(id) {
if (id === resolvedModulePreloadPolyfillId) {
if (skip) {
return "";
}
if (!polyfillString) {
polyfillString = `${isModernFlag}&&(${polyfill.toString()}());`;
}
return { code: polyfillString, moduleSideEffects: true };
}
}
};
}
function polyfill() {
const relList = document.createElement("link").relList;
if (relList && relList.supports && relList.supports("modulepreload")) {
return;
}
for (const link of document.querySelectorAll('link[rel="modulepreload"]')) {
processPreload(link);
}
new MutationObserver((mutations) => {
for (const mutation of mutations) {
if (mutation.type !== "childList") {
continue;
}
for (const node of mutation.addedNodes) {
if (node.tagName === "LINK" && node.rel === "modulepreload")
processPreload(node);
}
}
}).observe(document, { childList: true, subtree: true });
function getFetchOpts(link) {
const fetchOpts = {};
if (link.integrity) fetchOpts.integrity = link.integrity;
if (link.referrerPolicy) fetchOpts.referrerPolicy = link.referrerPolicy;
if (link.crossOrigin === "use-credentials")
fetchOpts.credentials = "include";
else if (link.crossOrigin === "anonymous") fetchOpts.credentials = "omit";
else fetchOpts.credentials = "same-origin";
return fetchOpts;
}
function processPreload(link) {
if (link.ep)
return;
link.ep = true;
const fetchOpts = getFetchOpts(link);
fetch(link.href, fetchOpts);
}
}
const htmlProxyRE$1 = /\?html-proxy=?(?:&inline-css)?(?:&style-attr)?&index=(\d+)\.(?:js|css)$/;
const isHtmlProxyRE = /\?html-proxy\b/;
const inlineCSSRE$1 = /__VITE_INLINE_CSS__([a-z\d]{8}_\d+)__/g;
const inlineImportRE = /(?]*type\s*=\s*(?:"importmap"|'importmap'|importmap)[^>]*>.*?<\/script>/is;
const moduleScriptRE = /[ \t]*