You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

66379 lines
2.1 MiB

This file contains invisible Unicode characters!

This file contains invisible Unicode characters that may be processed differently from what appears below. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to reveal hidden characters.

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

import fs$l from 'node:fs';
import fsp from 'node:fs/promises';
import path$o, { dirname as dirname$2, join as join$2, posix as posix$1, isAbsolute as isAbsolute$2, relative as relative$2, basename as basename$2, extname as extname$1 } from 'node:path';
import { fileURLToPath, URL as URL$3, URLSearchParams, parse as parse$i, pathToFileURL } from 'node:url';
import { promisify as promisify$4, format as format$2, inspect } from 'node:util';
import { performance } from 'node:perf_hooks';
import { createRequire as createRequire$1, builtinModules } from 'node:module';
import require$$0$3 from 'tty';
import esbuild, { transform as transform$1, formatMessages, build as build$3 } from 'esbuild';
import require$$0$4, { win32, posix, isAbsolute as isAbsolute$1, resolve as resolve$3, relative as relative$1, basename as basename$1, extname, dirname as dirname$1, join as join$1, sep as sep$1, normalize } from 'path';
import * as require$$0$2 from 'fs';
import require$$0__default, { existsSync, readFileSync, statSync as statSync$1, promises as promises$1, readdir as readdir$4, readdirSync } from 'fs';
import require$$0$5 from 'events';
import require$$5 from 'assert';
import require$$0$6 from 'util';
import require$$3$2 from 'net';
import require$$0$9 from 'url';
import require$$1$1 from 'http';
import require$$0$7 from 'stream';
import require$$2 from 'os';
import require$$2$1 from 'child_process';
import os$4 from 'node:os';
import { exec } from 'node:child_process';
import { createHash as createHash$2 } from 'node:crypto';
import { promises } from 'node:dns';
import { CLIENT_ENTRY, OPTIMIZABLE_ENTRY_RE, wildcardHosts, loopbackHosts, VALID_ID_PREFIX, NULL_BYTE_PLACEHOLDER, FS_PREFIX, CLIENT_PUBLIC_PATH, ENV_PUBLIC_PATH, ENV_ENTRY, DEP_VERSION_RE, DEFAULT_MAIN_FIELDS, DEFAULT_EXTENSIONS as DEFAULT_EXTENSIONS$1, SPECIAL_QUERY_RE, CSS_LANGS_RE, ESBUILD_MODULES_TARGET, KNOWN_ASSET_TYPES, CLIENT_DIR, JS_TYPES_RE, VERSION as VERSION$1, VITE_PACKAGE_DIR, DEFAULT_DEV_PORT, DEFAULT_PREVIEW_PORT, DEFAULT_ASSETS_RE, DEFAULT_CONFIG_FILES } from '../constants.js';
import require$$3$1 from 'crypto';
import { Buffer as Buffer$1 } from 'node:buffer';
import require$$0$8, { createRequire as createRequire$2 } from 'module';
import assert$1 from 'node:assert';
import process$1 from 'node:process';
import v8 from 'node:v8';
import { VERSION } from 'rollup';
import require$$1 from 'worker_threads';
import { createServer as createServer$3, STATUS_CODES } from 'node:http';
import { createServer as createServer$2 } from 'node:https';
import require$$0$a from 'zlib';
import require$$0$b from 'buffer';
import require$$1$2 from 'https';
import require$$4$1 from 'tls';
import * as qs from 'querystring';
import readline from 'node:readline';
import zlib$1, { gzip } from 'node:zlib';
import { fileURLToPath as __cjs_fileURLToPath } from 'node:url';
import { dirname as __cjs_dirname } from 'node:path';
import { createRequire as __cjs_createRequire } from 'node:module';
const __filename = __cjs_fileURLToPath(import.meta.url);
const __dirname = __cjs_dirname(__filename);
const require = __cjs_createRequire(import.meta.url);
const __require = require;
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function getDefaultExportFromCjs (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
}
function getAugmentedNamespace(n) {
if (n.__esModule) return n;
var f = n.default;
if (typeof f == "function") {
var a = function a () {
if (this instanceof a) {
return Reflect.construct(f, arguments, this.constructor);
}
return f.apply(this, arguments);
};
a.prototype = f.prototype;
} else a = {};
Object.defineProperty(a, '__esModule', {value: true});
Object.keys(n).forEach(function (k) {
var d = Object.getOwnPropertyDescriptor(n, k);
Object.defineProperty(a, k, d.get ? d : {
enumerable: true,
get: function () {
return n[k];
}
});
});
return a;
}
var picocolors = {exports: {}};
let tty = require$$0$3;
let isColorSupported =
!("NO_COLOR" in process.env || process.argv.includes("--no-color")) &&
("FORCE_COLOR" in process.env ||
process.argv.includes("--color") ||
process.platform === "win32" ||
(tty.isatty(1) && process.env.TERM !== "dumb") ||
"CI" in process.env);
let formatter =
(open, close, replace = open) =>
input => {
let string = "" + input;
let index = string.indexOf(close, open.length);
return ~index
? open + replaceClose(string, close, replace, index) + close
: open + string + close
};
let replaceClose = (string, close, replace, index) => {
let start = string.substring(0, index) + replace;
let end = string.substring(index + close.length);
let nextIndex = end.indexOf(close);
return ~nextIndex ? start + replaceClose(end, close, replace, nextIndex) : start + end
};
let createColors = (enabled = isColorSupported) => ({
isColorSupported: enabled,
reset: enabled ? s => `\x1b[0m${s}\x1b[0m` : String,
bold: enabled ? formatter("\x1b[1m", "\x1b[22m", "\x1b[22m\x1b[1m") : String,
dim: enabled ? formatter("\x1b[2m", "\x1b[22m", "\x1b[22m\x1b[2m") : String,
italic: enabled ? formatter("\x1b[3m", "\x1b[23m") : String,
underline: enabled ? formatter("\x1b[4m", "\x1b[24m") : String,
inverse: enabled ? formatter("\x1b[7m", "\x1b[27m") : String,
hidden: enabled ? formatter("\x1b[8m", "\x1b[28m") : String,
strikethrough: enabled ? formatter("\x1b[9m", "\x1b[29m") : String,
black: enabled ? formatter("\x1b[30m", "\x1b[39m") : String,
red: enabled ? formatter("\x1b[31m", "\x1b[39m") : String,
green: enabled ? formatter("\x1b[32m", "\x1b[39m") : String,
yellow: enabled ? formatter("\x1b[33m", "\x1b[39m") : String,
blue: enabled ? formatter("\x1b[34m", "\x1b[39m") : String,
magenta: enabled ? formatter("\x1b[35m", "\x1b[39m") : String,
cyan: enabled ? formatter("\x1b[36m", "\x1b[39m") : String,
white: enabled ? formatter("\x1b[37m", "\x1b[39m") : String,
gray: enabled ? formatter("\x1b[90m", "\x1b[39m") : String,
bgBlack: enabled ? formatter("\x1b[40m", "\x1b[49m") : String,
bgRed: enabled ? formatter("\x1b[41m", "\x1b[49m") : String,
bgGreen: enabled ? formatter("\x1b[42m", "\x1b[49m") : String,
bgYellow: enabled ? formatter("\x1b[43m", "\x1b[49m") : String,
bgBlue: enabled ? formatter("\x1b[44m", "\x1b[49m") : String,
bgMagenta: enabled ? formatter("\x1b[45m", "\x1b[49m") : String,
bgCyan: enabled ? formatter("\x1b[46m", "\x1b[49m") : String,
bgWhite: enabled ? formatter("\x1b[47m", "\x1b[49m") : String,
});
picocolors.exports = createColors();
picocolors.exports.createColors = createColors;
var picocolorsExports = picocolors.exports;
var colors$1 = /*@__PURE__*/getDefaultExportFromCjs(picocolorsExports);
function matches$1(pattern, importee) {
if (pattern instanceof RegExp) {
return pattern.test(importee);
}
if (importee.length < pattern.length) {
return false;
}
if (importee === pattern) {
return true;
}
// eslint-disable-next-line prefer-template
return importee.startsWith(pattern + '/');
}
function getEntries({ entries, customResolver }) {
if (!entries) {
return [];
}
const resolverFunctionFromOptions = resolveCustomResolver(customResolver);
if (Array.isArray(entries)) {
return entries.map((entry) => {
return {
find: entry.find,
replacement: entry.replacement,
resolverFunction: resolveCustomResolver(entry.customResolver) || resolverFunctionFromOptions
};
});
}
return Object.entries(entries).map(([key, value]) => {
return { find: key, replacement: value, resolverFunction: resolverFunctionFromOptions };
});
}
function getHookFunction(hook) {
if (typeof hook === 'function') {
return hook;
}
if (hook && 'handler' in hook && typeof hook.handler === 'function') {
return hook.handler;
}
return null;
}
function resolveCustomResolver(customResolver) {
if (typeof customResolver === 'function') {
return customResolver;
}
if (customResolver) {
return getHookFunction(customResolver.resolveId);
}
return null;
}
function alias$1(options = {}) {
const entries = getEntries(options);
if (entries.length === 0) {
return {
name: 'alias',
resolveId: () => null
};
}
return {
name: 'alias',
async buildStart(inputOptions) {
await Promise.all([...(Array.isArray(options.entries) ? options.entries : []), options].map(({ customResolver }) => { var _a; return customResolver && ((_a = getHookFunction(customResolver.buildStart)) === null || _a === void 0 ? void 0 : _a.call(this, inputOptions)); }));
},
resolveId(importee, importer, resolveOptions) {
if (!importer) {
return null;
}
// First match is supposed to be the correct one
const matchedEntry = entries.find((entry) => matches$1(entry.find, importee));
if (!matchedEntry) {
return null;
}
const updatedId = importee.replace(matchedEntry.find, matchedEntry.replacement);
if (matchedEntry.resolverFunction) {
return matchedEntry.resolverFunction.call(this, updatedId, importer, resolveOptions);
}
return this.resolve(updatedId, importer, Object.assign({ skipSelf: true }, resolveOptions)).then((resolved) => resolved || { id: updatedId });
}
};
}
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef {{
skip: () => void;
remove: () => void;
replace: (node: BaseNode) => void;
}} WalkerContext */
let WalkerBase$1 = class WalkerBase {
constructor() {
/** @type {boolean} */
this.should_skip = false;
/** @type {boolean} */
this.should_remove = false;
/** @type {BaseNode | null} */
this.replacement = null;
/** @type {WalkerContext} */
this.context = {
skip: () => (this.should_skip = true),
remove: () => (this.should_remove = true),
replace: (node) => (this.replacement = node)
};
}
/**
*
* @param {any} parent
* @param {string} prop
* @param {number} index
* @param {BaseNode} node
*/
replace(parent, prop, index, node) {
if (parent) {
if (index !== null) {
parent[prop][index] = node;
} else {
parent[prop] = node;
}
}
}
/**
*
* @param {any} parent
* @param {string} prop
* @param {number} index
*/
remove(parent, prop, index) {
if (parent) {
if (index !== null) {
parent[prop].splice(index, 1);
} else {
delete parent[prop];
}
}
}
};
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef { import('./walker.js').WalkerContext} WalkerContext */
/** @typedef {(
* this: WalkerContext,
* node: BaseNode,
* parent: BaseNode,
* key: string,
* index: number
* ) => void} SyncHandler */
let SyncWalker$1 = class SyncWalker extends WalkerBase$1 {
/**
*
* @param {SyncHandler} enter
* @param {SyncHandler} leave
*/
constructor(enter, leave) {
super();
/** @type {SyncHandler} */
this.enter = enter;
/** @type {SyncHandler} */
this.leave = leave;
}
/**
*
* @param {BaseNode} node
* @param {BaseNode} parent
* @param {string} [prop]
* @param {number} [index]
* @returns {BaseNode}
*/
visit(node, parent, prop, index) {
if (node) {
if (this.enter) {
const _should_skip = this.should_skip;
const _should_remove = this.should_remove;
const _replacement = this.replacement;
this.should_skip = false;
this.should_remove = false;
this.replacement = null;
this.enter.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const skipped = this.should_skip;
const removed = this.should_remove;
this.should_skip = _should_skip;
this.should_remove = _should_remove;
this.replacement = _replacement;
if (skipped) return node;
if (removed) return null;
}
for (const key in node) {
const value = node[key];
if (typeof value !== "object") {
continue;
} else if (Array.isArray(value)) {
for (let i = 0; i < value.length; i += 1) {
if (value[i] !== null && typeof value[i].type === 'string') {
if (!this.visit(value[i], node, key, i)) {
// removed
i--;
}
}
}
} else if (value !== null && typeof value.type === "string") {
this.visit(value, node, key, null);
}
}
if (this.leave) {
const _replacement = this.replacement;
const _should_remove = this.should_remove;
this.replacement = null;
this.should_remove = false;
this.leave.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const removed = this.should_remove;
this.replacement = _replacement;
this.should_remove = _should_remove;
if (removed) return null;
}
}
return node;
}
};
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef { import('./sync.js').SyncHandler} SyncHandler */
/** @typedef { import('./async.js').AsyncHandler} AsyncHandler */
/**
*
* @param {BaseNode} ast
* @param {{
* enter?: SyncHandler
* leave?: SyncHandler
* }} walker
* @returns {BaseNode}
*/
function walk$4(ast, { enter, leave }) {
const instance = new SyncWalker$1(enter, leave);
return instance.visit(ast, null);
}
var utils$k = {};
const path$n = require$$0$4;
const WIN_SLASH = '\\\\/';
const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
/**
* Posix glob regex
*/
const DOT_LITERAL = '\\.';
const PLUS_LITERAL = '\\+';
const QMARK_LITERAL = '\\?';
const SLASH_LITERAL = '\\/';
const ONE_CHAR = '(?=.)';
const QMARK = '[^/]';
const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
const NO_DOT = `(?!${DOT_LITERAL})`;
const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
const STAR$1 = `${QMARK}*?`;
const POSIX_CHARS = {
DOT_LITERAL,
PLUS_LITERAL,
QMARK_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
QMARK,
END_ANCHOR,
DOTS_SLASH,
NO_DOT,
NO_DOTS,
NO_DOT_SLASH,
NO_DOTS_SLASH,
QMARK_NO_DOT,
STAR: STAR$1,
START_ANCHOR
};
/**
* Windows glob regex
*/
const WINDOWS_CHARS = {
...POSIX_CHARS,
SLASH_LITERAL: `[${WIN_SLASH}]`,
QMARK: WIN_NO_SLASH,
STAR: `${WIN_NO_SLASH}*?`,
DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
NO_DOT: `(?!${DOT_LITERAL})`,
NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
END_ANCHOR: `(?:[${WIN_SLASH}]|$)`
};
/**
* POSIX Bracket Regex
*/
const POSIX_REGEX_SOURCE$1 = {
alnum: 'a-zA-Z0-9',
alpha: 'a-zA-Z',
ascii: '\\x00-\\x7F',
blank: ' \\t',
cntrl: '\\x00-\\x1F\\x7F',
digit: '0-9',
graph: '\\x21-\\x7E',
lower: 'a-z',
print: '\\x20-\\x7E ',
punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~',
space: ' \\t\\r\\n\\v\\f',
upper: 'A-Z',
word: 'A-Za-z0-9_',
xdigit: 'A-Fa-f0-9'
};
var constants$6 = {
MAX_LENGTH: 1024 * 64,
POSIX_REGEX_SOURCE: POSIX_REGEX_SOURCE$1,
// regular expressions
REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
// Replace globs with equivalent patterns to reduce parsing time.
REPLACEMENTS: {
'***': '*',
'**/**': '**',
'**/**/**': '**'
},
// Digits
CHAR_0: 48, /* 0 */
CHAR_9: 57, /* 9 */
// Alphabet chars.
CHAR_UPPERCASE_A: 65, /* A */
CHAR_LOWERCASE_A: 97, /* a */
CHAR_UPPERCASE_Z: 90, /* Z */
CHAR_LOWERCASE_Z: 122, /* z */
CHAR_LEFT_PARENTHESES: 40, /* ( */
CHAR_RIGHT_PARENTHESES: 41, /* ) */
CHAR_ASTERISK: 42, /* * */
// Non-alphabetic chars.
CHAR_AMPERSAND: 38, /* & */
CHAR_AT: 64, /* @ */
CHAR_BACKWARD_SLASH: 92, /* \ */
CHAR_CARRIAGE_RETURN: 13, /* \r */
CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */
CHAR_COLON: 58, /* : */
CHAR_COMMA: 44, /* , */
CHAR_DOT: 46, /* . */
CHAR_DOUBLE_QUOTE: 34, /* " */
CHAR_EQUAL: 61, /* = */
CHAR_EXCLAMATION_MARK: 33, /* ! */
CHAR_FORM_FEED: 12, /* \f */
CHAR_FORWARD_SLASH: 47, /* / */
CHAR_GRAVE_ACCENT: 96, /* ` */
CHAR_HASH: 35, /* # */
CHAR_HYPHEN_MINUS: 45, /* - */
CHAR_LEFT_ANGLE_BRACKET: 60, /* < */
CHAR_LEFT_CURLY_BRACE: 123, /* { */
CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */
CHAR_LINE_FEED: 10, /* \n */
CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */
CHAR_PERCENT: 37, /* % */
CHAR_PLUS: 43, /* + */
CHAR_QUESTION_MARK: 63, /* ? */
CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */
CHAR_RIGHT_CURLY_BRACE: 125, /* } */
CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */
CHAR_SEMICOLON: 59, /* ; */
CHAR_SINGLE_QUOTE: 39, /* ' */
CHAR_SPACE: 32, /* */
CHAR_TAB: 9, /* \t */
CHAR_UNDERSCORE: 95, /* _ */
CHAR_VERTICAL_LINE: 124, /* | */
CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */
SEP: path$n.sep,
/**
* Create EXTGLOB_CHARS
*/
extglobChars(chars) {
return {
'!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },
'?': { type: 'qmark', open: '(?:', close: ')?' },
'+': { type: 'plus', open: '(?:', close: ')+' },
'*': { type: 'star', open: '(?:', close: ')*' },
'@': { type: 'at', open: '(?:', close: ')' }
};
},
/**
* Create GLOB_CHARS
*/
globChars(win32) {
return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
}
};
(function (exports) {
const path = require$$0$4;
const win32 = process.platform === 'win32';
const {
REGEX_BACKSLASH,
REGEX_REMOVE_BACKSLASH,
REGEX_SPECIAL_CHARS,
REGEX_SPECIAL_CHARS_GLOBAL
} = constants$6;
exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);
exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);
exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1');
exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');
exports.removeBackslashes = str => {
return str.replace(REGEX_REMOVE_BACKSLASH, match => {
return match === '\\' ? '' : match;
});
};
exports.supportsLookbehinds = () => {
const segs = process.version.slice(1).split('.').map(Number);
if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {
return true;
}
return false;
};
exports.isWindows = options => {
if (options && typeof options.windows === 'boolean') {
return options.windows;
}
return win32 === true || path.sep === '\\';
};
exports.escapeLast = (input, char, lastIdx) => {
const idx = input.lastIndexOf(char, lastIdx);
if (idx === -1) return input;
if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1);
return `${input.slice(0, idx)}\\${input.slice(idx)}`;
};
exports.removePrefix = (input, state = {}) => {
let output = input;
if (output.startsWith('./')) {
output = output.slice(2);
state.prefix = './';
}
return output;
};
exports.wrapOutput = (input, state = {}, options = {}) => {
const prepend = options.contains ? '' : '^';
const append = options.contains ? '' : '$';
let output = `${prepend}(?:${input})${append}`;
if (state.negated === true) {
output = `(?:^(?!${output}).*$)`;
}
return output;
};
} (utils$k));
const utils$j = utils$k;
const {
CHAR_ASTERISK, /* * */
CHAR_AT, /* @ */
CHAR_BACKWARD_SLASH, /* \ */
CHAR_COMMA: CHAR_COMMA$1, /* , */
CHAR_DOT: CHAR_DOT$1, /* . */
CHAR_EXCLAMATION_MARK, /* ! */
CHAR_FORWARD_SLASH, /* / */
CHAR_LEFT_CURLY_BRACE: CHAR_LEFT_CURLY_BRACE$1, /* { */
CHAR_LEFT_PARENTHESES: CHAR_LEFT_PARENTHESES$1, /* ( */
CHAR_LEFT_SQUARE_BRACKET: CHAR_LEFT_SQUARE_BRACKET$1, /* [ */
CHAR_PLUS, /* + */
CHAR_QUESTION_MARK, /* ? */
CHAR_RIGHT_CURLY_BRACE: CHAR_RIGHT_CURLY_BRACE$1, /* } */
CHAR_RIGHT_PARENTHESES: CHAR_RIGHT_PARENTHESES$1, /* ) */
CHAR_RIGHT_SQUARE_BRACKET: CHAR_RIGHT_SQUARE_BRACKET$1 /* ] */
} = constants$6;
const isPathSeparator = code => {
return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
};
const depth = token => {
if (token.isPrefix !== true) {
token.depth = token.isGlobstar ? Infinity : 1;
}
};
/**
* Quickly scans a glob pattern and returns an object with a handful of
* useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
* `glob` (the actual pattern), `negated` (true if the path starts with `!` but not
* with `!(`) and `negatedExtglob` (true if the path starts with `!(`).
*
* ```js
* const pm = require('picomatch');
* console.log(pm.scan('foo/bar/*.js'));
* { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
* ```
* @param {String} `str`
* @param {Object} `options`
* @return {Object} Returns an object with tokens and regex source string.
* @api public
*/
const scan$2 = (input, options) => {
const opts = options || {};
const length = input.length - 1;
const scanToEnd = opts.parts === true || opts.scanToEnd === true;
const slashes = [];
const tokens = [];
const parts = [];
let str = input;
let index = -1;
let start = 0;
let lastIndex = 0;
let isBrace = false;
let isBracket = false;
let isGlob = false;
let isExtglob = false;
let isGlobstar = false;
let braceEscaped = false;
let backslashes = false;
let negated = false;
let negatedExtglob = false;
let finished = false;
let braces = 0;
let prev;
let code;
let token = { value: '', depth: 0, isGlob: false };
const eos = () => index >= length;
const peek = () => str.charCodeAt(index + 1);
const advance = () => {
prev = code;
return str.charCodeAt(++index);
};
while (index < length) {
code = advance();
let next;
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
code = advance();
if (code === CHAR_LEFT_CURLY_BRACE$1) {
braceEscaped = true;
}
continue;
}
if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE$1) {
braces++;
while (eos() !== true && (code = advance())) {
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
advance();
continue;
}
if (code === CHAR_LEFT_CURLY_BRACE$1) {
braces++;
continue;
}
if (braceEscaped !== true && code === CHAR_DOT$1 && (code = advance()) === CHAR_DOT$1) {
isBrace = token.isBrace = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (braceEscaped !== true && code === CHAR_COMMA$1) {
isBrace = token.isBrace = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_RIGHT_CURLY_BRACE$1) {
braces--;
if (braces === 0) {
braceEscaped = false;
isBrace = token.isBrace = true;
finished = true;
break;
}
}
}
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_FORWARD_SLASH) {
slashes.push(index);
tokens.push(token);
token = { value: '', depth: 0, isGlob: false };
if (finished === true) continue;
if (prev === CHAR_DOT$1 && index === (start + 1)) {
start += 2;
continue;
}
lastIndex = index + 1;
continue;
}
if (opts.noext !== true) {
const isExtglobChar = code === CHAR_PLUS
|| code === CHAR_AT
|| code === CHAR_ASTERISK
|| code === CHAR_QUESTION_MARK
|| code === CHAR_EXCLAMATION_MARK;
if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES$1) {
isGlob = token.isGlob = true;
isExtglob = token.isExtglob = true;
finished = true;
if (code === CHAR_EXCLAMATION_MARK && index === start) {
negatedExtglob = true;
}
if (scanToEnd === true) {
while (eos() !== true && (code = advance())) {
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
code = advance();
continue;
}
if (code === CHAR_RIGHT_PARENTHESES$1) {
isGlob = token.isGlob = true;
finished = true;
break;
}
}
continue;
}
break;
}
}
if (code === CHAR_ASTERISK) {
if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_QUESTION_MARK) {
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_LEFT_SQUARE_BRACKET$1) {
while (eos() !== true && (next = advance())) {
if (next === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
advance();
continue;
}
if (next === CHAR_RIGHT_SQUARE_BRACKET$1) {
isBracket = token.isBracket = true;
isGlob = token.isGlob = true;
finished = true;
break;
}
}
if (scanToEnd === true) {
continue;
}
break;
}
if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
negated = token.negated = true;
start++;
continue;
}
if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES$1) {
isGlob = token.isGlob = true;
if (scanToEnd === true) {
while (eos() !== true && (code = advance())) {
if (code === CHAR_LEFT_PARENTHESES$1) {
backslashes = token.backslashes = true;
code = advance();
continue;
}
if (code === CHAR_RIGHT_PARENTHESES$1) {
finished = true;
break;
}
}
continue;
}
break;
}
if (isGlob === true) {
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
}
if (opts.noext === true) {
isExtglob = false;
isGlob = false;
}
let base = str;
let prefix = '';
let glob = '';
if (start > 0) {
prefix = str.slice(0, start);
str = str.slice(start);
lastIndex -= start;
}
if (base && isGlob === true && lastIndex > 0) {
base = str.slice(0, lastIndex);
glob = str.slice(lastIndex);
} else if (isGlob === true) {
base = '';
glob = str;
} else {
base = str;
}
if (base && base !== '' && base !== '/' && base !== str) {
if (isPathSeparator(base.charCodeAt(base.length - 1))) {
base = base.slice(0, -1);
}
}
if (opts.unescape === true) {
if (glob) glob = utils$j.removeBackslashes(glob);
if (base && backslashes === true) {
base = utils$j.removeBackslashes(base);
}
}
const state = {
prefix,
input,
start,
base,
glob,
isBrace,
isBracket,
isGlob,
isExtglob,
isGlobstar,
negated,
negatedExtglob
};
if (opts.tokens === true) {
state.maxDepth = 0;
if (!isPathSeparator(code)) {
tokens.push(token);
}
state.tokens = tokens;
}
if (opts.parts === true || opts.tokens === true) {
let prevIndex;
for (let idx = 0; idx < slashes.length; idx++) {
const n = prevIndex ? prevIndex + 1 : start;
const i = slashes[idx];
const value = input.slice(n, i);
if (opts.tokens) {
if (idx === 0 && start !== 0) {
tokens[idx].isPrefix = true;
tokens[idx].value = prefix;
} else {
tokens[idx].value = value;
}
depth(tokens[idx]);
state.maxDepth += tokens[idx].depth;
}
if (idx !== 0 || value !== '') {
parts.push(value);
}
prevIndex = i;
}
if (prevIndex && prevIndex + 1 < input.length) {
const value = input.slice(prevIndex + 1);
parts.push(value);
if (opts.tokens) {
tokens[tokens.length - 1].value = value;
depth(tokens[tokens.length - 1]);
state.maxDepth += tokens[tokens.length - 1].depth;
}
}
state.slashes = slashes;
state.parts = parts;
}
return state;
};
var scan_1 = scan$2;
const constants$5 = constants$6;
const utils$i = utils$k;
/**
* Constants
*/
const {
MAX_LENGTH: MAX_LENGTH$1,
POSIX_REGEX_SOURCE,
REGEX_NON_SPECIAL_CHARS,
REGEX_SPECIAL_CHARS_BACKREF,
REPLACEMENTS
} = constants$5;
/**
* Helpers
*/
const expandRange = (args, options) => {
if (typeof options.expandRange === 'function') {
return options.expandRange(...args, options);
}
args.sort();
const value = `[${args.join('-')}]`;
return value;
};
/**
* Create the message for a syntax error
*/
const syntaxError = (type, char) => {
return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`;
};
/**
* Parse the given input string.
* @param {String} input
* @param {Object} options
* @return {Object}
*/
const parse$h = (input, options) => {
if (typeof input !== 'string') {
throw new TypeError('Expected a string');
}
input = REPLACEMENTS[input] || input;
const opts = { ...options };
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH$1, opts.maxLength) : MAX_LENGTH$1;
let len = input.length;
if (len > max) {
throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
}
const bos = { type: 'bos', value: '', output: opts.prepend || '' };
const tokens = [bos];
const capture = opts.capture ? '' : '?:';
const win32 = utils$i.isWindows(options);
// create constants based on platform, for windows or posix
const PLATFORM_CHARS = constants$5.globChars(win32);
const EXTGLOB_CHARS = constants$5.extglobChars(PLATFORM_CHARS);
const {
DOT_LITERAL,
PLUS_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
DOTS_SLASH,
NO_DOT,
NO_DOT_SLASH,
NO_DOTS_SLASH,
QMARK,
QMARK_NO_DOT,
STAR,
START_ANCHOR
} = PLATFORM_CHARS;
const globstar = opts => {
return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
};
const nodot = opts.dot ? '' : NO_DOT;
const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;
let star = opts.bash === true ? globstar(opts) : STAR;
if (opts.capture) {
star = `(${star})`;
}
// minimatch options support
if (typeof opts.noext === 'boolean') {
opts.noextglob = opts.noext;
}
const state = {
input,
index: -1,
start: 0,
dot: opts.dot === true,
consumed: '',
output: '',
prefix: '',
backtrack: false,
negated: false,
brackets: 0,
braces: 0,
parens: 0,
quotes: 0,
globstar: false,
tokens
};
input = utils$i.removePrefix(input, state);
len = input.length;
const extglobs = [];
const braces = [];
const stack = [];
let prev = bos;
let value;
/**
* Tokenizing helpers
*/
const eos = () => state.index === len - 1;
const peek = state.peek = (n = 1) => input[state.index + n];
const advance = state.advance = () => input[++state.index] || '';
const remaining = () => input.slice(state.index + 1);
const consume = (value = '', num = 0) => {
state.consumed += value;
state.index += num;
};
const append = token => {
state.output += token.output != null ? token.output : token.value;
consume(token.value);
};
const negate = () => {
let count = 1;
while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {
advance();
state.start++;
count++;
}
if (count % 2 === 0) {
return false;
}
state.negated = true;
state.start++;
return true;
};
const increment = type => {
state[type]++;
stack.push(type);
};
const decrement = type => {
state[type]--;
stack.pop();
};
/**
* Push tokens onto the tokens array. This helper speeds up
* tokenizing by 1) helping us avoid backtracking as much as possible,
* and 2) helping us avoid creating extra tokens when consecutive
* characters are plain text. This improves performance and simplifies
* lookbehinds.
*/
const push = tok => {
if (prev.type === 'globstar') {
const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');
const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));
if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {
state.output = state.output.slice(0, -prev.output.length);
prev.type = 'star';
prev.value = '*';
prev.output = star;
state.output += prev.output;
}
}
if (extglobs.length && tok.type !== 'paren') {
extglobs[extglobs.length - 1].inner += tok.value;
}
if (tok.value || tok.output) append(tok);
if (prev && prev.type === 'text' && tok.type === 'text') {
prev.value += tok.value;
prev.output = (prev.output || '') + tok.value;
return;
}
tok.prev = prev;
tokens.push(tok);
prev = tok;
};
const extglobOpen = (type, value) => {
const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };
token.prev = prev;
token.parens = state.parens;
token.output = state.output;
const output = (opts.capture ? '(' : '') + token.open;
increment('parens');
push({ type, value, output: state.output ? '' : ONE_CHAR });
push({ type: 'paren', extglob: true, value: advance(), output });
extglobs.push(token);
};
const extglobClose = token => {
let output = token.close + (opts.capture ? ')' : '');
let rest;
if (token.type === 'negate') {
let extglobStar = star;
if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {
extglobStar = globstar(opts);
}
if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) {
output = token.close = `)$))${extglobStar}`;
}
if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) {
// Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis.
// In this case, we need to parse the string and use it in the output of the original pattern.
// Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`.
//
// Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`.
const expression = parse$h(rest, { ...options, fastpaths: false }).output;
output = token.close = `)${expression})${extglobStar})`;
}
if (token.prev.type === 'bos') {
state.negatedExtglob = true;
}
}
push({ type: 'paren', extglob: true, value, output });
decrement('parens');
};
/**
* Fast paths
*/
if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) {
let backslashes = false;
let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {
if (first === '\\') {
backslashes = true;
return m;
}
if (first === '?') {
if (esc) {
return esc + first + (rest ? QMARK.repeat(rest.length) : '');
}
if (index === 0) {
return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');
}
return QMARK.repeat(chars.length);
}
if (first === '.') {
return DOT_LITERAL.repeat(chars.length);
}
if (first === '*') {
if (esc) {
return esc + first + (rest ? star : '');
}
return star;
}
return esc ? m : `\\${m}`;
});
if (backslashes === true) {
if (opts.unescape === true) {
output = output.replace(/\\/g, '');
} else {
output = output.replace(/\\+/g, m => {
return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : '');
});
}
}
if (output === input && opts.contains === true) {
state.output = input;
return state;
}
state.output = utils$i.wrapOutput(output, state, options);
return state;
}
/**
* Tokenize input until we reach end-of-string
*/
while (!eos()) {
value = advance();
if (value === '\u0000') {
continue;
}
/**
* Escaped characters
*/
if (value === '\\') {
const next = peek();
if (next === '/' && opts.bash !== true) {
continue;
}
if (next === '.' || next === ';') {
continue;
}
if (!next) {
value += '\\';
push({ type: 'text', value });
continue;
}
// collapse slashes to reduce potential for exploits
const match = /^\\+/.exec(remaining());
let slashes = 0;
if (match && match[0].length > 2) {
slashes = match[0].length;
state.index += slashes;
if (slashes % 2 !== 0) {
value += '\\';
}
}
if (opts.unescape === true) {
value = advance();
} else {
value += advance();
}
if (state.brackets === 0) {
push({ type: 'text', value });
continue;
}
}
/**
* If we're inside a regex character class, continue
* until we reach the closing bracket.
*/
if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {
if (opts.posix !== false && value === ':') {
const inner = prev.value.slice(1);
if (inner.includes('[')) {
prev.posix = true;
if (inner.includes(':')) {
const idx = prev.value.lastIndexOf('[');
const pre = prev.value.slice(0, idx);
const rest = prev.value.slice(idx + 2);
const posix = POSIX_REGEX_SOURCE[rest];
if (posix) {
prev.value = pre + posix;
state.backtrack = true;
advance();
if (!bos.output && tokens.indexOf(prev) === 1) {
bos.output = ONE_CHAR;
}
continue;
}
}
}
}
if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {
value = `\\${value}`;
}
if (value === ']' && (prev.value === '[' || prev.value === '[^')) {
value = `\\${value}`;
}
if (opts.posix === true && value === '!' && prev.value === '[') {
value = '^';
}
prev.value += value;
append({ value });
continue;
}
/**
* If we're inside a quoted string, continue
* until we reach the closing double quote.
*/
if (state.quotes === 1 && value !== '"') {
value = utils$i.escapeRegex(value);
prev.value += value;
append({ value });
continue;
}
/**
* Double quotes
*/
if (value === '"') {
state.quotes = state.quotes === 1 ? 0 : 1;
if (opts.keepQuotes === true) {
push({ type: 'text', value });
}
continue;
}
/**
* Parentheses
*/
if (value === '(') {
increment('parens');
push({ type: 'paren', value });
continue;
}
if (value === ')') {
if (state.parens === 0 && opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('opening', '('));
}
const extglob = extglobs[extglobs.length - 1];
if (extglob && state.parens === extglob.parens + 1) {
extglobClose(extglobs.pop());
continue;
}
push({ type: 'paren', value, output: state.parens ? ')' : '\\)' });
decrement('parens');
continue;
}
/**
* Square brackets
*/
if (value === '[') {
if (opts.nobracket === true || !remaining().includes(']')) {
if (opts.nobracket !== true && opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('closing', ']'));
}
value = `\\${value}`;
} else {
increment('brackets');
}
push({ type: 'bracket', value });
continue;
}
if (value === ']') {
if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {
push({ type: 'text', value, output: `\\${value}` });
continue;
}
if (state.brackets === 0) {
if (opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('opening', '['));
}
push({ type: 'text', value, output: `\\${value}` });
continue;
}
decrement('brackets');
const prevValue = prev.value.slice(1);
if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {
value = `/${value}`;
}
prev.value += value;
append({ value });
// when literal brackets are explicitly disabled
// assume we should match with a regex character class
if (opts.literalBrackets === false || utils$i.hasRegexChars(prevValue)) {
continue;
}
const escaped = utils$i.escapeRegex(prev.value);
state.output = state.output.slice(0, -prev.value.length);
// when literal brackets are explicitly enabled
// assume we should escape the brackets to match literal characters
if (opts.literalBrackets === true) {
state.output += escaped;
prev.value = escaped;
continue;
}
// when the user specifies nothing, try to match both
prev.value = `(${capture}${escaped}|${prev.value})`;
state.output += prev.value;
continue;
}
/**
* Braces
*/
if (value === '{' && opts.nobrace !== true) {
increment('braces');
const open = {
type: 'brace',
value,
output: '(',
outputIndex: state.output.length,
tokensIndex: state.tokens.length
};
braces.push(open);
push(open);
continue;
}
if (value === '}') {
const brace = braces[braces.length - 1];
if (opts.nobrace === true || !brace) {
push({ type: 'text', value, output: value });
continue;
}
let output = ')';
if (brace.dots === true) {
const arr = tokens.slice();
const range = [];
for (let i = arr.length - 1; i >= 0; i--) {
tokens.pop();
if (arr[i].type === 'brace') {
break;
}
if (arr[i].type !== 'dots') {
range.unshift(arr[i].value);
}
}
output = expandRange(range, opts);
state.backtrack = true;
}
if (brace.comma !== true && brace.dots !== true) {
const out = state.output.slice(0, brace.outputIndex);
const toks = state.tokens.slice(brace.tokensIndex);
brace.value = brace.output = '\\{';
value = output = '\\}';
state.output = out;
for (const t of toks) {
state.output += (t.output || t.value);
}
}
push({ type: 'brace', value, output });
decrement('braces');
braces.pop();
continue;
}
/**
* Pipes
*/
if (value === '|') {
if (extglobs.length > 0) {
extglobs[extglobs.length - 1].conditions++;
}
push({ type: 'text', value });
continue;
}
/**
* Commas
*/
if (value === ',') {
let output = value;
const brace = braces[braces.length - 1];
if (brace && stack[stack.length - 1] === 'braces') {
brace.comma = true;
output = '|';
}
push({ type: 'comma', value, output });
continue;
}
/**
* Slashes
*/
if (value === '/') {
// if the beginning of the glob is "./", advance the start
// to the current index, and don't add the "./" characters
// to the state. This greatly simplifies lookbehinds when
// checking for BOS characters like "!" and "." (not "./")
if (prev.type === 'dot' && state.index === state.start + 1) {
state.start = state.index + 1;
state.consumed = '';
state.output = '';
tokens.pop();
prev = bos; // reset "prev" to the first token
continue;
}
push({ type: 'slash', value, output: SLASH_LITERAL });
continue;
}
/**
* Dots
*/
if (value === '.') {
if (state.braces > 0 && prev.type === 'dot') {
if (prev.value === '.') prev.output = DOT_LITERAL;
const brace = braces[braces.length - 1];
prev.type = 'dots';
prev.output += value;
prev.value += value;
brace.dots = true;
continue;
}
if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {
push({ type: 'text', value, output: DOT_LITERAL });
continue;
}
push({ type: 'dot', value, output: DOT_LITERAL });
continue;
}
/**
* Question marks
*/
if (value === '?') {
const isGroup = prev && prev.value === '(';
if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
extglobOpen('qmark', value);
continue;
}
if (prev && prev.type === 'paren') {
const next = peek();
let output = value;
if (next === '<' && !utils$i.supportsLookbehinds()) {
throw new Error('Node.js v10 or higher is required for regex lookbehinds');
}
if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) {
output = `\\${value}`;
}
push({ type: 'text', value, output });
continue;
}
if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {
push({ type: 'qmark', value, output: QMARK_NO_DOT });
continue;
}
push({ type: 'qmark', value, output: QMARK });
continue;
}
/**
* Exclamation
*/
if (value === '!') {
if (opts.noextglob !== true && peek() === '(') {
if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {
extglobOpen('negate', value);
continue;
}
}
if (opts.nonegate !== true && state.index === 0) {
negate();
continue;
}
}
/**
* Plus
*/
if (value === '+') {
if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
extglobOpen('plus', value);
continue;
}
if ((prev && prev.value === '(') || opts.regex === false) {
push({ type: 'plus', value, output: PLUS_LITERAL });
continue;
}
if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {
push({ type: 'plus', value });
continue;
}
push({ type: 'plus', value: PLUS_LITERAL });
continue;
}
/**
* Plain text
*/
if (value === '@') {
if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
push({ type: 'at', extglob: true, value, output: '' });
continue;
}
push({ type: 'text', value });
continue;
}
/**
* Plain text
*/
if (value !== '*') {
if (value === '$' || value === '^') {
value = `\\${value}`;
}
const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());
if (match) {
value += match[0];
state.index += match[0].length;
}
push({ type: 'text', value });
continue;
}
/**
* Stars
*/
if (prev && (prev.type === 'globstar' || prev.star === true)) {
prev.type = 'star';
prev.star = true;
prev.value += value;
prev.output = star;
state.backtrack = true;
state.globstar = true;
consume(value);
continue;
}
let rest = remaining();
if (opts.noextglob !== true && /^\([^?]/.test(rest)) {
extglobOpen('star', value);
continue;
}
if (prev.type === 'star') {
if (opts.noglobstar === true) {
consume(value);
continue;
}
const prior = prev.prev;
const before = prior.prev;
const isStart = prior.type === 'slash' || prior.type === 'bos';
const afterStar = before && (before.type === 'star' || before.type === 'globstar');
if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {
push({ type: 'star', value, output: '' });
continue;
}
const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');
const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');
if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {
push({ type: 'star', value, output: '' });
continue;
}
// strip consecutive `/**/`
while (rest.slice(0, 3) === '/**') {
const after = input[state.index + 4];
if (after && after !== '/') {
break;
}
rest = rest.slice(3);
consume('/**', 3);
}
if (prior.type === 'bos' && eos()) {
prev.type = 'globstar';
prev.value += value;
prev.output = globstar(opts);
state.output = prev.output;
state.globstar = true;
consume(value);
continue;
}
if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {
state.output = state.output.slice(0, -(prior.output + prev.output).length);
prior.output = `(?:${prior.output}`;
prev.type = 'globstar';
prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');
prev.value += value;
state.globstar = true;
state.output += prior.output + prev.output;
consume(value);
continue;
}
if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {
const end = rest[1] !== void 0 ? '|$' : '';
state.output = state.output.slice(0, -(prior.output + prev.output).length);
prior.output = `(?:${prior.output}`;
prev.type = 'globstar';
prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;
prev.value += value;
state.output += prior.output + prev.output;
state.globstar = true;
consume(value + advance());
push({ type: 'slash', value: '/', output: '' });
continue;
}
if (prior.type === 'bos' && rest[0] === '/') {
prev.type = 'globstar';
prev.value += value;
prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;
state.output = prev.output;
state.globstar = true;
consume(value + advance());
push({ type: 'slash', value: '/', output: '' });
continue;
}
// remove single star from output
state.output = state.output.slice(0, -prev.output.length);
// reset previous token to globstar
prev.type = 'globstar';
prev.output = globstar(opts);
prev.value += value;
// reset output with globstar
state.output += prev.output;
state.globstar = true;
consume(value);
continue;
}
const token = { type: 'star', value, output: star };
if (opts.bash === true) {
token.output = '.*?';
if (prev.type === 'bos' || prev.type === 'slash') {
token.output = nodot + token.output;
}
push(token);
continue;
}
if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {
token.output = value;
push(token);
continue;
}
if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {
if (prev.type === 'dot') {
state.output += NO_DOT_SLASH;
prev.output += NO_DOT_SLASH;
} else if (opts.dot === true) {
state.output += NO_DOTS_SLASH;
prev.output += NO_DOTS_SLASH;
} else {
state.output += nodot;
prev.output += nodot;
}
if (peek() !== '*') {
state.output += ONE_CHAR;
prev.output += ONE_CHAR;
}
}
push(token);
}
while (state.brackets > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));
state.output = utils$i.escapeLast(state.output, '[');
decrement('brackets');
}
while (state.parens > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));
state.output = utils$i.escapeLast(state.output, '(');
decrement('parens');
}
while (state.braces > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));
state.output = utils$i.escapeLast(state.output, '{');
decrement('braces');
}
if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {
push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });
}
// rebuild the output if we had to backtrack at any point
if (state.backtrack === true) {
state.output = '';
for (const token of state.tokens) {
state.output += token.output != null ? token.output : token.value;
if (token.suffix) {
state.output += token.suffix;
}
}
}
return state;
};
/**
* Fast paths for creating regular expressions for common glob patterns.
* This can significantly speed up processing and has very little downside
* impact when none of the fast paths match.
*/
parse$h.fastpaths = (input, options) => {
const opts = { ...options };
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH$1, opts.maxLength) : MAX_LENGTH$1;
const len = input.length;
if (len > max) {
throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
}
input = REPLACEMENTS[input] || input;
const win32 = utils$i.isWindows(options);
// create constants based on platform, for windows or posix
const {
DOT_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
DOTS_SLASH,
NO_DOT,
NO_DOTS,
NO_DOTS_SLASH,
STAR,
START_ANCHOR
} = constants$5.globChars(win32);
const nodot = opts.dot ? NO_DOTS : NO_DOT;
const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;
const capture = opts.capture ? '' : '?:';
const state = { negated: false, prefix: '' };
let star = opts.bash === true ? '.*?' : STAR;
if (opts.capture) {
star = `(${star})`;
}
const globstar = opts => {
if (opts.noglobstar === true) return star;
return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
};
const create = str => {
switch (str) {
case '*':
return `${nodot}${ONE_CHAR}${star}`;
case '.*':
return `${DOT_LITERAL}${ONE_CHAR}${star}`;
case '*.*':
return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
case '*/*':
return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;
case '**':
return nodot + globstar(opts);
case '**/*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;
case '**/*.*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
case '**/.*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;
default: {
const match = /^(.*?)\.(\w+)$/.exec(str);
if (!match) return;
const source = create(match[1]);
if (!source) return;
return source + DOT_LITERAL + match[2];
}
}
};
const output = utils$i.removePrefix(input, state);
let source = create(output);
if (source && opts.strictSlashes !== true) {
source += `${SLASH_LITERAL}?`;
}
return source;
};
var parse_1$3 = parse$h;
const path$m = require$$0$4;
const scan$1 = scan_1;
const parse$g = parse_1$3;
const utils$h = utils$k;
const constants$4 = constants$6;
const isObject$4 = val => val && typeof val === 'object' && !Array.isArray(val);
/**
* Creates a matcher function from one or more glob patterns. The
* returned function takes a string to match as its first argument,
* and returns true if the string is a match. The returned matcher
* function also takes a boolean as the second argument that, when true,
* returns an object with additional information.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch(glob[, options]);
*
* const isMatch = picomatch('*.!(*a)');
* console.log(isMatch('a.a')); //=> false
* console.log(isMatch('a.b')); //=> true
* ```
* @name picomatch
* @param {String|Array} `globs` One or more glob patterns.
* @param {Object=} `options`
* @return {Function=} Returns a matcher function.
* @api public
*/
const picomatch$5 = (glob, options, returnState = false) => {
if (Array.isArray(glob)) {
const fns = glob.map(input => picomatch$5(input, options, returnState));
const arrayMatcher = str => {
for (const isMatch of fns) {
const state = isMatch(str);
if (state) return state;
}
return false;
};
return arrayMatcher;
}
const isState = isObject$4(glob) && glob.tokens && glob.input;
if (glob === '' || (typeof glob !== 'string' && !isState)) {
throw new TypeError('Expected pattern to be a non-empty string');
}
const opts = options || {};
const posix = utils$h.isWindows(options);
const regex = isState
? picomatch$5.compileRe(glob, options)
: picomatch$5.makeRe(glob, options, false, true);
const state = regex.state;
delete regex.state;
let isIgnored = () => false;
if (opts.ignore) {
const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
isIgnored = picomatch$5(opts.ignore, ignoreOpts, returnState);
}
const matcher = (input, returnObject = false) => {
const { isMatch, match, output } = picomatch$5.test(input, regex, options, { glob, posix });
const result = { glob, state, regex, posix, input, output, match, isMatch };
if (typeof opts.onResult === 'function') {
opts.onResult(result);
}
if (isMatch === false) {
result.isMatch = false;
return returnObject ? result : false;
}
if (isIgnored(input)) {
if (typeof opts.onIgnore === 'function') {
opts.onIgnore(result);
}
result.isMatch = false;
return returnObject ? result : false;
}
if (typeof opts.onMatch === 'function') {
opts.onMatch(result);
}
return returnObject ? result : true;
};
if (returnState) {
matcher.state = state;
}
return matcher;
};
/**
* Test `input` with the given `regex`. This is used by the main
* `picomatch()` function to test the input string.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.test(input, regex[, options]);
*
* console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
* // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
* ```
* @param {String} `input` String to test.
* @param {RegExp} `regex`
* @return {Object} Returns an object with matching info.
* @api public
*/
picomatch$5.test = (input, regex, options, { glob, posix } = {}) => {
if (typeof input !== 'string') {
throw new TypeError('Expected input to be a string');
}
if (input === '') {
return { isMatch: false, output: '' };
}
const opts = options || {};
const format = opts.format || (posix ? utils$h.toPosixSlashes : null);
let match = input === glob;
let output = (match && format) ? format(input) : input;
if (match === false) {
output = format ? format(input) : input;
match = output === glob;
}
if (match === false || opts.capture === true) {
if (opts.matchBase === true || opts.basename === true) {
match = picomatch$5.matchBase(input, regex, options, posix);
} else {
match = regex.exec(output);
}
}
return { isMatch: Boolean(match), match, output };
};
/**
* Match the basename of a filepath.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.matchBase(input, glob[, options]);
* console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
* ```
* @param {String} `input` String to test.
* @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
* @return {Boolean}
* @api public
*/
picomatch$5.matchBase = (input, glob, options, posix = utils$h.isWindows(options)) => {
const regex = glob instanceof RegExp ? glob : picomatch$5.makeRe(glob, options);
return regex.test(path$m.basename(input));
};
/**
* Returns true if **any** of the given glob `patterns` match the specified `string`.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.isMatch(string, patterns[, options]);
*
* console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
* console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
* ```
* @param {String|Array} str The string to test.
* @param {String|Array} patterns One or more glob patterns to use for matching.
* @param {Object} [options] See available [options](#options).
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
picomatch$5.isMatch = (str, patterns, options) => picomatch$5(patterns, options)(str);
/**
* Parse a glob pattern to create the source string for a regular
* expression.
*
* ```js
* const picomatch = require('picomatch');
* const result = picomatch.parse(pattern[, options]);
* ```
* @param {String} `pattern`
* @param {Object} `options`
* @return {Object} Returns an object with useful properties and output to be used as a regex source string.
* @api public
*/
picomatch$5.parse = (pattern, options) => {
if (Array.isArray(pattern)) return pattern.map(p => picomatch$5.parse(p, options));
return parse$g(pattern, { ...options, fastpaths: false });
};
/**
* Scan a glob pattern to separate the pattern into segments.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.scan(input[, options]);
*
* const result = picomatch.scan('!./foo/*.js');
* console.log(result);
* { prefix: '!./',
* input: '!./foo/*.js',
* start: 3,
* base: 'foo',
* glob: '*.js',
* isBrace: false,
* isBracket: false,
* isGlob: true,
* isExtglob: false,
* isGlobstar: false,
* negated: true }
* ```
* @param {String} `input` Glob pattern to scan.
* @param {Object} `options`
* @return {Object} Returns an object with
* @api public
*/
picomatch$5.scan = (input, options) => scan$1(input, options);
/**
* Compile a regular expression from the `state` object returned by the
* [parse()](#parse) method.
*
* @param {Object} `state`
* @param {Object} `options`
* @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser.
* @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
* @return {RegExp}
* @api public
*/
picomatch$5.compileRe = (state, options, returnOutput = false, returnState = false) => {
if (returnOutput === true) {
return state.output;
}
const opts = options || {};
const prepend = opts.contains ? '' : '^';
const append = opts.contains ? '' : '$';
let source = `${prepend}(?:${state.output})${append}`;
if (state && state.negated === true) {
source = `^(?!${source}).*$`;
}
const regex = picomatch$5.toRegex(source, options);
if (returnState === true) {
regex.state = state;
}
return regex;
};
/**
* Create a regular expression from a parsed glob pattern.
*
* ```js
* const picomatch = require('picomatch');
* const state = picomatch.parse('*.js');
* // picomatch.compileRe(state[, options]);
*
* console.log(picomatch.compileRe(state));
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
* ```
* @param {String} `state` The object returned from the `.parse` method.
* @param {Object} `options`
* @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
* @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
* @return {RegExp} Returns a regex created from the given pattern.
* @api public
*/
picomatch$5.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
if (!input || typeof input !== 'string') {
throw new TypeError('Expected a non-empty string');
}
let parsed = { negated: false, fastpaths: true };
if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {
parsed.output = parse$g.fastpaths(input, options);
}
if (!parsed.output) {
parsed = parse$g(input, options);
}
return picomatch$5.compileRe(parsed, options, returnOutput, returnState);
};
/**
* Create a regular expression from the given regex source string.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.toRegex(source[, options]);
*
* const { output } = picomatch.parse('*.js');
* console.log(picomatch.toRegex(output));
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
* ```
* @param {String} `source` Regular expression source string.
* @param {Object} `options`
* @return {RegExp}
* @api public
*/
picomatch$5.toRegex = (source, options) => {
try {
const opts = options || {};
return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));
} catch (err) {
if (options && options.debug === true) throw err;
return /$^/;
}
};
/**
* Picomatch constants.
* @return {Object}
*/
picomatch$5.constants = constants$4;
/**
* Expose "picomatch"
*/
var picomatch_1 = picomatch$5;
var picomatch$3 = picomatch_1;
var picomatch$4 = /*@__PURE__*/getDefaultExportFromCjs(picomatch$3);
const extractors = {
ArrayPattern(names, param) {
for (const element of param.elements) {
if (element)
extractors[element.type](names, element);
}
},
AssignmentPattern(names, param) {
extractors[param.left.type](names, param.left);
},
Identifier(names, param) {
names.push(param.name);
},
MemberExpression() { },
ObjectPattern(names, param) {
for (const prop of param.properties) {
// @ts-ignore Typescript reports that this is not a valid type
if (prop.type === 'RestElement') {
extractors.RestElement(names, prop);
}
else {
extractors[prop.value.type](names, prop.value);
}
}
},
RestElement(names, param) {
extractors[param.argument.type](names, param.argument);
}
};
const extractAssignedNames = function extractAssignedNames(param) {
const names = [];
extractors[param.type](names, param);
return names;
};
const blockDeclarations = {
const: true,
let: true
};
let Scope$1 = class Scope {
constructor(options = {}) {
this.parent = options.parent;
this.isBlockScope = !!options.block;
this.declarations = Object.create(null);
if (options.params) {
options.params.forEach((param) => {
extractAssignedNames(param).forEach((name) => {
this.declarations[name] = true;
});
});
}
}
addDeclaration(node, isBlockDeclaration, isVar) {
if (!isBlockDeclaration && this.isBlockScope) {
// it's a `var` or function node, and this
// is a block scope, so we need to go up
this.parent.addDeclaration(node, isBlockDeclaration, isVar);
}
else if (node.id) {
extractAssignedNames(node.id).forEach((name) => {
this.declarations[name] = true;
});
}
}
contains(name) {
return this.declarations[name] || (this.parent ? this.parent.contains(name) : false);
}
};
const attachScopes = function attachScopes(ast, propertyName = 'scope') {
let scope = new Scope$1();
walk$4(ast, {
enter(n, parent) {
const node = n;
// function foo () {...}
// class Foo {...}
if (/(Function|Class)Declaration/.test(node.type)) {
scope.addDeclaration(node, false, false);
}
// var foo = 1
if (node.type === 'VariableDeclaration') {
const { kind } = node;
const isBlockDeclaration = blockDeclarations[kind];
node.declarations.forEach((declaration) => {
scope.addDeclaration(declaration, isBlockDeclaration, true);
});
}
let newScope;
// create new function scope
if (/Function/.test(node.type)) {
const func = node;
newScope = new Scope$1({
parent: scope,
block: false,
params: func.params
});
// named function expressions - the name is considered
// part of the function's scope
if (func.type === 'FunctionExpression' && func.id) {
newScope.addDeclaration(func, false, false);
}
}
// create new for scope
if (/For(In|Of)?Statement/.test(node.type)) {
newScope = new Scope$1({
parent: scope,
block: true
});
}
// create new block scope
if (node.type === 'BlockStatement' && !/Function/.test(parent.type)) {
newScope = new Scope$1({
parent: scope,
block: true
});
}
// catch clause has its own block scope
if (node.type === 'CatchClause') {
newScope = new Scope$1({
parent: scope,
params: node.param ? [node.param] : [],
block: true
});
}
if (newScope) {
Object.defineProperty(node, propertyName, {
value: newScope,
configurable: true
});
scope = newScope;
}
},
leave(n) {
const node = n;
if (node[propertyName])
scope = scope.parent;
}
});
return scope;
};
// Helper since Typescript can't detect readonly arrays with Array.isArray
function isArray$2(arg) {
return Array.isArray(arg);
}
function ensureArray(thing) {
if (isArray$2(thing))
return thing;
if (thing == null)
return [];
return [thing];
}
const normalizePath$5 = function normalizePath(filename) {
return filename.split(win32.sep).join(posix.sep);
};
function getMatcherString(id, resolutionBase) {
if (resolutionBase === false || isAbsolute$1(id) || id.startsWith('*')) {
return normalizePath$5(id);
}
// resolve('') is valid and will default to process.cwd()
const basePath = normalizePath$5(resolve$3(resolutionBase || ''))
// escape all possible (posix + win) path characters that might interfere with regex
.replace(/[-^$*+?.()|[\]{}]/g, '\\$&');
// Note that we use posix.join because:
// 1. the basePath has been normalized to use /
// 2. the incoming glob (id) matcher, also uses /
// otherwise Node will force backslash (\) on windows
return posix.join(basePath, normalizePath$5(id));
}
const createFilter$1 = function createFilter(include, exclude, options) {
const resolutionBase = options && options.resolve;
const getMatcher = (id) => id instanceof RegExp
? id
: {
test: (what) => {
// this refactor is a tad overly verbose but makes for easy debugging
const pattern = getMatcherString(id, resolutionBase);
const fn = picomatch$4(pattern, { dot: true });
const result = fn(what);
return result;
}
};
const includeMatchers = ensureArray(include).map(getMatcher);
const excludeMatchers = ensureArray(exclude).map(getMatcher);
return function result(id) {
if (typeof id !== 'string')
return false;
if (/\0/.test(id))
return false;
const pathId = normalizePath$5(id);
for (let i = 0; i < excludeMatchers.length; ++i) {
const matcher = excludeMatchers[i];
if (matcher.test(pathId))
return false;
}
for (let i = 0; i < includeMatchers.length; ++i) {
const matcher = includeMatchers[i];
if (matcher.test(pathId))
return true;
}
return !includeMatchers.length;
};
};
const reservedWords$1 = 'break case class catch const continue debugger default delete do else export extends finally for function if import in instanceof let new return super switch this throw try typeof var void while with yield enum await implements package protected static interface private public';
const builtins$1 = 'arguments Infinity NaN undefined null true false eval uneval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Symbol Error EvalError InternalError RangeError ReferenceError SyntaxError TypeError URIError Number Math Date String RegExp Array Int8Array Uint8Array Uint8ClampedArray Int16Array Uint16Array Int32Array Uint32Array Float32Array Float64Array Map Set WeakMap WeakSet SIMD ArrayBuffer DataView JSON Promise Generator GeneratorFunction Reflect Proxy Intl';
const forbiddenIdentifiers = new Set(`${reservedWords$1} ${builtins$1}`.split(' '));
forbiddenIdentifiers.add('');
const makeLegalIdentifier = function makeLegalIdentifier(str) {
let identifier = str
.replace(/-(\w)/g, (_, letter) => letter.toUpperCase())
.replace(/[^$_a-zA-Z0-9]/g, '_');
if (/\d/.test(identifier[0]) || forbiddenIdentifiers.has(identifier)) {
identifier = `_${identifier}`;
}
return identifier || '_';
};
function stringify$8(obj) {
return (JSON.stringify(obj) || 'undefined').replace(/[\u2028\u2029]/g, (char) => `\\u${`000${char.charCodeAt(0).toString(16)}`.slice(-4)}`);
}
function serializeArray(arr, indent, baseIndent) {
let output = '[';
const separator = indent ? `\n${baseIndent}${indent}` : '';
for (let i = 0; i < arr.length; i++) {
const key = arr[i];
output += `${i > 0 ? ',' : ''}${separator}${serialize(key, indent, baseIndent + indent)}`;
}
return `${output}${indent ? `\n${baseIndent}` : ''}]`;
}
function serializeObject(obj, indent, baseIndent) {
let output = '{';
const separator = indent ? `\n${baseIndent}${indent}` : '';
const entries = Object.entries(obj);
for (let i = 0; i < entries.length; i++) {
const [key, value] = entries[i];
const stringKey = makeLegalIdentifier(key) === key ? key : stringify$8(key);
output += `${i > 0 ? ',' : ''}${separator}${stringKey}:${indent ? ' ' : ''}${serialize(value, indent, baseIndent + indent)}`;
}
return `${output}${indent ? `\n${baseIndent}` : ''}}`;
}
function serialize(obj, indent, baseIndent) {
if (typeof obj === 'object' && obj !== null) {
if (Array.isArray(obj))
return serializeArray(obj, indent, baseIndent);
if (obj instanceof Date)
return `new Date(${obj.getTime()})`;
if (obj instanceof RegExp)
return obj.toString();
return serializeObject(obj, indent, baseIndent);
}
if (typeof obj === 'number') {
if (obj === Infinity)
return 'Infinity';
if (obj === -Infinity)
return '-Infinity';
if (obj === 0)
return 1 / obj === Infinity ? '0' : '-0';
if (obj !== obj)
return 'NaN'; // eslint-disable-line no-self-compare
}
if (typeof obj === 'symbol') {
const key = Symbol.keyFor(obj);
// eslint-disable-next-line no-undefined
if (key !== undefined)
return `Symbol.for(${stringify$8(key)})`;
}
if (typeof obj === 'bigint')
return `${obj}n`;
return stringify$8(obj);
}
const dataToEsm = function dataToEsm(data, options = {}) {
const t = options.compact ? '' : 'indent' in options ? options.indent : '\t';
const _ = options.compact ? '' : ' ';
const n = options.compact ? '' : '\n';
const declarationType = options.preferConst ? 'const' : 'var';
if (options.namedExports === false ||
typeof data !== 'object' ||
Array.isArray(data) ||
data instanceof Date ||
data instanceof RegExp ||
data === null) {
const code = serialize(data, options.compact ? null : t, '');
const magic = _ || (/^[{[\-\/]/.test(code) ? '' : ' '); // eslint-disable-line no-useless-escape
return `export default${magic}${code};`;
}
let namedExportCode = '';
const defaultExportRows = [];
for (const [key, value] of Object.entries(data)) {
if (key === makeLegalIdentifier(key)) {
if (options.objectShorthand)
defaultExportRows.push(key);
else
defaultExportRows.push(`${key}:${_}${key}`);
namedExportCode += `export ${declarationType} ${key}${_}=${_}${serialize(value, options.compact ? null : t, '')};${n}`;
}
else {
defaultExportRows.push(`${stringify$8(key)}:${_}${serialize(value, options.compact ? null : t, '')}`);
}
}
return `${namedExportCode}export default${_}{${n}${t}${defaultExportRows.join(`,${n}${t}`)}${n}};${n}`;
};
var path$l = require$$0$4;
var commondir = function (basedir, relfiles) {
if (relfiles) {
var files = relfiles.map(function (r) {
return path$l.resolve(basedir, r);
});
}
else {
var files = basedir;
}
var res = files.slice(1).reduce(function (ps, file) {
if (!file.match(/^([A-Za-z]:)?\/|\\/)) {
throw new Error('relative path without a basedir');
}
var xs = file.split(/\/+|\\+/);
for (
var i = 0;
ps[i] === xs[i] && i < Math.min(ps.length, xs.length);
i++
);
return ps.slice(0, i);
}, files[0].split(/\/+|\\+/));
// Windows correctly handles paths with forward-slashes
return res.length > 1 ? res.join('/') : '/'
};
var getCommonDir = /*@__PURE__*/getDefaultExportFromCjs(commondir);
var old$1 = {};
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var pathModule = require$$0$4;
var isWindows$6 = process.platform === 'win32';
var fs$k = require$$0__default;
// JavaScript implementation of realpath, ported from node pre-v6
var DEBUG$1 = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
function rethrow() {
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
// is fairly slow to generate.
var callback;
if (DEBUG$1) {
var backtrace = new Error;
callback = debugCallback;
} else
callback = missingCallback;
return callback;
function debugCallback(err) {
if (err) {
backtrace.message = err.message;
err = backtrace;
missingCallback(err);
}
}
function missingCallback(err) {
if (err) {
if (process.throwDeprecation)
throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
else if (!process.noDeprecation) {
var msg = 'fs: missing callback ' + (err.stack || err.message);
if (process.traceDeprecation)
console.trace(msg);
else
console.error(msg);
}
}
}
}
function maybeCallback(cb) {
return typeof cb === 'function' ? cb : rethrow();
}
// Regexp that finds the next partion of a (partial) path
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
if (isWindows$6) {
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
} else {
var nextPartRe = /(.*?)(?:[\/]+|$)/g;
}
// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
if (isWindows$6) {
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
} else {
var splitRootRe = /^[\/]*/;
}
old$1.realpathSync = function realpathSync(p, cache) {
// make p is absolute
p = pathModule.resolve(p);
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
return cache[p];
}
var original = p,
seenLinks = {},
knownHard = {};
// current character position in p
var pos;
// the partial path so far, including a trailing slash if any
var current;
// the partial path without a trailing slash (except when pointing at a root)
var base;
// the partial path scanned in the previous round, with slash
var previous;
start();
function start() {
// Skip over roots
var m = splitRootRe.exec(p);
pos = m[0].length;
current = m[0];
base = m[0];
previous = '';
// On windows, check that the root exists. On unix there is no need.
if (isWindows$6 && !knownHard[base]) {
fs$k.lstatSync(base);
knownHard[base] = true;
}
}
// walk down the path, swapping out linked pathparts for their real
// values
// NB: p.length changes.
while (pos < p.length) {
// find the next part
nextPartRe.lastIndex = pos;
var result = nextPartRe.exec(p);
previous = current;
current += result[0];
base = previous + result[1];
pos = nextPartRe.lastIndex;
// continue if not a symlink
if (knownHard[base] || (cache && cache[base] === base)) {
continue;
}
var resolvedLink;
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
// some known symbolic link. no need to stat again.
resolvedLink = cache[base];
} else {
var stat = fs$k.lstatSync(base);
if (!stat.isSymbolicLink()) {
knownHard[base] = true;
if (cache) cache[base] = base;
continue;
}
// read the link if it wasn't read before
// dev/ino always return 0 on windows, so skip the check.
var linkTarget = null;
if (!isWindows$6) {
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
if (seenLinks.hasOwnProperty(id)) {
linkTarget = seenLinks[id];
}
}
if (linkTarget === null) {
fs$k.statSync(base);
linkTarget = fs$k.readlinkSync(base);
}
resolvedLink = pathModule.resolve(previous, linkTarget);
// track this, if given a cache.
if (cache) cache[base] = resolvedLink;
if (!isWindows$6) seenLinks[id] = linkTarget;
}
// resolve the link, then start over
p = pathModule.resolve(resolvedLink, p.slice(pos));
start();
}
if (cache) cache[original] = p;
return p;
};
old$1.realpath = function realpath(p, cache, cb) {
if (typeof cb !== 'function') {
cb = maybeCallback(cache);
cache = null;
}
// make p is absolute
p = pathModule.resolve(p);
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
return process.nextTick(cb.bind(null, null, cache[p]));
}
var original = p,
seenLinks = {},
knownHard = {};
// current character position in p
var pos;
// the partial path so far, including a trailing slash if any
var current;
// the partial path without a trailing slash (except when pointing at a root)
var base;
// the partial path scanned in the previous round, with slash
var previous;
start();
function start() {
// Skip over roots
var m = splitRootRe.exec(p);
pos = m[0].length;
current = m[0];
base = m[0];
previous = '';
// On windows, check that the root exists. On unix there is no need.
if (isWindows$6 && !knownHard[base]) {
fs$k.lstat(base, function(err) {
if (err) return cb(err);
knownHard[base] = true;
LOOP();
});
} else {
process.nextTick(LOOP);
}
}
// walk down the path, swapping out linked pathparts for their real
// values
function LOOP() {
// stop if scanned past end of path
if (pos >= p.length) {
if (cache) cache[original] = p;
return cb(null, p);
}
// find the next part
nextPartRe.lastIndex = pos;
var result = nextPartRe.exec(p);
previous = current;
current += result[0];
base = previous + result[1];
pos = nextPartRe.lastIndex;
// continue if not a symlink
if (knownHard[base] || (cache && cache[base] === base)) {
return process.nextTick(LOOP);
}
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
// known symbolic link. no need to stat again.
return gotResolvedLink(cache[base]);
}
return fs$k.lstat(base, gotStat);
}
function gotStat(err, stat) {
if (err) return cb(err);
// if not a symlink, skip to the next path part
if (!stat.isSymbolicLink()) {
knownHard[base] = true;
if (cache) cache[base] = base;
return process.nextTick(LOOP);
}
// stat & read the link if not read before
// call gotTarget as soon as the link target is known
// dev/ino always return 0 on windows, so skip the check.
if (!isWindows$6) {
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
if (seenLinks.hasOwnProperty(id)) {
return gotTarget(null, seenLinks[id], base);
}
}
fs$k.stat(base, function(err) {
if (err) return cb(err);
fs$k.readlink(base, function(err, target) {
if (!isWindows$6) seenLinks[id] = target;
gotTarget(err, target);
});
});
}
function gotTarget(err, target, base) {
if (err) return cb(err);
var resolvedLink = pathModule.resolve(previous, target);
if (cache) cache[base] = resolvedLink;
gotResolvedLink(resolvedLink);
}
function gotResolvedLink(resolvedLink) {
// resolve the link, then start over
p = pathModule.resolve(resolvedLink, p.slice(pos));
start();
}
};
var fs_realpath = realpath$2;
realpath$2.realpath = realpath$2;
realpath$2.sync = realpathSync;
realpath$2.realpathSync = realpathSync;
realpath$2.monkeypatch = monkeypatch;
realpath$2.unmonkeypatch = unmonkeypatch;
var fs$j = require$$0__default;
var origRealpath = fs$j.realpath;
var origRealpathSync = fs$j.realpathSync;
var version$4 = process.version;
var ok = /^v[0-5]\./.test(version$4);
var old = old$1;
function newError (er) {
return er && er.syscall === 'realpath' && (
er.code === 'ELOOP' ||
er.code === 'ENOMEM' ||
er.code === 'ENAMETOOLONG'
)
}
function realpath$2 (p, cache, cb) {
if (ok) {
return origRealpath(p, cache, cb)
}
if (typeof cache === 'function') {
cb = cache;
cache = null;
}
origRealpath(p, cache, function (er, result) {
if (newError(er)) {
old.realpath(p, cache, cb);
} else {
cb(er, result);
}
});
}
function realpathSync (p, cache) {
if (ok) {
return origRealpathSync(p, cache)
}
try {
return origRealpathSync(p, cache)
} catch (er) {
if (newError(er)) {
return old.realpathSync(p, cache)
} else {
throw er
}
}
}
function monkeypatch () {
fs$j.realpath = realpath$2;
fs$j.realpathSync = realpathSync;
}
function unmonkeypatch () {
fs$j.realpath = origRealpath;
fs$j.realpathSync = origRealpathSync;
}
const isWindows$5 = typeof process === 'object' &&
process &&
process.platform === 'win32';
var path$k = isWindows$5 ? { sep: '\\' } : { sep: '/' };
var balancedMatch = balanced$1;
function balanced$1(a, b, str) {
if (a instanceof RegExp) a = maybeMatch(a, str);
if (b instanceof RegExp) b = maybeMatch(b, str);
var r = range$1(a, b, str);
return r && {
start: r[0],
end: r[1],
pre: str.slice(0, r[0]),
body: str.slice(r[0] + a.length, r[1]),
post: str.slice(r[1] + b.length)
};
}
function maybeMatch(reg, str) {
var m = str.match(reg);
return m ? m[0] : null;
}
balanced$1.range = range$1;
function range$1(a, b, str) {
var begs, beg, left, right, result;
var ai = str.indexOf(a);
var bi = str.indexOf(b, ai + 1);
var i = ai;
if (ai >= 0 && bi > 0) {
if(a===b) {
return [ai, bi];
}
begs = [];
left = str.length;
while (i >= 0 && !result) {
if (i == ai) {
begs.push(i);
ai = str.indexOf(a, i + 1);
} else if (begs.length == 1) {
result = [ begs.pop(), bi ];
} else {
beg = begs.pop();
if (beg < left) {
left = beg;
right = bi;
}
bi = str.indexOf(b, i + 1);
}
i = ai < bi && ai >= 0 ? ai : bi;
}
if (begs.length) {
result = [ left, right ];
}
}
return result;
}
var balanced = balancedMatch;
var braceExpansion = expandTop;
var escSlash = '\0SLASH'+Math.random()+'\0';
var escOpen = '\0OPEN'+Math.random()+'\0';
var escClose = '\0CLOSE'+Math.random()+'\0';
var escComma = '\0COMMA'+Math.random()+'\0';
var escPeriod = '\0PERIOD'+Math.random()+'\0';
function numeric(str) {
return parseInt(str, 10) == str
? parseInt(str, 10)
: str.charCodeAt(0);
}
function escapeBraces(str) {
return str.split('\\\\').join(escSlash)
.split('\\{').join(escOpen)
.split('\\}').join(escClose)
.split('\\,').join(escComma)
.split('\\.').join(escPeriod);
}
function unescapeBraces(str) {
return str.split(escSlash).join('\\')
.split(escOpen).join('{')
.split(escClose).join('}')
.split(escComma).join(',')
.split(escPeriod).join('.');
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts(str) {
if (!str)
return [''];
var parts = [];
var m = balanced('{', '}', str);
if (!m)
return str.split(',');
var pre = m.pre;
var body = m.body;
var post = m.post;
var p = pre.split(',');
p[p.length-1] += '{' + body + '}';
var postParts = parseCommaParts(post);
if (post.length) {
p[p.length-1] += postParts.shift();
p.push.apply(p, postParts);
}
parts.push.apply(parts, p);
return parts;
}
function expandTop(str) {
if (!str)
return [];
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if (str.substr(0, 2) === '{}') {
str = '\\{\\}' + str.substr(2);
}
return expand$4(escapeBraces(str), true).map(unescapeBraces);
}
function embrace(str) {
return '{' + str + '}';
}
function isPadded(el) {
return /^-?0\d/.test(el);
}
function lte(i, y) {
return i <= y;
}
function gte(i, y) {
return i >= y;
}
function expand$4(str, isTop) {
var expansions = [];
var m = balanced('{', '}', str);
if (!m) return [str];
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m.pre;
var post = m.post.length
? expand$4(m.post, false)
: [''];
if (/\$$/.test(m.pre)) {
for (var k = 0; k < post.length; k++) {
var expansion = pre+ '{' + m.body + '}' + post[k];
expansions.push(expansion);
}
} else {
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
var isSequence = isNumericSequence || isAlphaSequence;
var isOptions = m.body.indexOf(',') >= 0;
if (!isSequence && !isOptions) {
// {a},b}
if (m.post.match(/,.*\}/)) {
str = m.pre + '{' + m.body + escClose + m.post;
return expand$4(str);
}
return [str];
}
var n;
if (isSequence) {
n = m.body.split(/\.\./);
} else {
n = parseCommaParts(m.body);
if (n.length === 1) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand$4(n[0], false).map(embrace);
if (n.length === 1) {
return post.map(function(p) {
return m.pre + n[0] + p;
});
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
var N;
if (isSequence) {
var x = numeric(n[0]);
var y = numeric(n[1]);
var width = Math.max(n[0].length, n[1].length);
var incr = n.length == 3
? Math.abs(numeric(n[2]))
: 1;
var test = lte;
var reverse = y < x;
if (reverse) {
incr *= -1;
test = gte;
}
var pad = n.some(isPadded);
N = [];
for (var i = x; test(i, y); i += incr) {
var c;
if (isAlphaSequence) {
c = String.fromCharCode(i);
if (c === '\\')
c = '';
} else {
c = String(i);
if (pad) {
var need = width - c.length;
if (need > 0) {
var z = new Array(need + 1).join('0');
if (i < 0)
c = '-' + z + c.slice(1);
else
c = z + c;
}
}
}
N.push(c);
}
} else {
N = [];
for (var j = 0; j < n.length; j++) {
N.push.apply(N, expand$4(n[j], false));
}
}
for (var j = 0; j < N.length; j++) {
for (var k = 0; k < post.length; k++) {
var expansion = pre + N[j] + post[k];
if (!isTop || isSequence || expansion)
expansions.push(expansion);
}
}
}
return expansions;
}
const minimatch$1 = minimatch_1 = (p, pattern, options = {}) => {
assertValidPattern(pattern);
// shortcut: comments match nothing.
if (!options.nocomment && pattern.charAt(0) === '#') {
return false
}
return new Minimatch$1(pattern, options).match(p)
};
var minimatch_1 = minimatch$1;
const path$j = path$k;
minimatch$1.sep = path$j.sep;
const GLOBSTAR$2 = Symbol('globstar **');
minimatch$1.GLOBSTAR = GLOBSTAR$2;
const expand$3 = braceExpansion;
const plTypes = {
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
'?': { open: '(?:', close: ')?' },
'+': { open: '(?:', close: ')+' },
'*': { open: '(?:', close: ')*' },
'@': { open: '(?:', close: ')' }
};
// any single thing other than /
// don't need to escape / when using new RegExp()
const qmark = '[^/]';
// * => any number of characters
const star = qmark + '*?';
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?';
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?';
// "abc" -> { a:true, b:true, c:true }
const charSet = s => s.split('').reduce((set, c) => {
set[c] = true;
return set
}, {});
// characters that need to be escaped in RegExp.
const reSpecials = charSet('().*{}+?[]^$\\!');
// characters that indicate we have to add the pattern start
const addPatternStartSet = charSet('[.(');
// normalizes slashes.
const slashSplit = /\/+/;
minimatch$1.filter = (pattern, options = {}) =>
(p, i, list) => minimatch$1(p, pattern, options);
const ext = (a, b = {}) => {
const t = {};
Object.keys(a).forEach(k => t[k] = a[k]);
Object.keys(b).forEach(k => t[k] = b[k]);
return t
};
minimatch$1.defaults = def => {
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
return minimatch$1
}
const orig = minimatch$1;
const m = (p, pattern, options) => orig(p, pattern, ext(def, options));
m.Minimatch = class Minimatch extends orig.Minimatch {
constructor (pattern, options) {
super(pattern, ext(def, options));
}
};
m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch;
m.filter = (pattern, options) => orig.filter(pattern, ext(def, options));
m.defaults = options => orig.defaults(ext(def, options));
m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options));
m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options));
m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options));
return m
};
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch$1.braceExpand = (pattern, options) => braceExpand(pattern, options);
const braceExpand = (pattern, options = {}) => {
assertValidPattern(pattern);
// Thanks to Yeting Li <https://github.com/yetingli> for
// improving this regexp to avoid a ReDOS vulnerability.
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
// shortcut. no need to expand.
return [pattern]
}
return expand$3(pattern)
};
const MAX_PATTERN_LENGTH = 1024 * 64;
const assertValidPattern = pattern => {
if (typeof pattern !== 'string') {
throw new TypeError('invalid pattern')
}
if (pattern.length > MAX_PATTERN_LENGTH) {
throw new TypeError('pattern is too long')
}
};
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
const SUBPARSE = Symbol('subparse');
minimatch$1.makeRe = (pattern, options) =>
new Minimatch$1(pattern, options || {}).makeRe();
minimatch$1.match = (list, pattern, options = {}) => {
const mm = new Minimatch$1(pattern, options);
list = list.filter(f => mm.match(f));
if (mm.options.nonull && !list.length) {
list.push(pattern);
}
return list
};
// replace stuff like \* with *
const globUnescape = s => s.replace(/\\(.)/g, '$1');
const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
let Minimatch$1 = class Minimatch {
constructor (pattern, options) {
assertValidPattern(pattern);
if (!options) options = {};
this.options = options;
this.set = [];
this.pattern = pattern;
this.windowsPathsNoEscape = !!options.windowsPathsNoEscape ||
options.allowWindowsEscape === false;
if (this.windowsPathsNoEscape) {
this.pattern = this.pattern.replace(/\\/g, '/');
}
this.regexp = null;
this.negate = false;
this.comment = false;
this.empty = false;
this.partial = !!options.partial;
// make the set of regexps etc.
this.make();
}
debug () {}
make () {
const pattern = this.pattern;
const options = this.options;
// empty patterns and comments match nothing.
if (!options.nocomment && pattern.charAt(0) === '#') {
this.comment = true;
return
}
if (!pattern) {
this.empty = true;
return
}
// step 1: figure out negation, etc.
this.parseNegate();
// step 2: expand braces
let set = this.globSet = this.braceExpand();
if (options.debug) this.debug = (...args) => console.error(...args);
this.debug(this.pattern, set);
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this.globParts = set.map(s => s.split(slashSplit));
this.debug(this.pattern, set);
// glob --> regexps
set = set.map((s, si, set) => s.map(this.parse, this));
this.debug(this.pattern, set);
// filter out everything that didn't compile properly.
set = set.filter(s => s.indexOf(false) === -1);
this.debug(this.pattern, set);
this.set = set;
}
parseNegate () {
if (this.options.nonegate) return
const pattern = this.pattern;
let negate = false;
let negateOffset = 0;
for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
negate = !negate;
negateOffset++;
}
if (negateOffset) this.pattern = pattern.substr(negateOffset);
this.negate = negate;
}
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
matchOne (file, pattern, partial) {
var options = this.options;
this.debug('matchOne',
{ 'this': this, file: file, pattern: pattern });
this.debug('matchOne', file.length, pattern.length);
for (var fi = 0,
pi = 0,
fl = file.length,
pl = pattern.length
; (fi < fl) && (pi < pl)
; fi++, pi++) {
this.debug('matchOne loop');
var p = pattern[pi];
var f = file[fi];
this.debug(pattern, p, f);
// should be impossible.
// some invalid regexp stuff in the set.
/* istanbul ignore if */
if (p === false) return false
if (p === GLOBSTAR$2) {
this.debug('GLOBSTAR', [pattern, p, f]);
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi;
var pr = pi + 1;
if (pr === pl) {
this.debug('** at the end');
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for (; fi < fl; fi++) {
if (file[fi] === '.' || file[fi] === '..' ||
(!options.dot && file[fi].charAt(0) === '.')) return false
}
return true
}
// ok, let's see if we can swallow whatever we can.
while (fr < fl) {
var swallowee = file[fr];
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
// XXX remove this slice. Just pass the start index.
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
this.debug('globstar found match!', fr, fl, swallowee);
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if (swallowee === '.' || swallowee === '..' ||
(!options.dot && swallowee.charAt(0) === '.')) {
this.debug('dot detected!', file, fr, pattern, pr);
break
}
// ** swallows a segment, and continue.
this.debug('globstar swallow a segment, and continue');
fr++;
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
/* istanbul ignore if */
if (partial) {
// ran out of file
this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
if (fr === fl) return true
}
return false
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit;
if (typeof p === 'string') {
hit = f === p;
this.debug('string match', p, f, hit);
} else {
hit = f.match(p);
this.debug('pattern match', p, f, hit);
}
if (!hit) return false
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if (fi === fl && pi === pl) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if (fi === fl) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else /* istanbul ignore else */ if (pi === pl) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
return (fi === fl - 1) && (file[fi] === '')
}
// should be unreachable.
/* istanbul ignore next */
throw new Error('wtf?')
}
braceExpand () {
return braceExpand(this.pattern, this.options)
}
parse (pattern, isSub) {
assertValidPattern(pattern);
const options = this.options;
// shortcuts
if (pattern === '**') {
if (!options.noglobstar)
return GLOBSTAR$2
else
pattern = '*';
}
if (pattern === '') return ''
let re = '';
let hasMagic = !!options.nocase;
let escaping = false;
// ? => one single character
const patternListStack = [];
const negativeLists = [];
let stateChar;
let inClass = false;
let reClassStart = -1;
let classStart = -1;
let cs;
let pl;
let sp;
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
const patternStart = pattern.charAt(0) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)';
const clearStateChar = () => {
if (stateChar) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch (stateChar) {
case '*':
re += star;
hasMagic = true;
break
case '?':
re += qmark;
hasMagic = true;
break
default:
re += '\\' + stateChar;
break
}
this.debug('clearStateChar %j %j', stateChar, re);
stateChar = false;
}
};
for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) {
this.debug('%s\t%s %s %j', pattern, i, re, c);
// skip over any that are escaped.
if (escaping) {
/* istanbul ignore next - completely not allowed, even escaped. */
if (c === '/') {
return false
}
if (reSpecials[c]) {
re += '\\';
}
re += c;
escaping = false;
continue
}
switch (c) {
/* istanbul ignore next */
case '/': {
// Should already be path-split by now.
return false
}
case '\\':
clearStateChar();
escaping = true;
continue
// the various stateChar values
// for the "extglob" stuff.
case '?':
case '*':
case '+':
case '@':
case '!':
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c);
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if (inClass) {
this.debug(' in class');
if (c === '!' && i === classStart + 1) c = '^';
re += c;
continue
}
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
this.debug('call clearStateChar %j', stateChar);
clearStateChar();
stateChar = c;
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if (options.noext) clearStateChar();
continue
case '(':
if (inClass) {
re += '(';
continue
}
if (!stateChar) {
re += '\\(';
continue
}
patternListStack.push({
type: stateChar,
start: i - 1,
reStart: re.length,
open: plTypes[stateChar].open,
close: plTypes[stateChar].close
});
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:';
this.debug('plType %j %j', stateChar, re);
stateChar = false;
continue
case ')':
if (inClass || !patternListStack.length) {
re += '\\)';
continue
}
clearStateChar();
hasMagic = true;
pl = patternListStack.pop();
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
re += pl.close;
if (pl.type === '!') {
negativeLists.push(pl);
}
pl.reEnd = re.length;
continue
case '|':
if (inClass || !patternListStack.length) {
re += '\\|';
continue
}
clearStateChar();
re += '|';
continue
// these are mostly the same in regexp and glob
case '[':
// swallow any state-tracking char before the [
clearStateChar();
if (inClass) {
re += '\\' + c;
continue
}
inClass = true;
classStart = i;
reClassStart = re.length;
re += c;
continue
case ']':
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if (i === classStart + 1 || !inClass) {
re += '\\' + c;
continue
}
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
cs = pattern.substring(classStart + 1, i);
// finish up the class.
hasMagic = true;
inClass = false;
re += c;
continue
default:
// swallow any state char that wasn't consumed
clearStateChar();
if (reSpecials[c] && !(c === '^' && inClass)) {
re += '\\';
}
re += c;
break
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if (inClass) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern.substr(classStart + 1);
sp = this.parse(cs, SUBPARSE);
re = re.substr(0, reClassStart) + '\\[' + sp[0];
hasMagic = hasMagic || sp[1];
}
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
let tail;
tail = re.slice(pl.reStart + pl.open.length);
this.debug('setting tail', re, pl);
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => {
/* istanbul ignore else - should already be done */
if (!$2) {
// the | isn't already escaped, so escape it.
$2 = '\\';
}
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
});
this.debug('tail=%j\n %s', tail, tail, pl, re);
const t = pl.type === '*' ? star
: pl.type === '?' ? qmark
: '\\' + pl.type;
hasMagic = true;
re = re.slice(0, pl.reStart) + t + '\\(' + tail;
}
// handle trailing things that only matter at the very end.
clearStateChar();
if (escaping) {
// trailing \\
re += '\\\\';
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
const addPatternStart = addPatternStartSet[re.charAt(0)];
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for (let n = negativeLists.length - 1; n > -1; n--) {
const nl = negativeLists[n];
const nlBefore = re.slice(0, nl.reStart);
const nlFirst = re.slice(nl.reStart, nl.reEnd - 8);
let nlAfter = re.slice(nl.reEnd);
const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter;
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
const openParensBefore = nlBefore.split('(').length - 1;
let cleanAfter = nlAfter;
for (let i = 0; i < openParensBefore; i++) {
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '');
}
nlAfter = cleanAfter;
const dollar = nlAfter === '' && isSub !== SUBPARSE ? '$' : '';
re = nlBefore + nlFirst + nlAfter + dollar + nlLast;
}
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if (re !== '' && hasMagic) {
re = '(?=.)' + re;
}
if (addPatternStart) {
re = patternStart + re;
}
// parsing just a piece of a larger pattern.
if (isSub === SUBPARSE) {
return [re, hasMagic]
}
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if (!hasMagic) {
return globUnescape(pattern)
}
const flags = options.nocase ? 'i' : '';
try {
return Object.assign(new RegExp('^' + re + '$', flags), {
_glob: pattern,
_src: re,
})
} catch (er) /* istanbul ignore next - should be impossible */ {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
// mode, but it's not a /m regex.
return new RegExp('$.')
}
}
makeRe () {
if (this.regexp || this.regexp === false) return this.regexp
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
const set = this.set;
if (!set.length) {
this.regexp = false;
return this.regexp
}
const options = this.options;
const twoStar = options.noglobstar ? star
: options.dot ? twoStarDot
: twoStarNoDot;
const flags = options.nocase ? 'i' : '';
// coalesce globstars and regexpify non-globstar patterns
// if it's the only item, then we just do one twoStar
// if it's the first, and there are more, prepend (\/|twoStar\/)? to next
// if it's the last, append (\/twoStar|) to previous
// if it's in the middle, append (\/|\/twoStar\/) to previous
// then filter out GLOBSTAR symbols
let re = set.map(pattern => {
pattern = pattern.map(p =>
typeof p === 'string' ? regExpEscape(p)
: p === GLOBSTAR$2 ? GLOBSTAR$2
: p._src
).reduce((set, p) => {
if (!(set[set.length - 1] === GLOBSTAR$2 && p === GLOBSTAR$2)) {
set.push(p);
}
return set
}, []);
pattern.forEach((p, i) => {
if (p !== GLOBSTAR$2 || pattern[i-1] === GLOBSTAR$2) {
return
}
if (i === 0) {
if (pattern.length > 1) {
pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1];
} else {
pattern[i] = twoStar;
}
} else if (i === pattern.length - 1) {
pattern[i-1] += '(?:\\\/|' + twoStar + ')?';
} else {
pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1];
pattern[i+1] = GLOBSTAR$2;
}
});
return pattern.filter(p => p !== GLOBSTAR$2).join('/')
}).join('|');
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$';
// can match anything, as long as it's not this.
if (this.negate) re = '^(?!' + re + ').*$';
try {
this.regexp = new RegExp(re, flags);
} catch (ex) /* istanbul ignore next - should be impossible */ {
this.regexp = false;
}
return this.regexp
}
match (f, partial = this.partial) {
this.debug('match', f, this.pattern);
// short-circuit in the case of busted things.
// comments, etc.
if (this.comment) return false
if (this.empty) return f === ''
if (f === '/' && partial) return true
const options = this.options;
// windows: need to use /, not \
if (path$j.sep !== '/') {
f = f.split(path$j.sep).join('/');
}
// treat the test path as a set of pathparts.
f = f.split(slashSplit);
this.debug(this.pattern, 'split', f);
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
const set = this.set;
this.debug(this.pattern, 'set', set);
// Find the basename of the path by looking for the last non-empty segment
let filename;
for (let i = f.length - 1; i >= 0; i--) {
filename = f[i];
if (filename) break
}
for (let i = 0; i < set.length; i++) {
const pattern = set[i];
let file = f;
if (options.matchBase && pattern.length === 1) {
file = [filename];
}
const hit = this.matchOne(file, pattern, partial);
if (hit) {
if (options.flipNegate) return true
return !this.negate
}
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if (options.flipNegate) return false
return this.negate
}
static defaults (def) {
return minimatch$1.defaults(def).Minimatch
}
};
minimatch$1.Minimatch = Minimatch$1;
var inherits = {exports: {}};
var inherits_browser = {exports: {}};
var hasRequiredInherits_browser;
function requireInherits_browser () {
if (hasRequiredInherits_browser) return inherits_browser.exports;
hasRequiredInherits_browser = 1;
if (typeof Object.create === 'function') {
// implementation from standard node.js 'util' module
inherits_browser.exports = function inherits(ctor, superCtor) {
if (superCtor) {
ctor.super_ = superCtor;
ctor.prototype = Object.create(superCtor.prototype, {
constructor: {
value: ctor,
enumerable: false,
writable: true,
configurable: true
}
});
}
};
} else {
// old school shim for old browsers
inherits_browser.exports = function inherits(ctor, superCtor) {
if (superCtor) {
ctor.super_ = superCtor;
var TempCtor = function () {};
TempCtor.prototype = superCtor.prototype;
ctor.prototype = new TempCtor();
ctor.prototype.constructor = ctor;
}
};
}
return inherits_browser.exports;
}
try {
var util$2 = require('util');
/* istanbul ignore next */
if (typeof util$2.inherits !== 'function') throw '';
inherits.exports = util$2.inherits;
} catch (e) {
/* istanbul ignore next */
inherits.exports = requireInherits_browser();
}
var inheritsExports = inherits.exports;
var common$c = {};
common$c.setopts = setopts;
common$c.ownProp = ownProp;
common$c.makeAbs = makeAbs;
common$c.finish = finish;
common$c.mark = mark;
common$c.isIgnored = isIgnored;
common$c.childrenIgnored = childrenIgnored;
function ownProp (obj, field) {
return Object.prototype.hasOwnProperty.call(obj, field)
}
var fs$i = require$$0__default;
var path$i = require$$0$4;
var minimatch = minimatch_1;
var isAbsolute = require$$0$4.isAbsolute;
var Minimatch = minimatch.Minimatch;
function alphasort (a, b) {
return a.localeCompare(b, 'en')
}
function setupIgnores (self, options) {
self.ignore = options.ignore || [];
if (!Array.isArray(self.ignore))
self.ignore = [self.ignore];
if (self.ignore.length) {
self.ignore = self.ignore.map(ignoreMap);
}
}
// ignore patterns are always in dot:true mode.
function ignoreMap (pattern) {
var gmatcher = null;
if (pattern.slice(-3) === '/**') {
var gpattern = pattern.replace(/(\/\*\*)+$/, '');
gmatcher = new Minimatch(gpattern, { dot: true });
}
return {
matcher: new Minimatch(pattern, { dot: true }),
gmatcher: gmatcher
}
}
function setopts (self, pattern, options) {
if (!options)
options = {};
// base-matching: just use globstar for that.
if (options.matchBase && -1 === pattern.indexOf("/")) {
if (options.noglobstar) {
throw new Error("base matching requires globstar")
}
pattern = "**/" + pattern;
}
self.silent = !!options.silent;
self.pattern = pattern;
self.strict = options.strict !== false;
self.realpath = !!options.realpath;
self.realpathCache = options.realpathCache || Object.create(null);
self.follow = !!options.follow;
self.dot = !!options.dot;
self.mark = !!options.mark;
self.nodir = !!options.nodir;
if (self.nodir)
self.mark = true;
self.sync = !!options.sync;
self.nounique = !!options.nounique;
self.nonull = !!options.nonull;
self.nosort = !!options.nosort;
self.nocase = !!options.nocase;
self.stat = !!options.stat;
self.noprocess = !!options.noprocess;
self.absolute = !!options.absolute;
self.fs = options.fs || fs$i;
self.maxLength = options.maxLength || Infinity;
self.cache = options.cache || Object.create(null);
self.statCache = options.statCache || Object.create(null);
self.symlinks = options.symlinks || Object.create(null);
setupIgnores(self, options);
self.changedCwd = false;
var cwd = process.cwd();
if (!ownProp(options, "cwd"))
self.cwd = path$i.resolve(cwd);
else {
self.cwd = path$i.resolve(options.cwd);
self.changedCwd = self.cwd !== cwd;
}
self.root = options.root || path$i.resolve(self.cwd, "/");
self.root = path$i.resolve(self.root);
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd);
self.nomount = !!options.nomount;
if (process.platform === "win32") {
self.root = self.root.replace(/\\/g, "/");
self.cwd = self.cwd.replace(/\\/g, "/");
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/");
}
// disable comments and negation in Minimatch.
// Note that they are not supported in Glob itself anyway.
options.nonegate = true;
options.nocomment = true;
// always treat \ in patterns as escapes, not path separators
options.allowWindowsEscape = true;
self.minimatch = new Minimatch(pattern, options);
self.options = self.minimatch.options;
}
function finish (self) {
var nou = self.nounique;
var all = nou ? [] : Object.create(null);
for (var i = 0, l = self.matches.length; i < l; i ++) {
var matches = self.matches[i];
if (!matches || Object.keys(matches).length === 0) {
if (self.nonull) {
// do like the shell, and spit out the literal glob
var literal = self.minimatch.globSet[i];
if (nou)
all.push(literal);
else
all[literal] = true;
}
} else {
// had matches
var m = Object.keys(matches);
if (nou)
all.push.apply(all, m);
else
m.forEach(function (m) {
all[m] = true;
});
}
}
if (!nou)
all = Object.keys(all);
if (!self.nosort)
all = all.sort(alphasort);
// at *some* point we statted all of these
if (self.mark) {
for (var i = 0; i < all.length; i++) {
all[i] = self._mark(all[i]);
}
if (self.nodir) {
all = all.filter(function (e) {
var notDir = !(/\/$/.test(e));
var c = self.cache[e] || self.cache[makeAbs(self, e)];
if (notDir && c)
notDir = c !== 'DIR' && !Array.isArray(c);
return notDir
});
}
}
if (self.ignore.length)
all = all.filter(function(m) {
return !isIgnored(self, m)
});
self.found = all;
}
function mark (self, p) {
var abs = makeAbs(self, p);
var c = self.cache[abs];
var m = p;
if (c) {
var isDir = c === 'DIR' || Array.isArray(c);
var slash = p.slice(-1) === '/';
if (isDir && !slash)
m += '/';
else if (!isDir && slash)
m = m.slice(0, -1);
if (m !== p) {
var mabs = makeAbs(self, m);
self.statCache[mabs] = self.statCache[abs];
self.cache[mabs] = self.cache[abs];
}
}
return m
}
// lotta situps...
function makeAbs (self, f) {
var abs = f;
if (f.charAt(0) === '/') {
abs = path$i.join(self.root, f);
} else if (isAbsolute(f) || f === '') {
abs = f;
} else if (self.changedCwd) {
abs = path$i.resolve(self.cwd, f);
} else {
abs = path$i.resolve(f);
}
if (process.platform === 'win32')
abs = abs.replace(/\\/g, '/');
return abs
}
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
function isIgnored (self, path) {
if (!self.ignore.length)
return false
return self.ignore.some(function(item) {
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
})
}
function childrenIgnored (self, path) {
if (!self.ignore.length)
return false
return self.ignore.some(function(item) {
return !!(item.gmatcher && item.gmatcher.match(path))
})
}
var sync$9;
var hasRequiredSync;
function requireSync () {
if (hasRequiredSync) return sync$9;
hasRequiredSync = 1;
sync$9 = globSync;
globSync.GlobSync = GlobSync;
var rp = fs_realpath;
var minimatch = minimatch_1;
requireGlob().Glob;
var path = require$$0$4;
var assert = require$$5;
var isAbsolute = require$$0$4.isAbsolute;
var common = common$c;
var setopts = common.setopts;
var ownProp = common.ownProp;
var childrenIgnored = common.childrenIgnored;
var isIgnored = common.isIgnored;
function globSync (pattern, options) {
if (typeof options === 'function' || arguments.length === 3)
throw new TypeError('callback provided to sync glob\n'+
'See: https://github.com/isaacs/node-glob/issues/167')
return new GlobSync(pattern, options).found
}
function GlobSync (pattern, options) {
if (!pattern)
throw new Error('must provide pattern')
if (typeof options === 'function' || arguments.length === 3)
throw new TypeError('callback provided to sync glob\n'+
'See: https://github.com/isaacs/node-glob/issues/167')
if (!(this instanceof GlobSync))
return new GlobSync(pattern, options)
setopts(this, pattern, options);
if (this.noprocess)
return this
var n = this.minimatch.set.length;
this.matches = new Array(n);
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false);
}
this._finish();
}
GlobSync.prototype._finish = function () {
assert.ok(this instanceof GlobSync);
if (this.realpath) {
var self = this;
this.matches.forEach(function (matchset, index) {
var set = self.matches[index] = Object.create(null);
for (var p in matchset) {
try {
p = self._makeAbs(p);
var real = rp.realpathSync(p, self.realpathCache);
set[real] = true;
} catch (er) {
if (er.syscall === 'stat')
set[self._makeAbs(p)] = true;
else
throw er
}
}
});
}
common.finish(this);
};
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
assert.ok(this instanceof GlobSync);
// Get the first [n] parts of pattern that are all strings.
var n = 0;
while (typeof pattern[n] === 'string') {
n ++;
}
// now n is the index of the first one that is *not* a string.
// See if there's anything else
var prefix;
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index);
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null;
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/');
break
}
var remain = pattern.slice(n);
// get the list of entries.
var read;
if (prefix === null)
read = '.';
else if (isAbsolute(prefix) ||
isAbsolute(pattern.map(function (p) {
return typeof p === 'string' ? p : '[*]'
}).join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix;
read = prefix;
} else
read = prefix;
var abs = this._makeAbs(read);
//if ignored, skip processing
if (childrenIgnored(this, read))
return
var isGlobStar = remain[0] === minimatch.GLOBSTAR;
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar);
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar);
};
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
var entries = this._readdir(abs, inGlobStar);
// if the abs isn't a dir, then nothing can match!
if (!entries)
return
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0];
var negate = !!this.minimatch.negate;
var rawGlob = pn._glob;
var dotOk = this.dot || rawGlob.charAt(0) === '.';
var matchedEntries = [];
for (var i = 0; i < entries.length; i++) {
var e = entries[i];
if (e.charAt(0) !== '.' || dotOk) {
var m;
if (negate && !prefix) {
m = !e.match(pn);
} else {
m = e.match(pn);
}
if (m)
matchedEntries.push(e);
}
}
var len = matchedEntries.length;
// If there are no matched entries, then nothing matches.
if (len === 0)
return
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null);
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i];
if (prefix) {
if (prefix.slice(-1) !== '/')
e = prefix + '/' + e;
else
e = prefix + e;
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e);
}
this._emitMatch(index, e);
}
// This was the last one, and no stats were needed
return
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift();
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i];
var newPattern;
if (prefix)
newPattern = [prefix, e];
else
newPattern = [e];
this._process(newPattern.concat(remain), index, inGlobStar);
}
};
GlobSync.prototype._emitMatch = function (index, e) {
if (isIgnored(this, e))
return
var abs = this._makeAbs(e);
if (this.mark)
e = this._mark(e);
if (this.absolute) {
e = abs;
}
if (this.matches[index][e])
return
if (this.nodir) {
var c = this.cache[abs];
if (c === 'DIR' || Array.isArray(c))
return
}
this.matches[index][e] = true;
if (this.stat)
this._stat(e);
};
GlobSync.prototype._readdirInGlobStar = function (abs) {
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false)
var entries;
var lstat;
try {
lstat = this.fs.lstatSync(abs);
} catch (er) {
if (er.code === 'ENOENT') {
// lstat failed, doesn't exist
return null
}
}
var isSym = lstat && lstat.isSymbolicLink();
this.symlinks[abs] = isSym;
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && lstat && !lstat.isDirectory())
this.cache[abs] = 'FILE';
else
entries = this._readdir(abs, false);
return entries
};
GlobSync.prototype._readdir = function (abs, inGlobStar) {
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs];
if (!c || c === 'FILE')
return null
if (Array.isArray(c))
return c
}
try {
return this._readdirEntries(abs, this.fs.readdirSync(abs))
} catch (er) {
this._readdirError(abs, er);
return null
}
};
GlobSync.prototype._readdirEntries = function (abs, entries) {
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i];
if (abs === '/')
e = abs + e;
else
e = abs + '/' + e;
this.cache[e] = true;
}
}
this.cache[abs] = entries;
// mark and cache dir-ness
return entries
};
GlobSync.prototype._readdirError = function (f, er) {
// handle errors, and cache the information
switch (er.code) {
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
var abs = this._makeAbs(f);
this.cache[abs] = 'FILE';
if (abs === this.cwdAbs) {
var error = new Error(er.code + ' invalid cwd ' + this.cwd);
error.path = this.cwd;
error.code = er.code;
throw error
}
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false;
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false;
if (this.strict)
throw er
if (!this.silent)
console.error('glob error', er);
break
}
};
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
var entries = this._readdir(abs, inGlobStar);
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1);
var gspref = prefix ? [ prefix ] : [];
var noGlobStar = gspref.concat(remainWithoutGlobStar);
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false);
var len = entries.length;
var isSym = this.symlinks[abs];
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return
for (var i = 0; i < len; i++) {
var e = entries[i];
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar);
this._process(instead, index, true);
var below = gspref.concat(entries[i], remain);
this._process(below, index, true);
}
};
GlobSync.prototype._processSimple = function (prefix, index) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var exists = this._stat(prefix);
if (!this.matches[index])
this.matches[index] = Object.create(null);
// If it doesn't exist, then just mark the lack of results
if (!exists)
return
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix);
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix);
} else {
prefix = path.resolve(this.root, prefix);
if (trail)
prefix += '/';
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/');
// Mark this as a match
this._emitMatch(index, prefix);
};
// Returns either 'DIR', 'FILE', or false
GlobSync.prototype._stat = function (f) {
var abs = this._makeAbs(f);
var needDir = f.slice(-1) === '/';
if (f.length > this.maxLength)
return false
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs];
if (Array.isArray(c))
c = 'DIR';
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return c
if (needDir && c === 'FILE')
return false
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var stat = this.statCache[abs];
if (!stat) {
var lstat;
try {
lstat = this.fs.lstatSync(abs);
} catch (er) {
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
this.statCache[abs] = false;
return false
}
}
if (lstat && lstat.isSymbolicLink()) {
try {
stat = this.fs.statSync(abs);
} catch (er) {
stat = lstat;
}
} else {
stat = lstat;
}
}
this.statCache[abs] = stat;
var c = true;
if (stat)
c = stat.isDirectory() ? 'DIR' : 'FILE';
this.cache[abs] = this.cache[abs] || c;
if (needDir && c === 'FILE')
return false
return c
};
GlobSync.prototype._mark = function (p) {
return common.mark(this, p)
};
GlobSync.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
};
return sync$9;
}
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
var wrappy_1 = wrappy$2;
function wrappy$2 (fn, cb) {
if (fn && cb) return wrappy$2(fn)(cb)
if (typeof fn !== 'function')
throw new TypeError('need wrapper function')
Object.keys(fn).forEach(function (k) {
wrapper[k] = fn[k];
});
return wrapper
function wrapper() {
var args = new Array(arguments.length);
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
var ret = fn.apply(this, args);
var cb = args[args.length-1];
if (typeof ret === 'function' && ret !== cb) {
Object.keys(cb).forEach(function (k) {
ret[k] = cb[k];
});
}
return ret
}
}
var once$2 = {exports: {}};
var wrappy$1 = wrappy_1;
once$2.exports = wrappy$1(once$1);
once$2.exports.strict = wrappy$1(onceStrict);
once$1.proto = once$1(function () {
Object.defineProperty(Function.prototype, 'once', {
value: function () {
return once$1(this)
},
configurable: true
});
Object.defineProperty(Function.prototype, 'onceStrict', {
value: function () {
return onceStrict(this)
},
configurable: true
});
});
function once$1 (fn) {
var f = function () {
if (f.called) return f.value
f.called = true;
return f.value = fn.apply(this, arguments)
};
f.called = false;
return f
}
function onceStrict (fn) {
var f = function () {
if (f.called)
throw new Error(f.onceError)
f.called = true;
return f.value = fn.apply(this, arguments)
};
var name = fn.name || 'Function wrapped with `once`';
f.onceError = name + " shouldn't be called more than once";
f.called = false;
return f
}
var onceExports = once$2.exports;
var wrappy = wrappy_1;
var reqs = Object.create(null);
var once = onceExports;
var inflight_1 = wrappy(inflight);
function inflight (key, cb) {
if (reqs[key]) {
reqs[key].push(cb);
return null
} else {
reqs[key] = [cb];
return makeres(key)
}
}
function makeres (key) {
return once(function RES () {
var cbs = reqs[key];
var len = cbs.length;
var args = slice$1(arguments);
// XXX It's somewhat ambiguous whether a new callback added in this
// pass should be queued for later execution if something in the
// list of callbacks throws, or if it should just be discarded.
// However, it's such an edge case that it hardly matters, and either
// choice is likely as surprising as the other.
// As it happens, we do go ahead and schedule it for later execution.
try {
for (var i = 0; i < len; i++) {
cbs[i].apply(null, args);
}
} finally {
if (cbs.length > len) {
// added more in the interim.
// de-zalgo, just in case, but don't call again.
cbs.splice(0, len);
process.nextTick(function () {
RES.apply(null, args);
});
} else {
delete reqs[key];
}
}
})
}
function slice$1 (args) {
var length = args.length;
var array = [];
for (var i = 0; i < length; i++) array[i] = args[i];
return array
}
var glob_1;
var hasRequiredGlob;
function requireGlob () {
if (hasRequiredGlob) return glob_1;
hasRequiredGlob = 1;
// Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
glob_1 = glob;
var rp = fs_realpath;
var minimatch = minimatch_1;
var inherits = inheritsExports;
var EE = require$$0$5.EventEmitter;
var path = require$$0$4;
var assert = require$$5;
var isAbsolute = require$$0$4.isAbsolute;
var globSync = requireSync();
var common = common$c;
var setopts = common.setopts;
var ownProp = common.ownProp;
var inflight = inflight_1;
var childrenIgnored = common.childrenIgnored;
var isIgnored = common.isIgnored;
var once = onceExports;
function glob (pattern, options, cb) {
if (typeof options === 'function') cb = options, options = {};
if (!options) options = {};
if (options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return globSync(pattern, options)
}
return new Glob(pattern, options, cb)
}
glob.sync = globSync;
var GlobSync = glob.GlobSync = globSync.GlobSync;
// old api surface
glob.glob = glob;
function extend (origin, add) {
if (add === null || typeof add !== 'object') {
return origin
}
var keys = Object.keys(add);
var i = keys.length;
while (i--) {
origin[keys[i]] = add[keys[i]];
}
return origin
}
glob.hasMagic = function (pattern, options_) {
var options = extend({}, options_);
options.noprocess = true;
var g = new Glob(pattern, options);
var set = g.minimatch.set;
if (!pattern)
return false
if (set.length > 1)
return true
for (var j = 0; j < set[0].length; j++) {
if (typeof set[0][j] !== 'string')
return true
}
return false
};
glob.Glob = Glob;
inherits(Glob, EE);
function Glob (pattern, options, cb) {
if (typeof options === 'function') {
cb = options;
options = null;
}
if (options && options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return new GlobSync(pattern, options)
}
if (!(this instanceof Glob))
return new Glob(pattern, options, cb)
setopts(this, pattern, options);
this._didRealPath = false;
// process each pattern in the minimatch set
var n = this.minimatch.set.length;
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this.matches = new Array(n);
if (typeof cb === 'function') {
cb = once(cb);
this.on('error', cb);
this.on('end', function (matches) {
cb(null, matches);
});
}
var self = this;
this._processing = 0;
this._emitQueue = [];
this._processQueue = [];
this.paused = false;
if (this.noprocess)
return this
if (n === 0)
return done()
var sync = true;
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false, done);
}
sync = false;
function done () {
--self._processing;
if (self._processing <= 0) {
if (sync) {
process.nextTick(function () {
self._finish();
});
} else {
self._finish();
}
}
}
}
Glob.prototype._finish = function () {
assert(this instanceof Glob);
if (this.aborted)
return
if (this.realpath && !this._didRealpath)
return this._realpath()
common.finish(this);
this.emit('end', this.found);
};
Glob.prototype._realpath = function () {
if (this._didRealpath)
return
this._didRealpath = true;
var n = this.matches.length;
if (n === 0)
return this._finish()
var self = this;
for (var i = 0; i < this.matches.length; i++)
this._realpathSet(i, next);
function next () {
if (--n === 0)
self._finish();
}
};
Glob.prototype._realpathSet = function (index, cb) {
var matchset = this.matches[index];
if (!matchset)
return cb()
var found = Object.keys(matchset);
var self = this;
var n = found.length;
if (n === 0)
return cb()
var set = this.matches[index] = Object.create(null);
found.forEach(function (p, i) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self._makeAbs(p);
rp.realpath(p, self.realpathCache, function (er, real) {
if (!er)
set[real] = true;
else if (er.syscall === 'stat')
set[p] = true;
else
self.emit('error', er); // srsly wtf right here
if (--n === 0) {
self.matches[index] = set;
cb();
}
});
});
};
Glob.prototype._mark = function (p) {
return common.mark(this, p)
};
Glob.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
};
Glob.prototype.abort = function () {
this.aborted = true;
this.emit('abort');
};
Glob.prototype.pause = function () {
if (!this.paused) {
this.paused = true;
this.emit('pause');
}
};
Glob.prototype.resume = function () {
if (this.paused) {
this.emit('resume');
this.paused = false;
if (this._emitQueue.length) {
var eq = this._emitQueue.slice(0);
this._emitQueue.length = 0;
for (var i = 0; i < eq.length; i ++) {
var e = eq[i];
this._emitMatch(e[0], e[1]);
}
}
if (this._processQueue.length) {
var pq = this._processQueue.slice(0);
this._processQueue.length = 0;
for (var i = 0; i < pq.length; i ++) {
var p = pq[i];
this._processing--;
this._process(p[0], p[1], p[2], p[3]);
}
}
}
};
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
assert(this instanceof Glob);
assert(typeof cb === 'function');
if (this.aborted)
return
this._processing++;
if (this.paused) {
this._processQueue.push([pattern, index, inGlobStar, cb]);
return
}
//console.error('PROCESS %d', this._processing, pattern)
// Get the first [n] parts of pattern that are all strings.
var n = 0;
while (typeof pattern[n] === 'string') {
n ++;
}
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix;
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index, cb);
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null;
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/');
break
}
var remain = pattern.slice(n);
// get the list of entries.
var read;
if (prefix === null)
read = '.';
else if (isAbsolute(prefix) ||
isAbsolute(pattern.map(function (p) {
return typeof p === 'string' ? p : '[*]'
}).join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix;
read = prefix;
} else
read = prefix;
var abs = this._makeAbs(read);
//if ignored, skip _processing
if (childrenIgnored(this, read))
return cb()
var isGlobStar = remain[0] === minimatch.GLOBSTAR;
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb);
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb);
};
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this;
this._readdir(abs, inGlobStar, function (er, entries) {
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
});
};
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
// if the abs isn't a dir, then nothing can match!
if (!entries)
return cb()
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0];
var negate = !!this.minimatch.negate;
var rawGlob = pn._glob;
var dotOk = this.dot || rawGlob.charAt(0) === '.';
var matchedEntries = [];
for (var i = 0; i < entries.length; i++) {
var e = entries[i];
if (e.charAt(0) !== '.' || dotOk) {
var m;
if (negate && !prefix) {
m = !e.match(pn);
} else {
m = e.match(pn);
}
if (m)
matchedEntries.push(e);
}
}
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries.length;
// If there are no matched entries, then nothing matches.
if (len === 0)
return cb()
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null);
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i];
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e;
else
e = prefix + e;
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e);
}
this._emitMatch(index, e);
}
// This was the last one, and no stats were needed
return cb()
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift();
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i];
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e;
else
e = prefix + e;
}
this._process([e].concat(remain), index, inGlobStar, cb);
}
cb();
};
Glob.prototype._emitMatch = function (index, e) {
if (this.aborted)
return
if (isIgnored(this, e))
return
if (this.paused) {
this._emitQueue.push([index, e]);
return
}
var abs = isAbsolute(e) ? e : this._makeAbs(e);
if (this.mark)
e = this._mark(e);
if (this.absolute)
e = abs;
if (this.matches[index][e])
return
if (this.nodir) {
var c = this.cache[abs];
if (c === 'DIR' || Array.isArray(c))
return
}
this.matches[index][e] = true;
var st = this.statCache[abs];
if (st)
this.emit('stat', e, st);
this.emit('match', e);
};
Glob.prototype._readdirInGlobStar = function (abs, cb) {
if (this.aborted)
return
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false, cb)
var lstatkey = 'lstat\0' + abs;
var self = this;
var lstatcb = inflight(lstatkey, lstatcb_);
if (lstatcb)
self.fs.lstat(abs, lstatcb);
function lstatcb_ (er, lstat) {
if (er && er.code === 'ENOENT')
return cb()
var isSym = lstat && lstat.isSymbolicLink();
self.symlinks[abs] = isSym;
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && lstat && !lstat.isDirectory()) {
self.cache[abs] = 'FILE';
cb();
} else
self._readdir(abs, false, cb);
}
};
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
if (this.aborted)
return
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb);
if (!cb)
return
//console.error('RD %j %j', +inGlobStar, abs)
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs, cb)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs];
if (!c || c === 'FILE')
return cb()
if (Array.isArray(c))
return cb(null, c)
}
var self = this;
self.fs.readdir(abs, readdirCb(this, abs, cb));
};
function readdirCb (self, abs, cb) {
return function (er, entries) {
if (er)
self._readdirError(abs, er, cb);
else
self._readdirEntries(abs, entries, cb);
}
}
Glob.prototype._readdirEntries = function (abs, entries, cb) {
if (this.aborted)
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i];
if (abs === '/')
e = abs + e;
else
e = abs + '/' + e;
this.cache[e] = true;
}
}
this.cache[abs] = entries;
return cb(null, entries)
};
Glob.prototype._readdirError = function (f, er, cb) {
if (this.aborted)
return
// handle errors, and cache the information
switch (er.code) {
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
var abs = this._makeAbs(f);
this.cache[abs] = 'FILE';
if (abs === this.cwdAbs) {
var error = new Error(er.code + ' invalid cwd ' + this.cwd);
error.path = this.cwd;
error.code = er.code;
this.emit('error', error);
this.abort();
}
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false;
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false;
if (this.strict) {
this.emit('error', er);
// If the error is handled, then we abort
// if not, we threw out of here
this.abort();
}
if (!this.silent)
console.error('glob error', er);
break
}
return cb()
};
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this;
this._readdir(abs, inGlobStar, function (er, entries) {
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb);
});
};
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
//console.error('pgs2', prefix, remain[0], entries)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return cb()
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1);
var gspref = prefix ? [ prefix ] : [];
var noGlobStar = gspref.concat(remainWithoutGlobStar);
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false, cb);
var isSym = this.symlinks[abs];
var len = entries.length;
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return cb()
for (var i = 0; i < len; i++) {
var e = entries[i];
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar);
this._process(instead, index, true, cb);
var below = gspref.concat(entries[i], remain);
this._process(below, index, true, cb);
}
cb();
};
Glob.prototype._processSimple = function (prefix, index, cb) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this;
this._stat(prefix, function (er, exists) {
self._processSimple2(prefix, index, er, exists, cb);
});
};
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
//console.error('ps2', prefix, exists)
if (!this.matches[index])
this.matches[index] = Object.create(null);
// If it doesn't exist, then just mark the lack of results
if (!exists)
return cb()
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix);
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix);
} else {
prefix = path.resolve(this.root, prefix);
if (trail)
prefix += '/';
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/');
// Mark this as a match
this._emitMatch(index, prefix);
cb();
};
// Returns either 'DIR', 'FILE', or false
Glob.prototype._stat = function (f, cb) {
var abs = this._makeAbs(f);
var needDir = f.slice(-1) === '/';
if (f.length > this.maxLength)
return cb()
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs];
if (Array.isArray(c))
c = 'DIR';
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return cb(null, c)
if (needDir && c === 'FILE')
return cb()
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var stat = this.statCache[abs];
if (stat !== undefined) {
if (stat === false)
return cb(null, stat)
else {
var type = stat.isDirectory() ? 'DIR' : 'FILE';
if (needDir && type === 'FILE')
return cb()
else
return cb(null, type, stat)
}
}
var self = this;
var statcb = inflight('stat\0' + abs, lstatcb_);
if (statcb)
self.fs.lstat(abs, statcb);
function lstatcb_ (er, lstat) {
if (lstat && lstat.isSymbolicLink()) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return self.fs.stat(abs, function (er, stat) {
if (er)
self._stat2(f, abs, null, lstat, cb);
else
self._stat2(f, abs, er, stat, cb);
})
} else {
self._stat2(f, abs, er, lstat, cb);
}
}
};
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
this.statCache[abs] = false;
return cb()
}
var needDir = f.slice(-1) === '/';
this.statCache[abs] = stat;
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
return cb(null, false, stat)
var c = true;
if (stat)
c = stat.isDirectory() ? 'DIR' : 'FILE';
this.cache[abs] = this.cache[abs] || c;
if (needDir && c === 'FILE')
return cb()
return cb(null, c, stat)
};
return glob_1;
}
var globExports = requireGlob();
var glob$1 = /*@__PURE__*/getDefaultExportFromCjs(globExports);
const comma$1 = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars$2 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar$1 = new Uint8Array(64); // 64 possible chars.
const charToInt$1 = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars$2.length; i++) {
const c = chars$2.charCodeAt(i);
intToChar$1[i] = c;
charToInt$1[c] = i;
}
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
function encode$1(decoded) {
const state = new Int32Array(5);
const bufLength = 1024 * 16;
const subLength = bufLength - 36;
const buf = new Uint8Array(bufLength);
const sub = buf.subarray(0, subLength);
let pos = 0;
let out = '';
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) {
if (pos === bufLength) {
out += td.decode(buf);
pos = 0;
}
buf[pos++] = semicolon;
}
if (line.length === 0)
continue;
state[0] = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
// We can push up to 5 ints, each int can take at most 7 chars, and we
// may push a comma.
if (pos > subLength) {
out += td.decode(sub);
buf.copyWithin(0, subLength, pos);
pos -= subLength;
}
if (j > 0)
buf[pos++] = comma$1;
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
if (segment.length === 1)
continue;
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
if (segment.length === 4)
continue;
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
}
}
return out + td.decode(buf.subarray(0, pos));
}
function encodeInteger(buf, pos, state, segment, j) {
const next = segment[j];
let num = next - state[j];
state[j] = next;
num = num < 0 ? (-num << 1) | 1 : num << 1;
do {
let clamped = num & 0b011111;
num >>>= 5;
if (num > 0)
clamped |= 0b100000;
buf[pos++] = intToChar$1[clamped];
} while (num > 0);
return pos;
}
let BitSet$1 = class BitSet {
constructor(arg) {
this.bits = arg instanceof BitSet ? arg.bits.slice() : [];
}
add(n) {
this.bits[n >> 5] |= 1 << (n & 31);
}
has(n) {
return !!(this.bits[n >> 5] & (1 << (n & 31)));
}
};
let Chunk$1 = class Chunk {
constructor(start, end, content) {
this.start = start;
this.end = end;
this.original = content;
this.intro = '';
this.outro = '';
this.content = content;
this.storeName = false;
this.edited = false;
{
this.previous = null;
this.next = null;
}
}
appendLeft(content) {
this.outro += content;
}
appendRight(content) {
this.intro = this.intro + content;
}
clone() {
const chunk = new Chunk(this.start, this.end, this.original);
chunk.intro = this.intro;
chunk.outro = this.outro;
chunk.content = this.content;
chunk.storeName = this.storeName;
chunk.edited = this.edited;
return chunk;
}
contains(index) {
return this.start < index && index < this.end;
}
eachNext(fn) {
let chunk = this;
while (chunk) {
fn(chunk);
chunk = chunk.next;
}
}
eachPrevious(fn) {
let chunk = this;
while (chunk) {
fn(chunk);
chunk = chunk.previous;
}
}
edit(content, storeName, contentOnly) {
this.content = content;
if (!contentOnly) {
this.intro = '';
this.outro = '';
}
this.storeName = storeName;
this.edited = true;
return this;
}
prependLeft(content) {
this.outro = content + this.outro;
}
prependRight(content) {
this.intro = content + this.intro;
}
split(index) {
const sliceIndex = index - this.start;
const originalBefore = this.original.slice(0, sliceIndex);
const originalAfter = this.original.slice(sliceIndex);
this.original = originalBefore;
const newChunk = new Chunk(index, this.end, originalAfter);
newChunk.outro = this.outro;
this.outro = '';
this.end = index;
if (this.edited) {
// TODO is this block necessary?...
newChunk.edit('', false);
this.content = '';
} else {
this.content = originalBefore;
}
newChunk.next = this.next;
if (newChunk.next) newChunk.next.previous = newChunk;
newChunk.previous = this;
this.next = newChunk;
return newChunk;
}
toString() {
return this.intro + this.content + this.outro;
}
trimEnd(rx) {
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
const trimmed = this.content.replace(rx, '');
if (trimmed.length) {
if (trimmed !== this.content) {
this.split(this.start + trimmed.length).edit('', undefined, true);
}
return true;
} else {
this.edit('', undefined, true);
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
}
}
trimStart(rx) {
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
const trimmed = this.content.replace(rx, '');
if (trimmed.length) {
if (trimmed !== this.content) {
this.split(this.end - trimmed.length);
this.edit('', undefined, true);
}
return true;
} else {
this.edit('', undefined, true);
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
}
}
};
function getBtoa$1 () {
if (typeof window !== 'undefined' && typeof window.btoa === 'function') {
return (str) => window.btoa(unescape(encodeURIComponent(str)));
} else if (typeof Buffer === 'function') {
return (str) => Buffer.from(str, 'utf-8').toString('base64');
} else {
return () => {
throw new Error('Unsupported environment: `window.btoa` or `Buffer` should be supported.');
};
}
}
const btoa$2 = /*#__PURE__*/ getBtoa$1();
let SourceMap$2 = class SourceMap {
constructor(properties) {
this.version = 3;
this.file = properties.file;
this.sources = properties.sources;
this.sourcesContent = properties.sourcesContent;
this.names = properties.names;
this.mappings = encode$1(properties.mappings);
}
toString() {
return JSON.stringify(this);
}
toUrl() {
return 'data:application/json;charset=utf-8;base64,' + btoa$2(this.toString());
}
};
function guessIndent$1(code) {
const lines = code.split('\n');
const tabbed = lines.filter((line) => /^\t+/.test(line));
const spaced = lines.filter((line) => /^ {2,}/.test(line));
if (tabbed.length === 0 && spaced.length === 0) {
return null;
}
// More lines tabbed than spaced? Assume tabs, and
// default to tabs in the case of a tie (or nothing
// to go on)
if (tabbed.length >= spaced.length) {
return '\t';
}
// Otherwise, we need to guess the multiple
const min = spaced.reduce((previous, current) => {
const numSpaces = /^ +/.exec(current)[0].length;
return Math.min(numSpaces, previous);
}, Infinity);
return new Array(min + 1).join(' ');
}
function getRelativePath$1(from, to) {
const fromParts = from.split(/[/\\]/);
const toParts = to.split(/[/\\]/);
fromParts.pop(); // get dirname
while (fromParts[0] === toParts[0]) {
fromParts.shift();
toParts.shift();
}
if (fromParts.length) {
let i = fromParts.length;
while (i--) fromParts[i] = '..';
}
return fromParts.concat(toParts).join('/');
}
const toString$3 = Object.prototype.toString;
function isObject$3(thing) {
return toString$3.call(thing) === '[object Object]';
}
function getLocator$1(source) {
const originalLines = source.split('\n');
const lineOffsets = [];
for (let i = 0, pos = 0; i < originalLines.length; i++) {
lineOffsets.push(pos);
pos += originalLines[i].length + 1;
}
return function locate(index) {
let i = 0;
let j = lineOffsets.length;
while (i < j) {
const m = (i + j) >> 1;
if (index < lineOffsets[m]) {
j = m;
} else {
i = m + 1;
}
}
const line = i - 1;
const column = index - lineOffsets[line];
return { line, column };
};
}
let Mappings$1 = class Mappings {
constructor(hires) {
this.hires = hires;
this.generatedCodeLine = 0;
this.generatedCodeColumn = 0;
this.raw = [];
this.rawSegments = this.raw[this.generatedCodeLine] = [];
this.pending = null;
}
addEdit(sourceIndex, content, loc, nameIndex) {
if (content.length) {
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (nameIndex >= 0) {
segment.push(nameIndex);
}
this.rawSegments.push(segment);
} else if (this.pending) {
this.rawSegments.push(this.pending);
}
this.advance(content);
this.pending = null;
}
addUneditedChunk(sourceIndex, chunk, original, loc, sourcemapLocations) {
let originalCharIndex = chunk.start;
let first = true;
while (originalCharIndex < chunk.end) {
if (this.hires || first || sourcemapLocations.has(originalCharIndex)) {
this.rawSegments.push([this.generatedCodeColumn, sourceIndex, loc.line, loc.column]);
}
if (original[originalCharIndex] === '\n') {
loc.line += 1;
loc.column = 0;
this.generatedCodeLine += 1;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
this.generatedCodeColumn = 0;
first = true;
} else {
loc.column += 1;
this.generatedCodeColumn += 1;
first = false;
}
originalCharIndex += 1;
}
this.pending = null;
}
advance(str) {
if (!str) return;
const lines = str.split('\n');
if (lines.length > 1) {
for (let i = 0; i < lines.length - 1; i++) {
this.generatedCodeLine++;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
}
this.generatedCodeColumn = 0;
}
this.generatedCodeColumn += lines[lines.length - 1].length;
}
};
const n$2 = '\n';
const warned$1 = {
insertLeft: false,
insertRight: false,
storeName: false,
};
let MagicString$1 = class MagicString {
constructor(string, options = {}) {
const chunk = new Chunk$1(0, string.length, string);
Object.defineProperties(this, {
original: { writable: true, value: string },
outro: { writable: true, value: '' },
intro: { writable: true, value: '' },
firstChunk: { writable: true, value: chunk },
lastChunk: { writable: true, value: chunk },
lastSearchedChunk: { writable: true, value: chunk },
byStart: { writable: true, value: {} },
byEnd: { writable: true, value: {} },
filename: { writable: true, value: options.filename },
indentExclusionRanges: { writable: true, value: options.indentExclusionRanges },
sourcemapLocations: { writable: true, value: new BitSet$1() },
storedNames: { writable: true, value: {} },
indentStr: { writable: true, value: undefined },
});
this.byStart[0] = chunk;
this.byEnd[string.length] = chunk;
}
addSourcemapLocation(char) {
this.sourcemapLocations.add(char);
}
append(content) {
if (typeof content !== 'string') throw new TypeError('outro content must be a string');
this.outro += content;
return this;
}
appendLeft(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byEnd[index];
if (chunk) {
chunk.appendLeft(content);
} else {
this.intro += content;
}
return this;
}
appendRight(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byStart[index];
if (chunk) {
chunk.appendRight(content);
} else {
this.outro += content;
}
return this;
}
clone() {
const cloned = new MagicString(this.original, { filename: this.filename });
let originalChunk = this.firstChunk;
let clonedChunk = (cloned.firstChunk = cloned.lastSearchedChunk = originalChunk.clone());
while (originalChunk) {
cloned.byStart[clonedChunk.start] = clonedChunk;
cloned.byEnd[clonedChunk.end] = clonedChunk;
const nextOriginalChunk = originalChunk.next;
const nextClonedChunk = nextOriginalChunk && nextOriginalChunk.clone();
if (nextClonedChunk) {
clonedChunk.next = nextClonedChunk;
nextClonedChunk.previous = clonedChunk;
clonedChunk = nextClonedChunk;
}
originalChunk = nextOriginalChunk;
}
cloned.lastChunk = clonedChunk;
if (this.indentExclusionRanges) {
cloned.indentExclusionRanges = this.indentExclusionRanges.slice();
}
cloned.sourcemapLocations = new BitSet$1(this.sourcemapLocations);
cloned.intro = this.intro;
cloned.outro = this.outro;
return cloned;
}
generateDecodedMap(options) {
options = options || {};
const sourceIndex = 0;
const names = Object.keys(this.storedNames);
const mappings = new Mappings$1(options.hires);
const locate = getLocator$1(this.original);
if (this.intro) {
mappings.advance(this.intro);
}
this.firstChunk.eachNext((chunk) => {
const loc = locate(chunk.start);
if (chunk.intro.length) mappings.advance(chunk.intro);
if (chunk.edited) {
mappings.addEdit(
sourceIndex,
chunk.content,
loc,
chunk.storeName ? names.indexOf(chunk.original) : -1
);
} else {
mappings.addUneditedChunk(sourceIndex, chunk, this.original, loc, this.sourcemapLocations);
}
if (chunk.outro.length) mappings.advance(chunk.outro);
});
return {
file: options.file ? options.file.split(/[/\\]/).pop() : null,
sources: [options.source ? getRelativePath$1(options.file || '', options.source) : null],
sourcesContent: options.includeContent ? [this.original] : [null],
names,
mappings: mappings.raw,
};
}
generateMap(options) {
return new SourceMap$2(this.generateDecodedMap(options));
}
_ensureindentStr() {
if (this.indentStr === undefined) {
this.indentStr = guessIndent$1(this.original);
}
}
_getRawIndentString() {
this._ensureindentStr();
return this.indentStr;
}
getIndentString() {
this._ensureindentStr();
return this.indentStr === null ? '\t' : this.indentStr;
}
indent(indentStr, options) {
const pattern = /^[^\r\n]/gm;
if (isObject$3(indentStr)) {
options = indentStr;
indentStr = undefined;
}
if (indentStr === undefined) {
this._ensureindentStr();
indentStr = this.indentStr || '\t';
}
if (indentStr === '') return this; // noop
options = options || {};
// Process exclusion ranges
const isExcluded = {};
if (options.exclude) {
const exclusions =
typeof options.exclude[0] === 'number' ? [options.exclude] : options.exclude;
exclusions.forEach((exclusion) => {
for (let i = exclusion[0]; i < exclusion[1]; i += 1) {
isExcluded[i] = true;
}
});
}
let shouldIndentNextCharacter = options.indentStart !== false;
const replacer = (match) => {
if (shouldIndentNextCharacter) return `${indentStr}${match}`;
shouldIndentNextCharacter = true;
return match;
};
this.intro = this.intro.replace(pattern, replacer);
let charIndex = 0;
let chunk = this.firstChunk;
while (chunk) {
const end = chunk.end;
if (chunk.edited) {
if (!isExcluded[charIndex]) {
chunk.content = chunk.content.replace(pattern, replacer);
if (chunk.content.length) {
shouldIndentNextCharacter = chunk.content[chunk.content.length - 1] === '\n';
}
}
} else {
charIndex = chunk.start;
while (charIndex < end) {
if (!isExcluded[charIndex]) {
const char = this.original[charIndex];
if (char === '\n') {
shouldIndentNextCharacter = true;
} else if (char !== '\r' && shouldIndentNextCharacter) {
shouldIndentNextCharacter = false;
if (charIndex === chunk.start) {
chunk.prependRight(indentStr);
} else {
this._splitChunk(chunk, charIndex);
chunk = chunk.next;
chunk.prependRight(indentStr);
}
}
}
charIndex += 1;
}
}
charIndex = chunk.end;
chunk = chunk.next;
}
this.outro = this.outro.replace(pattern, replacer);
return this;
}
insert() {
throw new Error(
'magicString.insert(...) is deprecated. Use prependRight(...) or appendLeft(...)'
);
}
insertLeft(index, content) {
if (!warned$1.insertLeft) {
console.warn(
'magicString.insertLeft(...) is deprecated. Use magicString.appendLeft(...) instead'
); // eslint-disable-line no-console
warned$1.insertLeft = true;
}
return this.appendLeft(index, content);
}
insertRight(index, content) {
if (!warned$1.insertRight) {
console.warn(
'magicString.insertRight(...) is deprecated. Use magicString.prependRight(...) instead'
); // eslint-disable-line no-console
warned$1.insertRight = true;
}
return this.prependRight(index, content);
}
move(start, end, index) {
if (index >= start && index <= end) throw new Error('Cannot move a selection inside itself');
this._split(start);
this._split(end);
this._split(index);
const first = this.byStart[start];
const last = this.byEnd[end];
const oldLeft = first.previous;
const oldRight = last.next;
const newRight = this.byStart[index];
if (!newRight && last === this.lastChunk) return this;
const newLeft = newRight ? newRight.previous : this.lastChunk;
if (oldLeft) oldLeft.next = oldRight;
if (oldRight) oldRight.previous = oldLeft;
if (newLeft) newLeft.next = first;
if (newRight) newRight.previous = last;
if (!first.previous) this.firstChunk = last.next;
if (!last.next) {
this.lastChunk = first.previous;
this.lastChunk.next = null;
}
first.previous = newLeft;
last.next = newRight || null;
if (!newLeft) this.firstChunk = first;
if (!newRight) this.lastChunk = last;
return this;
}
overwrite(start, end, content, options) {
options = options || {};
return this.update(start, end, content, { ...options, overwrite: !options.contentOnly });
}
update(start, end, content, options) {
if (typeof content !== 'string') throw new TypeError('replacement content must be a string');
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
if (end > this.original.length) throw new Error('end is out of bounds');
if (start === end)
throw new Error(
'Cannot overwrite a zero-length range use appendLeft or prependRight instead'
);
this._split(start);
this._split(end);
if (options === true) {
if (!warned$1.storeName) {
console.warn(
'The final argument to magicString.overwrite(...) should be an options object. See https://github.com/rich-harris/magic-string'
); // eslint-disable-line no-console
warned$1.storeName = true;
}
options = { storeName: true };
}
const storeName = options !== undefined ? options.storeName : false;
const overwrite = options !== undefined ? options.overwrite : false;
if (storeName) {
const original = this.original.slice(start, end);
Object.defineProperty(this.storedNames, original, {
writable: true,
value: true,
enumerable: true,
});
}
const first = this.byStart[start];
const last = this.byEnd[end];
if (first) {
let chunk = first;
while (chunk !== last) {
if (chunk.next !== this.byStart[chunk.end]) {
throw new Error('Cannot overwrite across a split point');
}
chunk = chunk.next;
chunk.edit('', false);
}
first.edit(content, storeName, !overwrite);
} else {
// must be inserting at the end
const newChunk = new Chunk$1(start, end, '').edit(content, storeName);
// TODO last chunk in the array may not be the last chunk, if it's moved...
last.next = newChunk;
newChunk.previous = last;
}
return this;
}
prepend(content) {
if (typeof content !== 'string') throw new TypeError('outro content must be a string');
this.intro = content + this.intro;
return this;
}
prependLeft(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byEnd[index];
if (chunk) {
chunk.prependLeft(content);
} else {
this.intro = content + this.intro;
}
return this;
}
prependRight(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byStart[index];
if (chunk) {
chunk.prependRight(content);
} else {
this.outro = content + this.outro;
}
return this;
}
remove(start, end) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
if (start === end) return this;
if (start < 0 || end > this.original.length) throw new Error('Character is out of bounds');
if (start > end) throw new Error('end must be greater than start');
this._split(start);
this._split(end);
let chunk = this.byStart[start];
while (chunk) {
chunk.intro = '';
chunk.outro = '';
chunk.edit('');
chunk = end > chunk.end ? this.byStart[chunk.end] : null;
}
return this;
}
lastChar() {
if (this.outro.length) return this.outro[this.outro.length - 1];
let chunk = this.lastChunk;
do {
if (chunk.outro.length) return chunk.outro[chunk.outro.length - 1];
if (chunk.content.length) return chunk.content[chunk.content.length - 1];
if (chunk.intro.length) return chunk.intro[chunk.intro.length - 1];
} while ((chunk = chunk.previous));
if (this.intro.length) return this.intro[this.intro.length - 1];
return '';
}
lastLine() {
let lineIndex = this.outro.lastIndexOf(n$2);
if (lineIndex !== -1) return this.outro.substr(lineIndex + 1);
let lineStr = this.outro;
let chunk = this.lastChunk;
do {
if (chunk.outro.length > 0) {
lineIndex = chunk.outro.lastIndexOf(n$2);
if (lineIndex !== -1) return chunk.outro.substr(lineIndex + 1) + lineStr;
lineStr = chunk.outro + lineStr;
}
if (chunk.content.length > 0) {
lineIndex = chunk.content.lastIndexOf(n$2);
if (lineIndex !== -1) return chunk.content.substr(lineIndex + 1) + lineStr;
lineStr = chunk.content + lineStr;
}
if (chunk.intro.length > 0) {
lineIndex = chunk.intro.lastIndexOf(n$2);
if (lineIndex !== -1) return chunk.intro.substr(lineIndex + 1) + lineStr;
lineStr = chunk.intro + lineStr;
}
} while ((chunk = chunk.previous));
lineIndex = this.intro.lastIndexOf(n$2);
if (lineIndex !== -1) return this.intro.substr(lineIndex + 1) + lineStr;
return this.intro + lineStr;
}
slice(start = 0, end = this.original.length) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
let result = '';
// find start chunk
let chunk = this.firstChunk;
while (chunk && (chunk.start > start || chunk.end <= start)) {
// found end chunk before start
if (chunk.start < end && chunk.end >= end) {
return result;
}
chunk = chunk.next;
}
if (chunk && chunk.edited && chunk.start !== start)
throw new Error(`Cannot use replaced character ${start} as slice start anchor.`);
const startChunk = chunk;
while (chunk) {
if (chunk.intro && (startChunk !== chunk || chunk.start === start)) {
result += chunk.intro;
}
const containsEnd = chunk.start < end && chunk.end >= end;
if (containsEnd && chunk.edited && chunk.end !== end)
throw new Error(`Cannot use replaced character ${end} as slice end anchor.`);
const sliceStart = startChunk === chunk ? start - chunk.start : 0;
const sliceEnd = containsEnd ? chunk.content.length + end - chunk.end : chunk.content.length;
result += chunk.content.slice(sliceStart, sliceEnd);
if (chunk.outro && (!containsEnd || chunk.end === end)) {
result += chunk.outro;
}
if (containsEnd) {
break;
}
chunk = chunk.next;
}
return result;
}
// TODO deprecate this? not really very useful
snip(start, end) {
const clone = this.clone();
clone.remove(0, start);
clone.remove(end, clone.original.length);
return clone;
}
_split(index) {
if (this.byStart[index] || this.byEnd[index]) return;
let chunk = this.lastSearchedChunk;
const searchForward = index > chunk.end;
while (chunk) {
if (chunk.contains(index)) return this._splitChunk(chunk, index);
chunk = searchForward ? this.byStart[chunk.end] : this.byEnd[chunk.start];
}
}
_splitChunk(chunk, index) {
if (chunk.edited && chunk.content.length) {
// zero-length edited chunks are a special case (overlapping replacements)
const loc = getLocator$1(this.original)(index);
throw new Error(
`Cannot split a chunk that has already been edited (${loc.line}:${loc.column} "${chunk.original}")`
);
}
const newChunk = chunk.split(index);
this.byEnd[index] = chunk;
this.byStart[index] = newChunk;
this.byEnd[newChunk.end] = newChunk;
if (chunk === this.lastChunk) this.lastChunk = newChunk;
this.lastSearchedChunk = chunk;
return true;
}
toString() {
let str = this.intro;
let chunk = this.firstChunk;
while (chunk) {
str += chunk.toString();
chunk = chunk.next;
}
return str + this.outro;
}
isEmpty() {
let chunk = this.firstChunk;
do {
if (
(chunk.intro.length && chunk.intro.trim()) ||
(chunk.content.length && chunk.content.trim()) ||
(chunk.outro.length && chunk.outro.trim())
)
return false;
} while ((chunk = chunk.next));
return true;
}
length() {
let chunk = this.firstChunk;
let length = 0;
do {
length += chunk.intro.length + chunk.content.length + chunk.outro.length;
} while ((chunk = chunk.next));
return length;
}
trimLines() {
return this.trim('[\\r\\n]');
}
trim(charType) {
return this.trimStart(charType).trimEnd(charType);
}
trimEndAborted(charType) {
const rx = new RegExp((charType || '\\s') + '+$');
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
let chunk = this.lastChunk;
do {
const end = chunk.end;
const aborted = chunk.trimEnd(rx);
// if chunk was trimmed, we have a new lastChunk
if (chunk.end !== end) {
if (this.lastChunk === chunk) {
this.lastChunk = chunk.next;
}
this.byEnd[chunk.end] = chunk;
this.byStart[chunk.next.start] = chunk.next;
this.byEnd[chunk.next.end] = chunk.next;
}
if (aborted) return true;
chunk = chunk.previous;
} while (chunk);
return false;
}
trimEnd(charType) {
this.trimEndAborted(charType);
return this;
}
trimStartAborted(charType) {
const rx = new RegExp('^' + (charType || '\\s') + '+');
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
let chunk = this.firstChunk;
do {
const end = chunk.end;
const aborted = chunk.trimStart(rx);
if (chunk.end !== end) {
// special case...
if (chunk === this.lastChunk) this.lastChunk = chunk.next;
this.byEnd[chunk.end] = chunk;
this.byStart[chunk.next.start] = chunk.next;
this.byEnd[chunk.next.end] = chunk.next;
}
if (aborted) return true;
chunk = chunk.next;
} while (chunk);
return false;
}
trimStart(charType) {
this.trimStartAborted(charType);
return this;
}
hasChanged() {
return this.original !== this.toString();
}
_replaceRegexp(searchValue, replacement) {
function getReplacement(match, str) {
if (typeof replacement === 'string') {
return replacement.replace(/\$(\$|&|\d+)/g, (_, i) => {
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#specifying_a_string_as_a_parameter
if (i === '$') return '$';
if (i === '&') return match[0];
const num = +i;
if (num < match.length) return match[+i];
return `$${i}`;
});
} else {
return replacement(...match, match.index, str, match.groups);
}
}
function matchAll(re, str) {
let match;
const matches = [];
while ((match = re.exec(str))) {
matches.push(match);
}
return matches;
}
if (searchValue.global) {
const matches = matchAll(searchValue, this.original);
matches.forEach((match) => {
if (match.index != null)
this.overwrite(
match.index,
match.index + match[0].length,
getReplacement(match, this.original)
);
});
} else {
const match = this.original.match(searchValue);
if (match && match.index != null)
this.overwrite(
match.index,
match.index + match[0].length,
getReplacement(match, this.original)
);
}
return this;
}
_replaceString(string, replacement) {
const { original } = this;
const index = original.indexOf(string);
if (index !== -1) {
this.overwrite(index, index + string.length, replacement);
}
return this;
}
replace(searchValue, replacement) {
if (typeof searchValue === 'string') {
return this._replaceString(searchValue, replacement);
}
return this._replaceRegexp(searchValue, replacement);
}
_replaceAllString(string, replacement) {
const { original } = this;
const stringLength = string.length;
for (
let index = original.indexOf(string);
index !== -1;
index = original.indexOf(string, index + stringLength)
) {
this.overwrite(index, index + stringLength, replacement);
}
return this;
}
replaceAll(searchValue, replacement) {
if (typeof searchValue === 'string') {
return this._replaceAllString(searchValue, replacement);
}
if (!searchValue.global) {
throw new TypeError(
'MagicString.prototype.replaceAll called with a non-global RegExp argument'
);
}
return this._replaceRegexp(searchValue, replacement);
}
};
function isReference(node, parent) {
if (node.type === 'MemberExpression') {
return !node.computed && isReference(node.object, node);
}
if (node.type === 'Identifier') {
if (!parent)
return true;
switch (parent.type) {
// disregard `bar` in `foo.bar`
case 'MemberExpression': return parent.computed || node === parent.object;
// disregard the `foo` in `class {foo(){}}` but keep it in `class {[foo](){}}`
case 'MethodDefinition': return parent.computed;
// disregard the `foo` in `class {foo=bar}` but keep it in `class {[foo]=bar}` and `class {bar=foo}`
case 'FieldDefinition': return parent.computed || node === parent.value;
// disregard the `bar` in `{ bar: foo }`, but keep it in `{ [bar]: foo }`
case 'Property': return parent.computed || node === parent.value;
// disregard the `bar` in `export { foo as bar }` or
// the foo in `import { foo as bar }`
case 'ExportSpecifier':
case 'ImportSpecifier': return node === parent.local;
// disregard the `foo` in `foo: while (...) { ... break foo; ... continue foo;}`
case 'LabeledStatement':
case 'BreakStatement':
case 'ContinueStatement': return false;
default: return true;
}
}
return false;
}
var version$3 = "25.0.3";
var peerDependencies = {
rollup: "^2.68.0||^3.0.0"
};
function tryParse(parse, code, id) {
try {
return parse(code, { allowReturnOutsideFunction: true });
} catch (err) {
err.message += ` in ${id}`;
throw err;
}
}
const firstpassGlobal = /\b(?:require|module|exports|global)\b/;
const firstpassNoGlobal = /\b(?:require|module|exports)\b/;
function hasCjsKeywords(code, ignoreGlobal) {
const firstpass = ignoreGlobal ? firstpassNoGlobal : firstpassGlobal;
return firstpass.test(code);
}
/* eslint-disable no-underscore-dangle */
function analyzeTopLevelStatements(parse, code, id) {
const ast = tryParse(parse, code, id);
let isEsModule = false;
let hasDefaultExport = false;
let hasNamedExports = false;
for (const node of ast.body) {
switch (node.type) {
case 'ExportDefaultDeclaration':
isEsModule = true;
hasDefaultExport = true;
break;
case 'ExportNamedDeclaration':
isEsModule = true;
if (node.declaration) {
hasNamedExports = true;
} else {
for (const specifier of node.specifiers) {
if (specifier.exported.name === 'default') {
hasDefaultExport = true;
} else {
hasNamedExports = true;
}
}
}
break;
case 'ExportAllDeclaration':
isEsModule = true;
if (node.exported && node.exported.name === 'default') {
hasDefaultExport = true;
} else {
hasNamedExports = true;
}
break;
case 'ImportDeclaration':
isEsModule = true;
break;
}
}
return { isEsModule, hasDefaultExport, hasNamedExports, ast };
}
/* eslint-disable import/prefer-default-export */
function deconflict(scopes, globals, identifier) {
let i = 1;
let deconflicted = makeLegalIdentifier(identifier);
const hasConflicts = () =>
scopes.some((scope) => scope.contains(deconflicted)) || globals.has(deconflicted);
while (hasConflicts()) {
deconflicted = makeLegalIdentifier(`${identifier}_${i}`);
i += 1;
}
for (const scope of scopes) {
scope.declarations[deconflicted] = true;
}
return deconflicted;
}
function getName(id) {
const name = makeLegalIdentifier(basename$1(id, extname(id)));
if (name !== 'index') {
return name;
}
return makeLegalIdentifier(basename$1(dirname$1(id)));
}
function normalizePathSlashes(path) {
return path.replace(/\\/g, '/');
}
const getVirtualPathForDynamicRequirePath = (path, commonDir) =>
`/${normalizePathSlashes(relative$1(commonDir, path))}`;
function capitalize(name) {
return name[0].toUpperCase() + name.slice(1);
}
function getStrictRequiresFilter({ strictRequires }) {
switch (strictRequires) {
case true:
return { strictRequiresFilter: () => true, detectCyclesAndConditional: false };
// eslint-disable-next-line no-undefined
case undefined:
case 'auto':
case 'debug':
case null:
return { strictRequiresFilter: () => false, detectCyclesAndConditional: true };
case false:
return { strictRequiresFilter: () => false, detectCyclesAndConditional: false };
default:
if (typeof strictRequires === 'string' || Array.isArray(strictRequires)) {
return {
strictRequiresFilter: createFilter$1(strictRequires),
detectCyclesAndConditional: false
};
}
throw new Error('Unexpected value for "strictRequires" option.');
}
}
function getPackageEntryPoint(dirPath) {
let entryPoint = 'index.js';
try {
if (existsSync(join$1(dirPath, 'package.json'))) {
entryPoint =
JSON.parse(readFileSync(join$1(dirPath, 'package.json'), { encoding: 'utf8' })).main ||
entryPoint;
}
} catch (ignored) {
// ignored
}
return entryPoint;
}
function isDirectory(path) {
try {
if (statSync$1(path).isDirectory()) return true;
} catch (ignored) {
// Nothing to do here
}
return false;
}
function getDynamicRequireModules(patterns, dynamicRequireRoot) {
const dynamicRequireModules = new Map();
const dirNames = new Set();
for (const pattern of !patterns || Array.isArray(patterns) ? patterns || [] : [patterns]) {
const isNegated = pattern.startsWith('!');
const modifyMap = (targetPath, resolvedPath) =>
isNegated
? dynamicRequireModules.delete(targetPath)
: dynamicRequireModules.set(targetPath, resolvedPath);
for (const path of glob$1.sync(isNegated ? pattern.substr(1) : pattern)) {
const resolvedPath = resolve$3(path);
const requirePath = normalizePathSlashes(resolvedPath);
if (isDirectory(resolvedPath)) {
dirNames.add(resolvedPath);
const modulePath = resolve$3(join$1(resolvedPath, getPackageEntryPoint(path)));
modifyMap(requirePath, modulePath);
modifyMap(normalizePathSlashes(modulePath), modulePath);
} else {
dirNames.add(dirname$1(resolvedPath));
modifyMap(requirePath, resolvedPath);
}
}
}
return {
commonDir: dirNames.size ? getCommonDir([...dirNames, dynamicRequireRoot]) : null,
dynamicRequireModules
};
}
const FAILED_REQUIRE_ERROR = `throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');`;
const COMMONJS_REQUIRE_EXPORT = 'commonjsRequire';
const CREATE_COMMONJS_REQUIRE_EXPORT = 'createCommonjsRequire';
function getDynamicModuleRegistry(
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ignoreDynamicRequires
) {
if (!isDynamicRequireModulesEnabled) {
return `export function ${COMMONJS_REQUIRE_EXPORT}(path) {
${FAILED_REQUIRE_ERROR}
}`;
}
const dynamicModuleImports = [...dynamicRequireModules.values()]
.map(
(id, index) =>
`import ${
id.endsWith('.json') ? `json${index}` : `{ __require as require${index} }`
} from ${JSON.stringify(id)};`
)
.join('\n');
const dynamicModuleProps = [...dynamicRequireModules.keys()]
.map(
(id, index) =>
`\t\t${JSON.stringify(getVirtualPathForDynamicRequirePath(id, commonDir))}: ${
id.endsWith('.json') ? `function () { return json${index}; }` : `require${index}`
}`
)
.join(',\n');
return `${dynamicModuleImports}
var dynamicModules;
function getDynamicModules() {
return dynamicModules || (dynamicModules = {
${dynamicModuleProps}
});
}
export function ${CREATE_COMMONJS_REQUIRE_EXPORT}(originalModuleDir) {
function handleRequire(path) {
var resolvedPath = commonjsResolve(path, originalModuleDir);
if (resolvedPath !== null) {
return getDynamicModules()[resolvedPath]();
}
${ignoreDynamicRequires ? 'return require(path);' : FAILED_REQUIRE_ERROR}
}
handleRequire.resolve = function (path) {
var resolvedPath = commonjsResolve(path, originalModuleDir);
if (resolvedPath !== null) {
return resolvedPath;
}
return require.resolve(path);
}
return handleRequire;
}
function commonjsResolve (path, originalModuleDir) {
var shouldTryNodeModules = isPossibleNodeModulesPath(path);
path = normalize(path);
var relPath;
if (path[0] === '/') {
originalModuleDir = '';
}
var modules = getDynamicModules();
var checkedExtensions = ['', '.js', '.json'];
while (true) {
if (!shouldTryNodeModules) {
relPath = normalize(originalModuleDir + '/' + path);
} else {
relPath = normalize(originalModuleDir + '/node_modules/' + path);
}
if (relPath.endsWith('/..')) {
break; // Travelled too far up, avoid infinite loop
}
for (var extensionIndex = 0; extensionIndex < checkedExtensions.length; extensionIndex++) {
var resolvedPath = relPath + checkedExtensions[extensionIndex];
if (modules[resolvedPath]) {
return resolvedPath;
}
}
if (!shouldTryNodeModules) break;
var nextDir = normalize(originalModuleDir + '/..');
if (nextDir === originalModuleDir) break;
originalModuleDir = nextDir;
}
return null;
}
function isPossibleNodeModulesPath (modulePath) {
var c0 = modulePath[0];
if (c0 === '/' || c0 === '\\\\') return false;
var c1 = modulePath[1], c2 = modulePath[2];
if ((c0 === '.' && (!c1 || c1 === '/' || c1 === '\\\\')) ||
(c0 === '.' && c1 === '.' && (!c2 || c2 === '/' || c2 === '\\\\'))) return false;
if (c1 === ':' && (c2 === '/' || c2 === '\\\\')) return false;
return true;
}
function normalize (path) {
path = path.replace(/\\\\/g, '/');
var parts = path.split('/');
var slashed = parts[0] === '';
for (var i = 1; i < parts.length; i++) {
if (parts[i] === '.' || parts[i] === '') {
parts.splice(i--, 1);
}
}
for (var i = 1; i < parts.length; i++) {
if (parts[i] !== '..') continue;
if (i > 0 && parts[i - 1] !== '..' && parts[i - 1] !== '.') {
parts.splice(--i, 2);
i--;
}
}
path = parts.join('/');
if (slashed && path[0] !== '/') path = '/' + path;
else if (path.length === 0) path = '.';
return path;
}`;
}
const isWrappedId = (id, suffix) => id.endsWith(suffix);
const wrapId$1 = (id, suffix) => `\0${id}${suffix}`;
const unwrapId$1 = (wrappedId, suffix) => wrappedId.slice(1, -suffix.length);
const PROXY_SUFFIX = '?commonjs-proxy';
const WRAPPED_SUFFIX = '?commonjs-wrapped';
const EXTERNAL_SUFFIX = '?commonjs-external';
const EXPORTS_SUFFIX = '?commonjs-exports';
const MODULE_SUFFIX = '?commonjs-module';
const ENTRY_SUFFIX = '?commonjs-entry';
const ES_IMPORT_SUFFIX = '?commonjs-es-import';
const DYNAMIC_MODULES_ID = '\0commonjs-dynamic-modules';
const HELPERS_ID = '\0commonjsHelpers.js';
const IS_WRAPPED_COMMONJS = 'withRequireFunction';
// `x['default']` is used instead of `x.default` for backward compatibility with ES3 browsers.
// Minifiers like uglify will usually transpile it back if compatibility with ES3 is not enabled.
// This could be improved by inspecting Rollup's "generatedCode" option
const HELPERS = `
export var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
export function getDefaultExportFromCjs (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
}
export function getDefaultExportFromNamespaceIfPresent (n) {
return n && Object.prototype.hasOwnProperty.call(n, 'default') ? n['default'] : n;
}
export function getDefaultExportFromNamespaceIfNotNamed (n) {
return n && Object.prototype.hasOwnProperty.call(n, 'default') && Object.keys(n).length === 1 ? n['default'] : n;
}
export function getAugmentedNamespace(n) {
if (n.__esModule) return n;
var f = n.default;
if (typeof f == "function") {
var a = function a () {
if (this instanceof a) {
return Reflect.construct(f, arguments, this.constructor);
}
return f.apply(this, arguments);
};
a.prototype = f.prototype;
} else a = {};
Object.defineProperty(a, '__esModule', {value: true});
Object.keys(n).forEach(function (k) {
var d = Object.getOwnPropertyDescriptor(n, k);
Object.defineProperty(a, k, d.get ? d : {
enumerable: true,
get: function () {
return n[k];
}
});
});
return a;
}
`;
function getHelpersModule() {
return HELPERS;
}
function getUnknownRequireProxy(id, requireReturnsDefault) {
if (requireReturnsDefault === true || id.endsWith('.json')) {
return `export { default } from ${JSON.stringify(id)};`;
}
const name = getName(id);
const exported =
requireReturnsDefault === 'auto'
? `import { getDefaultExportFromNamespaceIfNotNamed } from "${HELPERS_ID}"; export default /*@__PURE__*/getDefaultExportFromNamespaceIfNotNamed(${name});`
: requireReturnsDefault === 'preferred'
? `import { getDefaultExportFromNamespaceIfPresent } from "${HELPERS_ID}"; export default /*@__PURE__*/getDefaultExportFromNamespaceIfPresent(${name});`
: !requireReturnsDefault
? `import { getAugmentedNamespace } from "${HELPERS_ID}"; export default /*@__PURE__*/getAugmentedNamespace(${name});`
: `export default ${name};`;
return `import * as ${name} from ${JSON.stringify(id)}; ${exported}`;
}
async function getStaticRequireProxy(id, requireReturnsDefault, loadModule) {
const name = getName(id);
const {
meta: { commonjs: commonjsMeta }
} = await loadModule({ id });
if (!commonjsMeta) {
return getUnknownRequireProxy(id, requireReturnsDefault);
}
if (commonjsMeta.isCommonJS) {
return `export { __moduleExports as default } from ${JSON.stringify(id)};`;
}
if (!requireReturnsDefault) {
return `import { getAugmentedNamespace } from "${HELPERS_ID}"; import * as ${name} from ${JSON.stringify(
id
)}; export default /*@__PURE__*/getAugmentedNamespace(${name});`;
}
if (
requireReturnsDefault !== true &&
(requireReturnsDefault === 'namespace' ||
!commonjsMeta.hasDefaultExport ||
(requireReturnsDefault === 'auto' && commonjsMeta.hasNamedExports))
) {
return `import * as ${name} from ${JSON.stringify(id)}; export default ${name};`;
}
return `export { default } from ${JSON.stringify(id)};`;
}
function getEntryProxy(id, defaultIsModuleExports, getModuleInfo) {
const {
meta: { commonjs: commonjsMeta },
hasDefaultExport
} = getModuleInfo(id);
if (!commonjsMeta || commonjsMeta.isCommonJS !== IS_WRAPPED_COMMONJS) {
const stringifiedId = JSON.stringify(id);
let code = `export * from ${stringifiedId};`;
if (hasDefaultExport) {
code += `export { default } from ${stringifiedId};`;
}
return code;
}
return getEsImportProxy(id, defaultIsModuleExports);
}
function getEsImportProxy(id, defaultIsModuleExports) {
const name = getName(id);
const exportsName = `${name}Exports`;
const requireModule = `require${capitalize(name)}`;
let code =
`import { getDefaultExportFromCjs } from "${HELPERS_ID}";\n` +
`import { __require as ${requireModule} } from ${JSON.stringify(id)};\n` +
`var ${exportsName} = ${requireModule}();\n` +
`export { ${exportsName} as __moduleExports };`;
if (defaultIsModuleExports === true) {
code += `\nexport { ${exportsName} as default };`;
} else {
code += `export default /*@__PURE__*/getDefaultExportFromCjs(${exportsName});`;
}
return {
code,
syntheticNamedExports: '__moduleExports'
};
}
/* eslint-disable no-param-reassign, no-undefined */
function getCandidatesForExtension(resolved, extension) {
return [resolved + extension, `${resolved}${sep$1}index${extension}`];
}
function getCandidates(resolved, extensions) {
return extensions.reduce(
(paths, extension) => paths.concat(getCandidatesForExtension(resolved, extension)),
[resolved]
);
}
function resolveExtensions(importee, importer, extensions) {
// not our problem
if (importee[0] !== '.' || !importer) return undefined;
const resolved = resolve$3(dirname$1(importer), importee);
const candidates = getCandidates(resolved, extensions);
for (let i = 0; i < candidates.length; i += 1) {
try {
const stats = statSync$1(candidates[i]);
if (stats.isFile()) return { id: candidates[i] };
} catch (err) {
/* noop */
}
}
return undefined;
}
function getResolveId(extensions, isPossibleCjsId) {
const currentlyResolving = new Map();
return {
/**
* This is a Maps of importers to Sets of require sources being resolved at
* the moment by resolveRequireSourcesAndUpdateMeta
*/
currentlyResolving,
async resolveId(importee, importer, resolveOptions) {
const customOptions = resolveOptions.custom;
// All logic below is specific to ES imports.
// Also, if we do not skip this logic for requires that are resolved while
// transforming a commonjs file, it can easily lead to deadlocks.
if (
customOptions &&
customOptions['node-resolve'] &&
customOptions['node-resolve'].isRequire
) {
return null;
}
const currentlyResolvingForParent = currentlyResolving.get(importer);
if (currentlyResolvingForParent && currentlyResolvingForParent.has(importee)) {
this.warn({
code: 'THIS_RESOLVE_WITHOUT_OPTIONS',
message:
'It appears a plugin has implemented a "resolveId" hook that uses "this.resolve" without forwarding the third "options" parameter of "resolveId". This is problematic as it can lead to wrong module resolutions especially for the node-resolve plugin and in certain cases cause early exit errors for the commonjs plugin.\nIn rare cases, this warning can appear if the same file is both imported and required from the same mixed ES/CommonJS module, in which case it can be ignored.',
url: 'https://rollupjs.org/guide/en/#resolveid'
});
return null;
}
if (isWrappedId(importee, WRAPPED_SUFFIX)) {
return unwrapId$1(importee, WRAPPED_SUFFIX);
}
if (
importee.endsWith(ENTRY_SUFFIX) ||
isWrappedId(importee, MODULE_SUFFIX) ||
isWrappedId(importee, EXPORTS_SUFFIX) ||
isWrappedId(importee, PROXY_SUFFIX) ||
isWrappedId(importee, ES_IMPORT_SUFFIX) ||
isWrappedId(importee, EXTERNAL_SUFFIX) ||
importee.startsWith(HELPERS_ID) ||
importee === DYNAMIC_MODULES_ID
) {
return importee;
}
if (importer) {
if (
importer === DYNAMIC_MODULES_ID ||
// Proxies are only importing resolved ids, no need to resolve again
isWrappedId(importer, PROXY_SUFFIX) ||
isWrappedId(importer, ES_IMPORT_SUFFIX) ||
importer.endsWith(ENTRY_SUFFIX)
) {
return importee;
}
if (isWrappedId(importer, EXTERNAL_SUFFIX)) {
// We need to return null for unresolved imports so that the proper warning is shown
if (
!(await this.resolve(
importee,
importer,
Object.assign({ skipSelf: true }, resolveOptions)
))
) {
return null;
}
// For other external imports, we need to make sure they are handled as external
return { id: importee, external: true };
}
}
if (importee.startsWith('\0')) {
return null;
}
// If this is an entry point or ESM import, we need to figure out if the importee is wrapped and
// if that is the case, we need to add a proxy.
const resolved =
(await this.resolve(
importee,
importer,
Object.assign({ skipSelf: true }, resolveOptions)
)) || resolveExtensions(importee, importer, extensions);
// Make sure that even if other plugins resolve again, we ignore our own proxies
if (
!resolved ||
resolved.external ||
resolved.id.endsWith(ENTRY_SUFFIX) ||
isWrappedId(resolved.id, ES_IMPORT_SUFFIX) ||
!isPossibleCjsId(resolved.id)
) {
return resolved;
}
const moduleInfo = await this.load(resolved);
const {
meta: { commonjs: commonjsMeta }
} = moduleInfo;
if (commonjsMeta) {
const { isCommonJS } = commonjsMeta;
if (isCommonJS) {
if (resolveOptions.isEntry) {
moduleInfo.moduleSideEffects = true;
// We must not precede entry proxies with a `\0` as that will mess up relative external resolution
return resolved.id + ENTRY_SUFFIX;
}
if (isCommonJS === IS_WRAPPED_COMMONJS) {
return { id: wrapId$1(resolved.id, ES_IMPORT_SUFFIX), meta: { commonjs: { resolved } } };
}
}
}
return resolved;
}
};
}
function getRequireResolver(extensions, detectCyclesAndConditional, currentlyResolving) {
const knownCjsModuleTypes = Object.create(null);
const requiredIds = Object.create(null);
const unconditionallyRequiredIds = Object.create(null);
const dependencies = Object.create(null);
const getDependencies = (id) => dependencies[id] || (dependencies[id] = new Set());
const isCyclic = (id) => {
const dependenciesToCheck = new Set(getDependencies(id));
for (const dependency of dependenciesToCheck) {
if (dependency === id) {
return true;
}
for (const childDependency of getDependencies(dependency)) {
dependenciesToCheck.add(childDependency);
}
}
return false;
};
// Once a module is listed here, its type (wrapped or not) is fixed and may
// not change for the rest of the current build, to not break already
// transformed modules.
const fullyAnalyzedModules = Object.create(null);
const getTypeForFullyAnalyzedModule = (id) => {
const knownType = knownCjsModuleTypes[id];
if (knownType !== true || !detectCyclesAndConditional || fullyAnalyzedModules[id]) {
return knownType;
}
if (isCyclic(id)) {
return (knownCjsModuleTypes[id] = IS_WRAPPED_COMMONJS);
}
return knownType;
};
const setInitialParentType = (id, initialCommonJSType) => {
// Fully analyzed modules may never change type
if (fullyAnalyzedModules[id]) {
return;
}
knownCjsModuleTypes[id] = initialCommonJSType;
if (
detectCyclesAndConditional &&
knownCjsModuleTypes[id] === true &&
requiredIds[id] &&
!unconditionallyRequiredIds[id]
) {
knownCjsModuleTypes[id] = IS_WRAPPED_COMMONJS;
}
};
const analyzeRequiredModule = async (parentId, resolved, isConditional, loadModule) => {
const childId = resolved.id;
requiredIds[childId] = true;
if (!(isConditional || knownCjsModuleTypes[parentId] === IS_WRAPPED_COMMONJS)) {
unconditionallyRequiredIds[childId] = true;
}
getDependencies(parentId).add(childId);
if (!isCyclic(childId)) {
// This makes sure the current transform handler waits for all direct
// dependencies to be loaded and transformed and therefore for all
// transitive CommonJS dependencies to be loaded as well so that all
// cycles have been found and knownCjsModuleTypes is reliable.
await loadModule(resolved);
}
};
const getTypeForImportedModule = async (resolved, loadModule) => {
if (resolved.id in knownCjsModuleTypes) {
// This handles cyclic ES dependencies
return knownCjsModuleTypes[resolved.id];
}
const {
meta: { commonjs }
} = await loadModule(resolved);
return (commonjs && commonjs.isCommonJS) || false;
};
return {
getWrappedIds: () =>
Object.keys(knownCjsModuleTypes).filter(
(id) => knownCjsModuleTypes[id] === IS_WRAPPED_COMMONJS
),
isRequiredId: (id) => requiredIds[id],
async shouldTransformCachedModule({
id: parentId,
resolvedSources,
meta: { commonjs: parentMeta }
}) {
// We explicitly track ES modules to handle circular imports
if (!(parentMeta && parentMeta.isCommonJS)) knownCjsModuleTypes[parentId] = false;
if (isWrappedId(parentId, ES_IMPORT_SUFFIX)) return false;
const parentRequires = parentMeta && parentMeta.requires;
if (parentRequires) {
setInitialParentType(parentId, parentMeta.initialCommonJSType);
await Promise.all(
parentRequires.map(({ resolved, isConditional }) =>
analyzeRequiredModule(parentId, resolved, isConditional, this.load)
)
);
if (getTypeForFullyAnalyzedModule(parentId) !== parentMeta.isCommonJS) {
return true;
}
for (const {
resolved: { id }
} of parentRequires) {
if (getTypeForFullyAnalyzedModule(id) !== parentMeta.isRequiredCommonJS[id]) {
return true;
}
}
// Now that we decided to go with the cached copy, neither the parent
// module nor any of its children may change types anymore
fullyAnalyzedModules[parentId] = true;
for (const {
resolved: { id }
} of parentRequires) {
fullyAnalyzedModules[id] = true;
}
}
const parentRequireSet = new Set((parentRequires || []).map(({ resolved: { id } }) => id));
return (
await Promise.all(
Object.keys(resolvedSources)
.map((source) => resolvedSources[source])
.filter(({ id, external }) => !(external || parentRequireSet.has(id)))
.map(async (resolved) => {
if (isWrappedId(resolved.id, ES_IMPORT_SUFFIX)) {
return (
(await getTypeForImportedModule(
(
await this.load({ id: resolved.id })
).meta.commonjs.resolved,
this.load
)) !== IS_WRAPPED_COMMONJS
);
}
return (await getTypeForImportedModule(resolved, this.load)) === IS_WRAPPED_COMMONJS;
})
)
).some((shouldTransform) => shouldTransform);
},
/* eslint-disable no-param-reassign */
resolveRequireSourcesAndUpdateMeta:
(rollupContext) => async (parentId, isParentCommonJS, parentMeta, sources) => {
parentMeta.initialCommonJSType = isParentCommonJS;
parentMeta.requires = [];
parentMeta.isRequiredCommonJS = Object.create(null);
setInitialParentType(parentId, isParentCommonJS);
const currentlyResolvingForParent = currentlyResolving.get(parentId) || new Set();
currentlyResolving.set(parentId, currentlyResolvingForParent);
const requireTargets = await Promise.all(
sources.map(async ({ source, isConditional }) => {
// Never analyze or proxy internal modules
if (source.startsWith('\0')) {
return { id: source, allowProxy: false };
}
currentlyResolvingForParent.add(source);
const resolved =
(await rollupContext.resolve(source, parentId, {
custom: { 'node-resolve': { isRequire: true } }
})) || resolveExtensions(source, parentId, extensions);
currentlyResolvingForParent.delete(source);
if (!resolved) {
return { id: wrapId$1(source, EXTERNAL_SUFFIX), allowProxy: false };
}
const childId = resolved.id;
if (resolved.external) {
return { id: wrapId$1(childId, EXTERNAL_SUFFIX), allowProxy: false };
}
parentMeta.requires.push({ resolved, isConditional });
await analyzeRequiredModule(parentId, resolved, isConditional, rollupContext.load);
return { id: childId, allowProxy: true };
})
);
parentMeta.isCommonJS = getTypeForFullyAnalyzedModule(parentId);
fullyAnalyzedModules[parentId] = true;
return requireTargets.map(({ id: dependencyId, allowProxy }, index) => {
// eslint-disable-next-line no-multi-assign
const isCommonJS = (parentMeta.isRequiredCommonJS[dependencyId] =
getTypeForFullyAnalyzedModule(dependencyId));
fullyAnalyzedModules[dependencyId] = true;
return {
source: sources[index].source,
id: allowProxy
? isCommonJS === IS_WRAPPED_COMMONJS
? wrapId$1(dependencyId, WRAPPED_SUFFIX)
: wrapId$1(dependencyId, PROXY_SUFFIX)
: dependencyId,
isCommonJS
};
});
},
isCurrentlyResolving(source, parentId) {
const currentlyResolvingForParent = currentlyResolving.get(parentId);
return currentlyResolvingForParent && currentlyResolvingForParent.has(source);
}
};
}
function validateVersion(actualVersion, peerDependencyVersion, name) {
const versionRegexp = /\^(\d+\.\d+\.\d+)/g;
let minMajor = Infinity;
let minMinor = Infinity;
let minPatch = Infinity;
let foundVersion;
// eslint-disable-next-line no-cond-assign
while ((foundVersion = versionRegexp.exec(peerDependencyVersion))) {
const [foundMajor, foundMinor, foundPatch] = foundVersion[1].split('.').map(Number);
if (foundMajor < minMajor) {
minMajor = foundMajor;
minMinor = foundMinor;
minPatch = foundPatch;
}
}
if (!actualVersion) {
throw new Error(
`Insufficient ${name} version: "@rollup/plugin-commonjs" requires at least ${name}@${minMajor}.${minMinor}.${minPatch}.`
);
}
const [major, minor, patch] = actualVersion.split('.').map(Number);
if (
major < minMajor ||
(major === minMajor && (minor < minMinor || (minor === minMinor && patch < minPatch)))
) {
throw new Error(
`Insufficient ${name} version: "@rollup/plugin-commonjs" requires at least ${name}@${minMajor}.${minMinor}.${minPatch} but found ${name}@${actualVersion}.`
);
}
}
const operators = {
'==': (x) => equals(x.left, x.right, false),
'!=': (x) => not(operators['=='](x)),
'===': (x) => equals(x.left, x.right, true),
'!==': (x) => not(operators['==='](x)),
'!': (x) => isFalsy(x.argument),
'&&': (x) => isTruthy(x.left) && isTruthy(x.right),
'||': (x) => isTruthy(x.left) || isTruthy(x.right)
};
function not(value) {
return value === null ? value : !value;
}
function equals(a, b, strict) {
if (a.type !== b.type) return null;
// eslint-disable-next-line eqeqeq
if (a.type === 'Literal') return strict ? a.value === b.value : a.value == b.value;
return null;
}
function isTruthy(node) {
if (!node) return false;
if (node.type === 'Literal') return !!node.value;
if (node.type === 'ParenthesizedExpression') return isTruthy(node.expression);
if (node.operator in operators) return operators[node.operator](node);
return null;
}
function isFalsy(node) {
return not(isTruthy(node));
}
function getKeypath(node) {
const parts = [];
while (node.type === 'MemberExpression') {
if (node.computed) return null;
parts.unshift(node.property.name);
// eslint-disable-next-line no-param-reassign
node = node.object;
}
if (node.type !== 'Identifier') return null;
const { name } = node;
parts.unshift(name);
return { name, keypath: parts.join('.') };
}
const KEY_COMPILED_ESM = '__esModule';
function getDefineCompiledEsmType(node) {
const definedPropertyWithExports = getDefinePropertyCallName(node, 'exports');
const definedProperty =
definedPropertyWithExports || getDefinePropertyCallName(node, 'module.exports');
if (definedProperty && definedProperty.key === KEY_COMPILED_ESM) {
return isTruthy(definedProperty.value)
? definedPropertyWithExports
? 'exports'
: 'module'
: false;
}
return false;
}
function getDefinePropertyCallName(node, targetName) {
const {
callee: { object, property }
} = node;
if (!object || object.type !== 'Identifier' || object.name !== 'Object') return;
if (!property || property.type !== 'Identifier' || property.name !== 'defineProperty') return;
if (node.arguments.length !== 3) return;
const targetNames = targetName.split('.');
const [target, key, value] = node.arguments;
if (targetNames.length === 1) {
if (target.type !== 'Identifier' || target.name !== targetNames[0]) {
return;
}
}
if (targetNames.length === 2) {
if (
target.type !== 'MemberExpression' ||
target.object.name !== targetNames[0] ||
target.property.name !== targetNames[1]
) {
return;
}
}
if (value.type !== 'ObjectExpression' || !value.properties) return;
const valueProperty = value.properties.find((p) => p.key && p.key.name === 'value');
if (!valueProperty || !valueProperty.value) return;
// eslint-disable-next-line consistent-return
return { key: key.value, value: valueProperty.value };
}
function isShorthandProperty(parent) {
return parent && parent.type === 'Property' && parent.shorthand;
}
function wrapCode(magicString, uses, moduleName, exportsName, indentExclusionRanges) {
const args = [];
const passedArgs = [];
if (uses.module) {
args.push('module');
passedArgs.push(moduleName);
}
if (uses.exports) {
args.push('exports');
passedArgs.push(uses.module ? `${moduleName}.exports` : exportsName);
}
magicString
.trim()
.indent('\t', { exclude: indentExclusionRanges })
.prepend(`(function (${args.join(', ')}) {\n`)
// For some reason, this line is only indented correctly when using a
// require-wrapper if we have this leading space
.append(` \n} (${passedArgs.join(', ')}));`);
}
function rewriteExportsAndGetExportsBlock(
magicString,
moduleName,
exportsName,
exportedExportsName,
wrapped,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsAssignmentsByName,
topLevelAssignments,
defineCompiledEsmExpressions,
deconflictedExportNames,
code,
HELPERS_NAME,
exportMode,
defaultIsModuleExports,
usesRequireWrapper,
requireName
) {
const exports = [];
const exportDeclarations = [];
if (usesRequireWrapper) {
getExportsWhenUsingRequireWrapper(
magicString,
wrapped,
exportMode,
exports,
moduleExportsAssignments,
exportsAssignmentsByName,
moduleName,
exportsName,
requireName,
defineCompiledEsmExpressions
);
} else if (exportMode === 'replace') {
getExportsForReplacedModuleExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsName,
defaultIsModuleExports,
HELPERS_NAME
);
} else {
if (exportMode === 'module') {
exportDeclarations.push(`var ${exportedExportsName} = ${moduleName}.exports`);
exports.push(`${exportedExportsName} as __moduleExports`);
} else {
exports.push(`${exportsName} as __moduleExports`);
}
if (wrapped) {
exportDeclarations.push(
getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME)
);
} else {
getExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
exportsAssignmentsByName,
deconflictedExportNames,
topLevelAssignments,
moduleName,
exportsName,
exportedExportsName,
defineCompiledEsmExpressions,
HELPERS_NAME,
defaultIsModuleExports,
exportMode
);
}
}
if (exports.length) {
exportDeclarations.push(`export { ${exports.join(', ')} }`);
}
return `\n\n${exportDeclarations.join(';\n')};`;
}
function getExportsWhenUsingRequireWrapper(
magicString,
wrapped,
exportMode,
exports,
moduleExportsAssignments,
exportsAssignmentsByName,
moduleName,
exportsName,
requireName,
defineCompiledEsmExpressions
) {
exports.push(`${requireName} as __require`);
if (wrapped) return;
if (exportMode === 'replace') {
rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, exportsName);
} else {
rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, `${moduleName}.exports`);
// Collect and rewrite named exports
for (const [exportName, { nodes }] of exportsAssignmentsByName) {
for (const { node, type } of nodes) {
magicString.overwrite(
node.start,
node.left.end,
`${
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
}.${exportName}`
);
}
}
replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
);
}
}
function getExportsForReplacedModuleExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsName,
defaultIsModuleExports,
HELPERS_NAME
) {
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, exportsName);
}
magicString.prependRight(firstTopLevelModuleExportsAssignment.left.start, 'var ');
exports.push(`${exportsName} as __moduleExports`);
exportDeclarations.push(
getDefaultExportDeclaration(exportsName, defaultIsModuleExports, HELPERS_NAME)
);
}
function getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME) {
return `export default ${
defaultIsModuleExports === true
? exportedExportsName
: defaultIsModuleExports === false
? `${exportedExportsName}.default`
: `/*@__PURE__*/${HELPERS_NAME}.getDefaultExportFromCjs(${exportedExportsName})`
}`;
}
function getExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
exportsAssignmentsByName,
deconflictedExportNames,
topLevelAssignments,
moduleName,
exportsName,
exportedExportsName,
defineCompiledEsmExpressions,
HELPERS_NAME,
defaultIsModuleExports,
exportMode
) {
let deconflictedDefaultExportName;
// Collect and rewrite module.exports assignments
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, `${moduleName}.exports`);
}
// Collect and rewrite named exports
for (const [exportName, { nodes }] of exportsAssignmentsByName) {
const deconflicted = deconflictedExportNames[exportName];
let needsDeclaration = true;
for (const { node, type } of nodes) {
let replacement = `${deconflicted} = ${
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
}.${exportName}`;
if (needsDeclaration && topLevelAssignments.has(node)) {
replacement = `var ${replacement}`;
needsDeclaration = false;
}
magicString.overwrite(node.start, node.left.end, replacement);
}
if (needsDeclaration) {
magicString.prepend(`var ${deconflicted};\n`);
}
if (exportName === 'default') {
deconflictedDefaultExportName = deconflicted;
} else {
exports.push(exportName === deconflicted ? exportName : `${deconflicted} as ${exportName}`);
}
}
const isRestorableCompiledEsm = replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
);
if (
defaultIsModuleExports === false ||
(defaultIsModuleExports === 'auto' &&
isRestorableCompiledEsm &&
moduleExportsAssignments.length === 0)
) {
// If there is no deconflictedDefaultExportName, then we use the namespace as
// fallback because there can be no "default" property on the namespace
exports.push(`${deconflictedDefaultExportName || exportedExportsName} as default`);
} else if (
defaultIsModuleExports === true ||
(!isRestorableCompiledEsm && moduleExportsAssignments.length === 0)
) {
exports.push(`${exportedExportsName} as default`);
} else {
exportDeclarations.push(
getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME)
);
}
}
function rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, exportsName) {
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, exportsName);
}
}
function replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
) {
let isRestorableCompiledEsm = false;
for (const { node, type } of defineCompiledEsmExpressions) {
isRestorableCompiledEsm = true;
const moduleExportsExpression =
node.type === 'CallExpression' ? node.arguments[0] : node.left.object;
magicString.overwrite(
moduleExportsExpression.start,
moduleExportsExpression.end,
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
);
}
return isRestorableCompiledEsm;
}
function isRequireExpression(node, scope) {
if (!node) return false;
if (node.type !== 'CallExpression') return false;
// Weird case of `require()` or `module.require()` without arguments
if (node.arguments.length === 0) return false;
return isRequire(node.callee, scope);
}
function isRequire(node, scope) {
return (
(node.type === 'Identifier' && node.name === 'require' && !scope.contains('require')) ||
(node.type === 'MemberExpression' && isModuleRequire(node, scope))
);
}
function isModuleRequire({ object, property }, scope) {
return (
object.type === 'Identifier' &&
object.name === 'module' &&
property.type === 'Identifier' &&
property.name === 'require' &&
!scope.contains('module')
);
}
function hasDynamicArguments(node) {
return (
node.arguments.length > 1 ||
(node.arguments[0].type !== 'Literal' &&
(node.arguments[0].type !== 'TemplateLiteral' || node.arguments[0].expressions.length > 0))
);
}
const reservedMethod = { resolve: true, cache: true, main: true };
function isNodeRequirePropertyAccess(parent) {
return parent && parent.property && reservedMethod[parent.property.name];
}
function getRequireStringArg(node) {
return node.arguments[0].type === 'Literal'
? node.arguments[0].value
: node.arguments[0].quasis[0].value.cooked;
}
function getRequireHandlers() {
const requireExpressions = [];
function addRequireExpression(
sourceId,
node,
scope,
usesReturnValue,
isInsideTryBlock,
isInsideConditional,
toBeRemoved
) {
requireExpressions.push({
sourceId,
node,
scope,
usesReturnValue,
isInsideTryBlock,
isInsideConditional,
toBeRemoved
});
}
async function rewriteRequireExpressionsAndGetImportBlock(
magicString,
topLevelDeclarations,
reassignedNames,
helpersName,
dynamicRequireName,
moduleName,
exportsName,
id,
exportMode,
resolveRequireSourcesAndUpdateMeta,
needsRequireWrapper,
isEsModule,
isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
commonjsMeta
) {
const imports = [];
imports.push(`import * as ${helpersName} from "${HELPERS_ID}"`);
if (dynamicRequireName) {
imports.push(
`import { ${
isDynamicRequireModulesEnabled ? CREATE_COMMONJS_REQUIRE_EXPORT : COMMONJS_REQUIRE_EXPORT
} as ${dynamicRequireName} } from "${DYNAMIC_MODULES_ID}"`
);
}
if (exportMode === 'module') {
imports.push(
`import { __module as ${moduleName} } from ${JSON.stringify(wrapId$1(id, MODULE_SUFFIX))}`,
`var ${exportsName} = ${moduleName}.exports`
);
} else if (exportMode === 'exports') {
imports.push(
`import { __exports as ${exportsName} } from ${JSON.stringify(wrapId$1(id, EXPORTS_SUFFIX))}`
);
}
const requiresBySource = collectSources(requireExpressions);
const requireTargets = await resolveRequireSourcesAndUpdateMeta(
id,
needsRequireWrapper ? IS_WRAPPED_COMMONJS : !isEsModule,
commonjsMeta,
Object.keys(requiresBySource).map((source) => {
return {
source,
isConditional: requiresBySource[source].every((require) => require.isInsideConditional)
};
})
);
processRequireExpressions(
imports,
requireTargets,
requiresBySource,
getIgnoreTryCatchRequireStatementMode,
magicString
);
return imports.length ? `${imports.join(';\n')};\n\n` : '';
}
return {
addRequireExpression,
rewriteRequireExpressionsAndGetImportBlock
};
}
function collectSources(requireExpressions) {
const requiresBySource = Object.create(null);
for (const requireExpression of requireExpressions) {
const { sourceId } = requireExpression;
if (!requiresBySource[sourceId]) {
requiresBySource[sourceId] = [];
}
const requires = requiresBySource[sourceId];
requires.push(requireExpression);
}
return requiresBySource;
}
function processRequireExpressions(
imports,
requireTargets,
requiresBySource,
getIgnoreTryCatchRequireStatementMode,
magicString
) {
const generateRequireName = getGenerateRequireName();
for (const { source, id: resolvedId, isCommonJS } of requireTargets) {
const requires = requiresBySource[source];
const name = generateRequireName(requires);
let usesRequired = false;
let needsImport = false;
for (const { node, usesReturnValue, toBeRemoved, isInsideTryBlock } of requires) {
const { canConvertRequire, shouldRemoveRequire } =
isInsideTryBlock && isWrappedId(resolvedId, EXTERNAL_SUFFIX)
? getIgnoreTryCatchRequireStatementMode(source)
: { canConvertRequire: true, shouldRemoveRequire: false };
if (shouldRemoveRequire) {
if (usesReturnValue) {
magicString.overwrite(node.start, node.end, 'undefined');
} else {
magicString.remove(toBeRemoved.start, toBeRemoved.end);
}
} else if (canConvertRequire) {
needsImport = true;
if (isCommonJS === IS_WRAPPED_COMMONJS) {
magicString.overwrite(node.start, node.end, `${name}()`);
} else if (usesReturnValue) {
usesRequired = true;
magicString.overwrite(node.start, node.end, name);
} else {
magicString.remove(toBeRemoved.start, toBeRemoved.end);
}
}
}
if (needsImport) {
if (isCommonJS === IS_WRAPPED_COMMONJS) {
imports.push(`import { __require as ${name} } from ${JSON.stringify(resolvedId)}`);
} else {
imports.push(`import ${usesRequired ? `${name} from ` : ''}${JSON.stringify(resolvedId)}`);
}
}
}
}
function getGenerateRequireName() {
let uid = 0;
return (requires) => {
let name;
const hasNameConflict = ({ scope }) => scope.contains(name);
do {
name = `require$$${uid}`;
uid += 1;
} while (requires.some(hasNameConflict));
return name;
};
}
/* eslint-disable no-param-reassign, no-shadow, no-underscore-dangle, no-continue */
const exportsPattern = /^(?:module\.)?exports(?:\.([a-zA-Z_$][a-zA-Z_$0-9]*))?$/;
const functionType = /^(?:FunctionDeclaration|FunctionExpression|ArrowFunctionExpression)$/;
// There are three different types of CommonJS modules, described by their
// "exportMode":
// - exports: Only assignments to (module.)exports properties
// - replace: A single assignment to module.exports itself
// - module: Anything else
// Special cases:
// - usesRequireWrapper
// - isWrapped
async function transformCommonjs(
parse,
code,
id,
isEsModule,
ignoreGlobal,
ignoreRequire,
ignoreDynamicRequires,
getIgnoreTryCatchRequireStatementMode,
sourceMap,
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
astCache,
defaultIsModuleExports,
needsRequireWrapper,
resolveRequireSourcesAndUpdateMeta,
isRequired,
checkDynamicRequire,
commonjsMeta
) {
const ast = astCache || tryParse(parse, code, id);
const magicString = new MagicString$1(code);
const uses = {
module: false,
exports: false,
global: false,
require: false
};
const virtualDynamicRequirePath =
isDynamicRequireModulesEnabled && getVirtualPathForDynamicRequirePath(dirname$1(id), commonDir);
let scope = attachScopes(ast, 'scope');
let lexicalDepth = 0;
let programDepth = 0;
let classBodyDepth = 0;
let currentTryBlockEnd = null;
let shouldWrap = false;
const globals = new Set();
// A conditionalNode is a node for which execution is not guaranteed. If such a node is a require
// or contains nested requires, those should be handled as function calls unless there is an
// unconditional require elsewhere.
let currentConditionalNodeEnd = null;
const conditionalNodes = new Set();
const { addRequireExpression, rewriteRequireExpressionsAndGetImportBlock } = getRequireHandlers();
// See which names are assigned to. This is necessary to prevent
// illegally replacing `var foo = require('foo')` with `import foo from 'foo'`,
// where `foo` is later reassigned. (This happens in the wild. CommonJS, sigh)
const reassignedNames = new Set();
const topLevelDeclarations = [];
const skippedNodes = new Set();
const moduleAccessScopes = new Set([scope]);
const exportsAccessScopes = new Set([scope]);
const moduleExportsAssignments = [];
let firstTopLevelModuleExportsAssignment = null;
const exportsAssignmentsByName = new Map();
const topLevelAssignments = new Set();
const topLevelDefineCompiledEsmExpressions = [];
const replacedGlobal = [];
const replacedDynamicRequires = [];
const importedVariables = new Set();
const indentExclusionRanges = [];
walk$4(ast, {
enter(node, parent) {
if (skippedNodes.has(node)) {
this.skip();
return;
}
if (currentTryBlockEnd !== null && node.start > currentTryBlockEnd) {
currentTryBlockEnd = null;
}
if (currentConditionalNodeEnd !== null && node.start > currentConditionalNodeEnd) {
currentConditionalNodeEnd = null;
}
if (currentConditionalNodeEnd === null && conditionalNodes.has(node)) {
currentConditionalNodeEnd = node.end;
}
programDepth += 1;
if (node.scope) ({ scope } = node);
if (functionType.test(node.type)) lexicalDepth += 1;
if (sourceMap) {
magicString.addSourcemapLocation(node.start);
magicString.addSourcemapLocation(node.end);
}
// eslint-disable-next-line default-case
switch (node.type) {
case 'AssignmentExpression':
if (node.left.type === 'MemberExpression') {
const flattened = getKeypath(node.left);
if (!flattened || scope.contains(flattened.name)) return;
const exportsPatternMatch = exportsPattern.exec(flattened.keypath);
if (!exportsPatternMatch || flattened.keypath === 'exports') return;
const [, exportName] = exportsPatternMatch;
uses[flattened.name] = true;
// we're dealing with `module.exports = ...` or `[module.]exports.foo = ...`
if (flattened.keypath === 'module.exports') {
moduleExportsAssignments.push(node);
if (programDepth > 3) {
moduleAccessScopes.add(scope);
} else if (!firstTopLevelModuleExportsAssignment) {
firstTopLevelModuleExportsAssignment = node;
}
} else if (exportName === KEY_COMPILED_ESM) {
if (programDepth > 3) {
shouldWrap = true;
} else {
// The "type" is either "module" or "exports" to discern
// assignments to module.exports vs exports if needed
topLevelDefineCompiledEsmExpressions.push({ node, type: flattened.name });
}
} else {
const exportsAssignments = exportsAssignmentsByName.get(exportName) || {
nodes: [],
scopes: new Set()
};
exportsAssignments.nodes.push({ node, type: flattened.name });
exportsAssignments.scopes.add(scope);
exportsAccessScopes.add(scope);
exportsAssignmentsByName.set(exportName, exportsAssignments);
if (programDepth <= 3) {
topLevelAssignments.add(node);
}
}
skippedNodes.add(node.left);
} else {
for (const name of extractAssignedNames(node.left)) {
reassignedNames.add(name);
}
}
return;
case 'CallExpression': {
const defineCompiledEsmType = getDefineCompiledEsmType(node);
if (defineCompiledEsmType) {
if (programDepth === 3 && parent.type === 'ExpressionStatement') {
// skip special handling for [module.]exports until we know we render this
skippedNodes.add(node.arguments[0]);
topLevelDefineCompiledEsmExpressions.push({ node, type: defineCompiledEsmType });
} else {
shouldWrap = true;
}
return;
}
// Transform require.resolve
if (
isDynamicRequireModulesEnabled &&
node.callee.object &&
isRequire(node.callee.object, scope) &&
node.callee.property.name === 'resolve'
) {
checkDynamicRequire(node.start);
uses.require = true;
const requireNode = node.callee.object;
replacedDynamicRequires.push(requireNode);
skippedNodes.add(node.callee);
return;
}
if (!isRequireExpression(node, scope)) {
const keypath = getKeypath(node.callee);
if (keypath && importedVariables.has(keypath.name)) {
// Heuristic to deoptimize requires after a required function has been called
currentConditionalNodeEnd = Infinity;
}
return;
}
skippedNodes.add(node.callee);
uses.require = true;
if (hasDynamicArguments(node)) {
if (isDynamicRequireModulesEnabled) {
checkDynamicRequire(node.start);
}
if (!ignoreDynamicRequires) {
replacedDynamicRequires.push(node.callee);
}
return;
}
const requireStringArg = getRequireStringArg(node);
if (!ignoreRequire(requireStringArg)) {
const usesReturnValue = parent.type !== 'ExpressionStatement';
const toBeRemoved =
parent.type === 'ExpressionStatement' &&
(!currentConditionalNodeEnd ||
// We should completely remove requires directly in a try-catch
// so that Rollup can remove up the try-catch
(currentTryBlockEnd !== null && currentTryBlockEnd < currentConditionalNodeEnd))
? parent
: node;
addRequireExpression(
requireStringArg,
node,
scope,
usesReturnValue,
currentTryBlockEnd !== null,
currentConditionalNodeEnd !== null,
toBeRemoved
);
if (parent.type === 'VariableDeclarator' && parent.id.type === 'Identifier') {
for (const name of extractAssignedNames(parent.id)) {
importedVariables.add(name);
}
}
}
return;
}
case 'ClassBody':
classBodyDepth += 1;
return;
case 'ConditionalExpression':
case 'IfStatement':
// skip dead branches
if (isFalsy(node.test)) {
skippedNodes.add(node.consequent);
} else if (isTruthy(node.test)) {
if (node.alternate) {
skippedNodes.add(node.alternate);
}
} else {
conditionalNodes.add(node.consequent);
if (node.alternate) {
conditionalNodes.add(node.alternate);
}
}
return;
case 'ArrowFunctionExpression':
case 'FunctionDeclaration':
case 'FunctionExpression':
// requires in functions should be conditional unless it is an IIFE
if (
currentConditionalNodeEnd === null &&
!(parent.type === 'CallExpression' && parent.callee === node)
) {
currentConditionalNodeEnd = node.end;
}
return;
case 'Identifier': {
const { name } = node;
if (!isReference(node, parent) || scope.contains(name)) return;
switch (name) {
case 'require':
uses.require = true;
if (isNodeRequirePropertyAccess(parent)) {
return;
}
if (!ignoreDynamicRequires) {
if (isShorthandProperty(parent)) {
magicString.prependRight(node.start, 'require: ');
}
replacedDynamicRequires.push(node);
}
return;
case 'module':
case 'exports':
shouldWrap = true;
uses[name] = true;
return;
case 'global':
uses.global = true;
if (!ignoreGlobal) {
replacedGlobal.push(node);
}
return;
case 'define':
magicString.overwrite(node.start, node.end, 'undefined', {
storeName: true
});
return;
default:
globals.add(name);
return;
}
}
case 'LogicalExpression':
// skip dead branches
if (node.operator === '&&') {
if (isFalsy(node.left)) {
skippedNodes.add(node.right);
} else if (!isTruthy(node.left)) {
conditionalNodes.add(node.right);
}
} else if (node.operator === '||') {
if (isTruthy(node.left)) {
skippedNodes.add(node.right);
} else if (!isFalsy(node.left)) {
conditionalNodes.add(node.right);
}
}
return;
case 'MemberExpression':
if (!isDynamicRequireModulesEnabled && isModuleRequire(node, scope)) {
uses.require = true;
replacedDynamicRequires.push(node);
skippedNodes.add(node.object);
skippedNodes.add(node.property);
}
return;
case 'ReturnStatement':
// if top-level return, we need to wrap it
if (lexicalDepth === 0) {
shouldWrap = true;
}
return;
case 'ThisExpression':
// rewrite top-level `this` as `commonjsHelpers.commonjsGlobal`
if (lexicalDepth === 0 && !classBodyDepth) {
uses.global = true;
if (!ignoreGlobal) {
replacedGlobal.push(node);
}
}
return;
case 'TryStatement':
if (currentTryBlockEnd === null) {
currentTryBlockEnd = node.block.end;
}
if (currentConditionalNodeEnd === null) {
currentConditionalNodeEnd = node.end;
}
return;
case 'UnaryExpression':
// rewrite `typeof module`, `typeof module.exports` and `typeof exports` (https://github.com/rollup/rollup-plugin-commonjs/issues/151)
if (node.operator === 'typeof') {
const flattened = getKeypath(node.argument);
if (!flattened) return;
if (scope.contains(flattened.name)) return;
if (
!isEsModule &&
(flattened.keypath === 'module.exports' ||
flattened.keypath === 'module' ||
flattened.keypath === 'exports')
) {
magicString.overwrite(node.start, node.end, `'object'`, {
storeName: false
});
}
}
return;
case 'VariableDeclaration':
if (!scope.parent) {
topLevelDeclarations.push(node);
}
return;
case 'TemplateElement':
if (node.value.raw.includes('\n')) {
indentExclusionRanges.push([node.start, node.end]);
}
}
},
leave(node) {
programDepth -= 1;
if (node.scope) scope = scope.parent;
if (functionType.test(node.type)) lexicalDepth -= 1;
if (node.type === 'ClassBody') classBodyDepth -= 1;
}
});
const nameBase = getName(id);
const exportsName = deconflict([...exportsAccessScopes], globals, nameBase);
const moduleName = deconflict([...moduleAccessScopes], globals, `${nameBase}Module`);
const requireName = deconflict([scope], globals, `require${capitalize(nameBase)}`);
const isRequiredName = deconflict([scope], globals, `hasRequired${capitalize(nameBase)}`);
const helpersName = deconflict([scope], globals, 'commonjsHelpers');
const dynamicRequireName =
replacedDynamicRequires.length > 0 &&
deconflict(
[scope],
globals,
isDynamicRequireModulesEnabled ? CREATE_COMMONJS_REQUIRE_EXPORT : COMMONJS_REQUIRE_EXPORT
);
const deconflictedExportNames = Object.create(null);
for (const [exportName, { scopes }] of exportsAssignmentsByName) {
deconflictedExportNames[exportName] = deconflict([...scopes], globals, exportName);
}
for (const node of replacedGlobal) {
magicString.overwrite(node.start, node.end, `${helpersName}.commonjsGlobal`, {
storeName: true
});
}
for (const node of replacedDynamicRequires) {
magicString.overwrite(
node.start,
node.end,
isDynamicRequireModulesEnabled
? `${dynamicRequireName}(${JSON.stringify(virtualDynamicRequirePath)})`
: dynamicRequireName,
{
contentOnly: true,
storeName: true
}
);
}
// We cannot wrap ES/mixed modules
shouldWrap = !isEsModule && (shouldWrap || (uses.exports && moduleExportsAssignments.length > 0));
if (
!(
shouldWrap ||
isRequired ||
needsRequireWrapper ||
uses.module ||
uses.exports ||
uses.require ||
topLevelDefineCompiledEsmExpressions.length > 0
) &&
(ignoreGlobal || !uses.global)
) {
return { meta: { commonjs: { isCommonJS: false } } };
}
let leadingComment = '';
if (code.startsWith('/*')) {
const commentEnd = code.indexOf('*/', 2) + 2;
leadingComment = `${code.slice(0, commentEnd)}\n`;
magicString.remove(0, commentEnd).trim();
}
const exportMode = isEsModule
? 'none'
: shouldWrap
? uses.module
? 'module'
: 'exports'
: firstTopLevelModuleExportsAssignment
? exportsAssignmentsByName.size === 0 && topLevelDefineCompiledEsmExpressions.length === 0
? 'replace'
: 'module'
: moduleExportsAssignments.length === 0
? 'exports'
: 'module';
const exportedExportsName =
exportMode === 'module' ? deconflict([], globals, `${nameBase}Exports`) : exportsName;
const importBlock = await rewriteRequireExpressionsAndGetImportBlock(
magicString,
topLevelDeclarations,
reassignedNames,
helpersName,
dynamicRequireName,
moduleName,
exportsName,
id,
exportMode,
resolveRequireSourcesAndUpdateMeta,
needsRequireWrapper,
isEsModule,
isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
commonjsMeta
);
const usesRequireWrapper = commonjsMeta.isCommonJS === IS_WRAPPED_COMMONJS;
const exportBlock = isEsModule
? ''
: rewriteExportsAndGetExportsBlock(
magicString,
moduleName,
exportsName,
exportedExportsName,
shouldWrap,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsAssignmentsByName,
topLevelAssignments,
topLevelDefineCompiledEsmExpressions,
deconflictedExportNames,
code,
helpersName,
exportMode,
defaultIsModuleExports,
usesRequireWrapper,
requireName
);
if (shouldWrap) {
wrapCode(magicString, uses, moduleName, exportsName, indentExclusionRanges);
}
if (usesRequireWrapper) {
magicString.trim().indent('\t', {
exclude: indentExclusionRanges
});
const exported = exportMode === 'module' ? `${moduleName}.exports` : exportsName;
magicString.prepend(
`var ${isRequiredName};
function ${requireName} () {
\tif (${isRequiredName}) return ${exported};
\t${isRequiredName} = 1;
`
).append(`
\treturn ${exported};
}`);
if (exportMode === 'replace') {
magicString.prepend(`var ${exportsName};\n`);
}
}
magicString
.trim()
.prepend(leadingComment + importBlock)
.append(exportBlock);
return {
code: magicString.toString(),
map: sourceMap ? magicString.generateMap() : null,
syntheticNamedExports: isEsModule || usesRequireWrapper ? false : '__moduleExports',
meta: { commonjs: commonjsMeta }
};
}
const PLUGIN_NAME = 'commonjs';
function commonjs(options = {}) {
const {
ignoreGlobal,
ignoreDynamicRequires,
requireReturnsDefault: requireReturnsDefaultOption,
defaultIsModuleExports: defaultIsModuleExportsOption,
esmExternals
} = options;
const extensions = options.extensions || ['.js'];
const filter = createFilter$1(options.include, options.exclude);
const isPossibleCjsId = (id) => {
const extName = extname(id);
return extName === '.cjs' || (extensions.includes(extName) && filter(id));
};
const { strictRequiresFilter, detectCyclesAndConditional } = getStrictRequiresFilter(options);
const getRequireReturnsDefault =
typeof requireReturnsDefaultOption === 'function'
? requireReturnsDefaultOption
: () => requireReturnsDefaultOption;
let esmExternalIds;
const isEsmExternal =
typeof esmExternals === 'function'
? esmExternals
: Array.isArray(esmExternals)
? ((esmExternalIds = new Set(esmExternals)), (id) => esmExternalIds.has(id))
: () => esmExternals;
const getDefaultIsModuleExports =
typeof defaultIsModuleExportsOption === 'function'
? defaultIsModuleExportsOption
: () =>
typeof defaultIsModuleExportsOption === 'boolean' ? defaultIsModuleExportsOption : 'auto';
const dynamicRequireRoot =
typeof options.dynamicRequireRoot === 'string'
? resolve$3(options.dynamicRequireRoot)
: process.cwd();
const { commonDir, dynamicRequireModules } = getDynamicRequireModules(
options.dynamicRequireTargets,
dynamicRequireRoot
);
const isDynamicRequireModulesEnabled = dynamicRequireModules.size > 0;
const ignoreRequire =
typeof options.ignore === 'function'
? options.ignore
: Array.isArray(options.ignore)
? (id) => options.ignore.includes(id)
: () => false;
const getIgnoreTryCatchRequireStatementMode = (id) => {
const mode =
typeof options.ignoreTryCatch === 'function'
? options.ignoreTryCatch(id)
: Array.isArray(options.ignoreTryCatch)
? options.ignoreTryCatch.includes(id)
: typeof options.ignoreTryCatch !== 'undefined'
? options.ignoreTryCatch
: true;
return {
canConvertRequire: mode !== 'remove' && mode !== true,
shouldRemoveRequire: mode === 'remove'
};
};
const { currentlyResolving, resolveId } = getResolveId(extensions, isPossibleCjsId);
const sourceMap = options.sourceMap !== false;
// Initialized in buildStart
let requireResolver;
function transformAndCheckExports(code, id) {
const normalizedId = normalizePathSlashes(id);
const { isEsModule, hasDefaultExport, hasNamedExports, ast } = analyzeTopLevelStatements(
this.parse,
code,
id
);
const commonjsMeta = this.getModuleInfo(id).meta.commonjs || {};
if (hasDefaultExport) {
commonjsMeta.hasDefaultExport = true;
}
if (hasNamedExports) {
commonjsMeta.hasNamedExports = true;
}
if (
!dynamicRequireModules.has(normalizedId) &&
(!(hasCjsKeywords(code, ignoreGlobal) || requireResolver.isRequiredId(id)) ||
(isEsModule && !options.transformMixedEsModules))
) {
commonjsMeta.isCommonJS = false;
return { meta: { commonjs: commonjsMeta } };
}
const needsRequireWrapper =
!isEsModule && (dynamicRequireModules.has(normalizedId) || strictRequiresFilter(id));
const checkDynamicRequire = (position) => {
const normalizedDynamicRequireRoot = normalizePathSlashes(dynamicRequireRoot);
if (normalizedId.indexOf(normalizedDynamicRequireRoot) !== 0) {
this.error(
{
code: 'DYNAMIC_REQUIRE_OUTSIDE_ROOT',
normalizedId,
normalizedDynamicRequireRoot,
message: `"${normalizedId}" contains dynamic require statements but it is not within the current dynamicRequireRoot "${normalizedDynamicRequireRoot}". You should set dynamicRequireRoot to "${dirname$1(
normalizedId
)}" or one of its parent directories.`
},
position
);
}
};
return transformCommonjs(
this.parse,
code,
id,
isEsModule,
ignoreGlobal || isEsModule,
ignoreRequire,
ignoreDynamicRequires && !isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
sourceMap,
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ast,
getDefaultIsModuleExports(id),
needsRequireWrapper,
requireResolver.resolveRequireSourcesAndUpdateMeta(this),
requireResolver.isRequiredId(id),
checkDynamicRequire,
commonjsMeta
);
}
return {
name: PLUGIN_NAME,
version: version$3,
options(rawOptions) {
// We inject the resolver in the beginning so that "catch-all-resolver" like node-resolver
// do not prevent our plugin from resolving entry points ot proxies.
const plugins = Array.isArray(rawOptions.plugins)
? [...rawOptions.plugins]
: rawOptions.plugins
? [rawOptions.plugins]
: [];
plugins.unshift({
name: 'commonjs--resolver',
resolveId
});
return { ...rawOptions, plugins };
},
buildStart({ plugins }) {
validateVersion(this.meta.rollupVersion, peerDependencies.rollup, 'rollup');
const nodeResolve = plugins.find(({ name }) => name === 'node-resolve');
if (nodeResolve) {
validateVersion(nodeResolve.version, '^13.0.6', '@rollup/plugin-node-resolve');
}
if (options.namedExports != null) {
this.warn(
'The namedExports option from "@rollup/plugin-commonjs" is deprecated. Named exports are now handled automatically.'
);
}
requireResolver = getRequireResolver(
extensions,
detectCyclesAndConditional,
currentlyResolving
);
},
buildEnd() {
if (options.strictRequires === 'debug') {
const wrappedIds = requireResolver.getWrappedIds();
if (wrappedIds.length) {
this.warn({
code: 'WRAPPED_IDS',
ids: wrappedIds,
message: `The commonjs plugin automatically wrapped the following files:\n[\n${wrappedIds
.map((id) => `\t${JSON.stringify(relative$1(process.cwd(), id))}`)
.join(',\n')}\n]`
});
} else {
this.warn({
code: 'WRAPPED_IDS',
ids: wrappedIds,
message: 'The commonjs plugin did not wrap any files.'
});
}
}
},
load(id) {
if (id === HELPERS_ID) {
return getHelpersModule();
}
if (isWrappedId(id, MODULE_SUFFIX)) {
const name = getName(unwrapId$1(id, MODULE_SUFFIX));
return {
code: `var ${name} = {exports: {}}; export {${name} as __module}`,
meta: { commonjs: { isCommonJS: false } }
};
}
if (isWrappedId(id, EXPORTS_SUFFIX)) {
const name = getName(unwrapId$1(id, EXPORTS_SUFFIX));
return {
code: `var ${name} = {}; export {${name} as __exports}`,
meta: { commonjs: { isCommonJS: false } }
};
}
if (isWrappedId(id, EXTERNAL_SUFFIX)) {
const actualId = unwrapId$1(id, EXTERNAL_SUFFIX);
return getUnknownRequireProxy(
actualId,
isEsmExternal(actualId) ? getRequireReturnsDefault(actualId) : true
);
}
// entry suffix is just appended to not mess up relative external resolution
if (id.endsWith(ENTRY_SUFFIX)) {
const acutalId = id.slice(0, -ENTRY_SUFFIX.length);
return getEntryProxy(acutalId, getDefaultIsModuleExports(acutalId), this.getModuleInfo);
}
if (isWrappedId(id, ES_IMPORT_SUFFIX)) {
const actualId = unwrapId$1(id, ES_IMPORT_SUFFIX);
return getEsImportProxy(actualId, getDefaultIsModuleExports(actualId));
}
if (id === DYNAMIC_MODULES_ID) {
return getDynamicModuleRegistry(
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ignoreDynamicRequires
);
}
if (isWrappedId(id, PROXY_SUFFIX)) {
const actualId = unwrapId$1(id, PROXY_SUFFIX);
return getStaticRequireProxy(actualId, getRequireReturnsDefault(actualId), this.load);
}
return null;
},
shouldTransformCachedModule(...args) {
return requireResolver.shouldTransformCachedModule.call(this, ...args);
},
transform(code, id) {
if (!isPossibleCjsId(id)) return null;
try {
return transformAndCheckExports.call(this, code, id);
} catch (err) {
return this.error(err, err.loc);
}
}
};
}
const comma = ','.charCodeAt(0);
const chars$1 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars$1.length; i++) {
const c = chars$1.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
function decode(mappings) {
const state = new Int32Array(5);
const decoded = [];
let index = 0;
do {
const semi = indexOf(mappings, index);
const line = [];
let sorted = true;
let lastCol = 0;
state[0] = 0;
for (let i = index; i < semi; i++) {
let seg;
i = decodeInteger(mappings, i, state, 0); // genColumn
const col = state[0];
if (col < lastCol)
sorted = false;
lastCol = col;
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
i = decodeInteger(mappings, i, state, 2); // sourceLine
i = decodeInteger(mappings, i, state, 3); // sourceColumn
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 4); // namesIndex
seg = [col, state[1], state[2], state[3], state[4]];
}
else {
seg = [col, state[1], state[2], state[3]];
}
}
else {
seg = [col];
}
line.push(seg);
}
if (!sorted)
sort(line);
decoded.push(line);
index = semi + 1;
} while (index <= mappings.length);
return decoded;
}
function indexOf(mappings, index) {
const idx = mappings.indexOf(';', index);
return idx === -1 ? mappings.length : idx;
}
function decodeInteger(mappings, pos, state, j) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = mappings.charCodeAt(pos++);
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
state[j] += value;
return pos;
}
function hasMoreVlq(mappings, i, length) {
if (i >= length)
return false;
return mappings.charCodeAt(i) !== comma;
}
function sort(line) {
line.sort(sortComparator$1);
}
function sortComparator$1(a, b) {
return a[0] - b[0];
}
// Matches the scheme of a URL, eg "http://"
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
var UrlType;
(function (UrlType) {
UrlType[UrlType["Empty"] = 1] = "Empty";
UrlType[UrlType["Hash"] = 2] = "Hash";
UrlType[UrlType["Query"] = 3] = "Query";
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
UrlType[UrlType["Absolute"] = 7] = "Absolute";
})(UrlType || (UrlType = {}));
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith('//');
}
function isAbsolutePath(input) {
return input.startsWith('/');
}
function isFileUrl(input) {
return input.startsWith('file:');
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path = match[2];
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
}
function makeUrl(scheme, user, host, port, path, query, hash) {
return {
scheme,
user,
host,
port,
path,
query,
hash,
type: UrlType.Absolute,
};
}
function parseUrl$2(input) {
if (isSchemeRelativeUrl(input)) {
const url = parseAbsoluteUrl('http:' + input);
url.scheme = '';
url.type = UrlType.SchemeRelative;
return url;
}
if (isAbsolutePath(input)) {
const url = parseAbsoluteUrl('http://foo.com' + input);
url.scheme = '';
url.host = '';
url.type = UrlType.AbsolutePath;
return url;
}
if (isFileUrl(input))
return parseFileUrl(input);
if (isAbsoluteUrl(input))
return parseAbsoluteUrl(input);
const url = parseAbsoluteUrl('http://foo.com/' + input);
url.scheme = '';
url.host = '';
url.type = input
? input.startsWith('?')
? UrlType.Query
: input.startsWith('#')
? UrlType.Hash
: UrlType.RelativePath
: UrlType.Empty;
return url;
}
function stripPathFilename(path) {
// If a path ends with a parent directory "..", then it's a relative path with excess parent
// paths. It's not a file, so we can't strip it.
if (path.endsWith('/..'))
return path;
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
function mergePaths(url, base) {
normalizePath$4(base, base.type);
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
// path).
if (url.path === '/') {
url.path = base.path;
}
else {
// Resolution happens relative to the base path's directory, not the file.
url.path = stripPathFilename(base.path) + url.path;
}
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath$4(url, type) {
const rel = type <= UrlType.RelativePath;
const pieces = url.path.split('/');
// We need to preserve the first piece always, so that we output a leading slash. The item at
// pieces[0] is an empty string.
let pointer = 1;
// Positive is the number of real directories we've output, used for popping a parent directory.
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
let positive = 0;
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
// real directory, we won't need to append, unless the other conditions happen again.
let addTrailingSlash = false;
for (let i = 1; i < pieces.length; i++) {
const piece = pieces[i];
// An empty directory, could be a trailing slash, or just a double "//" in the path.
if (!piece) {
addTrailingSlash = true;
continue;
}
// If we encounter a real directory, then we don't need to append anymore.
addTrailingSlash = false;
// A current directory, which we can always drop.
if (piece === '.')
continue;
// A parent directory, we need to see if there are any real directories we can pop. Else, we
// have an excess of parents, and we'll need to keep the "..".
if (piece === '..') {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
}
else if (rel) {
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
pieces[pointer++] = piece;
}
continue;
}
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
// any popped or dropped directories.
pieces[pointer++] = piece;
positive++;
}
let path = '';
for (let i = 1; i < pointer; i++) {
path += '/' + pieces[i];
}
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
path += '/';
}
url.path = path;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve$2(input, base) {
if (!input && !base)
return '';
const url = parseUrl$2(input);
let inputType = url.type;
if (base && inputType !== UrlType.Absolute) {
const baseUrl = parseUrl$2(base);
const baseType = baseUrl.type;
switch (inputType) {
case UrlType.Empty:
url.hash = baseUrl.hash;
// fall through
case UrlType.Hash:
url.query = baseUrl.query;
// fall through
case UrlType.Query:
case UrlType.RelativePath:
mergePaths(url, baseUrl);
// fall through
case UrlType.AbsolutePath:
// The host, user, and port are joined, you can't copy one without the others.
url.user = baseUrl.user;
url.host = baseUrl.host;
url.port = baseUrl.port;
// fall through
case UrlType.SchemeRelative:
// The input doesn't have a schema at least, so we need to copy at least that over.
url.scheme = baseUrl.scheme;
}
if (baseType > inputType)
inputType = baseType;
}
normalizePath$4(url, inputType);
const queryHash = url.query + url.hash;
switch (inputType) {
// This is impossible, because of the empty checks at the start of the function.
// case UrlType.Empty:
case UrlType.Hash:
case UrlType.Query:
return queryHash;
case UrlType.RelativePath: {
// The first char is always a "/", and we need it to be relative.
const path = url.path.slice(1);
if (!path)
return queryHash || '.';
if (isRelative(base || input) && !isRelative(path)) {
// If base started with a leading ".", or there is no base and input started with a ".",
// then we need to ensure that the relative path starts with a ".". We don't know if
// relative starts with a "..", though, so check before prepending.
return './' + path + queryHash;
}
return path + queryHash;
}
case UrlType.AbsolutePath:
return url.path + queryHash;
default:
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
}
}
function resolve$1(input, base) {
// The base is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
if (base && !base.endsWith('/'))
base += '/';
return resolve$2(input, base);
}
/**
* Removes everything after the last "/", but leaves the slash.
*/
function stripFilename(path) {
if (!path)
return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
const COLUMN$1 = 0;
const SOURCES_INDEX$1 = 1;
const SOURCE_LINE$1 = 2;
const SOURCE_COLUMN$1 = 3;
const NAMES_INDEX$1 = 4;
function maybeSort(mappings, owned) {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length)
return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned)
mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings, start) {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i]))
return i;
}
return mappings.length;
}
function isSorted(line) {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) {
return false;
}
}
return true;
}
function sortSegments(line, owned) {
if (!owned)
line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[COLUMN$1] - b[COLUMN$1];
}
let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
function binarySearch(haystack, needle, low, high) {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN$1] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
}
else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
function upperBound(haystack, needle, index) {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN$1] !== needle)
break;
}
return index;
}
function lowerBound(haystack, needle, index) {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN$1] !== needle)
break;
}
return index;
}
function memoizedState() {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
function memoizedBinarySearch(haystack, needle, state, key) {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
}
else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
const LEAST_UPPER_BOUND = -1;
const GREATEST_LOWER_BOUND = 1;
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
let decodedMappings;
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
let traceSegment;
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
let originalPositionFor$1;
class TraceMap {
constructor(map, mapUrl) {
const isString = typeof map === 'string';
if (!isString && map._decodedMemo)
return map;
const parsed = (isString ? JSON.parse(map) : map);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names;
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
const from = resolve$1(sourceRoot || '', stripFilename(mapUrl));
this.resolvedSources = sources.map((s) => resolve$1(s || '', from));
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
}
else {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
}
}
(() => {
decodedMappings = (map) => {
return (map._decoded || (map._decoded = decode(map._encoded)));
};
traceSegment = (map, line, column) => {
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return null;
const segments = decoded[line];
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
return index === -1 ? null : segments[index];
};
originalPositionFor$1 = (map, { line, column, bias }) => {
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
if (index === -1)
return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1)
return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(resolvedSources[segment[SOURCES_INDEX$1]], segment[SOURCE_LINE$1] + 1, segment[SOURCE_COLUMN$1], segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null);
};
})();
function OMapping(source, line, column, name) {
return { source, line, column, name };
}
function traceSegmentInternal(segments, memo, line, column, bias) {
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
}
else if (bias === LEAST_UPPER_BOUND)
index++;
if (index === -1 || index === segments.length)
return -1;
return index;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
let get;
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
let put;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
class SetArray {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
}
(() => {
get = (strarr, key) => strarr._indexes[key];
put = (strarr, key) => {
// The key may or may not be present. If it is present, it's a number.
const index = get(strarr, key);
if (index !== undefined)
return index;
const { array, _indexes: indexes } = strarr;
return (indexes[key] = array.push(key) - 1);
};
})();
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const NO_NAME = -1;
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
let maybeAddSegment;
/**
* Adds/removes the content of the source file to the source map.
*/
let setSourceContent;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
let toDecodedMap;
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
let toEncodedMap;
// This split declaration is only so that terser can elminiate the static initialization block.
let addSegmentInternal;
/**
* Provides the state to generate a sourcemap.
*/
class GenMapping {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
}
}
(() => {
maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
};
setSourceContent = (map, source, content) => {
const { _sources: sources, _sourcesContent: sourcesContent } = map;
sourcesContent[put(sources, source)] = content;
};
toDecodedMap = (map) => {
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
removeEmptyFinalLines(mappings);
return {
version: 3,
file: file || undefined,
names: names.array,
sourceRoot: sourceRoot || undefined,
sources: sources.array,
sourcesContent,
mappings,
};
};
toEncodedMap = (map) => {
const decoded = toDecodedMap(map);
return Object.assign(Object.assign({}, decoded), { mappings: encode$1(decoded.mappings) });
};
// Internal helpers
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
const line = getLine(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index))
return;
return insert(line, index, [genColumn]);
}
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length)
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(line, index, name
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
};
})();
function getLine(mappings, index) {
for (let i = mappings.length; i <= index; i++) {
mappings[i] = [];
}
return mappings[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN])
break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0)
break;
}
if (len < length)
mappings.length = len;
}
function skipSourceless(line, index) {
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
// doesn't generate any useful information.
if (index === 0)
return true;
const prev = line[index - 1];
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
// genrate any new information. Else, this segment will end the source/named segment and point to
// a sourceless position, which is useful.
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
// A source/named segment at the start of a line gives position at that genColumn
if (index === 0)
return false;
const prev = line[index - 1];
// If the previous segment is sourceless, then we're transitioning to a source.
if (prev.length === 1)
return false;
// If the previous segment maps to the exact same source position, then this segment doesn't
// provide any new position information.
return (sourcesIndex === prev[SOURCES_INDEX] &&
sourceLine === prev[SOURCE_LINE] &&
sourceColumn === prev[SOURCE_COLUMN] &&
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
}
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null);
const EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content) {
return { source, line, column, name, content };
}
function Source(map, sources, source, content) {
return {
map,
sources,
source,
content,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content) {
return Source(null, EMPTY_SOURCES, source, content);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
const { column, line, name, content, source } = traced;
maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null)
setSourceContent(gen, source, content);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content);
}
const segment = traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build$2(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build$2(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build$2(new TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
return OriginalSource(source, sourceContent);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
let SourceMap$1 = class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
};
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap$1(traceMappings(tree), opts);
}
var src$2 = {exports: {}};
var browser$3 = {exports: {}};
/**
* Helpers.
*/
var ms$1;
var hasRequiredMs$1;
function requireMs$1 () {
if (hasRequiredMs$1) return ms$1;
hasRequiredMs$1 = 1;
var s = 1000;
var m = s * 60;
var h = m * 60;
var d = h * 24;
var w = d * 7;
var y = d * 365.25;
/**
* Parse or format the given `val`.
*
* Options:
*
* - `long` verbose formatting [false]
*
* @param {String|Number} val
* @param {Object} [options]
* @throws {Error} throw an error if val is not a non-empty string or a number
* @return {String|Number}
* @api public
*/
ms$1 = function(val, options) {
options = options || {};
var type = typeof val;
if (type === 'string' && val.length > 0) {
return parse(val);
} else if (type === 'number' && isFinite(val)) {
return options.long ? fmtLong(val) : fmtShort(val);
}
throw new Error(
'val is not a non-empty string or a valid number. val=' +
JSON.stringify(val)
);
};
/**
* Parse the given `str` and return milliseconds.
*
* @param {String} str
* @return {Number}
* @api private
*/
function parse(str) {
str = String(str);
if (str.length > 100) {
return;
}
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
str
);
if (!match) {
return;
}
var n = parseFloat(match[1]);
var type = (match[2] || 'ms').toLowerCase();
switch (type) {
case 'years':
case 'year':
case 'yrs':
case 'yr':
case 'y':
return n * y;
case 'weeks':
case 'week':
case 'w':
return n * w;
case 'days':
case 'day':
case 'd':
return n * d;
case 'hours':
case 'hour':
case 'hrs':
case 'hr':
case 'h':
return n * h;
case 'minutes':
case 'minute':
case 'mins':
case 'min':
case 'm':
return n * m;
case 'seconds':
case 'second':
case 'secs':
case 'sec':
case 's':
return n * s;
case 'milliseconds':
case 'millisecond':
case 'msecs':
case 'msec':
case 'ms':
return n;
default:
return undefined;
}
}
/**
* Short format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtShort(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d) {
return Math.round(ms / d) + 'd';
}
if (msAbs >= h) {
return Math.round(ms / h) + 'h';
}
if (msAbs >= m) {
return Math.round(ms / m) + 'm';
}
if (msAbs >= s) {
return Math.round(ms / s) + 's';
}
return ms + 'ms';
}
/**
* Long format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtLong(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d) {
return plural(ms, msAbs, d, 'day');
}
if (msAbs >= h) {
return plural(ms, msAbs, h, 'hour');
}
if (msAbs >= m) {
return plural(ms, msAbs, m, 'minute');
}
if (msAbs >= s) {
return plural(ms, msAbs, s, 'second');
}
return ms + ' ms';
}
/**
* Pluralization helper.
*/
function plural(ms, msAbs, n, name) {
var isPlural = msAbs >= n * 1.5;
return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
}
return ms$1;
}
var common$b;
var hasRequiredCommon;
function requireCommon () {
if (hasRequiredCommon) return common$b;
hasRequiredCommon = 1;
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*/
function setup(env) {
createDebug.debug = createDebug;
createDebug.default = createDebug;
createDebug.coerce = coerce;
createDebug.disable = disable;
createDebug.enable = enable;
createDebug.enabled = enabled;
createDebug.humanize = requireMs$1();
createDebug.destroy = destroy;
Object.keys(env).forEach(key => {
createDebug[key] = env[key];
});
/**
* The currently active debug mode names, and names to skip.
*/
createDebug.names = [];
createDebug.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
createDebug.formatters = {};
/**
* Selects a color for a debug namespace
* @param {String} namespace The namespace string for the debug instance to be colored
* @return {Number|String} An ANSI color code for the given namespace
* @api private
*/
function selectColor(namespace) {
let hash = 0;
for (let i = 0; i < namespace.length; i++) {
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
}
createDebug.selectColor = selectColor;
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
let prevTime;
let enableOverride = null;
let namespacesCache;
let enabledCache;
function debug(...args) {
// Disabled?
if (!debug.enabled) {
return;
}
const self = debug;
// Set `diff` timestamp
const curr = Number(new Date());
const ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
args[0] = createDebug.coerce(args[0]);
if (typeof args[0] !== 'string') {
// Anything else let's inspect with %O
args.unshift('%O');
}
// Apply any `formatters` transformations
let index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {
// If we encounter an escaped % then don't increase the array index
if (match === '%%') {
return '%';
}
index++;
const formatter = createDebug.formatters[format];
if (typeof formatter === 'function') {
const val = args[index];
match = formatter.call(self, val);
// Now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
});
// Apply env-specific formatting (colors, etc.)
createDebug.formatArgs.call(self, args);
const logFn = self.log || createDebug.log;
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.useColors = createDebug.useColors();
debug.color = createDebug.selectColor(namespace);
debug.extend = extend;
debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.
Object.defineProperty(debug, 'enabled', {
enumerable: true,
configurable: false,
get: () => {
if (enableOverride !== null) {
return enableOverride;
}
if (namespacesCache !== createDebug.namespaces) {
namespacesCache = createDebug.namespaces;
enabledCache = createDebug.enabled(namespace);
}
return enabledCache;
},
set: v => {
enableOverride = v;
}
});
// Env-specific initialization logic for debug instances
if (typeof createDebug.init === 'function') {
createDebug.init(debug);
}
return debug;
}
function extend(namespace, delimiter) {
const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
newDebug.log = this.log;
return newDebug;
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
createDebug.save(namespaces);
createDebug.namespaces = namespaces;
createDebug.names = [];
createDebug.skips = [];
let i;
const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
const len = split.length;
for (i = 0; i < len; i++) {
if (!split[i]) {
// ignore empty strings
continue;
}
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));
} else {
createDebug.names.push(new RegExp('^' + namespaces + '$'));
}
}
}
/**
* Disable debug output.
*
* @return {String} namespaces
* @api public
*/
function disable() {
const namespaces = [
...createDebug.names.map(toNamespace),
...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)
].join(',');
createDebug.enable('');
return namespaces;
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
if (name[name.length - 1] === '*') {
return true;
}
let i;
let len;
for (i = 0, len = createDebug.skips.length; i < len; i++) {
if (createDebug.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = createDebug.names.length; i < len; i++) {
if (createDebug.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Convert regexp to namespace
*
* @param {RegExp} regxep
* @return {String} namespace
* @api private
*/
function toNamespace(regexp) {
return regexp.toString()
.substring(2, regexp.toString().length - 2)
.replace(/\.\*\?$/, '*');
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) {
return val.stack || val.message;
}
return val;
}
/**
* XXX DO NOT USE. This is a temporary stub function.
* XXX It WILL be removed in the next major release.
*/
function destroy() {
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
}
createDebug.enable(createDebug.load());
return createDebug;
}
common$b = setup;
return common$b;
}
/* eslint-env browser */
var hasRequiredBrowser$1;
function requireBrowser$1 () {
if (hasRequiredBrowser$1) return browser$3.exports;
hasRequiredBrowser$1 = 1;
(function (module, exports) {
/**
* This is the web browser implementation of `debug()`.
*/
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.storage = localstorage();
exports.destroy = (() => {
let warned = false;
return () => {
if (!warned) {
warned = true;
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
}
};
})();
/**
* Colors.
*/
exports.colors = [
'#0000CC',
'#0000FF',
'#0033CC',
'#0033FF',
'#0066CC',
'#0066FF',
'#0099CC',
'#0099FF',
'#00CC00',
'#00CC33',
'#00CC66',
'#00CC99',
'#00CCCC',
'#00CCFF',
'#3300CC',
'#3300FF',
'#3333CC',
'#3333FF',
'#3366CC',
'#3366FF',
'#3399CC',
'#3399FF',
'#33CC00',
'#33CC33',
'#33CC66',
'#33CC99',
'#33CCCC',
'#33CCFF',
'#6600CC',
'#6600FF',
'#6633CC',
'#6633FF',
'#66CC00',
'#66CC33',
'#9900CC',
'#9900FF',
'#9933CC',
'#9933FF',
'#99CC00',
'#99CC33',
'#CC0000',
'#CC0033',
'#CC0066',
'#CC0099',
'#CC00CC',
'#CC00FF',
'#CC3300',
'#CC3333',
'#CC3366',
'#CC3399',
'#CC33CC',
'#CC33FF',
'#CC6600',
'#CC6633',
'#CC9900',
'#CC9933',
'#CCCC00',
'#CCCC33',
'#FF0000',
'#FF0033',
'#FF0066',
'#FF0099',
'#FF00CC',
'#FF00FF',
'#FF3300',
'#FF3333',
'#FF3366',
'#FF3399',
'#FF33CC',
'#FF33FF',
'#FF6600',
'#FF6633',
'#FF9900',
'#FF9933',
'#FFCC00',
'#FFCC33'
];
/**
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
* and the Firebug extension (any Firefox version) are known
* to support "%c" CSS customizations.
*
* TODO: add a `localStorage` variable to explicitly enable/disable colors
*/
// eslint-disable-next-line complexity
function useColors() {
// NB: In an Electron preload script, document will be defined but not fully
// initialized. Since we know we're in Chrome, we'll just detect this case
// explicitly
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
return true;
}
// Internet Explorer and Edge do not support colors.
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
return false;
}
// Is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
// Is firebug? http://stackoverflow.com/a/398120/376773
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
// Is firefox >= v31?
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||
// Double check webkit in userAgent just in case we are in a worker
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
}
/**
* Colorize log arguments if enabled.
*
* @api public
*/
function formatArgs(args) {
args[0] = (this.useColors ? '%c' : '') +
this.namespace +
(this.useColors ? ' %c' : ' ') +
args[0] +
(this.useColors ? '%c ' : ' ') +
'+' + module.exports.humanize(this.diff);
if (!this.useColors) {
return;
}
const c = 'color: ' + this.color;
args.splice(1, 0, c, 'color: inherit');
// The final "%c" is somewhat tricky, because there could be other
// arguments passed either before or after the %c, so we need to
// figure out the correct index to insert the CSS into
let index = 0;
let lastC = 0;
args[0].replace(/%[a-zA-Z%]/g, match => {
if (match === '%%') {
return;
}
index++;
if (match === '%c') {
// We only are interested in the *last* %c
// (the user may have provided their own)
lastC = index;
}
});
args.splice(lastC, 0, c);
}
/**
* Invokes `console.debug()` when available.
* No-op when `console.debug` is not a "function".
* If `console.debug` is not available, falls back
* to `console.log`.
*
* @api public
*/
exports.log = console.debug || console.log || (() => {});
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
try {
if (namespaces) {
exports.storage.setItem('debug', namespaces);
} else {
exports.storage.removeItem('debug');
}
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
let r;
try {
r = exports.storage.getItem('debug');
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
if (!r && typeof process !== 'undefined' && 'env' in process) {
r = process.env.DEBUG;
}
return r;
}
/**
* Localstorage attempts to return the localstorage.
*
* This is necessary because safari throws
* when a user disables cookies/localstorage
* and you attempt to access it.
*
* @return {LocalStorage}
* @api private
*/
function localstorage() {
try {
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
// The Browser also has localStorage in the global context.
return localStorage;
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
}
module.exports = requireCommon()(exports);
const {formatters} = module.exports;
/**
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
*/
formatters.j = function (v) {
try {
return JSON.stringify(v);
} catch (error) {
return '[UnexpectedJSONParseError]: ' + error.message;
}
};
} (browser$3, browser$3.exports));
return browser$3.exports;
}
var node$1 = {exports: {}};
/**
* Module dependencies.
*/
var hasRequiredNode$1;
function requireNode$1 () {
if (hasRequiredNode$1) return node$1.exports;
hasRequiredNode$1 = 1;
(function (module, exports) {
const tty = require$$0$3;
const util = require$$0$6;
/**
* This is the Node.js implementation of `debug()`.
*/
exports.init = init;
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.destroy = util.deprecate(
() => {},
'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'
);
/**
* Colors.
*/
exports.colors = [6, 2, 3, 4, 5, 1];
try {
// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
// eslint-disable-next-line import/no-extraneous-dependencies
const supportsColor = require('supports-color');
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
exports.colors = [
20,
21,
26,
27,
32,
33,
38,
39,
40,
41,
42,
43,
44,
45,
56,
57,
62,
63,
68,
69,
74,
75,
76,
77,
78,
79,
80,
81,
92,
93,
98,
99,
112,
113,
128,
129,
134,
135,
148,
149,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
178,
179,
184,
185,
196,
197,
198,
199,
200,
201,
202,
203,
204,
205,
206,
207,
208,
209,
214,
215,
220,
221
];
}
} catch (error) {
// Swallow - we only care if `supports-color` is available; it doesn't have to be.
}
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter(key => {
return /^debug_/i.test(key);
}).reduce((obj, key) => {
// Camel-case
const prop = key
.substring(6)
.toLowerCase()
.replace(/_([a-z])/g, (_, k) => {
return k.toUpperCase();
});
// Coerce string value into JS value
let val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) {
val = true;
} else if (/^(no|off|false|disabled)$/i.test(val)) {
val = false;
} else if (val === 'null') {
val = null;
} else {
val = Number(val);
}
obj[prop] = val;
return obj;
}, {});
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors() {
return 'colors' in exports.inspectOpts ?
Boolean(exports.inspectOpts.colors) :
tty.isatty(process.stderr.fd);
}
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs(args) {
const {namespace: name, useColors} = this;
if (useColors) {
const c = this.color;
const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c);
const prefix = ` ${colorCode};1m${name} \u001B[0m`;
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m');
} else {
args[0] = getDate() + name + ' ' + args[0];
}
}
function getDate() {
if (exports.inspectOpts.hideDate) {
return '';
}
return new Date().toISOString() + ' ';
}
/**
* Invokes `util.format()` with the specified arguments and writes to stderr.
*/
function log(...args) {
return process.stderr.write(util.format(...args) + '\n');
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
if (namespaces) {
process.env.DEBUG = namespaces;
} else {
// If you set a process.env field to null or undefined, it gets cast to the
// string 'null' or 'undefined'. Just delete instead.
delete process.env.DEBUG;
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
return process.env.DEBUG;
}
/**
* Init logic for `debug` instances.
*
* Create a new `inspectOpts` object in case `useColors` is set
* differently for a particular `debug` instance.
*/
function init(debug) {
debug.inspectOpts = {};
const keys = Object.keys(exports.inspectOpts);
for (let i = 0; i < keys.length; i++) {
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
}
}
module.exports = requireCommon()(exports);
const {formatters} = module.exports;
/**
* Map %o to `util.inspect()`, all on a single line.
*/
formatters.o = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts)
.split('\n')
.map(str => str.trim())
.join(' ');
};
/**
* Map %O to `util.inspect()`, allowing multiple lines if needed.
*/
formatters.O = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts);
};
} (node$1, node$1.exports));
return node$1.exports;
}
/**
* Detect Electron renderer / nwjs process, which is node, but we should
* treat as a browser.
*/
if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {
src$2.exports = requireBrowser$1();
} else {
src$2.exports = requireNode$1();
}
var srcExports$1 = src$2.exports;
var debug$g = /*@__PURE__*/getDefaultExportFromCjs(srcExports$1);
let pnp;
if (process.versions.pnp) {
try {
pnp = createRequire$1(import.meta.url)('pnpapi');
}
catch { }
}
function invalidatePackageData(packageCache, pkgPath) {
const pkgDir = path$o.dirname(pkgPath);
packageCache.forEach((pkg, cacheKey) => {
if (pkg.dir === pkgDir) {
packageCache.delete(cacheKey);
}
});
}
function resolvePackageData(pkgName, basedir, preserveSymlinks = false, packageCache) {
if (pnp) {
const cacheKey = getRpdCacheKey(pkgName, basedir, preserveSymlinks);
if (packageCache?.has(cacheKey))
return packageCache.get(cacheKey);
try {
const pkg = pnp.resolveToUnqualified(pkgName, basedir, {
considerBuiltins: false,
});
if (!pkg)
return null;
const pkgData = loadPackageData(path$o.join(pkg, 'package.json'));
packageCache?.set(cacheKey, pkgData);
return pkgData;
}
catch {
return null;
}
}
const originalBasedir = basedir;
while (basedir) {
if (packageCache) {
const cached = getRpdCache(packageCache, pkgName, basedir, originalBasedir, preserveSymlinks);
if (cached)
return cached;
}
const pkg = path$o.join(basedir, 'node_modules', pkgName, 'package.json');
try {
if (fs$l.existsSync(pkg)) {
const pkgPath = preserveSymlinks ? pkg : safeRealpathSync(pkg);
const pkgData = loadPackageData(pkgPath);
if (packageCache) {
setRpdCache(packageCache, pkgData, pkgName, basedir, originalBasedir, preserveSymlinks);
}
return pkgData;
}
}
catch { }
const nextBasedir = path$o.dirname(basedir);
if (nextBasedir === basedir)
break;
basedir = nextBasedir;
}
return null;
}
function findNearestPackageData(basedir, packageCache) {
const originalBasedir = basedir;
while (basedir) {
if (packageCache) {
const cached = getFnpdCache(packageCache, basedir, originalBasedir);
if (cached)
return cached;
}
const pkgPath = path$o.join(basedir, 'package.json');
try {
if (fs$l.statSync(pkgPath, { throwIfNoEntry: false })?.isFile()) {
const pkgData = loadPackageData(pkgPath);
if (packageCache) {
setFnpdCache(packageCache, pkgData, basedir, originalBasedir);
}
return pkgData;
}
}
catch { }
const nextBasedir = path$o.dirname(basedir);
if (nextBasedir === basedir)
break;
basedir = nextBasedir;
}
return null;
}
// Finds the nearest package.json with a `name` field
function findNearestMainPackageData(basedir, packageCache) {
const nearestPackage = findNearestPackageData(basedir, packageCache);
return (nearestPackage &&
(nearestPackage.data.name
? nearestPackage
: findNearestMainPackageData(path$o.dirname(nearestPackage.dir), packageCache)));
}
function loadPackageData(pkgPath) {
const data = JSON.parse(fs$l.readFileSync(pkgPath, 'utf-8'));
const pkgDir = path$o.dirname(pkgPath);
const { sideEffects } = data;
let hasSideEffects;
if (typeof sideEffects === 'boolean') {
hasSideEffects = () => sideEffects;
}
else if (Array.isArray(sideEffects)) {
const finalPackageSideEffects = sideEffects.map((sideEffect) => {
/*
* The array accepts simple glob patterns to the relevant files... Patterns like *.css, which do not include a /, will be treated like **\/*.css.
* https://webpack.js.org/guides/tree-shaking/
* https://github.com/vitejs/vite/pull/11807
*/
if (sideEffect.includes('/')) {
return sideEffect;
}
return `**/${sideEffect}`;
});
hasSideEffects = createFilter(finalPackageSideEffects, null, {
resolve: pkgDir,
});
}
else {
hasSideEffects = () => true;
}
const pkg = {
dir: pkgDir,
data,
hasSideEffects,
webResolvedImports: {},
nodeResolvedImports: {},
setResolvedCache(key, entry, targetWeb) {
if (targetWeb) {
pkg.webResolvedImports[key] = entry;
}
else {
pkg.nodeResolvedImports[key] = entry;
}
},
getResolvedCache(key, targetWeb) {
if (targetWeb) {
return pkg.webResolvedImports[key];
}
else {
return pkg.nodeResolvedImports[key];
}
},
};
return pkg;
}
function watchPackageDataPlugin(packageCache) {
// a list of files to watch before the plugin is ready
const watchQueue = new Set();
const watchedDirs = new Set();
const watchFileStub = (id) => {
watchQueue.add(id);
};
let watchFile = watchFileStub;
const setPackageData = packageCache.set.bind(packageCache);
packageCache.set = (id, pkg) => {
if (!isInNodeModules(pkg.dir) && !watchedDirs.has(pkg.dir)) {
watchedDirs.add(pkg.dir);
watchFile(path$o.join(pkg.dir, 'package.json'));
}
return setPackageData(id, pkg);
};
return {
name: 'vite:watch-package-data',
buildStart() {
watchFile = this.addWatchFile.bind(this);
watchQueue.forEach(watchFile);
watchQueue.clear();
},
buildEnd() {
watchFile = watchFileStub;
},
watchChange(id) {
if (id.endsWith('/package.json')) {
invalidatePackageData(packageCache, path$o.normalize(id));
}
},
handleHotUpdate({ file }) {
if (file.endsWith('/package.json')) {
invalidatePackageData(packageCache, path$o.normalize(file));
}
},
};
}
/**
* Get cached `resolvePackageData` value based on `basedir`. When one is found,
* and we've already traversed some directories between `basedir` and `originalBasedir`,
* we cache the value for those in-between directories as well.
*
* This makes it so the fs is only read once for a shared `basedir`.
*/
function getRpdCache(packageCache, pkgName, basedir, originalBasedir, preserveSymlinks) {
const cacheKey = getRpdCacheKey(pkgName, basedir, preserveSymlinks);
const pkgData = packageCache.get(cacheKey);
if (pkgData) {
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getRpdCacheKey(pkgName, dir, preserveSymlinks), pkgData);
});
return pkgData;
}
}
function setRpdCache(packageCache, pkgData, pkgName, basedir, originalBasedir, preserveSymlinks) {
packageCache.set(getRpdCacheKey(pkgName, basedir, preserveSymlinks), pkgData);
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getRpdCacheKey(pkgName, dir, preserveSymlinks), pkgData);
});
}
// package cache key for `resolvePackageData`
function getRpdCacheKey(pkgName, basedir, preserveSymlinks) {
return `rpd_${pkgName}_${basedir}_${preserveSymlinks}`;
}
/**
* Get cached `findNearestPackageData` value based on `basedir`. When one is found,
* and we've already traversed some directories between `basedir` and `originalBasedir`,
* we cache the value for those in-between directories as well.
*
* This makes it so the fs is only read once for a shared `basedir`.
*/
function getFnpdCache(packageCache, basedir, originalBasedir) {
const cacheKey = getFnpdCacheKey(basedir);
const pkgData = packageCache.get(cacheKey);
if (pkgData) {
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getFnpdCacheKey(dir), pkgData);
});
return pkgData;
}
}
function setFnpdCache(packageCache, pkgData, basedir, originalBasedir) {
packageCache.set(getFnpdCacheKey(basedir), pkgData);
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getFnpdCacheKey(dir), pkgData);
});
}
// package cache key for `findNearestPackageData`
function getFnpdCacheKey(basedir) {
return `fnpd_${basedir}`;
}
/**
* Traverse between `longerDir` (inclusive) and `shorterDir` (exclusive) and call `cb` for each dir.
* @param longerDir Longer dir path, e.g. `/User/foo/bar/baz`
* @param shorterDir Shorter dir path, e.g. `/User/foo`
*/
function traverseBetweenDirs(longerDir, shorterDir, cb) {
while (longerDir !== shorterDir) {
cb(longerDir);
longerDir = path$o.dirname(longerDir);
}
}
const createFilter = createFilter$1;
const windowsSlashRE = /\\/g;
function slash$1(p) {
return p.replace(windowsSlashRE, '/');
}
/**
* Prepend `/@id/` and replace null byte so the id is URL-safe.
* This is prepended to resolved ids that are not valid browser
* import specifiers by the importAnalysis plugin.
*/
function wrapId(id) {
return id.startsWith(VALID_ID_PREFIX)
? id
: VALID_ID_PREFIX + id.replace('\0', NULL_BYTE_PLACEHOLDER);
}
/**
* Undo {@link wrapId}'s `/@id/` and null byte replacements.
*/
function unwrapId(id) {
return id.startsWith(VALID_ID_PREFIX)
? id.slice(VALID_ID_PREFIX.length).replace(NULL_BYTE_PLACEHOLDER, '\0')
: id;
}
const replaceSlashOrColonRE = /[/:]/g;
const replaceDotRE = /\./g;
const replaceNestedIdRE = /(\s*>\s*)/g;
const replaceHashRE = /#/g;
const flattenId = (id) => id
.replace(replaceSlashOrColonRE, '_')
.replace(replaceDotRE, '__')
.replace(replaceNestedIdRE, '___')
.replace(replaceHashRE, '____');
const normalizeId = (id) => id.replace(replaceNestedIdRE, ' > ');
// Supported by Node, Deno, Bun
const NODE_BUILTIN_NAMESPACE = 'node:';
// Supported by Deno
const NPM_BUILTIN_NAMESPACE = 'npm:';
// Supported by Bun
const BUN_BUILTIN_NAMESPACE = 'bun:';
//TODO: revisit later to see if the edge case that "compiling using node v12 code to be run in node v16 in the server" is what we intend to support.
const builtins = new Set([
...builtinModules,
'assert/strict',
'diagnostics_channel',
'dns/promises',
'fs/promises',
'path/posix',
'path/win32',
'readline/promises',
'stream/consumers',
'stream/promises',
'stream/web',
'timers/promises',
'util/types',
'wasi',
]);
// Some runtimes like Bun injects namespaced modules here, which is not a node builtin
const nodeBuiltins = [...builtins].filter((id) => !id.includes(':'));
// TODO: Use `isBuiltin` from `node:module`, but Deno doesn't support it
function isBuiltin(id) {
if (process.versions.deno && id.startsWith(NPM_BUILTIN_NAMESPACE))
return true;
if (process.versions.bun && id.startsWith(BUN_BUILTIN_NAMESPACE))
return true;
return isNodeBuiltin(id);
}
function isNodeBuiltin(id) {
if (id.startsWith(NODE_BUILTIN_NAMESPACE))
return true;
return nodeBuiltins.includes(id);
}
function isInNodeModules(id) {
return id.includes('node_modules');
}
function moduleListContains(moduleList, id) {
return moduleList?.some((m) => m === id || id.startsWith(withTrailingSlash(m)));
}
function isOptimizable(id, optimizeDeps) {
const { extensions } = optimizeDeps;
return (OPTIMIZABLE_ENTRY_RE.test(id) ||
(extensions?.some((ext) => id.endsWith(ext)) ?? false));
}
const bareImportRE = /^(?![a-zA-Z]:)[\w@](?!.*:\/\/)/;
const deepImportRE = /^([^@][^/]*)\/|^(@[^/]+\/[^/]+)\//;
// TODO: use import()
const _require$3 = createRequire$1(import.meta.url);
// set in bin/vite.js
const filter = process.env.VITE_DEBUG_FILTER;
const DEBUG = process.env.DEBUG;
function createDebugger(namespace, options = {}) {
const log = debug$g(namespace);
const { onlyWhenFocused } = options;
let enabled = log.enabled;
if (enabled && onlyWhenFocused) {
const ns = typeof onlyWhenFocused === 'string' ? onlyWhenFocused : namespace;
enabled = !!DEBUG?.includes(ns);
}
if (enabled) {
return (...args) => {
if (!filter || args.some((a) => a?.includes?.(filter))) {
log(...args);
}
};
}
}
function testCaseInsensitiveFS() {
if (!CLIENT_ENTRY.endsWith('client.mjs')) {
throw new Error(`cannot test case insensitive FS, CLIENT_ENTRY const doesn't contain client.mjs`);
}
if (!fs$l.existsSync(CLIENT_ENTRY)) {
throw new Error('cannot test case insensitive FS, CLIENT_ENTRY does not point to an existing file: ' +
CLIENT_ENTRY);
}
return fs$l.existsSync(CLIENT_ENTRY.replace('client.mjs', 'cLiEnT.mjs'));
}
function isUrl(path) {
try {
new URL$3(path);
return true;
}
catch {
return false;
}
}
const isCaseInsensitiveFS = testCaseInsensitiveFS();
const isWindows$4 = os$4.platform() === 'win32';
const VOLUME_RE = /^[A-Z]:/i;
function normalizePath$3(id) {
return path$o.posix.normalize(isWindows$4 ? slash$1(id) : id);
}
function fsPathFromId(id) {
const fsPath = normalizePath$3(id.startsWith(FS_PREFIX) ? id.slice(FS_PREFIX.length) : id);
return fsPath[0] === '/' || fsPath.match(VOLUME_RE) ? fsPath : `/${fsPath}`;
}
function fsPathFromUrl(url) {
return fsPathFromId(cleanUrl(url));
}
function withTrailingSlash(path) {
if (path[path.length - 1] !== '/') {
return `${path}/`;
}
return path;
}
/**
* Check if dir is a parent of file
*
* Warning: parameters are not validated, only works with normalized absolute paths
*
* @param dir - normalized absolute path
* @param file - normalized absolute path
* @returns true if dir is a parent of file
*/
function isParentDirectory(dir, file) {
dir = withTrailingSlash(dir);
return (file.startsWith(dir) ||
(isCaseInsensitiveFS && file.toLowerCase().startsWith(dir.toLowerCase())));
}
/**
* Check if 2 file name are identical
*
* Warning: parameters are not validated, only works with normalized absolute paths
*
* @param file1 - normalized absolute path
* @param file2 - normalized absolute path
* @returns true if both files url are identical
*/
function isSameFileUri(file1, file2) {
return (file1 === file2 ||
(isCaseInsensitiveFS && file1.toLowerCase() === file2.toLowerCase()));
}
const queryRE = /\?.*$/s;
const postfixRE = /[?#].*$/s;
function cleanUrl(url) {
return url.replace(postfixRE, '');
}
const externalRE = /^(https?:)?\/\//;
const isExternalUrl = (url) => externalRE.test(url);
const dataUrlRE = /^\s*data:/i;
const isDataUrl = (url) => dataUrlRE.test(url);
const virtualModuleRE = /^virtual-module:.*/;
const virtualModulePrefix = 'virtual-module:';
const knownJsSrcRE = /\.(?:[jt]sx?|m[jt]s|vue|marko|svelte|astro|imba|mdx)(?:$|\?)/;
const isJSRequest = (url) => {
url = cleanUrl(url);
if (knownJsSrcRE.test(url)) {
return true;
}
if (!path$o.extname(url) && url[url.length - 1] !== '/') {
return true;
}
return false;
};
const knownTsRE = /\.(?:ts|mts|cts|tsx)(?:$|\?)/;
const isTsRequest = (url) => knownTsRE.test(url);
const importQueryRE = /(\?|&)import=?(?:&|$)/;
const directRequestRE$1 = /(\?|&)direct=?(?:&|$)/;
const internalPrefixes = [
FS_PREFIX,
VALID_ID_PREFIX,
CLIENT_PUBLIC_PATH,
ENV_PUBLIC_PATH,
];
const InternalPrefixRE = new RegExp(`^(?:${internalPrefixes.join('|')})`);
const trailingSeparatorRE = /[?&]$/;
const isImportRequest = (url) => importQueryRE.test(url);
const isInternalRequest = (url) => InternalPrefixRE.test(url);
function removeImportQuery(url) {
return url.replace(importQueryRE, '$1').replace(trailingSeparatorRE, '');
}
function removeDirectQuery(url) {
return url.replace(directRequestRE$1, '$1').replace(trailingSeparatorRE, '');
}
const replacePercentageRE = /%/g;
function injectQuery(url, queryToInject) {
// encode percents for consistent behavior with pathToFileURL
// see #2614 for details
const resolvedUrl = new URL$3(url.replace(replacePercentageRE, '%25'), 'relative:///');
const { search, hash } = resolvedUrl;
let pathname = cleanUrl(url);
pathname = isWindows$4 ? slash$1(pathname) : pathname;
return `${pathname}?${queryToInject}${search ? `&` + search.slice(1) : ''}${hash ?? ''}`;
}
const timestampRE = /\bt=\d{13}&?\b/;
function removeTimestampQuery(url) {
return url.replace(timestampRE, '').replace(trailingSeparatorRE, '');
}
async function asyncReplace(input, re, replacer) {
let match;
let remaining = input;
let rewritten = '';
while ((match = re.exec(remaining))) {
rewritten += remaining.slice(0, match.index);
rewritten += await replacer(match);
remaining = remaining.slice(match.index + match[0].length);
}
rewritten += remaining;
return rewritten;
}
function timeFrom(start, subtract = 0) {
const time = performance.now() - start - subtract;
const timeString = (time.toFixed(2) + `ms`).padEnd(5, ' ');
if (time < 10) {
return colors$1.green(timeString);
}
else if (time < 50) {
return colors$1.yellow(timeString);
}
else {
return colors$1.red(timeString);
}
}
/**
* pretty url for logging.
*/
function prettifyUrl(url, root) {
url = removeTimestampQuery(url);
const isAbsoluteFile = url.startsWith(root);
if (isAbsoluteFile || url.startsWith(FS_PREFIX)) {
const file = path$o.relative(root, isAbsoluteFile ? url : fsPathFromId(url));
return colors$1.dim(file);
}
else {
return colors$1.dim(url);
}
}
function isObject$2(value) {
return Object.prototype.toString.call(value) === '[object Object]';
}
function isDefined(value) {
return value != null;
}
function tryStatSync(file) {
try {
return fs$l.statSync(file, { throwIfNoEntry: false });
}
catch {
// Ignore errors
}
}
function lookupFile(dir, fileNames) {
while (dir) {
for (const fileName of fileNames) {
const fullPath = path$o.join(dir, fileName);
if (tryStatSync(fullPath)?.isFile())
return fullPath;
}
const parentDir = path$o.dirname(dir);
if (parentDir === dir)
return;
dir = parentDir;
}
}
const splitRE = /\r?\n/;
const range = 2;
function pad$1(source, n = 2) {
const lines = source.split(splitRE);
return lines.map((l) => ` `.repeat(n) + l).join(`\n`);
}
function posToNumber(source, pos) {
if (typeof pos === 'number')
return pos;
const lines = source.split(splitRE);
const { line, column } = pos;
let start = 0;
for (let i = 0; i < line - 1 && i < lines.length; i++) {
start += lines[i].length + 1;
}
return start + column;
}
function numberToPos(source, offset) {
if (typeof offset !== 'number')
return offset;
if (offset > source.length) {
throw new Error(`offset is longer than source length! offset ${offset} > length ${source.length}`);
}
const lines = source.split(splitRE);
let counted = 0;
let line = 0;
let column = 0;
for (; line < lines.length; line++) {
const lineLength = lines[line].length + 1;
if (counted + lineLength >= offset) {
column = offset - counted + 1;
break;
}
counted += lineLength;
}
return { line: line + 1, column };
}
function generateCodeFrame(source, start = 0, end) {
start = posToNumber(source, start);
end = end || start;
const lines = source.split(splitRE);
let count = 0;
const res = [];
for (let i = 0; i < lines.length; i++) {
count += lines[i].length + 1;
if (count >= start) {
for (let j = i - range; j <= i + range || end > count; j++) {
if (j < 0 || j >= lines.length)
continue;
const line = j + 1;
res.push(`${line}${' '.repeat(Math.max(3 - String(line).length, 0))}| ${lines[j]}`);
const lineLength = lines[j].length;
if (j === i) {
// push underline
const pad = Math.max(start - (count - lineLength) + 1, 0);
const length = Math.max(1, end > count ? lineLength - pad : end - start);
res.push(` | ` + ' '.repeat(pad) + '^'.repeat(length));
}
else if (j > i) {
if (end > count) {
const length = Math.max(Math.min(end - count, lineLength), 1);
res.push(` | ` + '^'.repeat(length));
}
count += lineLength + 1;
}
}
break;
}
}
return res.join('\n');
}
function isFileReadable(filename) {
try {
// The "throwIfNoEntry" is a performance optimization for cases where the file does not exist
if (!fs$l.statSync(filename, { throwIfNoEntry: false })) {
return false;
}
// Check if current process has read permission to the file
fs$l.accessSync(filename, fs$l.constants.R_OK);
return true;
}
catch {
return false;
}
}
const splitFirstDirRE = /(.+?)[\\/](.+)/;
/**
* Delete every file and subdirectory. **The given directory must exist.**
* Pass an optional `skip` array to preserve files under the root directory.
*/
function emptyDir(dir, skip) {
const skipInDir = [];
let nested = null;
if (skip?.length) {
for (const file of skip) {
if (path$o.dirname(file) !== '.') {
const matched = file.match(splitFirstDirRE);
if (matched) {
nested ?? (nested = new Map());
const [, nestedDir, skipPath] = matched;
let nestedSkip = nested.get(nestedDir);
if (!nestedSkip) {
nestedSkip = [];
nested.set(nestedDir, nestedSkip);
}
if (!nestedSkip.includes(skipPath)) {
nestedSkip.push(skipPath);
}
}
}
else {
skipInDir.push(file);
}
}
}
for (const file of fs$l.readdirSync(dir)) {
if (skipInDir.includes(file)) {
continue;
}
if (nested?.has(file)) {
emptyDir(path$o.resolve(dir, file), nested.get(file));
}
else {
fs$l.rmSync(path$o.resolve(dir, file), { recursive: true, force: true });
}
}
}
function copyDir(srcDir, destDir) {
fs$l.mkdirSync(destDir, { recursive: true });
for (const file of fs$l.readdirSync(srcDir)) {
const srcFile = path$o.resolve(srcDir, file);
if (srcFile === destDir) {
continue;
}
const destFile = path$o.resolve(destDir, file);
const stat = fs$l.statSync(srcFile);
if (stat.isDirectory()) {
copyDir(srcFile, destFile);
}
else {
fs$l.copyFileSync(srcFile, destFile);
}
}
}
// `fs.realpathSync.native` resolves differently in Windows network drive,
// causing file read errors. skip for now.
// https://github.com/nodejs/node/issues/37737
let safeRealpathSync = isWindows$4
? windowsSafeRealPathSync
: fs$l.realpathSync.native;
// Based on https://github.com/larrybahr/windows-network-drive
// MIT License, Copyright (c) 2017 Larry Bahr
const windowsNetworkMap = new Map();
function windowsMappedRealpathSync(path) {
const realPath = fs$l.realpathSync.native(path);
if (realPath.startsWith('\\\\')) {
for (const [network, volume] of windowsNetworkMap) {
if (realPath.startsWith(network))
return realPath.replace(network, volume);
}
}
return realPath;
}
const parseNetUseRE = /^(\w+)? +(\w:) +([^ ]+)\s/;
let firstSafeRealPathSyncRun = false;
function windowsSafeRealPathSync(path) {
if (!firstSafeRealPathSyncRun) {
optimizeSafeRealPathSync();
firstSafeRealPathSyncRun = true;
}
return fs$l.realpathSync(path);
}
function optimizeSafeRealPathSync() {
// Skip if using Node <16.18 due to MAX_PATH issue: https://github.com/vitejs/vite/issues/12931
const nodeVersion = process.versions.node.split('.').map(Number);
if (nodeVersion[0] < 16 || (nodeVersion[0] === 16 && nodeVersion[1] < 18)) {
safeRealpathSync = fs$l.realpathSync;
return;
}
// Check the availability `fs.realpathSync.native`
// in Windows virtual and RAM disks that bypass the Volume Mount Manager, in programs such as imDisk
// get the error EISDIR: illegal operation on a directory
try {
fs$l.realpathSync.native(path$o.resolve('./'));
}
catch (error) {
if (error.message.includes('EISDIR: illegal operation on a directory')) {
safeRealpathSync = fs$l.realpathSync;
return;
}
}
exec('net use', (error, stdout) => {
if (error)
return;
const lines = stdout.split('\n');
// OK Y: \\NETWORKA\Foo Microsoft Windows Network
// OK Z: \\NETWORKA\Bar Microsoft Windows Network
for (const line of lines) {
const m = line.match(parseNetUseRE);
if (m)
windowsNetworkMap.set(m[3], m[2]);
}
if (windowsNetworkMap.size === 0) {
safeRealpathSync = fs$l.realpathSync.native;
}
else {
safeRealpathSync = windowsMappedRealpathSync;
}
});
}
function ensureWatchedFile(watcher, file, root) {
if (file &&
// only need to watch if out of root
!file.startsWith(withTrailingSlash(root)) &&
// some rollup plugins use null bytes for private resolved Ids
!file.includes('\0') &&
fs$l.existsSync(file)) {
// resolve file to normalized system path
watcher.add(path$o.resolve(file));
}
}
const escapedSpaceCharacters = /( |\\t|\\n|\\f|\\r)+/g;
const imageSetUrlRE = /^(?:[\w\-]+\(.*?\)|'.*?'|".*?"|\S*)/;
function reduceSrcset(ret) {
return ret.reduce((prev, { url, descriptor }, index) => {
descriptor ?? (descriptor = '');
return (prev +=
url + ` ${descriptor}${index === ret.length - 1 ? '' : ', '}`);
}, '');
}
function splitSrcSetDescriptor(srcs) {
return splitSrcSet(srcs)
.map((s) => {
const src = s.replace(escapedSpaceCharacters, ' ').trim();
const [url] = imageSetUrlRE.exec(src) || [''];
return {
url,
descriptor: src?.slice(url.length).trim(),
};
})
.filter(({ url }) => !!url);
}
function processSrcSet(srcs, replacer) {
return Promise.all(splitSrcSetDescriptor(srcs).map(async ({ url, descriptor }) => ({
url: await replacer({ url, descriptor }),
descriptor,
}))).then((ret) => reduceSrcset(ret));
}
function processSrcSetSync(srcs, replacer) {
return reduceSrcset(splitSrcSetDescriptor(srcs).map(({ url, descriptor }) => ({
url: replacer({ url, descriptor }),
descriptor,
})));
}
const cleanSrcSetRE = /(?:url|image|gradient|cross-fade)\([^)]*\)|"([^"]|(?<=\\)")*"|'([^']|(?<=\\)')*'/g;
function splitSrcSet(srcs) {
const parts = [];
// There could be a ',' inside of url(data:...), linear-gradient(...) or "data:..."
const cleanedSrcs = srcs.replace(cleanSrcSetRE, blankReplacer);
let startIndex = 0;
let splitIndex;
do {
splitIndex = cleanedSrcs.indexOf(',', startIndex);
parts.push(srcs.slice(startIndex, splitIndex !== -1 ? splitIndex : undefined));
startIndex = splitIndex + 1;
} while (splitIndex !== -1);
return parts;
}
const windowsDriveRE = /^[A-Z]:/;
const replaceWindowsDriveRE = /^([A-Z]):\//;
const linuxAbsolutePathRE = /^\/[^/]/;
function escapeToLinuxLikePath(path) {
if (windowsDriveRE.test(path)) {
return path.replace(replaceWindowsDriveRE, '/windows/$1/');
}
if (linuxAbsolutePathRE.test(path)) {
return `/linux${path}`;
}
return path;
}
const revertWindowsDriveRE = /^\/windows\/([A-Z])\//;
function unescapeToLinuxLikePath(path) {
if (path.startsWith('/linux/')) {
return path.slice('/linux'.length);
}
if (path.startsWith('/windows/')) {
return path.replace(revertWindowsDriveRE, '$1:/');
}
return path;
}
// based on https://github.com/sveltejs/svelte/blob/abf11bb02b2afbd3e4cac509a0f70e318c306364/src/compiler/utils/mapped_code.ts#L221
const nullSourceMap = {
names: [],
sources: [],
mappings: '',
version: 3,
};
function combineSourcemaps(filename, sourcemapList) {
if (sourcemapList.length === 0 ||
sourcemapList.every((m) => m.sources.length === 0)) {
return { ...nullSourceMap };
}
// hack for parse broken with normalized absolute paths on windows (C:/path/to/something).
// escape them to linux like paths
// also avoid mutation here to prevent breaking plugin's using cache to generate sourcemaps like vue (see #7442)
sourcemapList = sourcemapList.map((sourcemap) => {
const newSourcemaps = { ...sourcemap };
newSourcemaps.sources = sourcemap.sources.map((source) => source ? escapeToLinuxLikePath(source) : null);
if (sourcemap.sourceRoot) {
newSourcemaps.sourceRoot = escapeToLinuxLikePath(sourcemap.sourceRoot);
}
return newSourcemaps;
});
const escapedFilename = escapeToLinuxLikePath(filename);
// We don't declare type here so we can convert/fake/map as RawSourceMap
let map; //: SourceMap
let mapIndex = 1;
const useArrayInterface = sourcemapList.slice(0, -1).find((m) => m.sources.length !== 1) === undefined;
if (useArrayInterface) {
map = remapping(sourcemapList, () => null);
}
else {
map = remapping(sourcemapList[0], function loader(sourcefile) {
if (sourcefile === escapedFilename && sourcemapList[mapIndex]) {
return sourcemapList[mapIndex++];
}
else {
return null;
}
});
}
if (!map.file) {
delete map.file;
}
// unescape the previous hack
map.sources = map.sources.map((source) => source ? unescapeToLinuxLikePath(source) : source);
map.file = filename;
return map;
}
function unique(arr) {
return Array.from(new Set(arr));
}
/**
* Returns resolved localhost address when `dns.lookup` result differs from DNS
*
* `dns.lookup` result is same when defaultResultOrder is `verbatim`.
* Even if defaultResultOrder is `ipv4first`, `dns.lookup` result maybe same.
* For example, when IPv6 is not supported on that machine/network.
*/
async function getLocalhostAddressIfDiffersFromDNS() {
const [nodeResult, dnsResult] = await Promise.all([
promises.lookup('localhost'),
promises.lookup('localhost', { verbatim: true }),
]);
const isSame = nodeResult.family === dnsResult.family &&
nodeResult.address === dnsResult.address;
return isSame ? undefined : nodeResult.address;
}
function diffDnsOrderChange(oldUrls, newUrls) {
return !(oldUrls === newUrls ||
(oldUrls &&
newUrls &&
arrayEqual(oldUrls.local, newUrls.local) &&
arrayEqual(oldUrls.network, newUrls.network)));
}
async function resolveHostname(optionsHost) {
let host;
if (optionsHost === undefined || optionsHost === false) {
// Use a secure default
host = 'localhost';
}
else if (optionsHost === true) {
// If passed --host in the CLI without arguments
host = undefined; // undefined typically means 0.0.0.0 or :: (listen on all IPs)
}
else {
host = optionsHost;
}
// Set host name to localhost when possible
let name = host === undefined || wildcardHosts.has(host) ? 'localhost' : host;
if (host === 'localhost') {
// See #8647 for more details.
const localhostAddr = await getLocalhostAddressIfDiffersFromDNS();
if (localhostAddr) {
name = localhostAddr;
}
}
return { host, name };
}
async function resolveServerUrls(server, options, config) {
const address = server.address();
const isAddressInfo = (x) => x?.address;
if (!isAddressInfo(address)) {
return { local: [], network: [] };
}
const local = [];
const network = [];
const hostname = await resolveHostname(options.host);
const protocol = options.https ? 'https' : 'http';
const port = address.port;
const base = config.rawBase === './' || config.rawBase === '' ? '/' : config.rawBase;
if (hostname.host !== undefined && !wildcardHosts.has(hostname.host)) {
let hostnameName = hostname.name;
// ipv6 host
if (hostnameName.includes(':')) {
hostnameName = `[${hostnameName}]`;
}
const address = `${protocol}://${hostnameName}:${port}${base}`;
if (loopbackHosts.has(hostname.host)) {
local.push(address);
}
else {
network.push(address);
}
}
else {
Object.values(os$4.networkInterfaces())
.flatMap((nInterface) => nInterface ?? [])
.filter((detail) => detail &&
detail.address &&
(detail.family === 'IPv4' ||
// @ts-expect-error Node 18.0 - 18.3 returns number
detail.family === 4))
.forEach((detail) => {
let host = detail.address.replace('127.0.0.1', hostname.name);
// ipv6 host
if (host.includes(':')) {
host = `[${host}]`;
}
const url = `${protocol}://${host}:${port}${base}`;
if (detail.address.includes('127.0.0.1')) {
local.push(url);
}
else {
network.push(url);
}
});
}
return { local, network };
}
function arraify(target) {
return Array.isArray(target) ? target : [target];
}
// Taken from https://stackoverflow.com/a/36328890
const multilineCommentsRE$1 = /\/\*[^*]*\*+(?:[^/*][^*]*\*+)*\//g;
const singlelineCommentsRE$1 = /\/\/.*/g;
const requestQuerySplitRE = /\?(?!.*[/|}])/;
// @ts-expect-error jest only exists when running Jest
const usingDynamicImport = typeof jest === 'undefined';
/**
* Dynamically import files. It will make sure it's not being compiled away by TS/Rollup.
*
* As a temporary workaround for Jest's lack of stable ESM support, we fallback to require
* if we're in a Jest environment.
* See https://github.com/vitejs/vite/pull/5197#issuecomment-938054077
*
* @param file File path to import.
*/
const dynamicImport = usingDynamicImport
? new Function('file', 'return import(file)')
: _require$3;
function parseRequest(id) {
const [_, search] = id.split(requestQuerySplitRE, 2);
if (!search) {
return null;
}
return Object.fromEntries(new URLSearchParams(search));
}
const blankReplacer = (match) => ' '.repeat(match.length);
function getHash(text) {
return createHash$2('sha256').update(text).digest('hex').substring(0, 8);
}
const _dirname = path$o.dirname(fileURLToPath(import.meta.url));
const requireResolveFromRootWithFallback = (root, id) => {
// check existence first, so if the package is not found,
// it won't be cached by nodejs, since there isn't a way to invalidate them:
// https://github.com/nodejs/node/issues/44663
const found = resolvePackageData(id, root) || resolvePackageData(id, _dirname);
if (!found) {
const error = new Error(`${JSON.stringify(id)} not found.`);
error.code = 'MODULE_NOT_FOUND';
throw error;
}
// actually resolve
// Search in the root directory first, and fallback to the default require paths.
return _require$3.resolve(id, { paths: [root, _dirname] });
};
function emptyCssComments(raw) {
return raw.replace(multilineCommentsRE$1, (s) => ' '.repeat(s.length));
}
function removeComments(raw) {
return raw.replace(multilineCommentsRE$1, '').replace(singlelineCommentsRE$1, '');
}
function mergeConfigRecursively(defaults, overrides, rootPath) {
const merged = { ...defaults };
for (const key in overrides) {
const value = overrides[key];
if (value == null) {
continue;
}
const existing = merged[key];
if (existing == null) {
merged[key] = value;
continue;
}
// fields that require special handling
if (key === 'alias' && (rootPath === 'resolve' || rootPath === '')) {
merged[key] = mergeAlias(existing, value);
continue;
}
else if (key === 'assetsInclude' && rootPath === '') {
merged[key] = [].concat(existing, value);
continue;
}
else if (key === 'noExternal' &&
rootPath === 'ssr' &&
(existing === true || value === true)) {
merged[key] = true;
continue;
}
if (Array.isArray(existing) || Array.isArray(value)) {
merged[key] = [...arraify(existing ?? []), ...arraify(value ?? [])];
continue;
}
if (isObject$2(existing) && isObject$2(value)) {
merged[key] = mergeConfigRecursively(existing, value, rootPath ? `${rootPath}.${key}` : key);
continue;
}
merged[key] = value;
}
return merged;
}
function mergeConfig(defaults, overrides, isRoot = true) {
if (typeof defaults === 'function' || typeof overrides === 'function') {
throw new Error(`Cannot merge config in form of callback`);
}
return mergeConfigRecursively(defaults, overrides, isRoot ? '' : '.');
}
function mergeAlias(a, b) {
if (!a)
return b;
if (!b)
return a;
if (isObject$2(a) && isObject$2(b)) {
return { ...a, ...b };
}
// the order is flipped because the alias is resolved from top-down,
// where the later should have higher priority
return [...normalizeAlias(b), ...normalizeAlias(a)];
}
function normalizeAlias(o = []) {
return Array.isArray(o)
? o.map(normalizeSingleAlias)
: Object.keys(o).map((find) => normalizeSingleAlias({
find,
replacement: o[find],
}));
}
// https://github.com/vitejs/vite/issues/1363
// work around https://github.com/rollup/plugins/issues/759
function normalizeSingleAlias({ find, replacement, customResolver, }) {
if (typeof find === 'string' &&
find[find.length - 1] === '/' &&
replacement[replacement.length - 1] === '/') {
find = find.slice(0, find.length - 1);
replacement = replacement.slice(0, replacement.length - 1);
}
const alias = {
find,
replacement,
};
if (customResolver) {
alias.customResolver = customResolver;
}
return alias;
}
/**
* Transforms transpiled code result where line numbers aren't altered,
* so we can skip sourcemap generation during dev
*/
function transformStableResult(s, id, config) {
return {
code: s.toString(),
map: config.command === 'build' && config.build.sourcemap
? s.generateMap({ hires: 'boundary', source: id })
: null,
};
}
async function asyncFlatten(arr) {
do {
arr = (await Promise.all(arr)).flat(Infinity);
} while (arr.some((v) => v?.then));
return arr;
}
// strip UTF-8 BOM
function stripBomTag(content) {
if (content.charCodeAt(0) === 0xfeff) {
return content.slice(1);
}
return content;
}
const windowsDrivePathPrefixRE = /^[A-Za-z]:[/\\]/;
/**
* path.isAbsolute also returns true for drive relative paths on windows (e.g. /something)
* this function returns false for them but true for absolute paths (e.g. C:/something)
*/
const isNonDriveRelativeAbsolutePath = (p) => {
if (!isWindows$4)
return p[0] === '/';
return windowsDrivePathPrefixRE.test(p);
};
/**
* Determine if a file is being requested with the correct case, to ensure
* consistent behaviour between dev and prod and across operating systems.
*/
function shouldServeFile(filePath, root) {
// can skip case check on Linux
if (!isCaseInsensitiveFS)
return true;
return hasCorrectCase(filePath, root);
}
/**
* Note that we can't use realpath here, because we don't want to follow
* symlinks.
*/
function hasCorrectCase(file, assets) {
if (file === assets)
return true;
const parent = path$o.dirname(file);
if (fs$l.readdirSync(parent).includes(path$o.basename(file))) {
return hasCorrectCase(parent, assets);
}
return false;
}
function joinUrlSegments(a, b) {
if (!a || !b) {
return a || b || '';
}
if (a[a.length - 1] === '/') {
a = a.substring(0, a.length - 1);
}
if (b[0] !== '/') {
b = '/' + b;
}
return a + b;
}
function removeLeadingSlash(str) {
return str[0] === '/' ? str.slice(1) : str;
}
function stripBase(path, base) {
if (path === base) {
return '/';
}
const devBase = withTrailingSlash(base);
return path.startsWith(devBase) ? path.slice(devBase.length - 1) : path;
}
function arrayEqual(a, b) {
if (a === b)
return true;
if (a.length !== b.length)
return false;
for (let i = 0; i < a.length; i++) {
if (a[i] !== b[i])
return false;
}
return true;
}
function evalValue(rawValue) {
const fn = new Function(`
var console, exports, global, module, process, require
return (\n${rawValue}\n)
`);
return fn();
}
function getNpmPackageName(importPath) {
const parts = importPath.split('/');
if (parts[0][0] === '@') {
if (!parts[1])
return null;
return `${parts[0]}/${parts[1]}`;
}
else {
return parts[0];
}
}
const escapeRegexRE = /[-/\\^$*+?.()|[\]{}]/g;
function escapeRegex(str) {
return str.replace(escapeRegexRE, '\\$&');
}
function getPackageManagerCommand(type = 'install') {
const packageManager = process.env.npm_config_user_agent?.split(' ')[0].split('/')[0] || 'npm';
switch (type) {
case 'install':
return packageManager === 'npm' ? 'npm install' : `${packageManager} add`;
case 'uninstall':
return packageManager === 'npm'
? 'npm uninstall'
: `${packageManager} remove`;
case 'update':
return packageManager === 'yarn'
? 'yarn upgrade'
: `${packageManager} update`;
default:
throw new TypeError(`Unknown command type: ${type}`);
}
}
/* eslint no-console: 0 */
const LogLevels = {
silent: 0,
error: 1,
warn: 2,
info: 3,
};
let lastType;
let lastMsg;
let sameCount = 0;
function clearScreen() {
const repeatCount = process.stdout.rows - 2;
const blank = repeatCount > 0 ? '\n'.repeat(repeatCount) : '';
console.log(blank);
readline.cursorTo(process.stdout, 0, 0);
readline.clearScreenDown(process.stdout);
}
function createLogger(level = 'info', options = {}) {
if (options.customLogger) {
return options.customLogger;
}
const timeFormatter = new Intl.DateTimeFormat(undefined, {
hour: 'numeric',
minute: 'numeric',
second: 'numeric',
});
const loggedErrors = new WeakSet();
const { prefix = '[vite]', allowClearScreen = true } = options;
const thresh = LogLevels[level];
const canClearScreen = allowClearScreen && process.stdout.isTTY && !process.env.CI;
const clear = canClearScreen ? clearScreen : () => { };
function output(type, msg, options = {}) {
if (thresh >= LogLevels[type]) {
const method = type === 'info' ? 'log' : type;
const format = () => {
if (options.timestamp) {
const tag = type === 'info'
? colors$1.cyan(colors$1.bold(prefix))
: type === 'warn'
? colors$1.yellow(colors$1.bold(prefix))
: colors$1.red(colors$1.bold(prefix));
return `${colors$1.dim(timeFormatter.format(new Date()))} ${tag} ${msg}`;
}
else {
return msg;
}
};
if (options.error) {
loggedErrors.add(options.error);
}
if (canClearScreen) {
if (type === lastType && msg === lastMsg) {
sameCount++;
clear();
console[method](format(), colors$1.yellow(`(x${sameCount + 1})`));
}
else {
sameCount = 0;
lastMsg = msg;
lastType = type;
if (options.clear) {
clear();
}
console[method](format());
}
}
else {
console[method](format());
}
}
}
const warnedMessages = new Set();
const logger = {
hasWarned: false,
info(msg, opts) {
output('info', msg, opts);
},
warn(msg, opts) {
logger.hasWarned = true;
output('warn', msg, opts);
},
warnOnce(msg, opts) {
if (warnedMessages.has(msg))
return;
logger.hasWarned = true;
output('warn', msg, opts);
warnedMessages.add(msg);
},
error(msg, opts) {
logger.hasWarned = true;
output('error', msg, opts);
},
clearScreen(type) {
if (thresh >= LogLevels[type]) {
clear();
}
},
hasErrorLogged(error) {
return loggedErrors.has(error);
},
};
return logger;
}
function printServerUrls(urls, optionsHost, info) {
const colorUrl = (url) => colors$1.cyan(url.replace(/:(\d+)\//, (_, port) => `:${colors$1.bold(port)}/`));
for (const url of urls.local) {
info(` ${colors$1.green('➜')} ${colors$1.bold('Local')}: ${colorUrl(url)}`);
}
for (const url of urls.network) {
info(` ${colors$1.green('➜')} ${colors$1.bold('Network')}: ${colorUrl(url)}`);
}
if (urls.network.length === 0 && optionsHost === undefined) {
info(colors$1.dim(` ${colors$1.green('➜')} ${colors$1.bold('Network')}: use `) +
colors$1.bold('--host') +
colors$1.dim(' to expose'));
}
}
const groups = [
{ name: 'Assets', color: colors$1.green },
{ name: 'CSS', color: colors$1.magenta },
{ name: 'JS', color: colors$1.cyan },
];
const COMPRESSIBLE_ASSETS_RE = /\.(?:html|json|svg|txt|xml|xhtml)$/;
function buildReporterPlugin(config) {
const compress = promisify$4(gzip);
const chunkLimit = config.build.chunkSizeWarningLimit;
const numberFormatter = new Intl.NumberFormat('en', {
maximumFractionDigits: 2,
minimumFractionDigits: 2,
});
const displaySize = (bytes) => {
return `${numberFormatter.format(bytes / 1000)} kB`;
};
const tty = process.stdout.isTTY && !process.env.CI;
const shouldLogInfo = LogLevels[config.logLevel || 'info'] >= LogLevels.info;
let hasTransformed = false;
let hasRenderedChunk = false;
let hasCompressChunk = false;
let transformedCount = 0;
let chunkCount = 0;
let compressedCount = 0;
let startTime = Date.now();
async function getCompressedSize(code) {
if (config.build.ssr || !config.build.reportCompressedSize) {
return null;
}
if (shouldLogInfo && !hasCompressChunk) {
if (!tty) {
config.logger.info('computing gzip size...');
}
else {
writeLine('computing gzip size (0)...');
}
hasCompressChunk = true;
}
const compressed = await compress(typeof code === 'string' ? code : Buffer.from(code));
compressedCount++;
if (shouldLogInfo && tty) {
writeLine(`computing gzip size (${compressedCount})...`);
}
return compressed.length;
}
const logTransform = throttle((id) => {
writeLine(`transforming (${transformedCount}) ${colors$1.dim(path$o.relative(config.root, id))}`);
});
return {
name: 'vite:reporter',
transform(_, id) {
transformedCount++;
if (shouldLogInfo) {
if (!tty) {
if (!hasTransformed) {
config.logger.info(`transforming...`);
}
}
else {
if (id.includes(`?`))
return;
logTransform(id);
}
hasTransformed = true;
}
return null;
},
options() {
startTime = Date.now();
},
buildStart() {
transformedCount = 0;
},
buildEnd() {
if (shouldLogInfo) {
if (tty) {
clearLine();
}
config.logger.info(`${colors$1.green(`✓`)} ${transformedCount} modules transformed.`);
}
},
renderStart() {
chunkCount = 0;
compressedCount = 0;
},
renderChunk(code, chunk, options) {
if (!options.inlineDynamicImports) {
for (const id of chunk.moduleIds) {
const module = this.getModuleInfo(id);
if (!module)
continue;
// When a dynamic importer shares a chunk with the imported module,
// warn that the dynamic imported module will not be moved to another chunk (#12850).
if (module.importers.length && module.dynamicImporters.length) {
// Filter out the intersection of dynamic importers and sibling modules in
// the same chunk. The intersecting dynamic importers' dynamic import is not
// expected to work. Note we're only detecting the direct ineffective
// dynamic import here.
const detectedIneffectiveDynamicImport = module.dynamicImporters.some((id) => !isInNodeModules(id) && chunk.moduleIds.includes(id));
if (detectedIneffectiveDynamicImport) {
this.warn(`\n(!) ${module.id} is dynamically imported by ${module.dynamicImporters.join(', ')} but also statically imported by ${module.importers.join(', ')}, dynamic import will not move module into another chunk.\n`);
}
}
}
}
chunkCount++;
if (shouldLogInfo) {
if (!tty) {
if (!hasRenderedChunk) {
config.logger.info('rendering chunks...');
}
}
else {
writeLine(`rendering chunks (${chunkCount})...`);
}
hasRenderedChunk = true;
}
return null;
},
generateBundle() {
if (shouldLogInfo && tty)
clearLine();
},
async writeBundle({ dir: outDir }, output) {
let hasLargeChunks = false;
if (shouldLogInfo) {
const entries = (await Promise.all(Object.values(output).map(async (chunk) => {
if (chunk.type === 'chunk') {
return {
name: chunk.fileName,
group: 'JS',
size: chunk.code.length,
compressedSize: await getCompressedSize(chunk.code),
mapSize: chunk.map ? chunk.map.toString().length : null,
};
}
else {
if (chunk.fileName.endsWith('.map'))
return null;
const isCSS = chunk.fileName.endsWith('.css');
const isCompressible = isCSS || COMPRESSIBLE_ASSETS_RE.test(chunk.fileName);
return {
name: chunk.fileName,
group: isCSS ? 'CSS' : 'Assets',
size: chunk.source.length,
mapSize: null,
compressedSize: isCompressible
? await getCompressedSize(chunk.source)
: null,
};
}
}))).filter(isDefined);
if (tty)
clearLine();
let longest = 0;
let biggestSize = 0;
let biggestMap = 0;
let biggestCompressSize = 0;
for (const entry of entries) {
if (entry.name.length > longest)
longest = entry.name.length;
if (entry.size > biggestSize)
biggestSize = entry.size;
if (entry.mapSize && entry.mapSize > biggestMap) {
biggestMap = entry.mapSize;
}
if (entry.compressedSize &&
entry.compressedSize > biggestCompressSize) {
biggestCompressSize = entry.compressedSize;
}
}
const sizePad = displaySize(biggestSize).length;
const mapPad = displaySize(biggestMap).length;
const compressPad = displaySize(biggestCompressSize).length;
const relativeOutDir = normalizePath$3(path$o.relative(config.root, path$o.resolve(config.root, outDir ?? config.build.outDir)));
const assetsDir = path$o.join(config.build.assetsDir, '/');
for (const group of groups) {
const filtered = entries.filter((e) => e.group === group.name);
if (!filtered.length)
continue;
for (const entry of filtered.sort((a, z) => a.size - z.size)) {
const isLarge = group.name === 'JS' && entry.size / 1000 > chunkLimit;
if (isLarge)
hasLargeChunks = true;
const sizeColor = isLarge ? colors$1.yellow : colors$1.dim;
let log = colors$1.dim(withTrailingSlash(relativeOutDir));
log +=
!config.build.lib &&
entry.name.startsWith(withTrailingSlash(assetsDir))
? colors$1.dim(assetsDir) +
group.color(entry.name
.slice(assetsDir.length)
.padEnd(longest + 2 - assetsDir.length))
: group.color(entry.name.padEnd(longest + 2));
log += colors$1.bold(sizeColor(displaySize(entry.size).padStart(sizePad)));
if (entry.compressedSize) {
log += colors$1.dim(` │ gzip: ${displaySize(entry.compressedSize).padStart(compressPad)}`);
}
if (entry.mapSize) {
log += colors$1.dim(` │ map: ${displaySize(entry.mapSize).padStart(mapPad)}`);
}
config.logger.info(log);
}
}
}
else {
hasLargeChunks = Object.values(output).some((chunk) => {
return chunk.type === 'chunk' && chunk.code.length / 1000 > chunkLimit;
});
}
if (hasLargeChunks &&
config.build.minify &&
!config.build.lib &&
!config.build.ssr) {
config.logger.warn(colors$1.yellow(`\n(!) Some chunks are larger than ${chunkLimit} kBs after minification. Consider:\n` +
`- Using dynamic import() to code-split the application\n` +
`- Use build.rollupOptions.output.manualChunks to improve chunking: https://rollupjs.org/configuration-options/#output-manualchunks\n` +
`- Adjust chunk size limit for this warning via build.chunkSizeWarningLimit.`));
}
},
closeBundle() {
if (shouldLogInfo && !config.build.watch) {
config.logger.info(`${colors$1.green(`✓ built in ${displayTime(Date.now() - startTime)}`)}`);
}
},
};
}
function writeLine(output) {
clearLine();
if (output.length < process.stdout.columns) {
process.stdout.write(output);
}
else {
process.stdout.write(output.substring(0, process.stdout.columns - 1));
}
}
function clearLine() {
process.stdout.clearLine(0);
process.stdout.cursorTo(0);
}
function throttle(fn) {
let timerHandle = null;
return (...args) => {
if (timerHandle)
return;
fn(...args);
timerHandle = setTimeout(() => {
timerHandle = null;
}, 100);
};
}
function displayTime(time) {
// display: {X}ms
if (time < 1000) {
return `${time}ms`;
}
time = time / 1000;
// display: {X}s
if (time < 60) {
return `${time.toFixed(2)}s`;
}
const mins = parseInt((time / 60).toString());
const seconds = time % 60;
// display: {X}m {Y}s
return `${mins}m${seconds < 1 ? '' : ` ${seconds.toFixed(0)}s`}`;
}
// src/find.ts
async function find(filename, options) {
let dir = require$$0$4.dirname(require$$0$4.resolve(filename));
const root = (options == null ? void 0 : options.root) ? require$$0$4.resolve(options.root) : null;
while (dir) {
const tsconfig = await tsconfigInDir(dir, options);
if (tsconfig) {
return tsconfig;
} else {
if (root === dir) {
break;
}
const parent = require$$0$4.dirname(dir);
if (parent === dir) {
break;
} else {
dir = parent;
}
}
}
throw new Error(`no tsconfig file found for ${filename}`);
}
async function tsconfigInDir(dir, options) {
const tsconfig = require$$0$4.join(dir, "tsconfig.json");
if (options == null ? void 0 : options.tsConfigPaths) {
return options.tsConfigPaths.has(tsconfig) ? tsconfig : void 0;
}
try {
const stat = await promises$1.stat(tsconfig);
if (stat.isFile() || stat.isFIFO()) {
return tsconfig;
}
} catch (e) {
if (e.code !== "ENOENT") {
throw e;
}
}
}
var sep = require$$0$4.sep;
async function findAll(dir, options) {
const state = {
files: [],
calls: 0,
skip: options == null ? void 0 : options.skip,
err: false
};
return new Promise((resolve, reject) => {
walk$3(require$$0$4.resolve(dir), state, (err, files) => err ? reject(err) : resolve(files));
});
}
function walk$3(dir, state, done) {
if (state.err) {
return;
}
state.calls++;
readdir$4(dir, { withFileTypes: true }, (err, entries = []) => {
var _a;
if (state.err) {
return;
}
if (err && !(err.code === "ENOENT" || err.code === "EACCES" || err.code === "EPERM")) {
state.err = true;
done(err);
} else {
for (const ent of entries) {
if (ent.isDirectory() && !((_a = state.skip) == null ? void 0 : _a.call(state, ent.name))) {
walk$3(`${dir}${sep}${ent.name}`, state, done);
} else if (ent.isFile() && ent.name === "tsconfig.json") {
state.files.push(`${dir}${sep}tsconfig.json`);
}
}
if (--state.calls === 0) {
if (!state.err) {
done(null, state.files);
}
}
}
});
}
// src/to-json.ts
function toJson(tsconfigJson) {
const stripped = stripDanglingComma(stripJsonComments(stripBom(tsconfigJson)));
if (stripped.trim() === "") {
return "{}";
} else {
return stripped;
}
}
function stripDanglingComma(pseudoJson) {
let insideString = false;
let offset = 0;
let result = "";
let danglingCommaPos = null;
for (let i = 0; i < pseudoJson.length; i++) {
const currentCharacter = pseudoJson[i];
if (currentCharacter === '"') {
const escaped = isEscaped(pseudoJson, i);
if (!escaped) {
insideString = !insideString;
}
}
if (insideString) {
danglingCommaPos = null;
continue;
}
if (currentCharacter === ",") {
danglingCommaPos = i;
continue;
}
if (danglingCommaPos) {
if (currentCharacter === "}" || currentCharacter === "]") {
result += pseudoJson.slice(offset, danglingCommaPos) + " ";
offset = danglingCommaPos + 1;
danglingCommaPos = null;
} else if (!currentCharacter.match(/\s/)) {
danglingCommaPos = null;
}
}
}
return result + pseudoJson.substring(offset);
}
function isEscaped(jsonString, quotePosition) {
let index = quotePosition - 1;
let backslashCount = 0;
while (jsonString[index] === "\\") {
index -= 1;
backslashCount += 1;
}
return Boolean(backslashCount % 2);
}
function strip(string, start, end) {
return string.slice(start, end).replace(/\S/g, " ");
}
var singleComment = Symbol("singleComment");
var multiComment = Symbol("multiComment");
function stripJsonComments(jsonString) {
let isInsideString = false;
let isInsideComment = false;
let offset = 0;
let result = "";
for (let index = 0; index < jsonString.length; index++) {
const currentCharacter = jsonString[index];
const nextCharacter = jsonString[index + 1];
if (!isInsideComment && currentCharacter === '"') {
const escaped = isEscaped(jsonString, index);
if (!escaped) {
isInsideString = !isInsideString;
}
}
if (isInsideString) {
continue;
}
if (!isInsideComment && currentCharacter + nextCharacter === "//") {
result += jsonString.slice(offset, index);
offset = index;
isInsideComment = singleComment;
index++;
} else if (isInsideComment === singleComment && currentCharacter + nextCharacter === "\r\n") {
index++;
isInsideComment = false;
result += strip(jsonString, offset, index);
offset = index;
} else if (isInsideComment === singleComment && currentCharacter === "\n") {
isInsideComment = false;
result += strip(jsonString, offset, index);
offset = index;
} else if (!isInsideComment && currentCharacter + nextCharacter === "/*") {
result += jsonString.slice(offset, index);
offset = index;
isInsideComment = multiComment;
index++;
} else if (isInsideComment === multiComment && currentCharacter + nextCharacter === "*/") {
index++;
isInsideComment = false;
result += strip(jsonString, offset, index + 1);
offset = index + 1;
}
}
return result + (isInsideComment ? strip(jsonString.slice(offset)) : jsonString.slice(offset));
}
function stripBom(string) {
if (string.charCodeAt(0) === 65279) {
return string.slice(1);
}
return string;
}
var POSIX_SEP_RE = new RegExp("\\" + require$$0$4.posix.sep, "g");
var NATIVE_SEP_RE = new RegExp("\\" + require$$0$4.sep, "g");
var PATTERN_REGEX_CACHE = /* @__PURE__ */ new Map();
var GLOB_ALL_PATTERN = `**/*`;
var DEFAULT_EXTENSIONS = [".ts", ".tsx", ".mts", ".cts"];
var DEFAULT_EXTENSIONS_RE_GROUP = `\\.(?:${DEFAULT_EXTENSIONS.map((ext) => ext.substring(1)).join(
"|"
)})`;
new Function("path", "return import(path).then(m => m.default)");
async function resolveTSConfig(filename) {
if (require$$0$4.extname(filename) !== ".json") {
return;
}
const tsconfig = require$$0$4.resolve(filename);
try {
const stat = await promises$1.stat(tsconfig);
if (stat.isFile() || stat.isFIFO()) {
return tsconfig;
}
} catch (e) {
if (e.code !== "ENOENT") {
throw e;
}
}
throw new Error(`no tsconfig file found for ${filename}`);
}
function posix2native(filename) {
return require$$0$4.posix.sep !== require$$0$4.sep && filename.includes(require$$0$4.posix.sep) ? filename.replace(POSIX_SEP_RE, require$$0$4.sep) : filename;
}
function native2posix(filename) {
return require$$0$4.posix.sep !== require$$0$4.sep && filename.includes(require$$0$4.sep) ? filename.replace(NATIVE_SEP_RE, require$$0$4.posix.sep) : filename;
}
function resolve2posix(dir, filename) {
if (require$$0$4.sep === require$$0$4.posix.sep) {
return dir ? require$$0$4.resolve(dir, filename) : require$$0$4.resolve(filename);
}
return native2posix(
dir ? require$$0$4.resolve(posix2native(dir), posix2native(filename)) : require$$0$4.resolve(posix2native(filename))
);
}
function resolveReferencedTSConfigFiles(result) {
const dir = require$$0$4.dirname(result.tsconfigFile);
return result.tsconfig.references.map((ref) => {
const refPath = ref.path.endsWith(".json") ? ref.path : require$$0$4.join(ref.path, "tsconfig.json");
return resolve2posix(dir, refPath);
});
}
function resolveSolutionTSConfig(filename, result) {
if (result.referenced && DEFAULT_EXTENSIONS.some((ext) => filename.endsWith(ext)) && !isIncluded(filename, result)) {
const solutionTSConfig = result.referenced.find(
(referenced) => isIncluded(filename, referenced)
);
if (solutionTSConfig) {
return {
...solutionTSConfig,
solution: result
};
}
}
return result;
}
function isIncluded(filename, result) {
const dir = native2posix(require$$0$4.dirname(result.tsconfigFile));
const files = (result.tsconfig.files || []).map((file) => resolve2posix(dir, file));
const absoluteFilename = resolve2posix(null, filename);
if (files.includes(filename)) {
return true;
}
const isIncluded2 = isGlobMatch(
absoluteFilename,
dir,
result.tsconfig.include || (result.tsconfig.files ? [] : [GLOB_ALL_PATTERN])
);
if (isIncluded2) {
const isExcluded = isGlobMatch(absoluteFilename, dir, result.tsconfig.exclude || []);
return !isExcluded;
}
return false;
}
function isGlobMatch(filename, dir, patterns) {
return patterns.some((pattern) => {
let lastWildcardIndex = pattern.length;
let hasWildcard = false;
for (let i = pattern.length - 1; i > -1; i--) {
if (pattern[i] === "*" || pattern[i] === "?") {
lastWildcardIndex = i;
hasWildcard = true;
break;
}
}
if (lastWildcardIndex < pattern.length - 1 && !filename.endsWith(pattern.slice(lastWildcardIndex + 1))) {
return false;
}
if (pattern.endsWith("*") && !DEFAULT_EXTENSIONS.some((ext) => filename.endsWith(ext))) {
return false;
}
if (pattern === GLOB_ALL_PATTERN) {
return filename.startsWith(`${dir}/`);
}
const resolvedPattern = resolve2posix(dir, pattern);
let firstWildcardIndex = -1;
for (let i = 0; i < resolvedPattern.length; i++) {
if (resolvedPattern[i] === "*" || resolvedPattern[i] === "?") {
firstWildcardIndex = i;
hasWildcard = true;
break;
}
}
if (firstWildcardIndex > 1 && !filename.startsWith(resolvedPattern.slice(0, firstWildcardIndex - 1))) {
return false;
}
if (!hasWildcard) {
return filename === resolvedPattern;
}
if (PATTERN_REGEX_CACHE.has(resolvedPattern)) {
return PATTERN_REGEX_CACHE.get(resolvedPattern).test(filename);
}
const regex = pattern2regex(resolvedPattern);
PATTERN_REGEX_CACHE.set(resolvedPattern, regex);
return regex.test(filename);
});
}
function pattern2regex(resolvedPattern) {
let regexStr = "^";
for (let i = 0; i < resolvedPattern.length; i++) {
const char = resolvedPattern[i];
if (char === "?") {
regexStr += "[^\\/]";
continue;
}
if (char === "*") {
if (resolvedPattern[i + 1] === "*" && resolvedPattern[i + 2] === "/") {
i += 2;
regexStr += "(?:[^\\/]*\\/)*";
continue;
}
regexStr += "[^\\/]*";
continue;
}
if ("/.+^${}()|[]\\".includes(char)) {
regexStr += `\\`;
}
regexStr += char;
}
if (resolvedPattern.endsWith("*")) {
regexStr += DEFAULT_EXTENSIONS_RE_GROUP;
}
regexStr += "$";
return new RegExp(regexStr);
}
// src/parse.ts
async function parse$f(filename, options) {
const cache = options == null ? void 0 : options.cache;
if (cache == null ? void 0 : cache.has(filename)) {
return cache.get(filename);
}
let tsconfigFile;
if (options == null ? void 0 : options.resolveWithEmptyIfConfigNotFound) {
try {
tsconfigFile = await resolveTSConfig(filename) || await find(filename, options);
} catch (e) {
const notFoundResult = {
tsconfigFile: "no_tsconfig_file_found",
tsconfig: {}
};
cache == null ? void 0 : cache.set(filename, notFoundResult);
return notFoundResult;
}
} else {
tsconfigFile = await resolveTSConfig(filename) || await find(filename, options);
}
let result;
if (cache == null ? void 0 : cache.has(tsconfigFile)) {
result = cache.get(tsconfigFile);
} else {
result = await parseFile$1(tsconfigFile, cache);
await Promise.all([parseExtends(result, cache), parseReferences(result, cache)]);
cache == null ? void 0 : cache.set(tsconfigFile, result);
}
result = resolveSolutionTSConfig(filename, result);
cache == null ? void 0 : cache.set(filename, result);
return result;
}
async function parseFile$1(tsconfigFile, cache) {
if (cache == null ? void 0 : cache.has(tsconfigFile)) {
return cache.get(tsconfigFile);
}
try {
const tsconfigJson = await promises$1.readFile(tsconfigFile, "utf-8");
const json = toJson(tsconfigJson);
const result = {
tsconfigFile,
tsconfig: normalizeTSConfig(JSON.parse(json), require$$0$4.dirname(tsconfigFile))
};
cache == null ? void 0 : cache.set(tsconfigFile, result);
return result;
} catch (e) {
throw new TSConfckParseError(
`parsing ${tsconfigFile} failed: ${e}`,
"PARSE_FILE",
tsconfigFile,
e
);
}
}
function normalizeTSConfig(tsconfig, dir) {
var _a;
if (((_a = tsconfig.compilerOptions) == null ? void 0 : _a.baseUrl) && !require$$0$4.isAbsolute(tsconfig.compilerOptions.baseUrl)) {
tsconfig.compilerOptions.baseUrl = resolve2posix(dir, tsconfig.compilerOptions.baseUrl);
}
return tsconfig;
}
async function parseReferences(result, cache) {
if (!result.tsconfig.references) {
return;
}
const referencedFiles = resolveReferencedTSConfigFiles(result);
const referenced = await Promise.all(referencedFiles.map((file) => parseFile$1(file, cache)));
await Promise.all(referenced.map((ref) => parseExtends(ref, cache)));
result.referenced = referenced;
}
async function parseExtends(result, cache) {
if (!result.tsconfig.extends) {
return;
}
const extended = [
{ tsconfigFile: result.tsconfigFile, tsconfig: JSON.parse(JSON.stringify(result.tsconfig)) }
];
let pos = 0;
const extendsPath = [];
let currentBranchDepth = 0;
while (pos < extended.length) {
const extending = extended[pos];
extendsPath.push(extending.tsconfigFile);
if (extending.tsconfig.extends) {
currentBranchDepth += 1;
let resolvedExtends;
if (!Array.isArray(extending.tsconfig.extends)) {
resolvedExtends = [resolveExtends(extending.tsconfig.extends, extending.tsconfigFile)];
} else {
resolvedExtends = extending.tsconfig.extends.reverse().map((ex) => resolveExtends(ex, extending.tsconfigFile));
}
const circularExtends = resolvedExtends.find(
(tsconfigFile) => extendsPath.includes(tsconfigFile)
);
if (circularExtends) {
const circle = extendsPath.concat([circularExtends]).join(" -> ");
throw new TSConfckParseError(
`Circular dependency in "extends": ${circle}`,
"EXTENDS_CIRCULAR",
result.tsconfigFile
);
}
extended.splice(
pos + 1,
0,
...await Promise.all(resolvedExtends.map((file) => parseFile$1(file, cache)))
);
} else {
extendsPath.splice(-currentBranchDepth);
currentBranchDepth = 0;
}
pos = pos + 1;
}
result.extended = extended;
for (const ext of result.extended.slice(1)) {
extendTSConfig(result, ext);
}
}
function resolveExtends(extended, from) {
let error;
try {
return createRequire$2(from).resolve(extended);
} catch (e) {
error = e;
}
if (!require$$0$4.isAbsolute(extended) && !extended.startsWith("./") && !extended.startsWith("../")) {
try {
const fallbackExtended = require$$0$4.join(extended, "tsconfig.json");
return createRequire$2(from).resolve(fallbackExtended);
} catch (e) {
error = e;
}
}
throw new TSConfckParseError(
`failed to resolve "extends":"${extended}" in ${from}`,
"EXTENDS_RESOLVE",
from,
error
);
}
var EXTENDABLE_KEYS = [
"compilerOptions",
"files",
"include",
"exclude",
"watchOptions",
"compileOnSave",
"typeAcquisition",
"buildOptions"
];
function extendTSConfig(extending, extended) {
const extendingConfig = extending.tsconfig;
const extendedConfig = extended.tsconfig;
const relativePath = native2posix(
require$$0$4.relative(require$$0$4.dirname(extending.tsconfigFile), require$$0$4.dirname(extended.tsconfigFile))
);
for (const key of Object.keys(extendedConfig).filter((key2) => EXTENDABLE_KEYS.includes(key2))) {
if (key === "compilerOptions") {
if (!extendingConfig.compilerOptions) {
extendingConfig.compilerOptions = {};
}
for (const option of Object.keys(extendedConfig.compilerOptions)) {
if (Object.prototype.hasOwnProperty.call(extendingConfig.compilerOptions, option)) {
continue;
}
extendingConfig.compilerOptions[option] = rebaseRelative(
option,
extendedConfig.compilerOptions[option],
relativePath
);
}
} else if (extendingConfig[key] === void 0) {
if (key === "watchOptions") {
extendingConfig.watchOptions = {};
for (const option of Object.keys(extendedConfig.watchOptions)) {
extendingConfig.watchOptions[option] = rebaseRelative(
option,
extendedConfig.watchOptions[option],
relativePath
);
}
} else {
extendingConfig[key] = rebaseRelative(key, extendedConfig[key], relativePath);
}
}
}
}
var REBASE_KEYS = [
// root
"files",
"include",
"exclude",
// compilerOptions
"baseUrl",
"rootDir",
"rootDirs",
"typeRoots",
"outDir",
"outFile",
"declarationDir",
// watchOptions
"excludeDirectories",
"excludeFiles"
];
function rebaseRelative(key, value, prependPath) {
if (!REBASE_KEYS.includes(key)) {
return value;
}
if (Array.isArray(value)) {
return value.map((x) => rebasePath(x, prependPath));
} else {
return rebasePath(value, prependPath);
}
}
function rebasePath(value, prependPath) {
if (require$$0$4.isAbsolute(value)) {
return value;
} else {
return require$$0$4.posix.normalize(require$$0$4.posix.join(prependPath, value));
}
}
var TSConfckParseError = class _TSConfckParseError extends Error {
constructor(message, code, tsconfigFile, cause) {
super(message);
Object.setPrototypeOf(this, _TSConfckParseError.prototype);
this.name = _TSConfckParseError.name;
this.code = code;
this.cause = cause;
this.tsconfigFile = tsconfigFile;
}
};
// https://github.com/vitejs/vite/issues/2820#issuecomment-812495079
const ROOT_FILES = [
// '.git',
// https://pnpm.io/workspaces/
'pnpm-workspace.yaml',
// https://rushjs.io/pages/advanced/config_files/
// 'rush.json',
// https://nx.dev/latest/react/getting-started/nx-setup
// 'workspace.json',
// 'nx.json',
// https://github.com/lerna/lerna#lernajson
'lerna.json',
];
// npm: https://docs.npmjs.com/cli/v7/using-npm/workspaces#installing-workspaces
// yarn: https://classic.yarnpkg.com/en/docs/workspaces/#toc-how-to-use-it
function hasWorkspacePackageJSON(root) {
const path = join$2(root, 'package.json');
if (!isFileReadable(path)) {
return false;
}
try {
const content = JSON.parse(fs$l.readFileSync(path, 'utf-8')) || {};
return !!content.workspaces;
}
catch {
return false;
}
}
function hasRootFile(root) {
return ROOT_FILES.some((file) => fs$l.existsSync(join$2(root, file)));
}
function hasPackageJSON(root) {
const path = join$2(root, 'package.json');
return fs$l.existsSync(path);
}
/**
* Search up for the nearest `package.json`
*/
function searchForPackageRoot(current, root = current) {
if (hasPackageJSON(current))
return current;
const dir = dirname$2(current);
// reach the fs root
if (!dir || dir === current)
return root;
return searchForPackageRoot(dir, root);
}
/**
* Search up for the nearest workspace root
*/
function searchForWorkspaceRoot(current, root = searchForPackageRoot(current)) {
if (hasRootFile(current))
return current;
if (hasWorkspacePackageJSON(current))
return current;
const dir = dirname$2(current);
// reach the fs root
if (!dir || dir === current)
return root;
return searchForWorkspaceRoot(dir, root);
}
const debug$f = createDebugger('vite:esbuild');
const INJECT_HELPERS_IIFE_RE = /^(.*?)((?:const|var)\s+\S+\s*=\s*function\s*\([^)]*\)\s*\{\s*"use strict";)/s;
const INJECT_HELPERS_UMD_RE = /^(.*?)(\(function\([^)]*\)\s*\{.+?amd.+?function\([^)]*\)\s*\{\s*"use strict";)/s;
const validExtensionRE = /\.\w+$/;
const jsxExtensionsRE = /\.(?:j|t)sx\b/;
let server;
async function transformWithEsbuild(code, filename, options, inMap) {
let loader = options?.loader;
if (!loader) {
// if the id ends with a valid ext, use it (e.g. vue blocks)
// otherwise, cleanup the query before checking the ext
const ext = path$o
.extname(validExtensionRE.test(filename) ? filename : cleanUrl(filename))
.slice(1);
if (ext === 'cjs' || ext === 'mjs') {
loader = 'js';
}
else if (ext === 'cts' || ext === 'mts') {
loader = 'ts';
}
else {
loader = ext;
}
}
let tsconfigRaw = options?.tsconfigRaw;
const fallbackSupported = {};
// if options provide tsconfigRaw in string, it takes highest precedence
if (typeof tsconfigRaw !== 'string') {
// these fields would affect the compilation result
// https://esbuild.github.io/content-types/#tsconfig-json
const meaningfulFields = [
'alwaysStrict',
'experimentalDecorators',
'importsNotUsedAsValues',
'jsx',
'jsxFactory',
'jsxFragmentFactory',
'jsxImportSource',
'preserveValueImports',
'target',
'useDefineForClassFields',
'verbatimModuleSyntax',
];
const compilerOptionsForFile = {};
if (loader === 'ts' || loader === 'tsx') {
const loadedTsconfig = await loadTsconfigJsonForFile(filename);
const loadedCompilerOptions = loadedTsconfig.compilerOptions ?? {};
for (const field of meaningfulFields) {
if (field in loadedCompilerOptions) {
// @ts-expect-error TypeScript can't tell they are of the same type
compilerOptionsForFile[field] = loadedCompilerOptions[field];
}
}
}
const compilerOptions = {
...compilerOptionsForFile,
...tsconfigRaw?.compilerOptions,
};
// esbuild uses `useDefineForClassFields: true` when `tsconfig.compilerOptions.target` isn't declared
// but we want `useDefineForClassFields: false` when `tsconfig.compilerOptions.target` isn't declared
// to align with the TypeScript's behavior
if (compilerOptions.useDefineForClassFields === undefined &&
compilerOptions.target === undefined) {
compilerOptions.useDefineForClassFields = false;
}
// esbuild v0.18 only transforms decorators when `experimentalDecorators` is set to `true`.
// To preserve compat with the esbuild breaking change, we set `experimentalDecorators` to
// `true` by default if it's unset.
// TODO: Remove this in Vite 5
if (compilerOptions.experimentalDecorators === undefined) {
compilerOptions.experimentalDecorators = true;
}
// Compat with esbuild 0.17 where static properties are transpiled to
// static blocks when `useDefineForClassFields` is false. Its support
// is not great yet, so temporarily disable it for now.
// TODO: Remove this in Vite 5, don't pass hardcoded `esnext` target
// to `transformWithEsbuild` in the esbuild plugin.
if (compilerOptions.useDefineForClassFields !== true) {
fallbackSupported['class-static-blocks'] = false;
}
// esbuild uses tsconfig fields when both the normal options and tsconfig was set
// but we want to prioritize the normal options
if (options) {
options.jsx && (compilerOptions.jsx = undefined);
options.jsxFactory && (compilerOptions.jsxFactory = undefined);
options.jsxFragment && (compilerOptions.jsxFragmentFactory = undefined);
options.jsxImportSource && (compilerOptions.jsxImportSource = undefined);
}
tsconfigRaw = {
...tsconfigRaw,
compilerOptions,
};
}
const resolvedOptions = {
sourcemap: true,
// ensure source file name contains full query
sourcefile: filename,
...options,
loader,
tsconfigRaw,
supported: {
...fallbackSupported,
...options?.supported,
},
};
// Some projects in the ecosystem are calling this function with an ESBuildOptions
// object and esbuild throws an error for extra fields
// @ts-expect-error include exists in ESBuildOptions
delete resolvedOptions.include;
// @ts-expect-error exclude exists in ESBuildOptions
delete resolvedOptions.exclude;
// @ts-expect-error jsxInject exists in ESBuildOptions
delete resolvedOptions.jsxInject;
try {
const result = await transform$1(code, resolvedOptions);
let map;
if (inMap && resolvedOptions.sourcemap) {
const nextMap = JSON.parse(result.map);
nextMap.sourcesContent = [];
map = combineSourcemaps(filename, [
nextMap,
inMap,
]);
}
else {
map =
resolvedOptions.sourcemap && resolvedOptions.sourcemap !== 'inline'
? JSON.parse(result.map)
: { mappings: '' };
}
return {
...result,
map,
};
}
catch (e) {
debug$f?.(`esbuild error with options used: `, resolvedOptions);
// patch error information
if (e.errors) {
e.frame = '';
e.errors.forEach((m) => {
if (m.text === 'Experimental decorators are not currently enabled') {
m.text +=
'. Vite 4.4+ now uses esbuild 0.18 and you need to enable them by adding "experimentalDecorators": true in your "tsconfig.json" file.';
}
e.frame += `\n` + prettifyMessage(m, code);
});
e.loc = e.errors[0].location;
}
throw e;
}
}
function esbuildPlugin(config) {
const options = config.esbuild;
const { jsxInject, include, exclude, ...esbuildTransformOptions } = options;
const filter = createFilter(include || /\.(m?ts|[jt]sx)$/, exclude || /\.js$/);
// Remove optimization options for dev as we only need to transpile them,
// and for build as the final optimization is in `buildEsbuildPlugin`
const transformOptions = {
target: 'esnext',
charset: 'utf8',
...esbuildTransformOptions,
minify: false,
minifyIdentifiers: false,
minifySyntax: false,
minifyWhitespace: false,
treeShaking: false,
// keepNames is not needed when minify is disabled.
// Also transforming multiple times with keepNames enabled breaks
// tree-shaking. (#9164)
keepNames: false,
};
initTSConfck(config.root);
return {
name: 'vite:esbuild',
configureServer(_server) {
server = _server;
server.watcher
.on('add', reloadOnTsconfigChange)
.on('change', reloadOnTsconfigChange)
.on('unlink', reloadOnTsconfigChange);
},
buildEnd() {
// recycle serve to avoid preventing Node self-exit (#6815)
server = null;
},
async transform(code, id) {
if (filter(id) || filter(cleanUrl(id))) {
const result = await transformWithEsbuild(code, id, transformOptions);
if (result.warnings.length) {
result.warnings.forEach((m) => {
this.warn(prettifyMessage(m, code));
});
}
if (jsxInject && jsxExtensionsRE.test(id)) {
result.code = jsxInject + ';' + result.code;
}
return {
code: result.code,
map: result.map,
};
}
},
};
}
const rollupToEsbuildFormatMap = {
es: 'esm',
cjs: 'cjs',
// passing `var Lib = (() => {})()` to esbuild with format = "iife"
// will turn it to `(() => { var Lib = (() => {})() })()`,
// so we remove the format config to tell esbuild not doing this
//
// although esbuild doesn't change format, there is still possibility
// that `{ treeShaking: true }` removes a top-level no-side-effect variable
// like: `var Lib = 1`, which becomes `` after esbuild transforming,
// but thankfully rollup does not do this optimization now
iife: undefined,
};
const buildEsbuildPlugin = (config) => {
initTSConfck(config.root);
return {
name: 'vite:esbuild-transpile',
async renderChunk(code, chunk, opts) {
// @ts-expect-error injected by @vitejs/plugin-legacy
if (opts.__vite_skip_esbuild__) {
return null;
}
const options = resolveEsbuildTranspileOptions(config, opts.format);
if (!options) {
return null;
}
const res = await transformWithEsbuild(code, chunk.fileName, options);
if (config.build.lib) {
// #7188, esbuild adds helpers out of the UMD and IIFE wrappers, and the
// names are minified potentially causing collision with other globals.
// We use a regex to inject the helpers inside the wrappers.
// We don't need to create a MagicString here because both the helpers and
// the headers don't modify the sourcemap
const injectHelpers = opts.format === 'umd'
? INJECT_HELPERS_UMD_RE
: opts.format === 'iife'
? INJECT_HELPERS_IIFE_RE
: undefined;
if (injectHelpers) {
res.code = res.code.replace(injectHelpers, (_, helpers, header) => header + helpers);
}
}
return res;
},
};
};
function resolveEsbuildTranspileOptions(config, format) {
const target = config.build.target;
const minify = config.build.minify === 'esbuild';
if ((!target || target === 'esnext') && !minify) {
return null;
}
// Do not minify whitespace for ES lib output since that would remove
// pure annotations and break tree-shaking
// https://github.com/vuejs/core/issues/2860#issuecomment-926882793
const isEsLibBuild = config.build.lib && format === 'es';
const esbuildOptions = config.esbuild || {};
const options = {
charset: 'utf8',
...esbuildOptions,
target: target || undefined,
format: rollupToEsbuildFormatMap[format],
// the final build should always support dynamic import and import.meta.
// if they need to be polyfilled, plugin-legacy should be used.
// plugin-legacy detects these two features when checking for modern code.
supported: {
'dynamic-import': true,
'import-meta': true,
...esbuildOptions.supported,
},
};
// If no minify, disable all minify options
if (!minify) {
return {
...options,
minify: false,
minifyIdentifiers: false,
minifySyntax: false,
minifyWhitespace: false,
treeShaking: false,
};
}
// If user enable fine-grain minify options, minify with their options instead
if (options.minifyIdentifiers != null ||
options.minifySyntax != null ||
options.minifyWhitespace != null) {
if (isEsLibBuild) {
// Disable minify whitespace as it breaks tree-shaking
return {
...options,
minify: false,
minifyIdentifiers: options.minifyIdentifiers ?? true,
minifySyntax: options.minifySyntax ?? true,
minifyWhitespace: false,
treeShaking: true,
};
}
else {
return {
...options,
minify: false,
minifyIdentifiers: options.minifyIdentifiers ?? true,
minifySyntax: options.minifySyntax ?? true,
minifyWhitespace: options.minifyWhitespace ?? true,
treeShaking: true,
};
}
}
// Else apply default minify options
if (isEsLibBuild) {
// Minify all except whitespace as it breaks tree-shaking
return {
...options,
minify: false,
minifyIdentifiers: true,
minifySyntax: true,
minifyWhitespace: false,
treeShaking: true,
};
}
else {
return {
...options,
minify: true,
treeShaking: true,
};
}
}
function prettifyMessage(m, code) {
let res = colors$1.yellow(m.text);
if (m.location) {
const lines = code.split(/\r?\n/g);
const line = Number(m.location.line);
const column = Number(m.location.column);
const offset = lines
.slice(0, line - 1)
.map((l) => l.length)
.reduce((total, l) => total + l + 1, 0) + column;
res += `\n` + generateCodeFrame(code, offset, offset + 1);
}
return res + `\n`;
}
let tsconfckRoot;
let tsconfckParseOptions = { resolveWithEmptyIfConfigNotFound: true };
function initTSConfck(root, force = false) {
// bail if already cached
if (!force && root === tsconfckRoot)
return;
const workspaceRoot = searchForWorkspaceRoot(root);
tsconfckRoot = root;
tsconfckParseOptions = initTSConfckParseOptions(workspaceRoot);
// cached as the options value itself when promise is resolved
tsconfckParseOptions.then((options) => {
if (root === tsconfckRoot) {
tsconfckParseOptions = options;
}
});
}
async function initTSConfckParseOptions(workspaceRoot) {
const start = debug$f ? performance.now() : 0;
const options = {
cache: new Map(),
root: workspaceRoot,
tsConfigPaths: new Set(await findAll(workspaceRoot, {
skip: (dir) => dir === 'node_modules' || dir === '.git',
})),
resolveWithEmptyIfConfigNotFound: true,
};
debug$f?.(timeFrom(start), 'tsconfck init', colors$1.dim(workspaceRoot));
return options;
}
async function loadTsconfigJsonForFile(filename) {
try {
const result = await parse$f(filename, await tsconfckParseOptions);
// tsconfig could be out of root, make sure it is watched on dev
if (server && result.tsconfigFile !== 'no_tsconfig_file_found') {
ensureWatchedFile(server.watcher, result.tsconfigFile, server.config.root);
}
return result.tsconfig;
}
catch (e) {
if (e instanceof TSConfckParseError) {
// tsconfig could be out of root, make sure it is watched on dev
if (server && e.tsconfigFile) {
ensureWatchedFile(server.watcher, e.tsconfigFile, server.config.root);
}
}
throw e;
}
}
async function reloadOnTsconfigChange(changedFile) {
// server could be closed externally after a file change is detected
if (!server)
return;
// any tsconfig.json that's added in the workspace could be closer to a code file than a previously cached one
// any json file in the tsconfig cache could have been used to compile ts
if (path$o.basename(changedFile) === 'tsconfig.json' ||
(changedFile.endsWith('.json') &&
(await tsconfckParseOptions)?.cache?.has(changedFile))) {
server.config.logger.info(`changed tsconfig file detected: ${changedFile} - Clearing cache and forcing full-reload to ensure TypeScript is compiled with updated config values.`, { clear: server.config.clearScreen, timestamp: true });
// clear module graph to remove code compiled with outdated config
server.moduleGraph.invalidateAll();
// reset tsconfck so that recompile works with up2date configs
initTSConfck(server.config.root, true);
// server may not be available if vite config is updated at the same time
if (server) {
// force full reload
server.ws.send({
type: 'full-reload',
path: '*',
});
}
}
}
var dist$1 = {};
var __importDefault = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(dist$1, "__esModule", { value: true });
var Worker_1 = dist$1.Worker = void 0;
const os_1 = __importDefault(require$$2);
const worker_threads_1 = require$$1;
class Worker {
constructor(fn, options = {}) {
this.code = genWorkerCode(fn);
this.max = options.max || Math.max(1, os_1.default.cpus().length - 1);
this.pool = [];
this.idlePool = [];
this.queue = [];
}
async run(...args) {
const worker = await this._getAvailableWorker();
return new Promise((resolve, reject) => {
worker.currentResolve = resolve;
worker.currentReject = reject;
worker.postMessage(args);
});
}
stop() {
this.pool.forEach((w) => w.unref());
this.queue.forEach(([_, reject]) => reject(new Error('Main worker pool stopped before a worker was available.')));
this.pool = [];
this.idlePool = [];
this.queue = [];
}
async _getAvailableWorker() {
// has idle one?
if (this.idlePool.length) {
return this.idlePool.shift();
}
// can spawn more?
if (this.pool.length < this.max) {
const worker = new worker_threads_1.Worker(this.code, { eval: true });
worker.on('message', (res) => {
worker.currentResolve && worker.currentResolve(res);
worker.currentResolve = null;
this._assignDoneWorker(worker);
});
worker.on('error', (err) => {
worker.currentReject && worker.currentReject(err);
worker.currentReject = null;
});
worker.on('exit', (code) => {
const i = this.pool.indexOf(worker);
if (i > -1)
this.pool.splice(i, 1);
if (code !== 0 && worker.currentReject) {
worker.currentReject(new Error(`Wroker stopped with non-0 exit code ${code}`));
worker.currentReject = null;
}
});
this.pool.push(worker);
return worker;
}
// no one is available, we have to wait
let resolve;
let reject;
const onWorkerAvailablePromise = new Promise((r, rj) => {
resolve = r;
reject = rj;
});
this.queue.push([resolve, reject]);
return onWorkerAvailablePromise;
}
_assignDoneWorker(worker) {
// someone's waiting already?
if (this.queue.length) {
const [resolve] = this.queue.shift();
resolve(worker);
return;
}
// take a rest.
this.idlePool.push(worker);
}
}
Worker_1 = dist$1.Worker = Worker;
function genWorkerCode(fn) {
return `
const doWork = ${fn.toString()}
const { parentPort } = require('worker_threads')
parentPort.on('message', async (args) => {
const res = await doWork(...args)
parentPort.postMessage(res)
})
`;
}
let terserPath;
const loadTerserPath = (root) => {
if (terserPath)
return terserPath;
try {
terserPath = requireResolveFromRootWithFallback(root, 'terser');
}
catch (e) {
if (e.code === 'MODULE_NOT_FOUND') {
throw new Error('terser not found. Since Vite v3, terser has become an optional dependency. You need to install it.');
}
else {
const message = new Error(`terser failed to load:\n${e.message}`);
message.stack = e.stack + '\n' + message.stack;
throw message;
}
}
return terserPath;
};
function terserPlugin(config) {
const makeWorker = () => new Worker_1(async (terserPath, code, options) => {
// test fails when using `import`. maybe related: https://github.com/nodejs/node/issues/43205
// eslint-disable-next-line no-restricted-globals -- this function runs inside cjs
const terser = require(terserPath);
return terser.minify(code, options);
});
let worker;
return {
name: 'vite:terser',
async renderChunk(code, _chunk, outputOptions) {
// This plugin is included for any non-false value of config.build.minify,
// so that normal chunks can use the preferred minifier, and legacy chunks
// can use terser.
if (config.build.minify !== 'terser' &&
// @ts-expect-error injected by @vitejs/plugin-legacy
!outputOptions.__vite_force_terser__) {
return null;
}
// Do not minify ES lib output since that would remove pure annotations
// and break tree-shaking.
if (config.build.lib && outputOptions.format === 'es') {
return null;
}
// Lazy load worker.
worker || (worker = makeWorker());
const terserPath = loadTerserPath(config.root);
const res = await worker.run(terserPath, code, {
safari10: true,
...config.build.terserOptions,
sourceMap: !!outputOptions.sourcemap,
module: outputOptions.format.startsWith('es'),
toplevel: outputOptions.format === 'cjs',
});
return {
code: res.code,
map: res.map,
};
},
closeBundle() {
worker?.stop();
},
};
}
var json = JSON;
var isArray$1 = Array.isArray || function (x) {
return {}.toString.call(x) === '[object Array]';
};
var objectKeys = Object.keys || function (obj) {
var has = Object.prototype.hasOwnProperty || function () { return true; };
var keys = [];
for (var key in obj) {
if (has.call(obj, key)) { keys.push(key); }
}
return keys;
};
var jsonStableStringify = function (obj, opts) {
if (!opts) { opts = {}; }
if (typeof opts === 'function') { opts = { cmp: opts }; }
var space = opts.space || '';
if (typeof space === 'number') { space = Array(space + 1).join(' '); }
var cycles = typeof opts.cycles === 'boolean' ? opts.cycles : false;
var replacer = opts.replacer || function (key, value) { return value; };
var cmp = opts.cmp && (function (f) {
return function (node) {
return function (a, b) {
var aobj = { key: a, value: node[a] };
var bobj = { key: b, value: node[b] };
return f(aobj, bobj);
};
};
}(opts.cmp));
var seen = [];
return (function stringify(parent, key, node, level) {
var indent = space ? '\n' + new Array(level + 1).join(space) : '';
var colonSeparator = space ? ': ' : ':';
if (node && node.toJSON && typeof node.toJSON === 'function') {
node = node.toJSON();
}
node = replacer.call(parent, key, node);
if (node === undefined) {
return;
}
if (typeof node !== 'object' || node === null) {
return json.stringify(node);
}
if (isArray$1(node)) {
var out = [];
for (var i = 0; i < node.length; i++) {
var item = stringify(node, i, node[i], level + 1) || json.stringify(null);
out.push(indent + space + item);
}
return '[' + out.join(',') + indent + ']';
}
if (seen.indexOf(node) !== -1) {
if (cycles) { return json.stringify('__cycle__'); }
throw new TypeError('Converting circular structure to JSON');
} else { seen.push(node); }
var keys = objectKeys(node).sort(cmp && cmp(node));
var out = [];
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
var value = stringify(node, key, node[key], level + 1);
if (!value) { continue; }
var keyValue = json.stringify(key)
+ colonSeparator
+ value;
out.push(indent + space + keyValue);
}
seen.splice(seen.indexOf(node), 1);
return '{' + out.join(',') + indent + '}';
}({ '': obj }, '', obj, 0));
};
var jsonStableStringify$1 = /*@__PURE__*/getDefaultExportFromCjs(jsonStableStringify);
const mimes$1 = {
"ez": "application/andrew-inset",
"aw": "application/applixware",
"atom": "application/atom+xml",
"atomcat": "application/atomcat+xml",
"atomdeleted": "application/atomdeleted+xml",
"atomsvc": "application/atomsvc+xml",
"dwd": "application/atsc-dwd+xml",
"held": "application/atsc-held+xml",
"rsat": "application/atsc-rsat+xml",
"bdoc": "application/bdoc",
"xcs": "application/calendar+xml",
"ccxml": "application/ccxml+xml",
"cdfx": "application/cdfx+xml",
"cdmia": "application/cdmi-capability",
"cdmic": "application/cdmi-container",
"cdmid": "application/cdmi-domain",
"cdmio": "application/cdmi-object",
"cdmiq": "application/cdmi-queue",
"cu": "application/cu-seeme",
"mpd": "application/dash+xml",
"davmount": "application/davmount+xml",
"dbk": "application/docbook+xml",
"dssc": "application/dssc+der",
"xdssc": "application/dssc+xml",
"es": "application/ecmascript",
"ecma": "application/ecmascript",
"emma": "application/emma+xml",
"emotionml": "application/emotionml+xml",
"epub": "application/epub+zip",
"exi": "application/exi",
"fdt": "application/fdt+xml",
"pfr": "application/font-tdpfr",
"geojson": "application/geo+json",
"gml": "application/gml+xml",
"gpx": "application/gpx+xml",
"gxf": "application/gxf",
"gz": "application/gzip",
"hjson": "application/hjson",
"stk": "application/hyperstudio",
"ink": "application/inkml+xml",
"inkml": "application/inkml+xml",
"ipfix": "application/ipfix",
"its": "application/its+xml",
"jar": "application/java-archive",
"war": "application/java-archive",
"ear": "application/java-archive",
"ser": "application/java-serialized-object",
"class": "application/java-vm",
"js": "application/javascript",
"mjs": "application/javascript",
"json": "application/json",
"map": "application/json",
"json5": "application/json5",
"jsonml": "application/jsonml+json",
"jsonld": "application/ld+json",
"lgr": "application/lgr+xml",
"lostxml": "application/lost+xml",
"hqx": "application/mac-binhex40",
"cpt": "application/mac-compactpro",
"mads": "application/mads+xml",
"webmanifest": "application/manifest+json",
"mrc": "application/marc",
"mrcx": "application/marcxml+xml",
"ma": "application/mathematica",
"nb": "application/mathematica",
"mb": "application/mathematica",
"mathml": "application/mathml+xml",
"mbox": "application/mbox",
"mscml": "application/mediaservercontrol+xml",
"metalink": "application/metalink+xml",
"meta4": "application/metalink4+xml",
"mets": "application/mets+xml",
"maei": "application/mmt-aei+xml",
"musd": "application/mmt-usd+xml",
"mods": "application/mods+xml",
"m21": "application/mp21",
"mp21": "application/mp21",
"mp4s": "application/mp4",
"m4p": "application/mp4",
"doc": "application/msword",
"dot": "application/msword",
"mxf": "application/mxf",
"nq": "application/n-quads",
"nt": "application/n-triples",
"cjs": "application/node",
"bin": "application/octet-stream",
"dms": "application/octet-stream",
"lrf": "application/octet-stream",
"mar": "application/octet-stream",
"so": "application/octet-stream",
"dist": "application/octet-stream",
"distz": "application/octet-stream",
"pkg": "application/octet-stream",
"bpk": "application/octet-stream",
"dump": "application/octet-stream",
"elc": "application/octet-stream",
"deploy": "application/octet-stream",
"exe": "application/octet-stream",
"dll": "application/octet-stream",
"deb": "application/octet-stream",
"dmg": "application/octet-stream",
"iso": "application/octet-stream",
"img": "application/octet-stream",
"msi": "application/octet-stream",
"msp": "application/octet-stream",
"msm": "application/octet-stream",
"buffer": "application/octet-stream",
"oda": "application/oda",
"opf": "application/oebps-package+xml",
"ogx": "application/ogg",
"omdoc": "application/omdoc+xml",
"onetoc": "application/onenote",
"onetoc2": "application/onenote",
"onetmp": "application/onenote",
"onepkg": "application/onenote",
"oxps": "application/oxps",
"relo": "application/p2p-overlay+xml",
"xer": "application/patch-ops-error+xml",
"pdf": "application/pdf",
"pgp": "application/pgp-encrypted",
"asc": "application/pgp-signature",
"sig": "application/pgp-signature",
"prf": "application/pics-rules",
"p10": "application/pkcs10",
"p7m": "application/pkcs7-mime",
"p7c": "application/pkcs7-mime",
"p7s": "application/pkcs7-signature",
"p8": "application/pkcs8",
"ac": "application/pkix-attr-cert",
"cer": "application/pkix-cert",
"crl": "application/pkix-crl",
"pkipath": "application/pkix-pkipath",
"pki": "application/pkixcmp",
"pls": "application/pls+xml",
"ai": "application/postscript",
"eps": "application/postscript",
"ps": "application/postscript",
"provx": "application/provenance+xml",
"cww": "application/prs.cww",
"pskcxml": "application/pskc+xml",
"raml": "application/raml+yaml",
"rdf": "application/rdf+xml",
"owl": "application/rdf+xml",
"rif": "application/reginfo+xml",
"rnc": "application/relax-ng-compact-syntax",
"rl": "application/resource-lists+xml",
"rld": "application/resource-lists-diff+xml",
"rs": "application/rls-services+xml",
"rapd": "application/route-apd+xml",
"sls": "application/route-s-tsid+xml",
"rusd": "application/route-usd+xml",
"gbr": "application/rpki-ghostbusters",
"mft": "application/rpki-manifest",
"roa": "application/rpki-roa",
"rsd": "application/rsd+xml",
"rss": "application/rss+xml",
"rtf": "application/rtf",
"sbml": "application/sbml+xml",
"scq": "application/scvp-cv-request",
"scs": "application/scvp-cv-response",
"spq": "application/scvp-vp-request",
"spp": "application/scvp-vp-response",
"sdp": "application/sdp",
"senmlx": "application/senml+xml",
"sensmlx": "application/sensml+xml",
"setpay": "application/set-payment-initiation",
"setreg": "application/set-registration-initiation",
"shf": "application/shf+xml",
"siv": "application/sieve",
"sieve": "application/sieve",
"smi": "application/smil+xml",
"smil": "application/smil+xml",
"rq": "application/sparql-query",
"srx": "application/sparql-results+xml",
"gram": "application/srgs",
"grxml": "application/srgs+xml",
"sru": "application/sru+xml",
"ssdl": "application/ssdl+xml",
"ssml": "application/ssml+xml",
"swidtag": "application/swid+xml",
"tei": "application/tei+xml",
"teicorpus": "application/tei+xml",
"tfi": "application/thraud+xml",
"tsd": "application/timestamped-data",
"toml": "application/toml",
"trig": "application/trig",
"ttml": "application/ttml+xml",
"ubj": "application/ubjson",
"rsheet": "application/urc-ressheet+xml",
"td": "application/urc-targetdesc+xml",
"vxml": "application/voicexml+xml",
"wasm": "application/wasm",
"wgt": "application/widget",
"hlp": "application/winhlp",
"wsdl": "application/wsdl+xml",
"wspolicy": "application/wspolicy+xml",
"xaml": "application/xaml+xml",
"xav": "application/xcap-att+xml",
"xca": "application/xcap-caps+xml",
"xdf": "application/xcap-diff+xml",
"xel": "application/xcap-el+xml",
"xns": "application/xcap-ns+xml",
"xenc": "application/xenc+xml",
"xhtml": "application/xhtml+xml",
"xht": "application/xhtml+xml",
"xlf": "application/xliff+xml",
"xml": "application/xml",
"xsl": "application/xml",
"xsd": "application/xml",
"rng": "application/xml",
"dtd": "application/xml-dtd",
"xop": "application/xop+xml",
"xpl": "application/xproc+xml",
"xslt": "application/xml",
"xspf": "application/xspf+xml",
"mxml": "application/xv+xml",
"xhvml": "application/xv+xml",
"xvml": "application/xv+xml",
"xvm": "application/xv+xml",
"yang": "application/yang",
"yin": "application/yin+xml",
"zip": "application/zip",
"3gpp": "video/3gpp",
"adp": "audio/adpcm",
"amr": "audio/amr",
"au": "audio/basic",
"snd": "audio/basic",
"mid": "audio/midi",
"midi": "audio/midi",
"kar": "audio/midi",
"rmi": "audio/midi",
"mxmf": "audio/mobile-xmf",
"mp3": "audio/mpeg",
"m4a": "audio/mp4",
"mp4a": "audio/mp4",
"mpga": "audio/mpeg",
"mp2": "audio/mpeg",
"mp2a": "audio/mpeg",
"m2a": "audio/mpeg",
"m3a": "audio/mpeg",
"oga": "audio/ogg",
"ogg": "audio/ogg",
"spx": "audio/ogg",
"opus": "audio/ogg",
"s3m": "audio/s3m",
"sil": "audio/silk",
"wav": "audio/wav",
"weba": "audio/webm",
"xm": "audio/xm",
"ttc": "font/collection",
"otf": "font/otf",
"ttf": "font/ttf",
"woff": "font/woff",
"woff2": "font/woff2",
"exr": "image/aces",
"apng": "image/apng",
"avif": "image/avif",
"bmp": "image/bmp",
"cgm": "image/cgm",
"drle": "image/dicom-rle",
"emf": "image/emf",
"fits": "image/fits",
"g3": "image/g3fax",
"gif": "image/gif",
"heic": "image/heic",
"heics": "image/heic-sequence",
"heif": "image/heif",
"heifs": "image/heif-sequence",
"hej2": "image/hej2k",
"hsj2": "image/hsj2",
"ief": "image/ief",
"jls": "image/jls",
"jp2": "image/jp2",
"jpg2": "image/jp2",
"jpeg": "image/jpeg",
"jpg": "image/jpeg",
"jpe": "image/jpeg",
"jph": "image/jph",
"jhc": "image/jphc",
"jpm": "image/jpm",
"jpx": "image/jpx",
"jpf": "image/jpx",
"jxr": "image/jxr",
"jxra": "image/jxra",
"jxrs": "image/jxrs",
"jxs": "image/jxs",
"jxsc": "image/jxsc",
"jxsi": "image/jxsi",
"jxss": "image/jxss",
"ktx": "image/ktx",
"ktx2": "image/ktx2",
"png": "image/png",
"btif": "image/prs.btif",
"pti": "image/prs.pti",
"sgi": "image/sgi",
"svg": "image/svg+xml",
"svgz": "image/svg+xml",
"t38": "image/t38",
"tif": "image/tiff",
"tiff": "image/tiff",
"tfx": "image/tiff-fx",
"webp": "image/webp",
"wmf": "image/wmf",
"disposition-notification": "message/disposition-notification",
"u8msg": "message/global",
"u8dsn": "message/global-delivery-status",
"u8mdn": "message/global-disposition-notification",
"u8hdr": "message/global-headers",
"eml": "message/rfc822",
"mime": "message/rfc822",
"3mf": "model/3mf",
"gltf": "model/gltf+json",
"glb": "model/gltf-binary",
"igs": "model/iges",
"iges": "model/iges",
"msh": "model/mesh",
"mesh": "model/mesh",
"silo": "model/mesh",
"mtl": "model/mtl",
"obj": "model/obj",
"stpz": "model/step+zip",
"stpxz": "model/step-xml+zip",
"stl": "model/stl",
"wrl": "model/vrml",
"vrml": "model/vrml",
"x3db": "model/x3d+fastinfoset",
"x3dbz": "model/x3d+binary",
"x3dv": "model/x3d-vrml",
"x3dvz": "model/x3d+vrml",
"x3d": "model/x3d+xml",
"x3dz": "model/x3d+xml",
"appcache": "text/cache-manifest",
"manifest": "text/cache-manifest",
"ics": "text/calendar",
"ifb": "text/calendar",
"coffee": "text/coffeescript",
"litcoffee": "text/coffeescript",
"css": "text/css",
"csv": "text/csv",
"html": "text/html",
"htm": "text/html",
"shtml": "text/html",
"jade": "text/jade",
"jsx": "text/jsx",
"less": "text/less",
"markdown": "text/markdown",
"md": "text/markdown",
"mml": "text/mathml",
"mdx": "text/mdx",
"n3": "text/n3",
"txt": "text/plain",
"text": "text/plain",
"conf": "text/plain",
"def": "text/plain",
"list": "text/plain",
"log": "text/plain",
"in": "text/plain",
"ini": "text/plain",
"dsc": "text/prs.lines.tag",
"rtx": "text/richtext",
"sgml": "text/sgml",
"sgm": "text/sgml",
"shex": "text/shex",
"slim": "text/slim",
"slm": "text/slim",
"spdx": "text/spdx",
"stylus": "text/stylus",
"styl": "text/stylus",
"tsv": "text/tab-separated-values",
"t": "text/troff",
"tr": "text/troff",
"roff": "text/troff",
"man": "text/troff",
"me": "text/troff",
"ms": "text/troff",
"ttl": "text/turtle",
"uri": "text/uri-list",
"uris": "text/uri-list",
"urls": "text/uri-list",
"vcard": "text/vcard",
"vtt": "text/vtt",
"yaml": "text/yaml",
"yml": "text/yaml",
"3gp": "video/3gpp",
"3g2": "video/3gpp2",
"h261": "video/h261",
"h263": "video/h263",
"h264": "video/h264",
"m4s": "video/iso.segment",
"jpgv": "video/jpeg",
"jpgm": "image/jpm",
"mj2": "video/mj2",
"mjp2": "video/mj2",
"ts": "video/mp2t",
"mp4": "video/mp4",
"mp4v": "video/mp4",
"mpg4": "video/mp4",
"mpeg": "video/mpeg",
"mpg": "video/mpeg",
"mpe": "video/mpeg",
"m1v": "video/mpeg",
"m2v": "video/mpeg",
"ogv": "video/ogg",
"qt": "video/quicktime",
"mov": "video/quicktime",
"webm": "video/webm"
};
function lookup(extn) {
let tmp = ('' + extn).trim().toLowerCase();
let idx = tmp.lastIndexOf('.');
return mimes$1[!~idx ? tmp : tmp.substring(++idx)];
}
class BitSet {
constructor(arg) {
this.bits = arg instanceof BitSet ? arg.bits.slice() : [];
}
add(n) {
this.bits[n >> 5] |= 1 << (n & 31);
}
has(n) {
return !!(this.bits[n >> 5] & (1 << (n & 31)));
}
}
class Chunk {
constructor(start, end, content) {
this.start = start;
this.end = end;
this.original = content;
this.intro = '';
this.outro = '';
this.content = content;
this.storeName = false;
this.edited = false;
{
this.previous = null;
this.next = null;
}
}
appendLeft(content) {
this.outro += content;
}
appendRight(content) {
this.intro = this.intro + content;
}
clone() {
const chunk = new Chunk(this.start, this.end, this.original);
chunk.intro = this.intro;
chunk.outro = this.outro;
chunk.content = this.content;
chunk.storeName = this.storeName;
chunk.edited = this.edited;
return chunk;
}
contains(index) {
return this.start < index && index < this.end;
}
eachNext(fn) {
let chunk = this;
while (chunk) {
fn(chunk);
chunk = chunk.next;
}
}
eachPrevious(fn) {
let chunk = this;
while (chunk) {
fn(chunk);
chunk = chunk.previous;
}
}
edit(content, storeName, contentOnly) {
this.content = content;
if (!contentOnly) {
this.intro = '';
this.outro = '';
}
this.storeName = storeName;
this.edited = true;
return this;
}
prependLeft(content) {
this.outro = content + this.outro;
}
prependRight(content) {
this.intro = content + this.intro;
}
split(index) {
const sliceIndex = index - this.start;
const originalBefore = this.original.slice(0, sliceIndex);
const originalAfter = this.original.slice(sliceIndex);
this.original = originalBefore;
const newChunk = new Chunk(index, this.end, originalAfter);
newChunk.outro = this.outro;
this.outro = '';
this.end = index;
if (this.edited) {
// TODO is this block necessary?...
newChunk.edit('', false);
this.content = '';
} else {
this.content = originalBefore;
}
newChunk.next = this.next;
if (newChunk.next) newChunk.next.previous = newChunk;
newChunk.previous = this;
this.next = newChunk;
return newChunk;
}
toString() {
return this.intro + this.content + this.outro;
}
trimEnd(rx) {
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
const trimmed = this.content.replace(rx, '');
if (trimmed.length) {
if (trimmed !== this.content) {
this.split(this.start + trimmed.length).edit('', undefined, true);
}
return true;
} else {
this.edit('', undefined, true);
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
}
}
trimStart(rx) {
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
const trimmed = this.content.replace(rx, '');
if (trimmed.length) {
if (trimmed !== this.content) {
this.split(this.end - trimmed.length);
this.edit('', undefined, true);
}
return true;
} else {
this.edit('', undefined, true);
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
}
}
}
function getBtoa() {
if (typeof window !== 'undefined' && typeof window.btoa === 'function') {
return (str) => window.btoa(unescape(encodeURIComponent(str)));
} else if (typeof Buffer === 'function') {
return (str) => Buffer.from(str, 'utf-8').toString('base64');
} else {
return () => {
throw new Error('Unsupported environment: `window.btoa` or `Buffer` should be supported.');
};
}
}
const btoa$1 = /*#__PURE__*/ getBtoa();
class SourceMap {
constructor(properties) {
this.version = 3;
this.file = properties.file;
this.sources = properties.sources;
this.sourcesContent = properties.sourcesContent;
this.names = properties.names;
this.mappings = encode$1(properties.mappings);
if (typeof properties.x_google_ignoreList !== 'undefined') {
this.x_google_ignoreList = properties.x_google_ignoreList;
}
}
toString() {
return JSON.stringify(this);
}
toUrl() {
return 'data:application/json;charset=utf-8;base64,' + btoa$1(this.toString());
}
}
function guessIndent(code) {
const lines = code.split('\n');
const tabbed = lines.filter((line) => /^\t+/.test(line));
const spaced = lines.filter((line) => /^ {2,}/.test(line));
if (tabbed.length === 0 && spaced.length === 0) {
return null;
}
// More lines tabbed than spaced? Assume tabs, and
// default to tabs in the case of a tie (or nothing
// to go on)
if (tabbed.length >= spaced.length) {
return '\t';
}
// Otherwise, we need to guess the multiple
const min = spaced.reduce((previous, current) => {
const numSpaces = /^ +/.exec(current)[0].length;
return Math.min(numSpaces, previous);
}, Infinity);
return new Array(min + 1).join(' ');
}
function getRelativePath(from, to) {
const fromParts = from.split(/[/\\]/);
const toParts = to.split(/[/\\]/);
fromParts.pop(); // get dirname
while (fromParts[0] === toParts[0]) {
fromParts.shift();
toParts.shift();
}
if (fromParts.length) {
let i = fromParts.length;
while (i--) fromParts[i] = '..';
}
return fromParts.concat(toParts).join('/');
}
const toString$2 = Object.prototype.toString;
function isObject$1(thing) {
return toString$2.call(thing) === '[object Object]';
}
function getLocator(source) {
const originalLines = source.split('\n');
const lineOffsets = [];
for (let i = 0, pos = 0; i < originalLines.length; i++) {
lineOffsets.push(pos);
pos += originalLines[i].length + 1;
}
return function locate(index) {
let i = 0;
let j = lineOffsets.length;
while (i < j) {
const m = (i + j) >> 1;
if (index < lineOffsets[m]) {
j = m;
} else {
i = m + 1;
}
}
const line = i - 1;
const column = index - lineOffsets[line];
return { line, column };
};
}
const wordRegex = /\w/;
class Mappings {
constructor(hires) {
this.hires = hires;
this.generatedCodeLine = 0;
this.generatedCodeColumn = 0;
this.raw = [];
this.rawSegments = this.raw[this.generatedCodeLine] = [];
this.pending = null;
}
addEdit(sourceIndex, content, loc, nameIndex) {
if (content.length) {
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (nameIndex >= 0) {
segment.push(nameIndex);
}
this.rawSegments.push(segment);
} else if (this.pending) {
this.rawSegments.push(this.pending);
}
this.advance(content);
this.pending = null;
}
addUneditedChunk(sourceIndex, chunk, original, loc, sourcemapLocations) {
let originalCharIndex = chunk.start;
let first = true;
// when iterating each char, check if it's in a word boundary
let charInHiresBoundary = false;
while (originalCharIndex < chunk.end) {
if (this.hires || first || sourcemapLocations.has(originalCharIndex)) {
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (this.hires === 'boundary') {
// in hires "boundary", group segments per word boundary than per char
if (wordRegex.test(original[originalCharIndex])) {
// for first char in the boundary found, start the boundary by pushing a segment
if (!charInHiresBoundary) {
this.rawSegments.push(segment);
charInHiresBoundary = true;
}
} else {
// for non-word char, end the boundary by pushing a segment
this.rawSegments.push(segment);
charInHiresBoundary = false;
}
} else {
this.rawSegments.push(segment);
}
}
if (original[originalCharIndex] === '\n') {
loc.line += 1;
loc.column = 0;
this.generatedCodeLine += 1;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
this.generatedCodeColumn = 0;
first = true;
} else {
loc.column += 1;
this.generatedCodeColumn += 1;
first = false;
}
originalCharIndex += 1;
}
this.pending = null;
}
advance(str) {
if (!str) return;
const lines = str.split('\n');
if (lines.length > 1) {
for (let i = 0; i < lines.length - 1; i++) {
this.generatedCodeLine++;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
}
this.generatedCodeColumn = 0;
}
this.generatedCodeColumn += lines[lines.length - 1].length;
}
}
const n$1 = '\n';
const warned = {
insertLeft: false,
insertRight: false,
storeName: false,
};
class MagicString {
constructor(string, options = {}) {
const chunk = new Chunk(0, string.length, string);
Object.defineProperties(this, {
original: { writable: true, value: string },
outro: { writable: true, value: '' },
intro: { writable: true, value: '' },
firstChunk: { writable: true, value: chunk },
lastChunk: { writable: true, value: chunk },
lastSearchedChunk: { writable: true, value: chunk },
byStart: { writable: true, value: {} },
byEnd: { writable: true, value: {} },
filename: { writable: true, value: options.filename },
indentExclusionRanges: { writable: true, value: options.indentExclusionRanges },
sourcemapLocations: { writable: true, value: new BitSet() },
storedNames: { writable: true, value: {} },
indentStr: { writable: true, value: undefined },
ignoreList: { writable: true, value: options.ignoreList },
});
this.byStart[0] = chunk;
this.byEnd[string.length] = chunk;
}
addSourcemapLocation(char) {
this.sourcemapLocations.add(char);
}
append(content) {
if (typeof content !== 'string') throw new TypeError('outro content must be a string');
this.outro += content;
return this;
}
appendLeft(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byEnd[index];
if (chunk) {
chunk.appendLeft(content);
} else {
this.intro += content;
}
return this;
}
appendRight(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byStart[index];
if (chunk) {
chunk.appendRight(content);
} else {
this.outro += content;
}
return this;
}
clone() {
const cloned = new MagicString(this.original, { filename: this.filename });
let originalChunk = this.firstChunk;
let clonedChunk = (cloned.firstChunk = cloned.lastSearchedChunk = originalChunk.clone());
while (originalChunk) {
cloned.byStart[clonedChunk.start] = clonedChunk;
cloned.byEnd[clonedChunk.end] = clonedChunk;
const nextOriginalChunk = originalChunk.next;
const nextClonedChunk = nextOriginalChunk && nextOriginalChunk.clone();
if (nextClonedChunk) {
clonedChunk.next = nextClonedChunk;
nextClonedChunk.previous = clonedChunk;
clonedChunk = nextClonedChunk;
}
originalChunk = nextOriginalChunk;
}
cloned.lastChunk = clonedChunk;
if (this.indentExclusionRanges) {
cloned.indentExclusionRanges = this.indentExclusionRanges.slice();
}
cloned.sourcemapLocations = new BitSet(this.sourcemapLocations);
cloned.intro = this.intro;
cloned.outro = this.outro;
return cloned;
}
generateDecodedMap(options) {
options = options || {};
const sourceIndex = 0;
const names = Object.keys(this.storedNames);
const mappings = new Mappings(options.hires);
const locate = getLocator(this.original);
if (this.intro) {
mappings.advance(this.intro);
}
this.firstChunk.eachNext((chunk) => {
const loc = locate(chunk.start);
if (chunk.intro.length) mappings.advance(chunk.intro);
if (chunk.edited) {
mappings.addEdit(
sourceIndex,
chunk.content,
loc,
chunk.storeName ? names.indexOf(chunk.original) : -1,
);
} else {
mappings.addUneditedChunk(sourceIndex, chunk, this.original, loc, this.sourcemapLocations);
}
if (chunk.outro.length) mappings.advance(chunk.outro);
});
return {
file: options.file ? options.file.split(/[/\\]/).pop() : undefined,
sources: [
options.source ? getRelativePath(options.file || '', options.source) : options.file || '',
],
sourcesContent: options.includeContent ? [this.original] : undefined,
names,
mappings: mappings.raw,
x_google_ignoreList: this.ignoreList ? [sourceIndex] : undefined,
};
}
generateMap(options) {
return new SourceMap(this.generateDecodedMap(options));
}
_ensureindentStr() {
if (this.indentStr === undefined) {
this.indentStr = guessIndent(this.original);
}
}
_getRawIndentString() {
this._ensureindentStr();
return this.indentStr;
}
getIndentString() {
this._ensureindentStr();
return this.indentStr === null ? '\t' : this.indentStr;
}
indent(indentStr, options) {
const pattern = /^[^\r\n]/gm;
if (isObject$1(indentStr)) {
options = indentStr;
indentStr = undefined;
}
if (indentStr === undefined) {
this._ensureindentStr();
indentStr = this.indentStr || '\t';
}
if (indentStr === '') return this; // noop
options = options || {};
// Process exclusion ranges
const isExcluded = {};
if (options.exclude) {
const exclusions =
typeof options.exclude[0] === 'number' ? [options.exclude] : options.exclude;
exclusions.forEach((exclusion) => {
for (let i = exclusion[0]; i < exclusion[1]; i += 1) {
isExcluded[i] = true;
}
});
}
let shouldIndentNextCharacter = options.indentStart !== false;
const replacer = (match) => {
if (shouldIndentNextCharacter) return `${indentStr}${match}`;
shouldIndentNextCharacter = true;
return match;
};
this.intro = this.intro.replace(pattern, replacer);
let charIndex = 0;
let chunk = this.firstChunk;
while (chunk) {
const end = chunk.end;
if (chunk.edited) {
if (!isExcluded[charIndex]) {
chunk.content = chunk.content.replace(pattern, replacer);
if (chunk.content.length) {
shouldIndentNextCharacter = chunk.content[chunk.content.length - 1] === '\n';
}
}
} else {
charIndex = chunk.start;
while (charIndex < end) {
if (!isExcluded[charIndex]) {
const char = this.original[charIndex];
if (char === '\n') {
shouldIndentNextCharacter = true;
} else if (char !== '\r' && shouldIndentNextCharacter) {
shouldIndentNextCharacter = false;
if (charIndex === chunk.start) {
chunk.prependRight(indentStr);
} else {
this._splitChunk(chunk, charIndex);
chunk = chunk.next;
chunk.prependRight(indentStr);
}
}
}
charIndex += 1;
}
}
charIndex = chunk.end;
chunk = chunk.next;
}
this.outro = this.outro.replace(pattern, replacer);
return this;
}
insert() {
throw new Error(
'magicString.insert(...) is deprecated. Use prependRight(...) or appendLeft(...)',
);
}
insertLeft(index, content) {
if (!warned.insertLeft) {
console.warn(
'magicString.insertLeft(...) is deprecated. Use magicString.appendLeft(...) instead',
); // eslint-disable-line no-console
warned.insertLeft = true;
}
return this.appendLeft(index, content);
}
insertRight(index, content) {
if (!warned.insertRight) {
console.warn(
'magicString.insertRight(...) is deprecated. Use magicString.prependRight(...) instead',
); // eslint-disable-line no-console
warned.insertRight = true;
}
return this.prependRight(index, content);
}
move(start, end, index) {
if (index >= start && index <= end) throw new Error('Cannot move a selection inside itself');
this._split(start);
this._split(end);
this._split(index);
const first = this.byStart[start];
const last = this.byEnd[end];
const oldLeft = first.previous;
const oldRight = last.next;
const newRight = this.byStart[index];
if (!newRight && last === this.lastChunk) return this;
const newLeft = newRight ? newRight.previous : this.lastChunk;
if (oldLeft) oldLeft.next = oldRight;
if (oldRight) oldRight.previous = oldLeft;
if (newLeft) newLeft.next = first;
if (newRight) newRight.previous = last;
if (!first.previous) this.firstChunk = last.next;
if (!last.next) {
this.lastChunk = first.previous;
this.lastChunk.next = null;
}
first.previous = newLeft;
last.next = newRight || null;
if (!newLeft) this.firstChunk = first;
if (!newRight) this.lastChunk = last;
return this;
}
overwrite(start, end, content, options) {
options = options || {};
return this.update(start, end, content, { ...options, overwrite: !options.contentOnly });
}
update(start, end, content, options) {
if (typeof content !== 'string') throw new TypeError('replacement content must be a string');
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
if (end > this.original.length) throw new Error('end is out of bounds');
if (start === end)
throw new Error(
'Cannot overwrite a zero-length range use appendLeft or prependRight instead',
);
this._split(start);
this._split(end);
if (options === true) {
if (!warned.storeName) {
console.warn(
'The final argument to magicString.overwrite(...) should be an options object. See https://github.com/rich-harris/magic-string',
); // eslint-disable-line no-console
warned.storeName = true;
}
options = { storeName: true };
}
const storeName = options !== undefined ? options.storeName : false;
const overwrite = options !== undefined ? options.overwrite : false;
if (storeName) {
const original = this.original.slice(start, end);
Object.defineProperty(this.storedNames, original, {
writable: true,
value: true,
enumerable: true,
});
}
const first = this.byStart[start];
const last = this.byEnd[end];
if (first) {
let chunk = first;
while (chunk !== last) {
if (chunk.next !== this.byStart[chunk.end]) {
throw new Error('Cannot overwrite across a split point');
}
chunk = chunk.next;
chunk.edit('', false);
}
first.edit(content, storeName, !overwrite);
} else {
// must be inserting at the end
const newChunk = new Chunk(start, end, '').edit(content, storeName);
// TODO last chunk in the array may not be the last chunk, if it's moved...
last.next = newChunk;
newChunk.previous = last;
}
return this;
}
prepend(content) {
if (typeof content !== 'string') throw new TypeError('outro content must be a string');
this.intro = content + this.intro;
return this;
}
prependLeft(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byEnd[index];
if (chunk) {
chunk.prependLeft(content);
} else {
this.intro = content + this.intro;
}
return this;
}
prependRight(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byStart[index];
if (chunk) {
chunk.prependRight(content);
} else {
this.outro = content + this.outro;
}
return this;
}
remove(start, end) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
if (start === end) return this;
if (start < 0 || end > this.original.length) throw new Error('Character is out of bounds');
if (start > end) throw new Error('end must be greater than start');
this._split(start);
this._split(end);
let chunk = this.byStart[start];
while (chunk) {
chunk.intro = '';
chunk.outro = '';
chunk.edit('');
chunk = end > chunk.end ? this.byStart[chunk.end] : null;
}
return this;
}
lastChar() {
if (this.outro.length) return this.outro[this.outro.length - 1];
let chunk = this.lastChunk;
do {
if (chunk.outro.length) return chunk.outro[chunk.outro.length - 1];
if (chunk.content.length) return chunk.content[chunk.content.length - 1];
if (chunk.intro.length) return chunk.intro[chunk.intro.length - 1];
} while ((chunk = chunk.previous));
if (this.intro.length) return this.intro[this.intro.length - 1];
return '';
}
lastLine() {
let lineIndex = this.outro.lastIndexOf(n$1);
if (lineIndex !== -1) return this.outro.substr(lineIndex + 1);
let lineStr = this.outro;
let chunk = this.lastChunk;
do {
if (chunk.outro.length > 0) {
lineIndex = chunk.outro.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.outro.substr(lineIndex + 1) + lineStr;
lineStr = chunk.outro + lineStr;
}
if (chunk.content.length > 0) {
lineIndex = chunk.content.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.content.substr(lineIndex + 1) + lineStr;
lineStr = chunk.content + lineStr;
}
if (chunk.intro.length > 0) {
lineIndex = chunk.intro.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.intro.substr(lineIndex + 1) + lineStr;
lineStr = chunk.intro + lineStr;
}
} while ((chunk = chunk.previous));
lineIndex = this.intro.lastIndexOf(n$1);
if (lineIndex !== -1) return this.intro.substr(lineIndex + 1) + lineStr;
return this.intro + lineStr;
}
slice(start = 0, end = this.original.length) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
let result = '';
// find start chunk
let chunk = this.firstChunk;
while (chunk && (chunk.start > start || chunk.end <= start)) {
// found end chunk before start
if (chunk.start < end && chunk.end >= end) {
return result;
}
chunk = chunk.next;
}
if (chunk && chunk.edited && chunk.start !== start)
throw new Error(`Cannot use replaced character ${start} as slice start anchor.`);
const startChunk = chunk;
while (chunk) {
if (chunk.intro && (startChunk !== chunk || chunk.start === start)) {
result += chunk.intro;
}
const containsEnd = chunk.start < end && chunk.end >= end;
if (containsEnd && chunk.edited && chunk.end !== end)
throw new Error(`Cannot use replaced character ${end} as slice end anchor.`);
const sliceStart = startChunk === chunk ? start - chunk.start : 0;
const sliceEnd = containsEnd ? chunk.content.length + end - chunk.end : chunk.content.length;
result += chunk.content.slice(sliceStart, sliceEnd);
if (chunk.outro && (!containsEnd || chunk.end === end)) {
result += chunk.outro;
}
if (containsEnd) {
break;
}
chunk = chunk.next;
}
return result;
}
// TODO deprecate this? not really very useful
snip(start, end) {
const clone = this.clone();
clone.remove(0, start);
clone.remove(end, clone.original.length);
return clone;
}
_split(index) {
if (this.byStart[index] || this.byEnd[index]) return;
let chunk = this.lastSearchedChunk;
const searchForward = index > chunk.end;
while (chunk) {
if (chunk.contains(index)) return this._splitChunk(chunk, index);
chunk = searchForward ? this.byStart[chunk.end] : this.byEnd[chunk.start];
}
}
_splitChunk(chunk, index) {
if (chunk.edited && chunk.content.length) {
// zero-length edited chunks are a special case (overlapping replacements)
const loc = getLocator(this.original)(index);
throw new Error(
`Cannot split a chunk that has already been edited (${loc.line}:${loc.column} "${chunk.original}")`,
);
}
const newChunk = chunk.split(index);
this.byEnd[index] = chunk;
this.byStart[index] = newChunk;
this.byEnd[newChunk.end] = newChunk;
if (chunk === this.lastChunk) this.lastChunk = newChunk;
this.lastSearchedChunk = chunk;
return true;
}
toString() {
let str = this.intro;
let chunk = this.firstChunk;
while (chunk) {
str += chunk.toString();
chunk = chunk.next;
}
return str + this.outro;
}
isEmpty() {
let chunk = this.firstChunk;
do {
if (
(chunk.intro.length && chunk.intro.trim()) ||
(chunk.content.length && chunk.content.trim()) ||
(chunk.outro.length && chunk.outro.trim())
)
return false;
} while ((chunk = chunk.next));
return true;
}
length() {
let chunk = this.firstChunk;
let length = 0;
do {
length += chunk.intro.length + chunk.content.length + chunk.outro.length;
} while ((chunk = chunk.next));
return length;
}
trimLines() {
return this.trim('[\\r\\n]');
}
trim(charType) {
return this.trimStart(charType).trimEnd(charType);
}
trimEndAborted(charType) {
const rx = new RegExp((charType || '\\s') + '+$');
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
let chunk = this.lastChunk;
do {
const end = chunk.end;
const aborted = chunk.trimEnd(rx);
// if chunk was trimmed, we have a new lastChunk
if (chunk.end !== end) {
if (this.lastChunk === chunk) {
this.lastChunk = chunk.next;
}
this.byEnd[chunk.end] = chunk;
this.byStart[chunk.next.start] = chunk.next;
this.byEnd[chunk.next.end] = chunk.next;
}
if (aborted) return true;
chunk = chunk.previous;
} while (chunk);
return false;
}
trimEnd(charType) {
this.trimEndAborted(charType);
return this;
}
trimStartAborted(charType) {
const rx = new RegExp('^' + (charType || '\\s') + '+');
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
let chunk = this.firstChunk;
do {
const end = chunk.end;
const aborted = chunk.trimStart(rx);
if (chunk.end !== end) {
// special case...
if (chunk === this.lastChunk) this.lastChunk = chunk.next;
this.byEnd[chunk.end] = chunk;
this.byStart[chunk.next.start] = chunk.next;
this.byEnd[chunk.next.end] = chunk.next;
}
if (aborted) return true;
chunk = chunk.next;
} while (chunk);
return false;
}
trimStart(charType) {
this.trimStartAborted(charType);
return this;
}
hasChanged() {
return this.original !== this.toString();
}
_replaceRegexp(searchValue, replacement) {
function getReplacement(match, str) {
if (typeof replacement === 'string') {
return replacement.replace(/\$(\$|&|\d+)/g, (_, i) => {
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#specifying_a_string_as_a_parameter
if (i === '$') return '$';
if (i === '&') return match[0];
const num = +i;
if (num < match.length) return match[+i];
return `$${i}`;
});
} else {
return replacement(...match, match.index, str, match.groups);
}
}
function matchAll(re, str) {
let match;
const matches = [];
while ((match = re.exec(str))) {
matches.push(match);
}
return matches;
}
if (searchValue.global) {
const matches = matchAll(searchValue, this.original);
matches.forEach((match) => {
if (match.index != null)
this.overwrite(
match.index,
match.index + match[0].length,
getReplacement(match, this.original),
);
});
} else {
const match = this.original.match(searchValue);
if (match && match.index != null)
this.overwrite(
match.index,
match.index + match[0].length,
getReplacement(match, this.original),
);
}
return this;
}
_replaceString(string, replacement) {
const { original } = this;
const index = original.indexOf(string);
if (index !== -1) {
this.overwrite(index, index + string.length, replacement);
}
return this;
}
replace(searchValue, replacement) {
if (typeof searchValue === 'string') {
return this._replaceString(searchValue, replacement);
}
return this._replaceRegexp(searchValue, replacement);
}
_replaceAllString(string, replacement) {
const { original } = this;
const stringLength = string.length;
for (
let index = original.indexOf(string);
index !== -1;
index = original.indexOf(string, index + stringLength)
) {
this.overwrite(index, index + stringLength, replacement);
}
return this;
}
replaceAll(searchValue, replacement) {
if (typeof searchValue === 'string') {
return this._replaceAllString(searchValue, replacement);
}
if (!searchValue.global) {
throw new TypeError(
'MagicString.prototype.replaceAll called with a non-global RegExp argument',
);
}
return this._replaceRegexp(searchValue, replacement);
}
}
const assetUrlRE = /__VITE_ASSET__([a-z\d]+)__(?:\$_(.*?)__)?/g;
const rawRE = /(?:\?|&)raw(?:&|$)/;
const urlRE = /(\?|&)url(?:&|$)/;
const jsSourceMapRE = /\.[cm]?js\.map$/;
const unnededFinalQueryCharRE = /[?&]$/;
const assetCache = new WeakMap();
const generatedAssets = new WeakMap();
// add own dictionary entry by directly assigning mrmime
function registerCustomMime() {
// https://github.com/lukeed/mrmime/issues/3
mimes$1['ico'] = 'image/x-icon';
// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers#flac
mimes$1['flac'] = 'audio/flac';
// mrmime and mime-db is not released yet: https://github.com/jshttp/mime-db/commit/c9242a9b7d4bb25d7a0c9244adec74aeef08d8a1
mimes$1['aac'] = 'audio/aac';
// https://wiki.xiph.org/MIME_Types_and_File_Extensions#.opus_-_audio/ogg
mimes$1['opus'] = 'audio/ogg';
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types
mimes$1['eot'] = 'application/vnd.ms-fontobject';
}
function renderAssetUrlInJS(ctx, config, chunk, opts, code) {
const toRelativeRuntime = createToImportMetaURLBasedRelativeRuntime(opts.format, config.isWorker);
let match;
let s;
// Urls added with JS using e.g.
// imgElement.src = "__VITE_ASSET__5aa0ddc0__" are using quotes
// Urls added in CSS that is imported in JS end up like
// var inlined = ".inlined{color:green;background:url(__VITE_ASSET__5aa0ddc0__)}\n";
// In both cases, the wrapping should already be fine
assetUrlRE.lastIndex = 0;
while ((match = assetUrlRE.exec(code))) {
s || (s = new MagicString(code));
const [full, referenceId, postfix = ''] = match;
const file = ctx.getFileName(referenceId);
chunk.viteMetadata.importedAssets.add(cleanUrl(file));
const filename = file + postfix;
const replacement = toOutputFilePathInJS(filename, 'asset', chunk.fileName, 'js', config, toRelativeRuntime);
const replacementString = typeof replacement === 'string'
? JSON.stringify(replacement).slice(1, -1)
: `"+${replacement.runtime}+"`;
s.update(match.index, match.index + full.length, replacementString);
}
// Replace __VITE_PUBLIC_ASSET__5aa0ddc0__ with absolute paths
const publicAssetUrlMap = publicAssetUrlCache.get(config);
publicAssetUrlRE.lastIndex = 0;
while ((match = publicAssetUrlRE.exec(code))) {
s || (s = new MagicString(code));
const [full, hash] = match;
const publicUrl = publicAssetUrlMap.get(hash).slice(1);
const replacement = toOutputFilePathInJS(publicUrl, 'public', chunk.fileName, 'js', config, toRelativeRuntime);
const replacementString = typeof replacement === 'string'
? JSON.stringify(replacement).slice(1, -1)
: `"+${replacement.runtime}+"`;
s.update(match.index, match.index + full.length, replacementString);
}
return s;
}
/**
* Also supports loading plain strings with import text from './foo.txt?raw'
*/
function assetPlugin(config) {
registerCustomMime();
return {
name: 'vite:asset',
buildStart() {
assetCache.set(config, new Map());
generatedAssets.set(config, new Map());
},
resolveId(id) {
if (!config.assetsInclude(cleanUrl(id)) && !urlRE.test(id)) {
return;
}
// imports to absolute urls pointing to files in /public
// will fail to resolve in the main resolver. handle them here.
const publicFile = checkPublicFile(id, config);
if (publicFile) {
return id;
}
},
async load(id) {
if (id[0] === '\0') {
// Rollup convention, this id should be handled by the
// plugin that marked it with \0
return;
}
// raw requests, read from disk
if (rawRE.test(id)) {
const file = checkPublicFile(id, config) || cleanUrl(id);
// raw query, read file and return as string
return `export default ${JSON.stringify(await fsp.readFile(file, 'utf-8'))}`;
}
if (!config.assetsInclude(cleanUrl(id)) && !urlRE.test(id)) {
return;
}
id = id.replace(urlRE, '$1').replace(unnededFinalQueryCharRE, '');
const url = await fileToUrl(id, config, this);
return `export default ${JSON.stringify(url)}`;
},
renderChunk(code, chunk, opts) {
const s = renderAssetUrlInJS(this, config, chunk, opts, code);
if (s) {
return {
code: s.toString(),
map: config.build.sourcemap
? s.generateMap({ hires: 'boundary' })
: null,
};
}
else {
return null;
}
},
generateBundle(_, bundle) {
// do not emit assets for SSR build
if (config.command === 'build' &&
config.build.ssr &&
!config.build.ssrEmitAssets) {
for (const file in bundle) {
if (bundle[file].type === 'asset' &&
!file.endsWith('ssr-manifest.json') &&
!jsSourceMapRE.test(file)) {
delete bundle[file];
}
}
}
},
};
}
function checkPublicFile(url, { publicDir }) {
// note if the file is in /public, the resolver would have returned it
// as-is so it's not going to be a fully resolved path.
if (!publicDir || url[0] !== '/') {
return;
}
const publicFile = path$o.join(publicDir, cleanUrl(url));
if (!normalizePath$3(publicFile).startsWith(withTrailingSlash(normalizePath$3(publicDir)))) {
// can happen if URL starts with '../'
return;
}
if (fs$l.existsSync(publicFile)) {
return publicFile;
}
else {
return;
}
}
async function fileToUrl(id, config, ctx) {
if (config.command === 'serve') {
return fileToDevUrl(id, config);
}
else {
return fileToBuiltUrl(id, config, ctx);
}
}
function fileToDevUrl(id, config) {
let rtn;
if (checkPublicFile(id, config)) {
// in public dir, keep the url as-is
rtn = id;
}
else if (id.startsWith(withTrailingSlash(config.root))) {
// in project root, infer short public path
rtn = '/' + path$o.posix.relative(config.root, id);
}
else {
// outside of project root, use absolute fs path
// (this is special handled by the serve static middleware
rtn = path$o.posix.join(FS_PREFIX, id);
}
const base = joinUrlSegments(config.server?.origin ?? '', config.base);
return joinUrlSegments(base, removeLeadingSlash(rtn));
}
function getPublicAssetFilename(hash, config) {
return publicAssetUrlCache.get(config)?.get(hash);
}
const publicAssetUrlCache = new WeakMap();
const publicAssetUrlRE = /__VITE_PUBLIC_ASSET__([a-z\d]{8})__/g;
function publicFileToBuiltUrl(url, config) {
if (config.command !== 'build') {
// We don't need relative base or renderBuiltUrl support during dev
return joinUrlSegments(config.base, url);
}
const hash = getHash(url);
let cache = publicAssetUrlCache.get(config);
if (!cache) {
cache = new Map();
publicAssetUrlCache.set(config, cache);
}
if (!cache.get(hash)) {
cache.set(hash, url);
}
return `__VITE_PUBLIC_ASSET__${hash}__`;
}
const GIT_LFS_PREFIX = Buffer$1.from('version https://git-lfs.github.com');
function isGitLfsPlaceholder(content) {
if (content.length < GIT_LFS_PREFIX.length)
return false;
// Check whether the content begins with the characteristic string of Git LFS placeholders
return GIT_LFS_PREFIX.compare(content, 0, GIT_LFS_PREFIX.length) === 0;
}
/**
* Register an asset to be emitted as part of the bundle (if necessary)
* and returns the resolved public URL
*/
async function fileToBuiltUrl(id, config, pluginContext, skipPublicCheck = false) {
if (!skipPublicCheck && checkPublicFile(id, config)) {
return publicFileToBuiltUrl(id, config);
}
const cache = assetCache.get(config);
const cached = cache.get(id);
if (cached) {
return cached;
}
const file = cleanUrl(id);
const content = await fsp.readFile(file);
let url;
if (config.build.lib ||
(!file.endsWith('.svg') &&
!file.endsWith('.html') &&
content.length < Number(config.build.assetsInlineLimit) &&
!isGitLfsPlaceholder(content))) {
if (config.build.lib && isGitLfsPlaceholder(content)) {
config.logger.warn(colors$1.yellow(`Inlined file ${id} was not downloaded via Git LFS`));
}
const mimeType = lookup(file) ?? 'application/octet-stream';
// base64 inlined as a string
url = `data:${mimeType};base64,${content.toString('base64')}`;
}
else {
// emit as asset
const { search, hash } = parse$i(id);
const postfix = (search || '') + (hash || '');
const referenceId = pluginContext.emitFile({
// Ignore directory structure for asset file names
name: path$o.basename(file),
type: 'asset',
source: content,
});
const originalName = normalizePath$3(path$o.relative(config.root, file));
generatedAssets.get(config).set(referenceId, { originalName });
url = `__VITE_ASSET__${referenceId}__${postfix ? `$_${postfix}__` : ``}`; // TODO_BASE
}
cache.set(id, url);
return url;
}
async function urlToBuiltUrl(url, importer, config, pluginContext) {
if (checkPublicFile(url, config)) {
return publicFileToBuiltUrl(url, config);
}
const file = url[0] === '/'
? path$o.join(config.root, url)
: path$o.join(path$o.dirname(importer), url);
return fileToBuiltUrl(file, config, pluginContext,
// skip public check since we just did it above
true);
}
function manifestPlugin(config) {
const manifest = {};
let outputCount;
return {
name: 'vite:manifest',
buildStart() {
outputCount = 0;
},
generateBundle({ format }, bundle) {
function getChunkName(chunk) {
if (chunk.facadeModuleId) {
let name = normalizePath$3(path$o.relative(config.root, chunk.facadeModuleId));
if (format === 'system' && !chunk.name.includes('-legacy')) {
const ext = path$o.extname(name);
const endPos = ext.length !== 0 ? -ext.length : undefined;
name = name.slice(0, endPos) + `-legacy` + ext;
}
return name.replace(/\0/g, '');
}
else {
return `_` + path$o.basename(chunk.fileName);
}
}
function getInternalImports(imports) {
const filteredImports = [];
for (const file of imports) {
if (bundle[file] === undefined) {
continue;
}
filteredImports.push(getChunkName(bundle[file]));
}
return filteredImports;
}
function createChunk(chunk) {
const manifestChunk = {
file: chunk.fileName,
};
if (chunk.facadeModuleId) {
manifestChunk.src = getChunkName(chunk);
}
if (chunk.isEntry) {
manifestChunk.isEntry = true;
}
if (chunk.isDynamicEntry) {
manifestChunk.isDynamicEntry = true;
}
if (chunk.imports.length) {
const internalImports = getInternalImports(chunk.imports);
if (internalImports.length > 0) {
manifestChunk.imports = internalImports;
}
}
if (chunk.dynamicImports.length) {
const internalImports = getInternalImports(chunk.dynamicImports);
if (internalImports.length > 0) {
manifestChunk.dynamicImports = internalImports;
}
}
if (chunk.viteMetadata?.importedCss.size) {
manifestChunk.css = [...chunk.viteMetadata.importedCss];
}
if (chunk.viteMetadata?.importedAssets.size) {
manifestChunk.assets = [...chunk.viteMetadata.importedAssets];
}
return manifestChunk;
}
function createAsset(asset, src, isEntry) {
const manifestChunk = {
file: asset.fileName,
src,
};
if (isEntry)
manifestChunk.isEntry = true;
return manifestChunk;
}
const fileNameToAssetMeta = new Map();
const assets = generatedAssets.get(config);
assets.forEach((asset, referenceId) => {
const fileName = this.getFileName(referenceId);
fileNameToAssetMeta.set(fileName, asset);
});
const fileNameToAsset = new Map();
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === 'chunk') {
manifest[getChunkName(chunk)] = createChunk(chunk);
}
else if (chunk.type === 'asset' && typeof chunk.name === 'string') {
// Add every unique asset to the manifest, keyed by its original name
const assetMeta = fileNameToAssetMeta.get(chunk.fileName);
const src = assetMeta?.originalName ?? chunk.name;
const asset = createAsset(chunk, src, assetMeta?.isEntry);
manifest[src] = asset;
fileNameToAsset.set(chunk.fileName, asset);
}
}
// Add deduplicated assets to the manifest
assets.forEach(({ originalName }, referenceId) => {
if (!manifest[originalName]) {
const fileName = this.getFileName(referenceId);
const asset = fileNameToAsset.get(fileName);
if (asset) {
manifest[originalName] = asset;
}
}
});
outputCount++;
const output = config.build.rollupOptions?.output;
const outputLength = Array.isArray(output) ? output.length : 1;
if (outputCount >= outputLength) {
this.emitFile({
fileName: typeof config.build.manifest === 'string'
? config.build.manifest
: 'manifest.json',
type: 'asset',
source: jsonStableStringify$1(manifest, { space: 2 }),
});
}
},
};
}
// This is based on @rollup/plugin-data-uri
// MIT Licensed https://github.com/rollup/plugins/blob/master/LICENSE
// ref https://github.com/vitejs/vite/issues/1428#issuecomment-757033808
const dataUriRE = /^([^/]+\/[^;,]+)(;base64)?,([\s\S]*)$/;
const base64RE = /base64/i;
const dataUriPrefix = `\0/@data-uri/`;
/**
* Build only, since importing from a data URI works natively.
*/
function dataURIPlugin() {
let resolved;
return {
name: 'vite:data-uri',
buildStart() {
resolved = new Map();
},
resolveId(id) {
if (!dataUriRE.test(id)) {
return;
}
const uri = new URL$3(id);
if (uri.protocol !== 'data:') {
return;
}
const match = uri.pathname.match(dataUriRE);
if (!match) {
return;
}
const [, mime, format, data] = match;
if (mime !== 'text/javascript') {
throw new Error(`data URI with non-JavaScript mime type is not supported. If you're using legacy JavaScript MIME types (such as 'application/javascript'), please use 'text/javascript' instead.`);
}
// decode data
const base64 = format && base64RE.test(format.substring(1));
const content = base64
? Buffer.from(data, 'base64').toString('utf-8')
: data;
resolved.set(id, content);
return dataUriPrefix + id;
},
load(id) {
if (id.startsWith(dataUriPrefix)) {
return resolved.get(id.slice(dataUriPrefix.length));
}
},
};
}
/* es-module-lexer 1.3.0 */
const A=1===new Uint8Array(new Uint16Array([1]).buffer)[0];function parse$e(E,g="@"){if(!C)return init.then((()=>parse$e(E)));const I=E.length+1,o=(C.__heap_base.value||C.__heap_base)+4*I-C.memory.buffer.byteLength;o>0&&C.memory.grow(Math.ceil(o/65536));const D=C.sa(I-1);if((A?B:Q)(E,new Uint16Array(C.memory.buffer,D,I)),!C.parse())throw Object.assign(new Error(`Parse error ${g}:${E.slice(0,C.e()).split("\n").length}:${C.e()-E.lastIndexOf("\n",C.e()-1)}`),{idx:C.e()});const K=[],k=[];for(;C.ri();){const A=C.is(),Q=C.ie(),B=C.ai(),g=C.id(),I=C.ss(),o=C.se();let D;C.ip()&&(D=J(E.slice(-1===g?A-1:A,-1===g?Q+1:Q))),K.push({n:D,s:A,e:Q,ss:I,se:o,d:g,a:B});}for(;C.re();){const A=C.es(),Q=C.ee(),B=C.els(),g=C.ele(),I=E.slice(A,Q),o=I[0],D=B<0?void 0:E.slice(B,g),K=D?D[0]:"";k.push({s:A,e:Q,ls:B,le:g,n:'"'===o||"'"===o?J(I):I,ln:'"'===K||"'"===K?J(D):D});}function J(A){try{return (0, eval)(A)}catch(A){}}return [K,k,!!C.f()]}function Q(A,Q){const B=A.length;let C=0;for(;C<B;){const B=A.charCodeAt(C);Q[C++]=(255&B)<<8|B>>>8;}}function B(A,Q){const B=A.length;let C=0;for(;C<B;)Q[C]=A.charCodeAt(C++);}let C;const init=WebAssembly.compile((E="AGFzbQEAAAABKghgAX8Bf2AEf39/fwBgAAF/YAAAYAF/AGADf39/AX9gAn9/AX9gAn9/AAMvLgABAQICAgICAgICAgICAgICAgIAAwMDBAQAAAADAAAAAAMDAAUGAAAABwAGAgUEBQFwAQEBBQMBAAEGDwJ/AUGw8gALfwBBsPIACwdwEwZtZW1vcnkCAAJzYQAAAWUAAwJpcwAEAmllAAUCc3MABgJzZQAHAmFpAAgCaWQACQJpcAAKAmVzAAsCZWUADANlbHMADQNlbGUADgJyaQAPAnJlABABZgARBXBhcnNlABILX19oZWFwX2Jhc2UDAQqsPS5oAQF/QQAgADYC9AlBACgC0AkiASAAQQF0aiIAQQA7AQBBACAAQQJqIgA2AvgJQQAgADYC/AlBAEEANgLUCUEAQQA2AuQJQQBBADYC3AlBAEEANgLYCUEAQQA2AuwJQQBBADYC4AkgAQufAQEDf0EAKALkCSEEQQBBACgC/AkiBTYC5AlBACAENgLoCUEAIAVBIGo2AvwJIARBHGpB1AkgBBsgBTYCAEEAKALICSEEQQAoAsQJIQYgBSABNgIAIAUgADYCCCAFIAIgAkECakEAIAYgA0YbIAQgA0YbNgIMIAUgAzYCFCAFQQA2AhAgBSACNgIEIAVBADYCHCAFQQAoAsQJIANGOgAYC1YBAX9BACgC7AkiBEEQakHYCSAEG0EAKAL8CSIENgIAQQAgBDYC7AlBACAEQRRqNgL8CSAEQQA2AhAgBCADNgIMIAQgAjYCCCAEIAE2AgQgBCAANgIACwgAQQAoAoAKCxUAQQAoAtwJKAIAQQAoAtAJa0EBdQseAQF/QQAoAtwJKAIEIgBBACgC0AlrQQF1QX8gABsLFQBBACgC3AkoAghBACgC0AlrQQF1Cx4BAX9BACgC3AkoAgwiAEEAKALQCWtBAXVBfyAAGwseAQF/QQAoAtwJKAIQIgBBACgC0AlrQQF1QX8gABsLOwEBfwJAQQAoAtwJKAIUIgBBACgCxAlHDQBBfw8LAkAgAEEAKALICUcNAEF+DwsgAEEAKALQCWtBAXULCwBBACgC3AktABgLFQBBACgC4AkoAgBBACgC0AlrQQF1CxUAQQAoAuAJKAIEQQAoAtAJa0EBdQseAQF/QQAoAuAJKAIIIgBBACgC0AlrQQF1QX8gABsLHgEBf0EAKALgCSgCDCIAQQAoAtAJa0EBdUF/IAAbCyUBAX9BAEEAKALcCSIAQRxqQdQJIAAbKAIAIgA2AtwJIABBAEcLJQEBf0EAQQAoAuAJIgBBEGpB2AkgABsoAgAiADYC4AkgAEEARwsIAEEALQCECgvmDAEGfyMAQYDQAGsiACQAQQBBAToAhApBAEEAKALMCTYCjApBAEEAKALQCUF+aiIBNgKgCkEAIAFBACgC9AlBAXRqIgI2AqQKQQBBADsBhgpBAEEAOwGICkEAQQA6AJAKQQBBADYCgApBAEEAOgDwCUEAIABBgBBqNgKUCkEAIAA2ApgKQQBBADoAnAoCQAJAAkACQANAQQAgAUECaiIDNgKgCiABIAJPDQECQCADLwEAIgJBd2pBBUkNAAJAAkACQAJAAkAgAkGbf2oOBQEICAgCAAsgAkEgRg0EIAJBL0YNAyACQTtGDQIMBwtBAC8BiAoNASADEBNFDQEgAUEEakGCCEEKEC0NARAUQQAtAIQKDQFBAEEAKAKgCiIBNgKMCgwHCyADEBNFDQAgAUEEakGMCEEKEC0NABAVC0EAQQAoAqAKNgKMCgwBCwJAIAEvAQQiA0EqRg0AIANBL0cNBBAWDAELQQEQFwtBACgCpAohAkEAKAKgCiEBDAALC0EAIQIgAyEBQQAtAPAJDQIMAQtBACABNgKgCkEAQQA6AIQKCwNAQQAgAUECaiIDNgKgCgJAAkACQAJAAkACQAJAAkACQCABQQAoAqQKTw0AIAMvAQAiAkF3akEFSQ0IAkACQAJAAkACQAJAAkACQAJAAkAgAkFgag4KEhEGEREREQUBAgALAkACQAJAAkAgAkGgf2oOCgsUFAMUARQUFAIACyACQYV/ag4DBRMGCQtBAC8BiAoNEiADEBNFDRIgAUEEakGCCEEKEC0NEhAUDBILIAMQE0UNESABQQRqQYwIQQoQLQ0REBUMEQsgAxATRQ0QIAEpAARC7ICEg7COwDlSDRAgAS8BDCIDQXdqIgFBF0sNDkEBIAF0QZ+AgARxRQ0ODA8LQQBBAC8BiAoiAUEBajsBiApBACgClAogAUEDdGoiAUEBNgIAIAFBACgCjAo2AgQMDwtBAC8BiAoiAkUNC0EAIAJBf2oiBDsBiApBAC8BhgoiAkUNDiACQQJ0QQAoApgKakF8aigCACIFKAIUQQAoApQKIARB//8DcUEDdGooAgRHDQ4CQCAFKAIEDQAgBSADNgIEC0EAIAJBf2o7AYYKIAUgAUEEajYCDAwOCwJAQQAoAowKIgEvAQBBKUcNAEEAKALkCSIDRQ0AIAMoAgQgAUcNAEEAQQAoAugJIgM2AuQJAkAgA0UNACADQQA2AhwMAQtBAEEANgLUCQtBAEEALwGICiIDQQFqOwGICkEAKAKUCiADQQN0aiIDQQZBAkEALQCcChs2AgAgAyABNgIEQQBBADoAnAoMDQtBAC8BiAoiAUUNCUEAIAFBf2oiATsBiApBACgClAogAUH//wNxQQN0aigCAEEERg0EDAwLQScQGAwLC0EiEBgMCgsgAkEvRw0JAkACQCABLwEEIgFBKkYNACABQS9HDQEQFgwMC0EBEBcMCwsCQAJAQQAoAowKIgEvAQAiAxAZRQ0AAkACQCADQVVqDgQACAEDCAsgAUF+ai8BAEErRg0GDAcLIAFBfmovAQBBLUYNBQwGCwJAIANB/QBGDQAgA0EpRw0FQQAoApQKQQAvAYgKQQN0aigCBBAaRQ0FDAYLQQAoApQKQQAvAYgKQQN0aiICKAIEEBsNBSACKAIAQQZGDQUMBAsgAUF+ai8BAEFQakH//wNxQQpJDQMMBAtBACgClApBAC8BiAoiAUEDdCIDakEAKAKMCjYCBEEAIAFBAWo7AYgKQQAoApQKIANqQQM2AgALEBwMBwtBAC0A8AlBAC8BhgpBAC8BiApyckUhAgwJCyABEB0NACADRQ0AIANBL0ZBAC0AkApBAEdxDQAgAUF+aiEBQQAoAtAJIQICQANAIAFBAmoiBCACTQ0BQQAgATYCjAogAS8BACEDIAFBfmoiBCEBIAMQHkUNAAsgBEECaiEEC0EBIQUgA0H//wNxEB9FDQEgBEF+aiEBAkADQCABQQJqIgMgAk0NAUEAIAE2AowKIAEvAQAhAyABQX5qIgQhASADEB8NAAsgBEECaiEDCyADECBFDQEQIUEAQQA6AJAKDAULECFBACEFC0EAIAU6AJAKDAMLECJBACECDAULIANBoAFHDQELQQBBAToAnAoLQQBBACgCoAo2AowKC0EAKAKgCiEBDAALCyAAQYDQAGokACACCxoAAkBBACgC0AkgAEcNAEEBDwsgAEF+ahAjC/IKAQZ/QQBBACgCoAoiAEEMaiIBNgKgCkEAKALsCSECQQEQJyEDAkACQAJAAkACQAJAAkACQAJAQQAoAqAKIgQgAUcNACADECZFDQELAkACQAJAAkACQAJAAkAgA0EqRg0AIANB+wBHDQFBACAEQQJqNgKgCkEBECchBEEAKAKgCiEFA0ACQAJAIARB//8DcSIDQSJGDQAgA0EnRg0AIAMQKhpBACgCoAohAwwBCyADEBhBAEEAKAKgCkECaiIDNgKgCgtBARAnGgJAIAUgAxArIgRBLEcNAEEAQQAoAqAKQQJqNgKgCkEBECchBAtBACgCoAohAyAEQf0ARg0DIAMgBUYNDyADIQUgA0EAKAKkCk0NAAwPCwtBACAEQQJqNgKgCkEBECcaQQAoAqAKIgMgAxArGgwCC0EAQQA6AIQKAkACQAJAAkACQAJAIANBn39qDgwCCwQBCwMLCwsLCwUACyADQfYARg0EDAoLQQAgBEEOaiIDNgKgCgJAAkACQEEBECdBn39qDgYAEgISEgESC0EAKAKgCiIFKQACQvOA5IPgjcAxUg0RIAUvAQoQH0UNEUEAIAVBCmo2AqAKQQAQJxoLQQAoAqAKIgVBAmpBoghBDhAtDRAgBS8BECICQXdqIgFBF0sNDUEBIAF0QZ+AgARxRQ0NDA4LQQAoAqAKIgUpAAJC7ICEg7COwDlSDQ8gBS8BCiICQXdqIgFBF00NBgwKC0EAIARBCmo2AqAKQQAQJxpBACgCoAohBAtBACAEQRBqNgKgCgJAQQEQJyIEQSpHDQBBAEEAKAKgCkECajYCoApBARAnIQQLQQAoAqAKIQMgBBAqGiADQQAoAqAKIgQgAyAEEAJBAEEAKAKgCkF+ajYCoAoPCwJAIAQpAAJC7ICEg7COwDlSDQAgBC8BChAeRQ0AQQAgBEEKajYCoApBARAnIQRBACgCoAohAyAEECoaIANBACgCoAoiBCADIAQQAkEAQQAoAqAKQX5qNgKgCg8LQQAgBEEEaiIENgKgCgtBACAEQQZqNgKgCkEAQQA6AIQKQQEQJyEEQQAoAqAKIQMgBBAqIQRBACgCoAohAiAEQd//A3EiAUHbAEcNA0EAIAJBAmo2AqAKQQEQJyEFQQAoAqAKIQNBACEEDAQLQQAgA0ECajYCoAoLQQEQJyEEQQAoAqAKIQMCQCAEQeYARw0AIANBAmpBnAhBBhAtDQBBACADQQhqNgKgCiAAQQEQJxApIAJBEGpB2AkgAhshAwNAIAMoAgAiA0UNBSADQgA3AgggA0EQaiEDDAALC0EAIANBfmo2AqAKDAMLQQEgAXRBn4CABHFFDQMMBAtBASEECwNAAkACQCAEDgIAAQELIAVB//8DcRAqGkEBIQQMAQsCQAJAQQAoAqAKIgQgA0YNACADIAQgAyAEEAJBARAnIQQCQCABQdsARw0AIARBIHJB/QBGDQQLQQAoAqAKIQMCQCAEQSxHDQBBACADQQJqNgKgCkEBECchBUEAKAKgCiEDIAVBIHJB+wBHDQILQQAgA0F+ajYCoAoLIAFB2wBHDQJBACACQX5qNgKgCg8LQQAhBAwACwsPCyACQaABRg0AIAJB+wBHDQQLQQAgBUEKajYCoApBARAnIgVB+wBGDQMMAgsCQCACQVhqDgMBAwEACyACQaABRw0CC0EAIAVBEGo2AqAKAkBBARAnIgVBKkcNAEEAQQAoAqAKQQJqNgKgCkEBECchBQsgBUEoRg0BC0EAKAKgCiEBIAUQKhpBACgCoAoiBSABTQ0AIAQgAyABIAUQAkEAQQAoAqAKQX5qNgKgCg8LIAQgA0EAQQAQAkEAIARBDGo2AqAKDwsQIgvUBgEEf0EAQQAoAqAKIgBBDGoiATYCoAoCQAJAAkACQAJAAkACQAJAAkACQEEBECciAkFZag4IBAIBBAEBAQMACyACQSJGDQMgAkH7AEYNBAtBACgCoAogAUcNAkEAIABBCmo2AqAKDwtBACgClApBAC8BiAoiAkEDdGoiAUEAKAKgCjYCBEEAIAJBAWo7AYgKIAFBBTYCAEEAKAKMCi8BAEEuRg0DQQBBACgCoAoiAUECajYCoApBARAnIQIgAEEAKAKgCkEAIAEQAUEAQQAvAYYKIgFBAWo7AYYKQQAoApgKIAFBAnRqQQAoAuQJNgIAAkAgAkEiRg0AIAJBJ0YNAEEAQQAoAqAKQX5qNgKgCg8LIAIQGEEAQQAoAqAKQQJqIgI2AqAKAkACQAJAQQEQJ0FXag4EAQICAAILQQBBACgCoApBAmo2AqAKQQEQJxpBACgC5AkiASACNgIEIAFBAToAGCABQQAoAqAKIgI2AhBBACACQX5qNgKgCg8LQQAoAuQJIgEgAjYCBCABQQE6ABhBAEEALwGICkF/ajsBiAogAUEAKAKgCkECajYCDEEAQQAvAYYKQX9qOwGGCg8LQQBBACgCoApBfmo2AqAKDwtBAEEAKAKgCkECajYCoApBARAnQe0ARw0CQQAoAqAKIgJBAmpBlghBBhAtDQICQEEAKAKMCiIBECgNACABLwEAQS5GDQMLIAAgACACQQhqQQAoAsgJEAEPC0EALwGICg0CQQAoAqAKIQJBACgCpAohAwNAIAIgA08NBQJAAkAgAi8BACIBQSdGDQAgAUEiRw0BCyAAIAEQKQ8LQQAgAkECaiICNgKgCgwACwtBACgCoAohAkEALwGICg0CAkADQAJAAkACQCACQQAoAqQKTw0AQQEQJyICQSJGDQEgAkEnRg0BIAJB/QBHDQJBAEEAKAKgCkECajYCoAoLQQEQJyEBQQAoAqAKIQICQCABQeYARw0AIAJBAmpBnAhBBhAtDQgLQQAgAkEIajYCoApBARAnIgJBIkYNAyACQSdGDQMMBwsgAhAYC0EAQQAoAqAKQQJqIgI2AqAKDAALCyAAIAIQKQsPC0EAQQAoAqAKQX5qNgKgCg8LQQAgAkF+ajYCoAoPCxAiC0cBA39BACgCoApBAmohAEEAKAKkCiEBAkADQCAAIgJBfmogAU8NASACQQJqIQAgAi8BAEF2ag4EAQAAAQALC0EAIAI2AqAKC5gBAQN/QQBBACgCoAoiAUECajYCoAogAUEGaiEBQQAoAqQKIQIDQAJAAkACQCABQXxqIAJPDQAgAUF+ai8BACEDAkACQCAADQAgA0EqRg0BIANBdmoOBAIEBAIECyADQSpHDQMLIAEvAQBBL0cNAkEAIAFBfmo2AqAKDAELIAFBfmohAQtBACABNgKgCg8LIAFBAmohAQwACwuIAQEEf0EAKAKgCiEBQQAoAqQKIQICQAJAA0AgASIDQQJqIQEgAyACTw0BIAEvAQAiBCAARg0CAkAgBEHcAEYNACAEQXZqDgQCAQECAQsgA0EEaiEBIAMvAQRBDUcNACADQQZqIAEgAy8BBkEKRhshAQwACwtBACABNgKgChAiDwtBACABNgKgCgtsAQF/AkACQCAAQV9qIgFBBUsNAEEBIAF0QTFxDQELIABBRmpB//8DcUEGSQ0AIABBKUcgAEFYakH//wNxQQdJcQ0AAkAgAEGlf2oOBAEAAAEACyAAQf0ARyAAQYV/akH//wNxQQRJcQ8LQQELLgEBf0EBIQECQCAAQZYJQQUQJA0AIABBoAlBAxAkDQAgAEGmCUECECQhAQsgAQuDAQECf0EBIQECQAJAAkACQAJAAkAgAC8BACICQUVqDgQFBAQBAAsCQCACQZt/ag4EAwQEAgALIAJBKUYNBCACQfkARw0DIABBfmpBsglBBhAkDwsgAEF+ai8BAEE9Rg8LIABBfmpBqglBBBAkDwsgAEF+akG+CUEDECQPC0EAIQELIAEL3gEBBH9BACgCoAohAEEAKAKkCiEBAkACQAJAA0AgACICQQJqIQAgAiABTw0BAkACQAJAIAAvAQAiA0Gkf2oOBQIDAwMBAAsgA0EkRw0CIAIvAQRB+wBHDQJBACACQQRqIgA2AqAKQQBBAC8BiAoiAkEBajsBiApBACgClAogAkEDdGoiAkEENgIAIAIgADYCBA8LQQAgADYCoApBAEEALwGICkF/aiIAOwGICkEAKAKUCiAAQf//A3FBA3RqKAIAQQNHDQMMBAsgAkEEaiEADAALC0EAIAA2AqAKCxAiCwu0AwECf0EAIQECQAJAAkACQAJAAkACQAJAAkACQCAALwEAQZx/ag4UAAECCQkJCQMJCQQFCQkGCQcJCQgJCwJAAkAgAEF+ai8BAEGXf2oOBAAKCgEKCyAAQXxqQboIQQIQJA8LIABBfGpBvghBAxAkDwsCQAJAAkAgAEF+ai8BAEGNf2oOAwABAgoLAkAgAEF8ai8BACICQeEARg0AIAJB7ABHDQogAEF6akHlABAlDwsgAEF6akHjABAlDwsgAEF8akHECEEEECQPCyAAQXxqQcwIQQYQJA8LIABBfmovAQBB7wBHDQYgAEF8ai8BAEHlAEcNBgJAIABBemovAQAiAkHwAEYNACACQeMARw0HIABBeGpB2AhBBhAkDwsgAEF4akHkCEECECQPCyAAQX5qQegIQQQQJA8LQQEhASAAQX5qIgBB6QAQJQ0EIABB8AhBBRAkDwsgAEF+akHkABAlDwsgAEF+akH6CEEHECQPCyAAQX5qQYgJQQQQJA8LAkAgAEF+ai8BACICQe8ARg0AIAJB5QBHDQEgAEF8akHuABAlDwsgAEF8akGQCUEDECQhAQsgAQs0AQF/QQEhAQJAIABBd2pB//8DcUEFSQ0AIABBgAFyQaABRg0AIABBLkcgABAmcSEBCyABCzABAX8CQAJAIABBd2oiAUEXSw0AQQEgAXRBjYCABHENAQsgAEGgAUYNAEEADwtBAQtOAQJ/QQAhAQJAAkAgAC8BACICQeUARg0AIAJB6wBHDQEgAEF+akHoCEEEECQPCyAAQX5qLwEAQfUARw0AIABBfGpBzAhBBhAkIQELIAELcAECfwJAAkADQEEAQQAoAqAKIgBBAmoiATYCoAogAEEAKAKkCk8NAQJAAkACQCABLwEAIgFBpX9qDgIBAgALAkAgAUF2ag4EBAMDBAALIAFBL0cNAgwECxAsGgwBC0EAIABBBGo2AqAKDAALCxAiCws1AQF/QQBBAToA8AlBACgCoAohAEEAQQAoAqQKQQJqNgKgCkEAIABBACgC0AlrQQF1NgKACgtDAQJ/QQEhAQJAIAAvAQAiAkF3akH//wNxQQVJDQAgAkGAAXJBoAFGDQBBACEBIAIQJkUNACACQS5HIAAQKHIPCyABC0YBA39BACEDAkAgACACQQF0IgJrIgRBAmoiAEEAKALQCSIFSQ0AIAAgASACEC0NAAJAIAAgBUcNAEEBDwsgBBAjIQMLIAMLPQECf0EAIQICQEEAKALQCSIDIABLDQAgAC8BACABRw0AAkAgAyAARw0AQQEPCyAAQX5qLwEAEB4hAgsgAgtoAQJ/QQEhAQJAAkAgAEFfaiICQQVLDQBBASACdEExcQ0BCyAAQfj/A3FBKEYNACAAQUZqQf//A3FBBkkNAAJAIABBpX9qIgJBA0sNACACQQFHDQELIABBhX9qQf//A3FBBEkhAQsgAQucAQEDf0EAKAKgCiEBAkADQAJAAkAgAS8BACICQS9HDQACQCABLwECIgFBKkYNACABQS9HDQQQFgwCCyAAEBcMAQsCQAJAIABFDQAgAkF3aiIBQRdLDQFBASABdEGfgIAEcUUNAQwCCyACEB9FDQMMAQsgAkGgAUcNAgtBAEEAKAKgCiIDQQJqIgE2AqAKIANBACgCpApJDQALCyACCzEBAX9BACEBAkAgAC8BAEEuRw0AIABBfmovAQBBLkcNACAAQXxqLwEAQS5GIQELIAELiQQBAX8CQCABQSJGDQAgAUEnRg0AECIPC0EAKAKgCiECIAEQGCAAIAJBAmpBACgCoApBACgCxAkQAUEAQQAoAqAKQQJqNgKgCgJAAkACQAJAQQAQJyIBQeEARg0AIAFB9wBGDQFBACgCoAohAQwCC0EAKAKgCiIBQQJqQbAIQQoQLQ0BQQYhAAwCC0EAKAKgCiIBLwECQekARw0AIAEvAQRB9ABHDQBBBCEAIAEvAQZB6ABGDQELQQAgAUF+ajYCoAoPC0EAIAEgAEEBdGo2AqAKAkBBARAnQfsARg0AQQAgATYCoAoPC0EAKAKgCiICIQADQEEAIABBAmo2AqAKAkACQAJAQQEQJyIAQSJGDQAgAEEnRw0BQScQGEEAQQAoAqAKQQJqNgKgCkEBECchAAwCC0EiEBhBAEEAKAKgCkECajYCoApBARAnIQAMAQsgABAqIQALAkAgAEE6Rg0AQQAgATYCoAoPC0EAQQAoAqAKQQJqNgKgCgJAQQEQJyIAQSJGDQAgAEEnRg0AQQAgATYCoAoPCyAAEBhBAEEAKAKgCkECajYCoAoCQAJAQQEQJyIAQSxGDQAgAEH9AEYNAUEAIAE2AqAKDwtBAEEAKAKgCkECajYCoApBARAnQf0ARg0AQQAoAqAKIQAMAQsLQQAoAuQJIgEgAjYCECABQQAoAqAKQQJqNgIMC20BAn8CQAJAA0ACQCAAQf//A3EiAUF3aiICQRdLDQBBASACdEGfgIAEcQ0CCyABQaABRg0BIAAhAiABECYNAkEAIQJBAEEAKAKgCiIAQQJqNgKgCiAALwECIgANAAwCCwsgACECCyACQf//A3ELqwEBBH8CQAJAQQAoAqAKIgIvAQAiA0HhAEYNACABIQQgACEFDAELQQAgAkEEajYCoApBARAnIQJBACgCoAohBQJAAkAgAkEiRg0AIAJBJ0YNACACECoaQQAoAqAKIQQMAQsgAhAYQQBBACgCoApBAmoiBDYCoAoLQQEQJyEDQQAoAqAKIQILAkAgAiAFRg0AIAUgBEEAIAAgACABRiICG0EAIAEgAhsQAgsgAwtyAQR/QQAoAqAKIQBBACgCpAohAQJAAkADQCAAQQJqIQIgACABTw0BAkACQCACLwEAIgNBpH9qDgIBBAALIAIhACADQXZqDgQCAQECAQsgAEEEaiEADAALC0EAIAI2AqAKECJBAA8LQQAgAjYCoApB3QALSQEDf0EAIQMCQCACRQ0AAkADQCAALQAAIgQgAS0AACIFRw0BIAFBAWohASAAQQFqIQAgAkF/aiICDQAMAgsLIAQgBWshAwsgAwsL4gECAEGACAvEAQAAeABwAG8AcgB0AG0AcABvAHIAdABlAHQAYQByAG8AbQB1AG4AYwB0AGkAbwBuAHMAcwBlAHIAdAB2AG8AeQBpAGUAZABlAGwAZQBjAG8AbgB0AGkAbgBpAG4AcwB0AGEAbgB0AHkAYgByAGUAYQByAGUAdAB1AHIAZABlAGIAdQBnAGcAZQBhAHcAYQBpAHQAaAByAHcAaABpAGwAZQBmAG8AcgBpAGYAYwBhAHQAYwBmAGkAbgBhAGwAbABlAGwAcwAAQcQJCxABAAAAAgAAAAAEAAAwOQAA","undefined"!=typeof Buffer?Buffer.from(E,"base64"):Uint8Array.from(atob(E),(A=>A.charCodeAt(0))))).then(WebAssembly.instantiate).then((({exports:A})=>{C=A;}));var E;
var convertSourceMap$1 = {};
(function (exports) {
Object.defineProperty(exports, 'commentRegex', {
get: function getCommentRegex () {
// Groups: 1: media type, 2: MIME type, 3: charset, 4: encoding, 5: data.
return /^\s*?\/[\/\*][@#]\s+?sourceMappingURL=data:(((?:application|text)\/json)(?:;charset=([^;,]+?)?)?)?(?:;(base64))?,(.*?)$/mg;
}
});
Object.defineProperty(exports, 'mapFileCommentRegex', {
get: function getMapFileCommentRegex () {
// Matches sourceMappingURL in either // or /* comment styles.
return /(?:\/\/[@#][ \t]+?sourceMappingURL=([^\s'"`]+?)[ \t]*?$)|(?:\/\*[@#][ \t]+sourceMappingURL=([^*]+?)[ \t]*?(?:\*\/){1}[ \t]*?$)/mg;
}
});
var decodeBase64;
if (typeof Buffer !== 'undefined') {
if (typeof Buffer.from === 'function') {
decodeBase64 = decodeBase64WithBufferFrom;
} else {
decodeBase64 = decodeBase64WithNewBuffer;
}
} else {
decodeBase64 = decodeBase64WithAtob;
}
function decodeBase64WithBufferFrom(base64) {
return Buffer.from(base64, 'base64').toString();
}
function decodeBase64WithNewBuffer(base64) {
if (typeof value === 'number') {
throw new TypeError('The value to decode must not be of type number.');
}
return new Buffer(base64, 'base64').toString();
}
function decodeBase64WithAtob(base64) {
return decodeURIComponent(escape(atob(base64)));
}
function stripComment(sm) {
return sm.split(',').pop();
}
function readFromFileMap(sm, read) {
var r = exports.mapFileCommentRegex.exec(sm);
// for some odd reason //# .. captures in 1 and /* .. */ in 2
var filename = r[1] || r[2];
try {
var sm = read(filename);
if (sm != null && typeof sm.catch === 'function') {
return sm.catch(throwError);
} else {
return sm;
}
} catch (e) {
throwError(e);
}
function throwError(e) {
throw new Error('An error occurred while trying to read the map file at ' + filename + '\n' + e.stack);
}
}
function Converter (sm, opts) {
opts = opts || {};
if (opts.hasComment) {
sm = stripComment(sm);
}
if (opts.encoding === 'base64') {
sm = decodeBase64(sm);
} else if (opts.encoding === 'uri') {
sm = decodeURIComponent(sm);
}
if (opts.isJSON || opts.encoding) {
sm = JSON.parse(sm);
}
this.sourcemap = sm;
}
Converter.prototype.toJSON = function (space) {
return JSON.stringify(this.sourcemap, null, space);
};
if (typeof Buffer !== 'undefined') {
if (typeof Buffer.from === 'function') {
Converter.prototype.toBase64 = encodeBase64WithBufferFrom;
} else {
Converter.prototype.toBase64 = encodeBase64WithNewBuffer;
}
} else {
Converter.prototype.toBase64 = encodeBase64WithBtoa;
}
function encodeBase64WithBufferFrom() {
var json = this.toJSON();
return Buffer.from(json, 'utf8').toString('base64');
}
function encodeBase64WithNewBuffer() {
var json = this.toJSON();
if (typeof json === 'number') {
throw new TypeError('The json to encode must not be of type number.');
}
return new Buffer(json, 'utf8').toString('base64');
}
function encodeBase64WithBtoa() {
var json = this.toJSON();
return btoa(unescape(encodeURIComponent(json)));
}
Converter.prototype.toURI = function () {
var json = this.toJSON();
return encodeURIComponent(json);
};
Converter.prototype.toComment = function (options) {
var encoding, content, data;
if (options != null && options.encoding === 'uri') {
encoding = '';
content = this.toURI();
} else {
encoding = ';base64';
content = this.toBase64();
}
data = 'sourceMappingURL=data:application/json;charset=utf-8' + encoding + ',' + content;
return options != null && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
};
// returns copy instead of original
Converter.prototype.toObject = function () {
return JSON.parse(this.toJSON());
};
Converter.prototype.addProperty = function (key, value) {
if (this.sourcemap.hasOwnProperty(key)) throw new Error('property "' + key + '" already exists on the sourcemap, use set property instead');
return this.setProperty(key, value);
};
Converter.prototype.setProperty = function (key, value) {
this.sourcemap[key] = value;
return this;
};
Converter.prototype.getProperty = function (key) {
return this.sourcemap[key];
};
exports.fromObject = function (obj) {
return new Converter(obj);
};
exports.fromJSON = function (json) {
return new Converter(json, { isJSON: true });
};
exports.fromURI = function (uri) {
return new Converter(uri, { encoding: 'uri' });
};
exports.fromBase64 = function (base64) {
return new Converter(base64, { encoding: 'base64' });
};
exports.fromComment = function (comment) {
var m, encoding;
comment = comment
.replace(/^\/\*/g, '//')
.replace(/\*\/$/g, '');
m = exports.commentRegex.exec(comment);
encoding = m && m[4] || 'uri';
return new Converter(comment, { encoding: encoding, hasComment: true });
};
function makeConverter(sm) {
return new Converter(sm, { isJSON: true });
}
exports.fromMapFileComment = function (comment, read) {
if (typeof read === 'string') {
throw new Error(
'String directory paths are no longer supported with `fromMapFileComment`\n' +
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
)
}
var sm = readFromFileMap(comment, read);
if (sm != null && typeof sm.then === 'function') {
return sm.then(makeConverter);
} else {
return makeConverter(sm);
}
};
// Finds last sourcemap comment in file or returns null if none was found
exports.fromSource = function (content) {
var m = content.match(exports.commentRegex);
return m ? exports.fromComment(m.pop()) : null;
};
// Finds last sourcemap comment in file or returns null if none was found
exports.fromMapFileSource = function (content, read) {
if (typeof read === 'string') {
throw new Error(
'String directory paths are no longer supported with `fromMapFileSource`\n' +
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
)
}
var m = content.match(exports.mapFileCommentRegex);
return m ? exports.fromMapFileComment(m.pop(), read) : null;
};
exports.removeComments = function (src) {
return src.replace(exports.commentRegex, '');
};
exports.removeMapFileComments = function (src) {
return src.replace(exports.mapFileCommentRegex, '');
};
exports.generateMapFileComment = function (file, options) {
var data = 'sourceMappingURL=' + file;
return options && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
};
} (convertSourceMap$1));
var convertSourceMap = /*@__PURE__*/getDefaultExportFromCjs(convertSourceMap$1);
const debug$e = createDebugger('vite:sourcemap', {
onlyWhenFocused: true,
});
// Virtual modules should be prefixed with a null byte to avoid a
// false positive "missing source" warning. We also check for certain
// prefixes used for special handling in esbuildDepPlugin.
const virtualSourceRE = /^(?:dep:|browser-external:|virtual:)|\0/;
async function injectSourcesContent(map, file, logger) {
let sourceRoot;
try {
// The source root is undefined for virtual modules and permission errors.
sourceRoot = await fsp.realpath(path$o.resolve(path$o.dirname(file), map.sourceRoot || ''));
}
catch { }
const missingSources = [];
const sourcesContent = map.sourcesContent || [];
await Promise.all(map.sources.map(async (sourcePath, index) => {
let content = null;
if (sourcePath && !virtualSourceRE.test(sourcePath)) {
sourcePath = decodeURI(sourcePath);
if (sourceRoot) {
sourcePath = path$o.resolve(sourceRoot, sourcePath);
}
// inject content from source file when sourcesContent is null
content =
sourcesContent[index] ??
(await fsp.readFile(sourcePath, 'utf-8').catch(() => {
missingSources.push(sourcePath);
return null;
}));
}
sourcesContent[index] = content;
}));
map.sourcesContent = sourcesContent;
// Use this command…
// DEBUG="vite:sourcemap" vite build
// …to log the missing sources.
if (missingSources.length) {
logger.warnOnce(`Sourcemap for "${file}" points to missing source files`);
debug$e?.(`Missing sources:\n ` + missingSources.join(`\n `));
}
}
function genSourceMapUrl(map) {
if (typeof map !== 'string') {
map = JSON.stringify(map);
}
return `data:application/json;base64,${Buffer.from(map).toString('base64')}`;
}
function getCodeWithSourcemap(type, code, map) {
if (debug$e) {
code += `\n/*${JSON.stringify(map, null, 2).replace(/\*\//g, '*\\/')}*/\n`;
}
if (type === 'js') {
code += `\n//# sourceMappingURL=${genSourceMapUrl(map)}`;
}
else if (type === 'css') {
code += `\n/*# sourceMappingURL=${genSourceMapUrl(map)} */`;
}
return code;
}
function applySourcemapIgnoreList(map, sourcemapPath, sourcemapIgnoreList, logger) {
let { x_google_ignoreList } = map;
if (x_google_ignoreList === undefined) {
x_google_ignoreList = [];
}
for (let sourcesIndex = 0; sourcesIndex < map.sources.length; ++sourcesIndex) {
const sourcePath = map.sources[sourcesIndex];
if (!sourcePath)
continue;
const ignoreList = sourcemapIgnoreList(path$o.isAbsolute(sourcePath)
? sourcePath
: path$o.resolve(path$o.dirname(sourcemapPath), sourcePath), sourcemapPath);
if (logger && typeof ignoreList !== 'boolean') {
logger.warn('sourcemapIgnoreList function must return a boolean.');
}
if (ignoreList && !x_google_ignoreList.includes(sourcesIndex)) {
x_google_ignoreList.push(sourcesIndex);
}
}
if (x_google_ignoreList.length > 0) {
if (!map.x_google_ignoreList)
map.x_google_ignoreList = x_google_ignoreList;
}
}
var tasks = {};
var utils$g = {};
var array$1 = {};
Object.defineProperty(array$1, "__esModule", { value: true });
array$1.splitWhen = array$1.flatten = void 0;
function flatten$1(items) {
return items.reduce((collection, item) => [].concat(collection, item), []);
}
array$1.flatten = flatten$1;
function splitWhen(items, predicate) {
const result = [[]];
let groupIndex = 0;
for (const item of items) {
if (predicate(item)) {
groupIndex++;
result[groupIndex] = [];
}
else {
result[groupIndex].push(item);
}
}
return result;
}
array$1.splitWhen = splitWhen;
var errno$1 = {};
Object.defineProperty(errno$1, "__esModule", { value: true });
errno$1.isEnoentCodeError = void 0;
function isEnoentCodeError(error) {
return error.code === 'ENOENT';
}
errno$1.isEnoentCodeError = isEnoentCodeError;
var fs$h = {};
Object.defineProperty(fs$h, "__esModule", { value: true });
fs$h.createDirentFromStats = void 0;
let DirentFromStats$1 = class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
};
function createDirentFromStats$1(name, stats) {
return new DirentFromStats$1(name, stats);
}
fs$h.createDirentFromStats = createDirentFromStats$1;
var path$h = {};
Object.defineProperty(path$h, "__esModule", { value: true });
path$h.convertPosixPathToPattern = path$h.convertWindowsPathToPattern = path$h.convertPathToPattern = path$h.escapePosixPath = path$h.escapeWindowsPath = path$h.escape = path$h.removeLeadingDotSegment = path$h.makeAbsolute = path$h.unixify = void 0;
const os$3 = require$$2;
const path$g = require$$0$4;
const IS_WINDOWS_PLATFORM = os$3.platform() === 'win32';
const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
/**
* All non-escaped special characters.
* Posix: ()*?[\]{|}, !+@ before (, ! at the beginning, \\ before non-special characters.
* Windows: (){}, !+@ before (, ! at the beginning.
*/
const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g;
const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([(){}]|^!|[!+@](?=\())/g;
/**
* The device path (\\.\ or \\?\).
* https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths
*/
const DOS_DEVICE_PATH_RE = /^\\\\([.?])/;
/**
* All backslashes except those escaping special characters.
* Windows: !()+@{}
* https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions
*/
const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@{}])/g;
/**
* Designed to work only with simple paths: `dir\\file`.
*/
function unixify(filepath) {
return filepath.replace(/\\/g, '/');
}
path$h.unixify = unixify;
function makeAbsolute(cwd, filepath) {
return path$g.resolve(cwd, filepath);
}
path$h.makeAbsolute = makeAbsolute;
function removeLeadingDotSegment(entry) {
// We do not use `startsWith` because this is 10x slower than current implementation for some cases.
// eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
if (entry.charAt(0) === '.') {
const secondCharactery = entry.charAt(1);
if (secondCharactery === '/' || secondCharactery === '\\') {
return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);
}
}
return entry;
}
path$h.removeLeadingDotSegment = removeLeadingDotSegment;
path$h.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath;
function escapeWindowsPath(pattern) {
return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
}
path$h.escapeWindowsPath = escapeWindowsPath;
function escapePosixPath(pattern) {
return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
}
path$h.escapePosixPath = escapePosixPath;
path$h.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern;
function convertWindowsPathToPattern(filepath) {
return escapeWindowsPath(filepath)
.replace(DOS_DEVICE_PATH_RE, '//$1')
.replace(WINDOWS_BACKSLASHES_RE, '/');
}
path$h.convertWindowsPathToPattern = convertWindowsPathToPattern;
function convertPosixPathToPattern(filepath) {
return escapePosixPath(filepath);
}
path$h.convertPosixPathToPattern = convertPosixPathToPattern;
var pattern$1 = {};
/*!
* is-extglob <https://github.com/jonschlinkert/is-extglob>
*
* Copyright (c) 2014-2016, Jon Schlinkert.
* Licensed under the MIT License.
*/
var isExtglob$1 = function isExtglob(str) {
if (typeof str !== 'string' || str === '') {
return false;
}
var match;
while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) {
if (match[2]) return true;
str = str.slice(match.index + match[0].length);
}
return false;
};
/*!
* is-glob <https://github.com/jonschlinkert/is-glob>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
var isExtglob = isExtglob$1;
var chars = { '{': '}', '(': ')', '[': ']'};
var strictCheck = function(str) {
if (str[0] === '!') {
return true;
}
var index = 0;
var pipeIndex = -2;
var closeSquareIndex = -2;
var closeCurlyIndex = -2;
var closeParenIndex = -2;
var backSlashIndex = -2;
while (index < str.length) {
if (str[index] === '*') {
return true;
}
if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) {
return true;
}
if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') {
if (closeSquareIndex < index) {
closeSquareIndex = str.indexOf(']', index);
}
if (closeSquareIndex > index) {
if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) {
return true;
}
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) {
return true;
}
}
}
if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') {
closeCurlyIndex = str.indexOf('}', index);
if (closeCurlyIndex > index) {
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) {
return true;
}
}
}
if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') {
closeParenIndex = str.indexOf(')', index);
if (closeParenIndex > index) {
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) {
return true;
}
}
}
if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') {
if (pipeIndex < index) {
pipeIndex = str.indexOf('|', index);
}
if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') {
closeParenIndex = str.indexOf(')', pipeIndex);
if (closeParenIndex > pipeIndex) {
backSlashIndex = str.indexOf('\\', pipeIndex);
if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) {
return true;
}
}
}
}
if (str[index] === '\\') {
var open = str[index + 1];
index += 2;
var close = chars[open];
if (close) {
var n = str.indexOf(close, index);
if (n !== -1) {
index = n + 1;
}
}
if (str[index] === '!') {
return true;
}
} else {
index++;
}
}
return false;
};
var relaxedCheck = function(str) {
if (str[0] === '!') {
return true;
}
var index = 0;
while (index < str.length) {
if (/[*?{}()[\]]/.test(str[index])) {
return true;
}
if (str[index] === '\\') {
var open = str[index + 1];
index += 2;
var close = chars[open];
if (close) {
var n = str.indexOf(close, index);
if (n !== -1) {
index = n + 1;
}
}
if (str[index] === '!') {
return true;
}
} else {
index++;
}
}
return false;
};
var isGlob$2 = function isGlob(str, options) {
if (typeof str !== 'string' || str === '') {
return false;
}
if (isExtglob(str)) {
return true;
}
var check = strictCheck;
// optionally relax check
if (options && options.strict === false) {
check = relaxedCheck;
}
return check(str);
};
var isGlob$1 = isGlob$2;
var pathPosixDirname = require$$0$4.posix.dirname;
var isWin32 = require$$2.platform() === 'win32';
var slash = '/';
var backslash = /\\/g;
var enclosure = /[\{\[].*[\}\]]$/;
var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/;
var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g;
/**
* @param {string} str
* @param {Object} opts
* @param {boolean} [opts.flipBackslashes=true]
* @returns {string}
*/
var globParent$2 = function globParent(str, opts) {
var options = Object.assign({ flipBackslashes: true }, opts);
// flip windows path separators
if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {
str = str.replace(backslash, slash);
}
// special case for strings ending in enclosure containing path separator
if (enclosure.test(str)) {
str += slash;
}
// preserves full path in case of trailing path separator
str += 'a';
// remove path parts that are globby
do {
str = pathPosixDirname(str);
} while (isGlob$1(str) || globby.test(str));
// remove escape chars and return result
return str.replace(escaped, '$1');
};
var utils$f = {};
(function (exports) {
exports.isInteger = num => {
if (typeof num === 'number') {
return Number.isInteger(num);
}
if (typeof num === 'string' && num.trim() !== '') {
return Number.isInteger(Number(num));
}
return false;
};
/**
* Find a node of the given type
*/
exports.find = (node, type) => node.nodes.find(node => node.type === type);
/**
* Find a node of the given type
*/
exports.exceedsLimit = (min, max, step = 1, limit) => {
if (limit === false) return false;
if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
return ((Number(max) - Number(min)) / Number(step)) >= limit;
};
/**
* Escape the given node with '\\' before node.value
*/
exports.escapeNode = (block, n = 0, type) => {
let node = block.nodes[n];
if (!node) return;
if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
if (node.escaped !== true) {
node.value = '\\' + node.value;
node.escaped = true;
}
}
};
/**
* Returns true if the given brace node should be enclosed in literal braces
*/
exports.encloseBrace = node => {
if (node.type !== 'brace') return false;
if ((node.commas >> 0 + node.ranges >> 0) === 0) {
node.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a brace node is invalid.
*/
exports.isInvalidBrace = block => {
if (block.type !== 'brace') return false;
if (block.invalid === true || block.dollar) return true;
if ((block.commas >> 0 + block.ranges >> 0) === 0) {
block.invalid = true;
return true;
}
if (block.open !== true || block.close !== true) {
block.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a node is an open or close node
*/
exports.isOpenOrClose = node => {
if (node.type === 'open' || node.type === 'close') {
return true;
}
return node.open === true || node.close === true;
};
/**
* Reduce an array of text nodes.
*/
exports.reduce = nodes => nodes.reduce((acc, node) => {
if (node.type === 'text') acc.push(node.value);
if (node.type === 'range') node.type = 'text';
return acc;
}, []);
/**
* Flatten an array
*/
exports.flatten = (...args) => {
const result = [];
const flat = arr => {
for (let i = 0; i < arr.length; i++) {
let ele = arr[i];
Array.isArray(ele) ? flat(ele) : ele !== void 0 && result.push(ele);
}
return result;
};
flat(args);
return result;
};
} (utils$f));
const utils$e = utils$f;
var stringify$7 = (ast, options = {}) => {
let stringify = (node, parent = {}) => {
let invalidBlock = options.escapeInvalid && utils$e.isInvalidBrace(parent);
let invalidNode = node.invalid === true && options.escapeInvalid === true;
let output = '';
if (node.value) {
if ((invalidBlock || invalidNode) && utils$e.isOpenOrClose(node)) {
return '\\' + node.value;
}
return node.value;
}
if (node.value) {
return node.value;
}
if (node.nodes) {
for (let child of node.nodes) {
output += stringify(child);
}
}
return output;
};
return stringify(ast);
};
/*!
* is-number <https://github.com/jonschlinkert/is-number>
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Released under the MIT License.
*/
var isNumber$2 = function(num) {
if (typeof num === 'number') {
return num - num === 0;
}
if (typeof num === 'string' && num.trim() !== '') {
return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);
}
return false;
};
/*!
* to-regex-range <https://github.com/micromatch/to-regex-range>
*
* Copyright (c) 2015-present, Jon Schlinkert.
* Released under the MIT License.
*/
const isNumber$1 = isNumber$2;
const toRegexRange$1 = (min, max, options) => {
if (isNumber$1(min) === false) {
throw new TypeError('toRegexRange: expected the first argument to be a number');
}
if (max === void 0 || min === max) {
return String(min);
}
if (isNumber$1(max) === false) {
throw new TypeError('toRegexRange: expected the second argument to be a number.');
}
let opts = { relaxZeros: true, ...options };
if (typeof opts.strictZeros === 'boolean') {
opts.relaxZeros = opts.strictZeros === false;
}
let relax = String(opts.relaxZeros);
let shorthand = String(opts.shorthand);
let capture = String(opts.capture);
let wrap = String(opts.wrap);
let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;
if (toRegexRange$1.cache.hasOwnProperty(cacheKey)) {
return toRegexRange$1.cache[cacheKey].result;
}
let a = Math.min(min, max);
let b = Math.max(min, max);
if (Math.abs(a - b) === 1) {
let result = min + '|' + max;
if (opts.capture) {
return `(${result})`;
}
if (opts.wrap === false) {
return result;
}
return `(?:${result})`;
}
let isPadded = hasPadding(min) || hasPadding(max);
let state = { min, max, a, b };
let positives = [];
let negatives = [];
if (isPadded) {
state.isPadded = isPadded;
state.maxLen = String(state.max).length;
}
if (a < 0) {
let newMin = b < 0 ? Math.abs(b) : 1;
negatives = splitToPatterns(newMin, Math.abs(a), state, opts);
a = state.a = 0;
}
if (b >= 0) {
positives = splitToPatterns(a, b, state, opts);
}
state.negatives = negatives;
state.positives = positives;
state.result = collatePatterns(negatives, positives);
if (opts.capture === true) {
state.result = `(${state.result})`;
} else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {
state.result = `(?:${state.result})`;
}
toRegexRange$1.cache[cacheKey] = state;
return state.result;
};
function collatePatterns(neg, pos, options) {
let onlyNegative = filterPatterns(neg, pos, '-', false) || [];
let onlyPositive = filterPatterns(pos, neg, '', false) || [];
let intersected = filterPatterns(neg, pos, '-?', true) || [];
let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);
return subpatterns.join('|');
}
function splitToRanges(min, max) {
let nines = 1;
let zeros = 1;
let stop = countNines(min, nines);
let stops = new Set([max]);
while (min <= stop && stop <= max) {
stops.add(stop);
nines += 1;
stop = countNines(min, nines);
}
stop = countZeros(max + 1, zeros) - 1;
while (min < stop && stop <= max) {
stops.add(stop);
zeros += 1;
stop = countZeros(max + 1, zeros) - 1;
}
stops = [...stops];
stops.sort(compare);
return stops;
}
/**
* Convert a range to a regex pattern
* @param {Number} `start`
* @param {Number} `stop`
* @return {String}
*/
function rangeToPattern(start, stop, options) {
if (start === stop) {
return { pattern: start, count: [], digits: 0 };
}
let zipped = zip(start, stop);
let digits = zipped.length;
let pattern = '';
let count = 0;
for (let i = 0; i < digits; i++) {
let [startDigit, stopDigit] = zipped[i];
if (startDigit === stopDigit) {
pattern += startDigit;
} else if (startDigit !== '0' || stopDigit !== '9') {
pattern += toCharacterClass(startDigit, stopDigit);
} else {
count++;
}
}
if (count) {
pattern += options.shorthand === true ? '\\d' : '[0-9]';
}
return { pattern, count: [count], digits };
}
function splitToPatterns(min, max, tok, options) {
let ranges = splitToRanges(min, max);
let tokens = [];
let start = min;
let prev;
for (let i = 0; i < ranges.length; i++) {
let max = ranges[i];
let obj = rangeToPattern(String(start), String(max), options);
let zeros = '';
if (!tok.isPadded && prev && prev.pattern === obj.pattern) {
if (prev.count.length > 1) {
prev.count.pop();
}
prev.count.push(obj.count[0]);
prev.string = prev.pattern + toQuantifier(prev.count);
start = max + 1;
continue;
}
if (tok.isPadded) {
zeros = padZeros(max, tok, options);
}
obj.string = zeros + obj.pattern + toQuantifier(obj.count);
tokens.push(obj);
start = max + 1;
prev = obj;
}
return tokens;
}
function filterPatterns(arr, comparison, prefix, intersection, options) {
let result = [];
for (let ele of arr) {
let { string } = ele;
// only push if _both_ are negative...
if (!intersection && !contains(comparison, 'string', string)) {
result.push(prefix + string);
}
// or _both_ are positive
if (intersection && contains(comparison, 'string', string)) {
result.push(prefix + string);
}
}
return result;
}
/**
* Zip strings
*/
function zip(a, b) {
let arr = [];
for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
return arr;
}
function compare(a, b) {
return a > b ? 1 : b > a ? -1 : 0;
}
function contains(arr, key, val) {
return arr.some(ele => ele[key] === val);
}
function countNines(min, len) {
return Number(String(min).slice(0, -len) + '9'.repeat(len));
}
function countZeros(integer, zeros) {
return integer - (integer % Math.pow(10, zeros));
}
function toQuantifier(digits) {
let [start = 0, stop = ''] = digits;
if (stop || start > 1) {
return `{${start + (stop ? ',' + stop : '')}}`;
}
return '';
}
function toCharacterClass(a, b, options) {
return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;
}
function hasPadding(str) {
return /^-?(0+)\d/.test(str);
}
function padZeros(value, tok, options) {
if (!tok.isPadded) {
return value;
}
let diff = Math.abs(tok.maxLen - String(value).length);
let relax = options.relaxZeros !== false;
switch (diff) {
case 0:
return '';
case 1:
return relax ? '0?' : '0';
case 2:
return relax ? '0{0,2}' : '00';
default: {
return relax ? `0{0,${diff}}` : `0{${diff}}`;
}
}
}
/**
* Cache
*/
toRegexRange$1.cache = {};
toRegexRange$1.clearCache = () => (toRegexRange$1.cache = {});
/**
* Expose `toRegexRange`
*/
var toRegexRange_1 = toRegexRange$1;
/*!
* fill-range <https://github.com/jonschlinkert/fill-range>
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Licensed under the MIT License.
*/
const util$1 = require$$0$6;
const toRegexRange = toRegexRange_1;
const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
const transform = toNumber => {
return value => toNumber === true ? Number(value) : String(value);
};
const isValidValue = value => {
return typeof value === 'number' || (typeof value === 'string' && value !== '');
};
const isNumber = num => Number.isInteger(+num);
const zeros = input => {
let value = `${input}`;
let index = -1;
if (value[0] === '-') value = value.slice(1);
if (value === '0') return false;
while (value[++index] === '0');
return index > 0;
};
const stringify$6 = (start, end, options) => {
if (typeof start === 'string' || typeof end === 'string') {
return true;
}
return options.stringify === true;
};
const pad = (input, maxLength, toNumber) => {
if (maxLength > 0) {
let dash = input[0] === '-' ? '-' : '';
if (dash) input = input.slice(1);
input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
}
if (toNumber === false) {
return String(input);
}
return input;
};
const toMaxLen = (input, maxLength) => {
let negative = input[0] === '-' ? '-' : '';
if (negative) {
input = input.slice(1);
maxLength--;
}
while (input.length < maxLength) input = '0' + input;
return negative ? ('-' + input) : input;
};
const toSequence = (parts, options) => {
parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
let prefix = options.capture ? '' : '?:';
let positives = '';
let negatives = '';
let result;
if (parts.positives.length) {
positives = parts.positives.join('|');
}
if (parts.negatives.length) {
negatives = `-(${prefix}${parts.negatives.join('|')})`;
}
if (positives && negatives) {
result = `${positives}|${negatives}`;
} else {
result = positives || negatives;
}
if (options.wrap) {
return `(${prefix}${result})`;
}
return result;
};
const toRange = (a, b, isNumbers, options) => {
if (isNumbers) {
return toRegexRange(a, b, { wrap: false, ...options });
}
let start = String.fromCharCode(a);
if (a === b) return start;
let stop = String.fromCharCode(b);
return `[${start}-${stop}]`;
};
const toRegex = (start, end, options) => {
if (Array.isArray(start)) {
let wrap = options.wrap === true;
let prefix = options.capture ? '' : '?:';
return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
}
return toRegexRange(start, end, options);
};
const rangeError = (...args) => {
return new RangeError('Invalid range arguments: ' + util$1.inspect(...args));
};
const invalidRange = (start, end, options) => {
if (options.strictRanges === true) throw rangeError([start, end]);
return [];
};
const invalidStep = (step, options) => {
if (options.strictRanges === true) {
throw new TypeError(`Expected step "${step}" to be a number`);
}
return [];
};
const fillNumbers = (start, end, step = 1, options = {}) => {
let a = Number(start);
let b = Number(end);
if (!Number.isInteger(a) || !Number.isInteger(b)) {
if (options.strictRanges === true) throw rangeError([start, end]);
return [];
}
// fix negative zero
if (a === 0) a = 0;
if (b === 0) b = 0;
let descending = a > b;
let startString = String(start);
let endString = String(end);
let stepString = String(step);
step = Math.max(Math.abs(step), 1);
let padded = zeros(startString) || zeros(endString) || zeros(stepString);
let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
let toNumber = padded === false && stringify$6(start, end, options) === false;
let format = options.transform || transform(toNumber);
if (options.toRegex && step === 1) {
return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
}
let parts = { negatives: [], positives: [] };
let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
let range = [];
let index = 0;
while (descending ? a >= b : a <= b) {
if (options.toRegex === true && step > 1) {
push(a);
} else {
range.push(pad(format(a, index), maxLen, toNumber));
}
a = descending ? a - step : a + step;
index++;
}
if (options.toRegex === true) {
return step > 1
? toSequence(parts, options)
: toRegex(range, null, { wrap: false, ...options });
}
return range;
};
const fillLetters = (start, end, step = 1, options = {}) => {
if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
return invalidRange(start, end, options);
}
let format = options.transform || (val => String.fromCharCode(val));
let a = `${start}`.charCodeAt(0);
let b = `${end}`.charCodeAt(0);
let descending = a > b;
let min = Math.min(a, b);
let max = Math.max(a, b);
if (options.toRegex && step === 1) {
return toRange(min, max, false, options);
}
let range = [];
let index = 0;
while (descending ? a >= b : a <= b) {
range.push(format(a, index));
a = descending ? a - step : a + step;
index++;
}
if (options.toRegex === true) {
return toRegex(range, null, { wrap: false, options });
}
return range;
};
const fill$2 = (start, end, step, options = {}) => {
if (end == null && isValidValue(start)) {
return [start];
}
if (!isValidValue(start) || !isValidValue(end)) {
return invalidRange(start, end, options);
}
if (typeof step === 'function') {
return fill$2(start, end, 1, { transform: step });
}
if (isObject(step)) {
return fill$2(start, end, 0, step);
}
let opts = { ...options };
if (opts.capture === true) opts.wrap = true;
step = step || opts.step || 1;
if (!isNumber(step)) {
if (step != null && !isObject(step)) return invalidStep(step, opts);
return fill$2(start, end, 1, step);
}
if (isNumber(start) && isNumber(end)) {
return fillNumbers(start, end, step, opts);
}
return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
};
var fillRange = fill$2;
const fill$1 = fillRange;
const utils$d = utils$f;
const compile$1 = (ast, options = {}) => {
let walk = (node, parent = {}) => {
let invalidBlock = utils$d.isInvalidBrace(parent);
let invalidNode = node.invalid === true && options.escapeInvalid === true;
let invalid = invalidBlock === true || invalidNode === true;
let prefix = options.escapeInvalid === true ? '\\' : '';
let output = '';
if (node.isOpen === true) {
return prefix + node.value;
}
if (node.isClose === true) {
return prefix + node.value;
}
if (node.type === 'open') {
return invalid ? (prefix + node.value) : '(';
}
if (node.type === 'close') {
return invalid ? (prefix + node.value) : ')';
}
if (node.type === 'comma') {
return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');
}
if (node.value) {
return node.value;
}
if (node.nodes && node.ranges > 0) {
let args = utils$d.reduce(node.nodes);
let range = fill$1(...args, { ...options, wrap: false, toRegex: true });
if (range.length !== 0) {
return args.length > 1 && range.length > 1 ? `(${range})` : range;
}
}
if (node.nodes) {
for (let child of node.nodes) {
output += walk(child, node);
}
}
return output;
};
return walk(ast);
};
var compile_1 = compile$1;
const fill = fillRange;
const stringify$5 = stringify$7;
const utils$c = utils$f;
const append$1 = (queue = '', stash = '', enclose = false) => {
let result = [];
queue = [].concat(queue);
stash = [].concat(stash);
if (!stash.length) return queue;
if (!queue.length) {
return enclose ? utils$c.flatten(stash).map(ele => `{${ele}}`) : stash;
}
for (let item of queue) {
if (Array.isArray(item)) {
for (let value of item) {
result.push(append$1(value, stash, enclose));
}
} else {
for (let ele of stash) {
if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
result.push(Array.isArray(ele) ? append$1(item, ele, enclose) : (item + ele));
}
}
}
return utils$c.flatten(result);
};
const expand$2 = (ast, options = {}) => {
let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;
let walk = (node, parent = {}) => {
node.queue = [];
let p = parent;
let q = parent.queue;
while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
p = p.parent;
q = p.queue;
}
if (node.invalid || node.dollar) {
q.push(append$1(q.pop(), stringify$5(node, options)));
return;
}
if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
q.push(append$1(q.pop(), ['{}']));
return;
}
if (node.nodes && node.ranges > 0) {
let args = utils$c.reduce(node.nodes);
if (utils$c.exceedsLimit(...args, options.step, rangeLimit)) {
throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
}
let range = fill(...args, options);
if (range.length === 0) {
range = stringify$5(node, options);
}
q.push(append$1(q.pop(), range));
node.nodes = [];
return;
}
let enclose = utils$c.encloseBrace(node);
let queue = node.queue;
let block = node;
while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
block = block.parent;
queue = block.queue;
}
for (let i = 0; i < node.nodes.length; i++) {
let child = node.nodes[i];
if (child.type === 'comma' && node.type === 'brace') {
if (i === 1) queue.push('');
queue.push('');
continue;
}
if (child.type === 'close') {
q.push(append$1(q.pop(), queue, enclose));
continue;
}
if (child.value && child.type !== 'open') {
queue.push(append$1(queue.pop(), child.value));
continue;
}
if (child.nodes) {
walk(child, node);
}
}
return queue;
};
return utils$c.flatten(walk(ast));
};
var expand_1$1 = expand$2;
var constants$3 = {
MAX_LENGTH: 1024 * 64,
// Digits
CHAR_0: '0', /* 0 */
CHAR_9: '9', /* 9 */
// Alphabet chars.
CHAR_UPPERCASE_A: 'A', /* A */
CHAR_LOWERCASE_A: 'a', /* a */
CHAR_UPPERCASE_Z: 'Z', /* Z */
CHAR_LOWERCASE_Z: 'z', /* z */
CHAR_LEFT_PARENTHESES: '(', /* ( */
CHAR_RIGHT_PARENTHESES: ')', /* ) */
CHAR_ASTERISK: '*', /* * */
// Non-alphabetic chars.
CHAR_AMPERSAND: '&', /* & */
CHAR_AT: '@', /* @ */
CHAR_BACKSLASH: '\\', /* \ */
CHAR_BACKTICK: '`', /* ` */
CHAR_CARRIAGE_RETURN: '\r', /* \r */
CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
CHAR_COLON: ':', /* : */
CHAR_COMMA: ',', /* , */
CHAR_DOLLAR: '$', /* . */
CHAR_DOT: '.', /* . */
CHAR_DOUBLE_QUOTE: '"', /* " */
CHAR_EQUAL: '=', /* = */
CHAR_EXCLAMATION_MARK: '!', /* ! */
CHAR_FORM_FEED: '\f', /* \f */
CHAR_FORWARD_SLASH: '/', /* / */
CHAR_HASH: '#', /* # */
CHAR_HYPHEN_MINUS: '-', /* - */
CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
CHAR_LEFT_CURLY_BRACE: '{', /* { */
CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
CHAR_LINE_FEED: '\n', /* \n */
CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
CHAR_PERCENT: '%', /* % */
CHAR_PLUS: '+', /* + */
CHAR_QUESTION_MARK: '?', /* ? */
CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
CHAR_RIGHT_CURLY_BRACE: '}', /* } */
CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
CHAR_SEMICOLON: ';', /* ; */
CHAR_SINGLE_QUOTE: '\'', /* ' */
CHAR_SPACE: ' ', /* */
CHAR_TAB: '\t', /* \t */
CHAR_UNDERSCORE: '_', /* _ */
CHAR_VERTICAL_LINE: '|', /* | */
CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
};
const stringify$4 = stringify$7;
/**
* Constants
*/
const {
MAX_LENGTH,
CHAR_BACKSLASH, /* \ */
CHAR_BACKTICK, /* ` */
CHAR_COMMA, /* , */
CHAR_DOT, /* . */
CHAR_LEFT_PARENTHESES, /* ( */
CHAR_RIGHT_PARENTHESES, /* ) */
CHAR_LEFT_CURLY_BRACE, /* { */
CHAR_RIGHT_CURLY_BRACE, /* } */
CHAR_LEFT_SQUARE_BRACKET, /* [ */
CHAR_RIGHT_SQUARE_BRACKET, /* ] */
CHAR_DOUBLE_QUOTE, /* " */
CHAR_SINGLE_QUOTE, /* ' */
CHAR_NO_BREAK_SPACE,
CHAR_ZERO_WIDTH_NOBREAK_SPACE
} = constants$3;
/**
* parse
*/
const parse$d = (input, options = {}) => {
if (typeof input !== 'string') {
throw new TypeError('Expected a string');
}
let opts = options || {};
let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
if (input.length > max) {
throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
}
let ast = { type: 'root', input, nodes: [] };
let stack = [ast];
let block = ast;
let prev = ast;
let brackets = 0;
let length = input.length;
let index = 0;
let depth = 0;
let value;
/**
* Helpers
*/
const advance = () => input[index++];
const push = node => {
if (node.type === 'text' && prev.type === 'dot') {
prev.type = 'text';
}
if (prev && prev.type === 'text' && node.type === 'text') {
prev.value += node.value;
return;
}
block.nodes.push(node);
node.parent = block;
node.prev = prev;
prev = node;
return node;
};
push({ type: 'bos' });
while (index < length) {
block = stack[stack.length - 1];
value = advance();
/**
* Invalid chars
*/
if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
continue;
}
/**
* Escaped chars
*/
if (value === CHAR_BACKSLASH) {
push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
continue;
}
/**
* Right square bracket (literal): ']'
*/
if (value === CHAR_RIGHT_SQUARE_BRACKET) {
push({ type: 'text', value: '\\' + value });
continue;
}
/**
* Left square bracket: '['
*/
if (value === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
let next;
while (index < length && (next = advance())) {
value += next;
if (next === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
continue;
}
if (next === CHAR_BACKSLASH) {
value += advance();
continue;
}
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
brackets--;
if (brackets === 0) {
break;
}
}
}
push({ type: 'text', value });
continue;
}
/**
* Parentheses
*/
if (value === CHAR_LEFT_PARENTHESES) {
block = push({ type: 'paren', nodes: [] });
stack.push(block);
push({ type: 'text', value });
continue;
}
if (value === CHAR_RIGHT_PARENTHESES) {
if (block.type !== 'paren') {
push({ type: 'text', value });
continue;
}
block = stack.pop();
push({ type: 'text', value });
block = stack[stack.length - 1];
continue;
}
/**
* Quotes: '|"|`
*/
if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
let open = value;
let next;
if (options.keepQuotes !== true) {
value = '';
}
while (index < length && (next = advance())) {
if (next === CHAR_BACKSLASH) {
value += next + advance();
continue;
}
if (next === open) {
if (options.keepQuotes === true) value += next;
break;
}
value += next;
}
push({ type: 'text', value });
continue;
}
/**
* Left curly brace: '{'
*/
if (value === CHAR_LEFT_CURLY_BRACE) {
depth++;
let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
let brace = {
type: 'brace',
open: true,
close: false,
dollar,
depth,
commas: 0,
ranges: 0,
nodes: []
};
block = push(brace);
stack.push(block);
push({ type: 'open', value });
continue;
}
/**
* Right curly brace: '}'
*/
if (value === CHAR_RIGHT_CURLY_BRACE) {
if (block.type !== 'brace') {
push({ type: 'text', value });
continue;
}
let type = 'close';
block = stack.pop();
block.close = true;
push({ type, value });
depth--;
block = stack[stack.length - 1];
continue;
}
/**
* Comma: ','
*/
if (value === CHAR_COMMA && depth > 0) {
if (block.ranges > 0) {
block.ranges = 0;
let open = block.nodes.shift();
block.nodes = [open, { type: 'text', value: stringify$4(block) }];
}
push({ type: 'comma', value });
block.commas++;
continue;
}
/**
* Dot: '.'
*/
if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
let siblings = block.nodes;
if (depth === 0 || siblings.length === 0) {
push({ type: 'text', value });
continue;
}
if (prev.type === 'dot') {
block.range = [];
prev.value += value;
prev.type = 'range';
if (block.nodes.length !== 3 && block.nodes.length !== 5) {
block.invalid = true;
block.ranges = 0;
prev.type = 'text';
continue;
}
block.ranges++;
block.args = [];
continue;
}
if (prev.type === 'range') {
siblings.pop();
let before = siblings[siblings.length - 1];
before.value += prev.value + value;
prev = before;
block.ranges--;
continue;
}
push({ type: 'dot', value });
continue;
}
/**
* Text
*/
push({ type: 'text', value });
}
// Mark imbalanced braces and brackets as invalid
do {
block = stack.pop();
if (block.type !== 'root') {
block.nodes.forEach(node => {
if (!node.nodes) {
if (node.type === 'open') node.isOpen = true;
if (node.type === 'close') node.isClose = true;
if (!node.nodes) node.type = 'text';
node.invalid = true;
}
});
// get the location of the block on parent.nodes (block's siblings)
let parent = stack[stack.length - 1];
let index = parent.nodes.indexOf(block);
// replace the (invalid) block with it's nodes
parent.nodes.splice(index, 1, ...block.nodes);
}
} while (stack.length > 0);
push({ type: 'eos' });
return ast;
};
var parse_1$2 = parse$d;
const stringify$3 = stringify$7;
const compile = compile_1;
const expand$1 = expand_1$1;
const parse$c = parse_1$2;
/**
* Expand the given pattern or create a regex-compatible string.
*
* ```js
* const braces = require('braces');
* console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
* console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
* ```
* @param {String} `str`
* @param {Object} `options`
* @return {String}
* @api public
*/
const braces$2 = (input, options = {}) => {
let output = [];
if (Array.isArray(input)) {
for (let pattern of input) {
let result = braces$2.create(pattern, options);
if (Array.isArray(result)) {
output.push(...result);
} else {
output.push(result);
}
}
} else {
output = [].concat(braces$2.create(input, options));
}
if (options && options.expand === true && options.nodupes === true) {
output = [...new Set(output)];
}
return output;
};
/**
* Parse the given `str` with the given `options`.
*
* ```js
* // braces.parse(pattern, [, options]);
* const ast = braces.parse('a/{b,c}/d');
* console.log(ast);
* ```
* @param {String} pattern Brace pattern to parse
* @param {Object} options
* @return {Object} Returns an AST
* @api public
*/
braces$2.parse = (input, options = {}) => parse$c(input, options);
/**
* Creates a braces string from an AST, or an AST node.
*
* ```js
* const braces = require('braces');
* let ast = braces.parse('foo/{a,b}/bar');
* console.log(stringify(ast.nodes[2])); //=> '{a,b}'
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.stringify = (input, options = {}) => {
if (typeof input === 'string') {
return stringify$3(braces$2.parse(input, options), options);
}
return stringify$3(input, options);
};
/**
* Compiles a brace pattern into a regex-compatible, optimized string.
* This method is called by the main [braces](#braces) function by default.
*
* ```js
* const braces = require('braces');
* console.log(braces.compile('a/{b,c}/d'));
* //=> ['a/(b|c)/d']
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.compile = (input, options = {}) => {
if (typeof input === 'string') {
input = braces$2.parse(input, options);
}
return compile(input, options);
};
/**
* Expands a brace pattern into an array. This method is called by the
* main [braces](#braces) function when `options.expand` is true. Before
* using this method it's recommended that you read the [performance notes](#performance))
* and advantages of using [.compile](#compile) instead.
*
* ```js
* const braces = require('braces');
* console.log(braces.expand('a/{b,c}/d'));
* //=> ['a/b/d', 'a/c/d'];
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.expand = (input, options = {}) => {
if (typeof input === 'string') {
input = braces$2.parse(input, options);
}
let result = expand$1(input, options);
// filter out empty strings if specified
if (options.noempty === true) {
result = result.filter(Boolean);
}
// filter out duplicates if specified
if (options.nodupes === true) {
result = [...new Set(result)];
}
return result;
};
/**
* Processes a brace pattern and returns either an expanded array
* (if `options.expand` is true), a highly optimized regex-compatible string.
* This method is called by the main [braces](#braces) function.
*
* ```js
* const braces = require('braces');
* console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
* //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.create = (input, options = {}) => {
if (input === '' || input.length < 3) {
return [input];
}
return options.expand !== true
? braces$2.compile(input, options)
: braces$2.expand(input, options);
};
/**
* Expose "braces"
*/
var braces_1 = braces$2;
const util = require$$0$6;
const braces$1 = braces_1;
const picomatch$2 = picomatch$3;
const utils$b = utils$k;
const isEmptyString = val => val === '' || val === './';
/**
* Returns an array of strings that match one or more glob patterns.
*
* ```js
* const mm = require('micromatch');
* // mm(list, patterns[, options]);
*
* console.log(mm(['a.js', 'a.txt'], ['*.js']));
* //=> [ 'a.js' ]
* ```
* @param {String|Array<string>} `list` List of strings to match.
* @param {String|Array<string>} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options)
* @return {Array} Returns an array of matches
* @summary false
* @api public
*/
const micromatch$1 = (list, patterns, options) => {
patterns = [].concat(patterns);
list = [].concat(list);
let omit = new Set();
let keep = new Set();
let items = new Set();
let negatives = 0;
let onResult = state => {
items.add(state.output);
if (options && options.onResult) {
options.onResult(state);
}
};
for (let i = 0; i < patterns.length; i++) {
let isMatch = picomatch$2(String(patterns[i]), { ...options, onResult }, true);
let negated = isMatch.state.negated || isMatch.state.negatedExtglob;
if (negated) negatives++;
for (let item of list) {
let matched = isMatch(item, true);
let match = negated ? !matched.isMatch : matched.isMatch;
if (!match) continue;
if (negated) {
omit.add(matched.output);
} else {
omit.delete(matched.output);
keep.add(matched.output);
}
}
}
let result = negatives === patterns.length ? [...items] : [...keep];
let matches = result.filter(item => !omit.has(item));
if (options && matches.length === 0) {
if (options.failglob === true) {
throw new Error(`No matches found for "${patterns.join(', ')}"`);
}
if (options.nonull === true || options.nullglob === true) {
return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns;
}
}
return matches;
};
/**
* Backwards compatibility
*/
micromatch$1.match = micromatch$1;
/**
* Returns a matcher function from the given glob `pattern` and `options`.
* The returned function takes a string to match as its only argument and returns
* true if the string is a match.
*
* ```js
* const mm = require('micromatch');
* // mm.matcher(pattern[, options]);
*
* const isMatch = mm.matcher('*.!(*a)');
* console.log(isMatch('a.a')); //=> false
* console.log(isMatch('a.b')); //=> true
* ```
* @param {String} `pattern` Glob pattern
* @param {Object} `options`
* @return {Function} Returns a matcher function.
* @api public
*/
micromatch$1.matcher = (pattern, options) => picomatch$2(pattern, options);
/**
* Returns true if **any** of the given glob `patterns` match the specified `string`.
*
* ```js
* const mm = require('micromatch');
* // mm.isMatch(string, patterns[, options]);
*
* console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true
* console.log(mm.isMatch('a.a', 'b.*')); //=> false
* ```
* @param {String} `str` The string to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `[options]` See available [options](#options).
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
micromatch$1.isMatch = (str, patterns, options) => picomatch$2(patterns, options)(str);
/**
* Backwards compatibility
*/
micromatch$1.any = micromatch$1.isMatch;
/**
* Returns a list of strings that _**do not match any**_ of the given `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.not(list, patterns[, options]);
*
* console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));
* //=> ['b.b', 'c.c']
* ```
* @param {Array} `list` Array of strings to match.
* @param {String|Array} `patterns` One or more glob pattern to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Array} Returns an array of strings that **do not match** the given patterns.
* @api public
*/
micromatch$1.not = (list, patterns, options = {}) => {
patterns = [].concat(patterns).map(String);
let result = new Set();
let items = [];
let onResult = state => {
if (options.onResult) options.onResult(state);
items.push(state.output);
};
let matches = new Set(micromatch$1(list, patterns, { ...options, onResult }));
for (let item of items) {
if (!matches.has(item)) {
result.add(item);
}
}
return [...result];
};
/**
* Returns true if the given `string` contains the given pattern. Similar
* to [.isMatch](#isMatch) but the pattern can match any part of the string.
*
* ```js
* var mm = require('micromatch');
* // mm.contains(string, pattern[, options]);
*
* console.log(mm.contains('aa/bb/cc', '*b'));
* //=> true
* console.log(mm.contains('aa/bb/cc', '*d'));
* //=> false
* ```
* @param {String} `str` The string to match.
* @param {String|Array} `patterns` Glob pattern to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any of the patterns matches any part of `str`.
* @api public
*/
micromatch$1.contains = (str, pattern, options) => {
if (typeof str !== 'string') {
throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
}
if (Array.isArray(pattern)) {
return pattern.some(p => micromatch$1.contains(str, p, options));
}
if (typeof pattern === 'string') {
if (isEmptyString(str) || isEmptyString(pattern)) {
return false;
}
if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {
return true;
}
}
return micromatch$1.isMatch(str, pattern, { ...options, contains: true });
};
/**
* Filter the keys of the given object with the given `glob` pattern
* and `options`. Does not attempt to match nested keys. If you need this feature,
* use [glob-object][] instead.
*
* ```js
* const mm = require('micromatch');
* // mm.matchKeys(object, patterns[, options]);
*
* const obj = { aa: 'a', ab: 'b', ac: 'c' };
* console.log(mm.matchKeys(obj, '*b'));
* //=> { ab: 'b' }
* ```
* @param {Object} `object` The object with keys to filter.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Object} Returns an object with only keys that match the given patterns.
* @api public
*/
micromatch$1.matchKeys = (obj, patterns, options) => {
if (!utils$b.isObject(obj)) {
throw new TypeError('Expected the first argument to be an object');
}
let keys = micromatch$1(Object.keys(obj), patterns, options);
let res = {};
for (let key of keys) res[key] = obj[key];
return res;
};
/**
* Returns true if some of the strings in the given `list` match any of the given glob `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.some(list, patterns[, options]);
*
* console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
* // true
* console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));
* // false
* ```
* @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any `patterns` matches any of the strings in `list`
* @api public
*/
micromatch$1.some = (list, patterns, options) => {
let items = [].concat(list);
for (let pattern of [].concat(patterns)) {
let isMatch = picomatch$2(String(pattern), options);
if (items.some(item => isMatch(item))) {
return true;
}
}
return false;
};
/**
* Returns true if every string in the given `list` matches
* any of the given glob `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.every(list, patterns[, options]);
*
* console.log(mm.every('foo.js', ['foo.js']));
* // true
* console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));
* // true
* console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
* // false
* console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));
* // false
* ```
* @param {String|Array} `list` The string or array of strings to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if all `patterns` matches all of the strings in `list`
* @api public
*/
micromatch$1.every = (list, patterns, options) => {
let items = [].concat(list);
for (let pattern of [].concat(patterns)) {
let isMatch = picomatch$2(String(pattern), options);
if (!items.every(item => isMatch(item))) {
return false;
}
}
return true;
};
/**
* Returns true if **all** of the given `patterns` match
* the specified string.
*
* ```js
* const mm = require('micromatch');
* // mm.all(string, patterns[, options]);
*
* console.log(mm.all('foo.js', ['foo.js']));
* // true
*
* console.log(mm.all('foo.js', ['*.js', '!foo.js']));
* // false
*
* console.log(mm.all('foo.js', ['*.js', 'foo.js']));
* // true
*
* console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));
* // true
* ```
* @param {String|Array} `str` The string to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
micromatch$1.all = (str, patterns, options) => {
if (typeof str !== 'string') {
throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
}
return [].concat(patterns).every(p => picomatch$2(p, options)(str));
};
/**
* Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.
*
* ```js
* const mm = require('micromatch');
* // mm.capture(pattern, string[, options]);
*
* console.log(mm.capture('test/*.js', 'test/foo.js'));
* //=> ['foo']
* console.log(mm.capture('test/*.js', 'foo/bar.css'));
* //=> null
* ```
* @param {String} `glob` Glob pattern to use for matching.
* @param {String} `input` String to match
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`.
* @api public
*/
micromatch$1.capture = (glob, input, options) => {
let posix = utils$b.isWindows(options);
let regex = picomatch$2.makeRe(String(glob), { ...options, capture: true });
let match = regex.exec(posix ? utils$b.toPosixSlashes(input) : input);
if (match) {
return match.slice(1).map(v => v === void 0 ? '' : v);
}
};
/**
* Create a regular expression from the given glob `pattern`.
*
* ```js
* const mm = require('micromatch');
* // mm.makeRe(pattern[, options]);
*
* console.log(mm.makeRe('*.js'));
* //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/
* ```
* @param {String} `pattern` A glob pattern to convert to regex.
* @param {Object} `options`
* @return {RegExp} Returns a regex created from the given pattern.
* @api public
*/
micromatch$1.makeRe = (...args) => picomatch$2.makeRe(...args);
/**
* Scan a glob pattern to separate the pattern into segments. Used
* by the [split](#split) method.
*
* ```js
* const mm = require('micromatch');
* const state = mm.scan(pattern[, options]);
* ```
* @param {String} `pattern`
* @param {Object} `options`
* @return {Object} Returns an object with
* @api public
*/
micromatch$1.scan = (...args) => picomatch$2.scan(...args);
/**
* Parse a glob pattern to create the source string for a regular
* expression.
*
* ```js
* const mm = require('micromatch');
* const state = mm.parse(pattern[, options]);
* ```
* @param {String} `glob`
* @param {Object} `options`
* @return {Object} Returns an object with useful properties and output to be used as regex source string.
* @api public
*/
micromatch$1.parse = (patterns, options) => {
let res = [];
for (let pattern of [].concat(patterns || [])) {
for (let str of braces$1(String(pattern), options)) {
res.push(picomatch$2.parse(str, options));
}
}
return res;
};
/**
* Process the given brace `pattern`.
*
* ```js
* const { braces } = require('micromatch');
* console.log(braces('foo/{a,b,c}/bar'));
* //=> [ 'foo/(a|b|c)/bar' ]
*
* console.log(braces('foo/{a,b,c}/bar', { expand: true }));
* //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]
* ```
* @param {String} `pattern` String with brace pattern to process.
* @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.
* @return {Array}
* @api public
*/
micromatch$1.braces = (pattern, options) => {
if (typeof pattern !== 'string') throw new TypeError('Expected a string');
if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) {
return [pattern];
}
return braces$1(pattern, options);
};
/**
* Expand braces
*/
micromatch$1.braceExpand = (pattern, options) => {
if (typeof pattern !== 'string') throw new TypeError('Expected a string');
return micromatch$1.braces(pattern, { ...options, expand: true });
};
/**
* Expose micromatch
*/
var micromatch_1 = micromatch$1;
var micromatch$2 = /*@__PURE__*/getDefaultExportFromCjs(micromatch_1);
Object.defineProperty(pattern$1, "__esModule", { value: true });
pattern$1.removeDuplicateSlashes = pattern$1.matchAny = pattern$1.convertPatternsToRe = pattern$1.makeRe = pattern$1.getPatternParts = pattern$1.expandBraceExpansion = pattern$1.expandPatternsWithBraceExpansion = pattern$1.isAffectDepthOfReadingPattern = pattern$1.endsWithSlashGlobStar = pattern$1.hasGlobStar = pattern$1.getBaseDirectory = pattern$1.isPatternRelatedToParentDirectory = pattern$1.getPatternsOutsideCurrentDirectory = pattern$1.getPatternsInsideCurrentDirectory = pattern$1.getPositivePatterns = pattern$1.getNegativePatterns = pattern$1.isPositivePattern = pattern$1.isNegativePattern = pattern$1.convertToNegativePattern = pattern$1.convertToPositivePattern = pattern$1.isDynamicPattern = pattern$1.isStaticPattern = void 0;
const path$f = require$$0$4;
const globParent$1 = globParent$2;
const micromatch = micromatch_1;
const GLOBSTAR$1 = '**';
const ESCAPE_SYMBOL = '\\';
const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;
const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/;
const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/;
const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/;
const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./;
/**
* Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string.
* The latter is due to the presence of the device path at the beginning of the UNC path.
*/
const DOUBLE_SLASH_RE$1 = /(?!^)\/{2,}/g;
function isStaticPattern(pattern, options = {}) {
return !isDynamicPattern(pattern, options);
}
pattern$1.isStaticPattern = isStaticPattern;
function isDynamicPattern(pattern, options = {}) {
/**
* A special case with an empty string is necessary for matching patterns that start with a forward slash.
* An empty string cannot be a dynamic pattern.
* For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
*/
if (pattern === '') {
return false;
}
/**
* When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
* filepath directly (without read directory).
*/
if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {
return true;
}
if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {
return true;
}
if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {
return true;
}
if (options.braceExpansion !== false && hasBraceExpansion(pattern)) {
return true;
}
return false;
}
pattern$1.isDynamicPattern = isDynamicPattern;
function hasBraceExpansion(pattern) {
const openingBraceIndex = pattern.indexOf('{');
if (openingBraceIndex === -1) {
return false;
}
const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1);
if (closingBraceIndex === -1) {
return false;
}
const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex);
return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent);
}
function convertToPositivePattern(pattern) {
return isNegativePattern(pattern) ? pattern.slice(1) : pattern;
}
pattern$1.convertToPositivePattern = convertToPositivePattern;
function convertToNegativePattern(pattern) {
return '!' + pattern;
}
pattern$1.convertToNegativePattern = convertToNegativePattern;
function isNegativePattern(pattern) {
return pattern.startsWith('!') && pattern[1] !== '(';
}
pattern$1.isNegativePattern = isNegativePattern;
function isPositivePattern(pattern) {
return !isNegativePattern(pattern);
}
pattern$1.isPositivePattern = isPositivePattern;
function getNegativePatterns(patterns) {
return patterns.filter(isNegativePattern);
}
pattern$1.getNegativePatterns = getNegativePatterns;
function getPositivePatterns$1(patterns) {
return patterns.filter(isPositivePattern);
}
pattern$1.getPositivePatterns = getPositivePatterns$1;
/**
* Returns patterns that can be applied inside the current directory.
*
* @example
* // ['./*', '*', 'a/*']
* getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*'])
*/
function getPatternsInsideCurrentDirectory(patterns) {
return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern));
}
pattern$1.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory;
/**
* Returns patterns to be expanded relative to (outside) the current directory.
*
* @example
* // ['../*', './../*']
* getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*'])
*/
function getPatternsOutsideCurrentDirectory(patterns) {
return patterns.filter(isPatternRelatedToParentDirectory);
}
pattern$1.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory;
function isPatternRelatedToParentDirectory(pattern) {
return pattern.startsWith('..') || pattern.startsWith('./..');
}
pattern$1.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory;
function getBaseDirectory(pattern) {
return globParent$1(pattern, { flipBackslashes: false });
}
pattern$1.getBaseDirectory = getBaseDirectory;
function hasGlobStar(pattern) {
return pattern.includes(GLOBSTAR$1);
}
pattern$1.hasGlobStar = hasGlobStar;
function endsWithSlashGlobStar(pattern) {
return pattern.endsWith('/' + GLOBSTAR$1);
}
pattern$1.endsWithSlashGlobStar = endsWithSlashGlobStar;
function isAffectDepthOfReadingPattern(pattern) {
const basename = path$f.basename(pattern);
return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);
}
pattern$1.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
function expandPatternsWithBraceExpansion(patterns) {
return patterns.reduce((collection, pattern) => {
return collection.concat(expandBraceExpansion(pattern));
}, []);
}
pattern$1.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
function expandBraceExpansion(pattern) {
const patterns = micromatch.braces(pattern, { expand: true, nodupes: true });
/**
* Sort the patterns by length so that the same depth patterns are processed side by side.
* `a/{b,}/{c,}/*` `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']`
*/
patterns.sort((a, b) => a.length - b.length);
/**
* Micromatch can return an empty string in the case of patterns like `{a,}`.
*/
return patterns.filter((pattern) => pattern !== '');
}
pattern$1.expandBraceExpansion = expandBraceExpansion;
function getPatternParts(pattern, options) {
let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
/**
* The scan method returns an empty array in some cases.
* See micromatch/picomatch#58 for more details.
*/
if (parts.length === 0) {
parts = [pattern];
}
/**
* The scan method does not return an empty part for the pattern with a forward slash.
* This is another part of micromatch/picomatch#58.
*/
if (parts[0].startsWith('/')) {
parts[0] = parts[0].slice(1);
parts.unshift('');
}
return parts;
}
pattern$1.getPatternParts = getPatternParts;
function makeRe(pattern, options) {
return micromatch.makeRe(pattern, options);
}
pattern$1.makeRe = makeRe;
function convertPatternsToRe(patterns, options) {
return patterns.map((pattern) => makeRe(pattern, options));
}
pattern$1.convertPatternsToRe = convertPatternsToRe;
function matchAny(entry, patternsRe) {
return patternsRe.some((patternRe) => patternRe.test(entry));
}
pattern$1.matchAny = matchAny;
/**
* This package only works with forward slashes as a path separator.
* Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes.
*/
function removeDuplicateSlashes(pattern) {
return pattern.replace(DOUBLE_SLASH_RE$1, '/');
}
pattern$1.removeDuplicateSlashes = removeDuplicateSlashes;
var stream$4 = {};
/*
* merge2
* https://github.com/teambition/merge2
*
* Copyright (c) 2014-2020 Teambition
* Licensed under the MIT license.
*/
const Stream = require$$0$7;
const PassThrough = Stream.PassThrough;
const slice = Array.prototype.slice;
var merge2_1 = merge2$1;
function merge2$1 () {
const streamsQueue = [];
const args = slice.call(arguments);
let merging = false;
let options = args[args.length - 1];
if (options && !Array.isArray(options) && options.pipe == null) {
args.pop();
} else {
options = {};
}
const doEnd = options.end !== false;
const doPipeError = options.pipeError === true;
if (options.objectMode == null) {
options.objectMode = true;
}
if (options.highWaterMark == null) {
options.highWaterMark = 64 * 1024;
}
const mergedStream = PassThrough(options);
function addStream () {
for (let i = 0, len = arguments.length; i < len; i++) {
streamsQueue.push(pauseStreams(arguments[i], options));
}
mergeStream();
return this
}
function mergeStream () {
if (merging) {
return
}
merging = true;
let streams = streamsQueue.shift();
if (!streams) {
process.nextTick(endStream);
return
}
if (!Array.isArray(streams)) {
streams = [streams];
}
let pipesCount = streams.length + 1;
function next () {
if (--pipesCount > 0) {
return
}
merging = false;
mergeStream();
}
function pipe (stream) {
function onend () {
stream.removeListener('merge2UnpipeEnd', onend);
stream.removeListener('end', onend);
if (doPipeError) {
stream.removeListener('error', onerror);
}
next();
}
function onerror (err) {
mergedStream.emit('error', err);
}
// skip ended stream
if (stream._readableState.endEmitted) {
return next()
}
stream.on('merge2UnpipeEnd', onend);
stream.on('end', onend);
if (doPipeError) {
stream.on('error', onerror);
}
stream.pipe(mergedStream, { end: false });
// compatible for old stream
stream.resume();
}
for (let i = 0; i < streams.length; i++) {
pipe(streams[i]);
}
next();
}
function endStream () {
merging = false;
// emit 'queueDrain' when all streams merged.
mergedStream.emit('queueDrain');
if (doEnd) {
mergedStream.end();
}
}
mergedStream.setMaxListeners(0);
mergedStream.add = addStream;
mergedStream.on('unpipe', function (stream) {
stream.emit('merge2UnpipeEnd');
});
if (args.length) {
addStream.apply(null, args);
}
return mergedStream
}
// check and pause streams for pipe.
function pauseStreams (streams, options) {
if (!Array.isArray(streams)) {
// Backwards-compat with old-style streams
if (!streams._readableState && streams.pipe) {
streams = streams.pipe(PassThrough(options));
}
if (!streams._readableState || !streams.pause || !streams.pipe) {
throw new Error('Only readable stream can be merged.')
}
streams.pause();
} else {
for (let i = 0, len = streams.length; i < len; i++) {
streams[i] = pauseStreams(streams[i], options);
}
}
return streams
}
Object.defineProperty(stream$4, "__esModule", { value: true });
stream$4.merge = void 0;
const merge2 = merge2_1;
function merge$1(streams) {
const mergedStream = merge2(streams);
streams.forEach((stream) => {
stream.once('error', (error) => mergedStream.emit('error', error));
});
mergedStream.once('close', () => propagateCloseEventToSources(streams));
mergedStream.once('end', () => propagateCloseEventToSources(streams));
return mergedStream;
}
stream$4.merge = merge$1;
function propagateCloseEventToSources(streams) {
streams.forEach((stream) => stream.emit('close'));
}
var string$2 = {};
Object.defineProperty(string$2, "__esModule", { value: true });
string$2.isEmpty = string$2.isString = void 0;
function isString(input) {
return typeof input === 'string';
}
string$2.isString = isString;
function isEmpty$1(input) {
return input === '';
}
string$2.isEmpty = isEmpty$1;
Object.defineProperty(utils$g, "__esModule", { value: true });
utils$g.string = utils$g.stream = utils$g.pattern = utils$g.path = utils$g.fs = utils$g.errno = utils$g.array = void 0;
const array = array$1;
utils$g.array = array;
const errno = errno$1;
utils$g.errno = errno;
const fs$g = fs$h;
utils$g.fs = fs$g;
const path$e = path$h;
utils$g.path = path$e;
const pattern = pattern$1;
utils$g.pattern = pattern;
const stream$3 = stream$4;
utils$g.stream = stream$3;
const string$1 = string$2;
utils$g.string = string$1;
Object.defineProperty(tasks, "__esModule", { value: true });
tasks.convertPatternGroupToTask = tasks.convertPatternGroupsToTasks = tasks.groupPatternsByBaseDirectory = tasks.getNegativePatternsAsPositive = tasks.getPositivePatterns = tasks.convertPatternsToTasks = tasks.generate = void 0;
const utils$a = utils$g;
function generate(input, settings) {
const patterns = processPatterns(input, settings);
const ignore = processPatterns(settings.ignore, settings);
const positivePatterns = getPositivePatterns(patterns);
const negativePatterns = getNegativePatternsAsPositive(patterns, ignore);
const staticPatterns = positivePatterns.filter((pattern) => utils$a.pattern.isStaticPattern(pattern, settings));
const dynamicPatterns = positivePatterns.filter((pattern) => utils$a.pattern.isDynamicPattern(pattern, settings));
const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);
const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);
return staticTasks.concat(dynamicTasks);
}
tasks.generate = generate;
function processPatterns(input, settings) {
let patterns = input;
/**
* The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry
* and some problems with the micromatch package (see fast-glob issues: #365, #394).
*
* To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown
* in matching in the case of a large set of patterns after expansion.
*/
if (settings.braceExpansion) {
patterns = utils$a.pattern.expandPatternsWithBraceExpansion(patterns);
}
/**
* If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used
* at any nesting level.
*
* We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change
* the pattern in the filter before creating a regular expression. There is no need to change the patterns
* in the application. Only on the input.
*/
if (settings.baseNameMatch) {
patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`);
}
/**
* This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion.
*/
return patterns.map((pattern) => utils$a.pattern.removeDuplicateSlashes(pattern));
}
/**
* Returns tasks grouped by basic pattern directories.
*
* Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately.
* This is necessary because directory traversal starts at the base directory and goes deeper.
*/
function convertPatternsToTasks(positive, negative, dynamic) {
const tasks = [];
const patternsOutsideCurrentDirectory = utils$a.pattern.getPatternsOutsideCurrentDirectory(positive);
const patternsInsideCurrentDirectory = utils$a.pattern.getPatternsInsideCurrentDirectory(positive);
const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory);
const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory);
tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic));
/*
* For the sake of reducing future accesses to the file system, we merge all tasks within the current directory
* into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest.
*/
if ('.' in insideCurrentDirectoryGroup) {
tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic));
}
else {
tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic));
}
return tasks;
}
tasks.convertPatternsToTasks = convertPatternsToTasks;
function getPositivePatterns(patterns) {
return utils$a.pattern.getPositivePatterns(patterns);
}
tasks.getPositivePatterns = getPositivePatterns;
function getNegativePatternsAsPositive(patterns, ignore) {
const negative = utils$a.pattern.getNegativePatterns(patterns).concat(ignore);
const positive = negative.map(utils$a.pattern.convertToPositivePattern);
return positive;
}
tasks.getNegativePatternsAsPositive = getNegativePatternsAsPositive;
function groupPatternsByBaseDirectory(patterns) {
const group = {};
return patterns.reduce((collection, pattern) => {
const base = utils$a.pattern.getBaseDirectory(pattern);
if (base in collection) {
collection[base].push(pattern);
}
else {
collection[base] = [pattern];
}
return collection;
}, group);
}
tasks.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;
function convertPatternGroupsToTasks(positive, negative, dynamic) {
return Object.keys(positive).map((base) => {
return convertPatternGroupToTask(base, positive[base], negative, dynamic);
});
}
tasks.convertPatternGroupsToTasks = convertPatternGroupsToTasks;
function convertPatternGroupToTask(base, positive, negative, dynamic) {
return {
dynamic,
positive,
negative,
base,
patterns: [].concat(positive, negative.map(utils$a.pattern.convertToNegativePattern))
};
}
tasks.convertPatternGroupToTask = convertPatternGroupToTask;
var async$7 = {};
var async$6 = {};
var out$3 = {};
var async$5 = {};
var async$4 = {};
var out$2 = {};
var async$3 = {};
var out$1 = {};
var async$2 = {};
Object.defineProperty(async$2, "__esModule", { value: true });
async$2.read = void 0;
function read$3(path, settings, callback) {
settings.fs.lstat(path, (lstatError, lstat) => {
if (lstatError !== null) {
callFailureCallback$2(callback, lstatError);
return;
}
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
callSuccessCallback$2(callback, lstat);
return;
}
settings.fs.stat(path, (statError, stat) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
callFailureCallback$2(callback, statError);
return;
}
callSuccessCallback$2(callback, lstat);
return;
}
if (settings.markSymbolicLink) {
stat.isSymbolicLink = () => true;
}
callSuccessCallback$2(callback, stat);
});
});
}
async$2.read = read$3;
function callFailureCallback$2(callback, error) {
callback(error);
}
function callSuccessCallback$2(callback, result) {
callback(null, result);
}
var sync$8 = {};
Object.defineProperty(sync$8, "__esModule", { value: true });
sync$8.read = void 0;
function read$2(path, settings) {
const lstat = settings.fs.lstatSync(path);
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
return lstat;
}
try {
const stat = settings.fs.statSync(path);
if (settings.markSymbolicLink) {
stat.isSymbolicLink = () => true;
}
return stat;
}
catch (error) {
if (!settings.throwErrorOnBrokenSymbolicLink) {
return lstat;
}
throw error;
}
}
sync$8.read = read$2;
var settings$3 = {};
var fs$f = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;
} (fs$f));
Object.defineProperty(settings$3, "__esModule", { value: true });
const fs$e = fs$f;
let Settings$2 = class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);
this.fs = fs$e.createFileSystemAdapter(this._options.fs);
this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
};
settings$3.default = Settings$2;
Object.defineProperty(out$1, "__esModule", { value: true });
out$1.statSync = out$1.stat = out$1.Settings = void 0;
const async$1 = async$2;
const sync$7 = sync$8;
const settings_1$3 = settings$3;
out$1.Settings = settings_1$3.default;
function stat$4(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async$1.read(path, getSettings$2(), optionsOrSettingsOrCallback);
return;
}
async$1.read(path, getSettings$2(optionsOrSettingsOrCallback), callback);
}
out$1.stat = stat$4;
function statSync(path, optionsOrSettings) {
const settings = getSettings$2(optionsOrSettings);
return sync$7.read(path, settings);
}
out$1.statSync = statSync;
function getSettings$2(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$3.default) {
return settingsOrOptions;
}
return new settings_1$3.default(settingsOrOptions);
}
/*! queue-microtask. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
let promise;
var queueMicrotask_1 = typeof queueMicrotask === 'function'
? queueMicrotask.bind(typeof window !== 'undefined' ? window : commonjsGlobal)
// reuse resolved promise, and allocate it lazily
: cb => (promise || (promise = Promise.resolve()))
.then(cb)
.catch(err => setTimeout(() => { throw err }, 0));
/*! run-parallel. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
var runParallel_1 = runParallel;
const queueMicrotask$1 = queueMicrotask_1;
function runParallel (tasks, cb) {
let results, pending, keys;
let isSync = true;
if (Array.isArray(tasks)) {
results = [];
pending = tasks.length;
} else {
keys = Object.keys(tasks);
results = {};
pending = keys.length;
}
function done (err) {
function end () {
if (cb) cb(err, results);
cb = null;
}
if (isSync) queueMicrotask$1(end);
else end();
}
function each (i, err, result) {
results[i] = result;
if (--pending === 0 || err) {
done(err);
}
}
if (!pending) {
// empty
done(null);
} else if (keys) {
// object
keys.forEach(function (key) {
tasks[key](function (err, result) { each(key, err, result); });
});
} else {
// array
tasks.forEach(function (task, i) {
task(function (err, result) { each(i, err, result); });
});
}
isSync = false;
}
var constants$2 = {};
Object.defineProperty(constants$2, "__esModule", { value: true });
constants$2.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;
const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) {
throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`);
}
const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
const SUPPORTED_MAJOR_VERSION = 10;
const SUPPORTED_MINOR_VERSION = 10;
const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;
const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;
/**
* IS `true` for Node.js 10.10 and greater.
*/
constants$2.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;
var utils$9 = {};
var fs$d = {};
Object.defineProperty(fs$d, "__esModule", { value: true });
fs$d.createDirentFromStats = void 0;
class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
}
function createDirentFromStats(name, stats) {
return new DirentFromStats(name, stats);
}
fs$d.createDirentFromStats = createDirentFromStats;
Object.defineProperty(utils$9, "__esModule", { value: true });
utils$9.fs = void 0;
const fs$c = fs$d;
utils$9.fs = fs$c;
var common$a = {};
Object.defineProperty(common$a, "__esModule", { value: true });
common$a.joinPathSegments = void 0;
function joinPathSegments$1(a, b, separator) {
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
common$a.joinPathSegments = joinPathSegments$1;
Object.defineProperty(async$3, "__esModule", { value: true });
async$3.readdir = async$3.readdirWithFileTypes = async$3.read = void 0;
const fsStat$5 = out$1;
const rpl = runParallel_1;
const constants_1$1 = constants$2;
const utils$8 = utils$9;
const common$9 = common$a;
function read$1(directory, settings, callback) {
if (!settings.stats && constants_1$1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
readdirWithFileTypes$1(directory, settings, callback);
return;
}
readdir$3(directory, settings, callback);
}
async$3.read = read$1;
function readdirWithFileTypes$1(directory, settings, callback) {
settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {
if (readdirError !== null) {
callFailureCallback$1(callback, readdirError);
return;
}
const entries = dirents.map((dirent) => ({
dirent,
name: dirent.name,
path: common$9.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
}));
if (!settings.followSymbolicLinks) {
callSuccessCallback$1(callback, entries);
return;
}
const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));
rpl(tasks, (rplError, rplEntries) => {
if (rplError !== null) {
callFailureCallback$1(callback, rplError);
return;
}
callSuccessCallback$1(callback, rplEntries);
});
});
}
async$3.readdirWithFileTypes = readdirWithFileTypes$1;
function makeRplTaskEntry(entry, settings) {
return (done) => {
if (!entry.dirent.isSymbolicLink()) {
done(null, entry);
return;
}
settings.fs.stat(entry.path, (statError, stats) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
done(statError);
return;
}
done(null, entry);
return;
}
entry.dirent = utils$8.fs.createDirentFromStats(entry.name, stats);
done(null, entry);
});
};
}
function readdir$3(directory, settings, callback) {
settings.fs.readdir(directory, (readdirError, names) => {
if (readdirError !== null) {
callFailureCallback$1(callback, readdirError);
return;
}
const tasks = names.map((name) => {
const path = common$9.joinPathSegments(directory, name, settings.pathSegmentSeparator);
return (done) => {
fsStat$5.stat(path, settings.fsStatSettings, (error, stats) => {
if (error !== null) {
done(error);
return;
}
const entry = {
name,
path,
dirent: utils$8.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
done(null, entry);
});
};
});
rpl(tasks, (rplError, entries) => {
if (rplError !== null) {
callFailureCallback$1(callback, rplError);
return;
}
callSuccessCallback$1(callback, entries);
});
});
}
async$3.readdir = readdir$3;
function callFailureCallback$1(callback, error) {
callback(error);
}
function callSuccessCallback$1(callback, result) {
callback(null, result);
}
var sync$6 = {};
Object.defineProperty(sync$6, "__esModule", { value: true });
sync$6.readdir = sync$6.readdirWithFileTypes = sync$6.read = void 0;
const fsStat$4 = out$1;
const constants_1 = constants$2;
const utils$7 = utils$9;
const common$8 = common$a;
function read(directory, settings) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
return readdirWithFileTypes(directory, settings);
}
return readdir$2(directory, settings);
}
sync$6.read = read;
function readdirWithFileTypes(directory, settings) {
const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });
return dirents.map((dirent) => {
const entry = {
dirent,
name: dirent.name,
path: common$8.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
};
if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {
try {
const stats = settings.fs.statSync(entry.path);
entry.dirent = utils$7.fs.createDirentFromStats(entry.name, stats);
}
catch (error) {
if (settings.throwErrorOnBrokenSymbolicLink) {
throw error;
}
}
}
return entry;
});
}
sync$6.readdirWithFileTypes = readdirWithFileTypes;
function readdir$2(directory, settings) {
const names = settings.fs.readdirSync(directory);
return names.map((name) => {
const entryPath = common$8.joinPathSegments(directory, name, settings.pathSegmentSeparator);
const stats = fsStat$4.statSync(entryPath, settings.fsStatSettings);
const entry = {
name,
path: entryPath,
dirent: utils$7.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
return entry;
});
}
sync$6.readdir = readdir$2;
var settings$2 = {};
var fs$b = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;
} (fs$b));
Object.defineProperty(settings$2, "__esModule", { value: true });
const path$d = require$$0$4;
const fsStat$3 = out$1;
const fs$a = fs$b;
let Settings$1 = class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
this.fs = fs$a.createFileSystemAdapter(this._options.fs);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path$d.sep);
this.stats = this._getValue(this._options.stats, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
this.fsStatSettings = new fsStat$3.Settings({
followSymbolicLink: this.followSymbolicLinks,
fs: this.fs,
throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
};
settings$2.default = Settings$1;
Object.defineProperty(out$2, "__esModule", { value: true });
out$2.Settings = out$2.scandirSync = out$2.scandir = void 0;
const async = async$3;
const sync$5 = sync$6;
const settings_1$2 = settings$2;
out$2.Settings = settings_1$2.default;
function scandir(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async.read(path, getSettings$1(), optionsOrSettingsOrCallback);
return;
}
async.read(path, getSettings$1(optionsOrSettingsOrCallback), callback);
}
out$2.scandir = scandir;
function scandirSync(path, optionsOrSettings) {
const settings = getSettings$1(optionsOrSettings);
return sync$5.read(path, settings);
}
out$2.scandirSync = scandirSync;
function getSettings$1(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$2.default) {
return settingsOrOptions;
}
return new settings_1$2.default(settingsOrOptions);
}
var queue = {exports: {}};
function reusify$1 (Constructor) {
var head = new Constructor();
var tail = head;
function get () {
var current = head;
if (current.next) {
head = current.next;
} else {
head = new Constructor();
tail = head;
}
current.next = null;
return current
}
function release (obj) {
tail.next = obj;
tail = obj;
}
return {
get: get,
release: release
}
}
var reusify_1 = reusify$1;
/* eslint-disable no-var */
var reusify = reusify_1;
function fastqueue (context, worker, concurrency) {
if (typeof context === 'function') {
concurrency = worker;
worker = context;
context = null;
}
if (concurrency < 1) {
throw new Error('fastqueue concurrency must be greater than 1')
}
var cache = reusify(Task);
var queueHead = null;
var queueTail = null;
var _running = 0;
var errorHandler = null;
var self = {
push: push,
drain: noop$4,
saturated: noop$4,
pause: pause,
paused: false,
concurrency: concurrency,
running: running,
resume: resume,
idle: idle,
length: length,
getQueue: getQueue,
unshift: unshift,
empty: noop$4,
kill: kill,
killAndDrain: killAndDrain,
error: error
};
return self
function running () {
return _running
}
function pause () {
self.paused = true;
}
function length () {
var current = queueHead;
var counter = 0;
while (current) {
current = current.next;
counter++;
}
return counter
}
function getQueue () {
var current = queueHead;
var tasks = [];
while (current) {
tasks.push(current.value);
current = current.next;
}
return tasks
}
function resume () {
if (!self.paused) return
self.paused = false;
for (var i = 0; i < self.concurrency; i++) {
_running++;
release();
}
}
function idle () {
return _running === 0 && self.length() === 0
}
function push (value, done) {
var current = cache.get();
current.context = context;
current.release = release;
current.value = value;
current.callback = done || noop$4;
current.errorHandler = errorHandler;
if (_running === self.concurrency || self.paused) {
if (queueTail) {
queueTail.next = current;
queueTail = current;
} else {
queueHead = current;
queueTail = current;
self.saturated();
}
} else {
_running++;
worker.call(context, current.value, current.worked);
}
}
function unshift (value, done) {
var current = cache.get();
current.context = context;
current.release = release;
current.value = value;
current.callback = done || noop$4;
if (_running === self.concurrency || self.paused) {
if (queueHead) {
current.next = queueHead;
queueHead = current;
} else {
queueHead = current;
queueTail = current;
self.saturated();
}
} else {
_running++;
worker.call(context, current.value, current.worked);
}
}
function release (holder) {
if (holder) {
cache.release(holder);
}
var next = queueHead;
if (next) {
if (!self.paused) {
if (queueTail === queueHead) {
queueTail = null;
}
queueHead = next.next;
next.next = null;
worker.call(context, next.value, next.worked);
if (queueTail === null) {
self.empty();
}
} else {
_running--;
}
} else if (--_running === 0) {
self.drain();
}
}
function kill () {
queueHead = null;
queueTail = null;
self.drain = noop$4;
}
function killAndDrain () {
queueHead = null;
queueTail = null;
self.drain();
self.drain = noop$4;
}
function error (handler) {
errorHandler = handler;
}
}
function noop$4 () {}
function Task () {
this.value = null;
this.callback = noop$4;
this.next = null;
this.release = noop$4;
this.context = null;
this.errorHandler = null;
var self = this;
this.worked = function worked (err, result) {
var callback = self.callback;
var errorHandler = self.errorHandler;
var val = self.value;
self.value = null;
self.callback = noop$4;
if (self.errorHandler) {
errorHandler(err, val);
}
callback.call(self.context, err, result);
self.release(self);
};
}
function queueAsPromised (context, worker, concurrency) {
if (typeof context === 'function') {
concurrency = worker;
worker = context;
context = null;
}
function asyncWrapper (arg, cb) {
worker.call(this, arg)
.then(function (res) {
cb(null, res);
}, cb);
}
var queue = fastqueue(context, asyncWrapper, concurrency);
var pushCb = queue.push;
var unshiftCb = queue.unshift;
queue.push = push;
queue.unshift = unshift;
queue.drained = drained;
return queue
function push (value) {
var p = new Promise(function (resolve, reject) {
pushCb(value, function (err, result) {
if (err) {
reject(err);
return
}
resolve(result);
});
});
// Let's fork the promise chain to
// make the error bubble up to the user but
// not lead to a unhandledRejection
p.catch(noop$4);
return p
}
function unshift (value) {
var p = new Promise(function (resolve, reject) {
unshiftCb(value, function (err, result) {
if (err) {
reject(err);
return
}
resolve(result);
});
});
// Let's fork the promise chain to
// make the error bubble up to the user but
// not lead to a unhandledRejection
p.catch(noop$4);
return p
}
function drained () {
var previousDrain = queue.drain;
var p = new Promise(function (resolve) {
queue.drain = function () {
previousDrain();
resolve();
};
});
return p
}
}
queue.exports = fastqueue;
queue.exports.promise = queueAsPromised;
var queueExports = queue.exports;
var common$7 = {};
Object.defineProperty(common$7, "__esModule", { value: true });
common$7.joinPathSegments = common$7.replacePathSegmentSeparator = common$7.isAppliedFilter = common$7.isFatalError = void 0;
function isFatalError(settings, error) {
if (settings.errorFilter === null) {
return true;
}
return !settings.errorFilter(error);
}
common$7.isFatalError = isFatalError;
function isAppliedFilter(filter, value) {
return filter === null || filter(value);
}
common$7.isAppliedFilter = isAppliedFilter;
function replacePathSegmentSeparator(filepath, separator) {
return filepath.split(/[/\\]/).join(separator);
}
common$7.replacePathSegmentSeparator = replacePathSegmentSeparator;
function joinPathSegments(a, b, separator) {
if (a === '') {
return b;
}
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
common$7.joinPathSegments = joinPathSegments;
var reader$1 = {};
Object.defineProperty(reader$1, "__esModule", { value: true });
const common$6 = common$7;
let Reader$1 = class Reader {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._root = common$6.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);
}
};
reader$1.default = Reader$1;
Object.defineProperty(async$4, "__esModule", { value: true });
const events_1 = require$$0$5;
const fsScandir$2 = out$2;
const fastq = queueExports;
const common$5 = common$7;
const reader_1$4 = reader$1;
class AsyncReader extends reader_1$4.default {
constructor(_root, _settings) {
super(_root, _settings);
this._settings = _settings;
this._scandir = fsScandir$2.scandir;
this._emitter = new events_1.EventEmitter();
this._queue = fastq(this._worker.bind(this), this._settings.concurrency);
this._isFatalError = false;
this._isDestroyed = false;
this._queue.drain = () => {
if (!this._isFatalError) {
this._emitter.emit('end');
}
};
}
read() {
this._isFatalError = false;
this._isDestroyed = false;
setImmediate(() => {
this._pushToQueue(this._root, this._settings.basePath);
});
return this._emitter;
}
get isDestroyed() {
return this._isDestroyed;
}
destroy() {
if (this._isDestroyed) {
throw new Error('The reader is already destroyed');
}
this._isDestroyed = true;
this._queue.killAndDrain();
}
onEntry(callback) {
this._emitter.on('entry', callback);
}
onError(callback) {
this._emitter.once('error', callback);
}
onEnd(callback) {
this._emitter.once('end', callback);
}
_pushToQueue(directory, base) {
const queueItem = { directory, base };
this._queue.push(queueItem, (error) => {
if (error !== null) {
this._handleError(error);
}
});
}
_worker(item, done) {
this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {
if (error !== null) {
done(error, undefined);
return;
}
for (const entry of entries) {
this._handleEntry(entry, item.base);
}
done(null, undefined);
});
}
_handleError(error) {
if (this._isDestroyed || !common$5.isFatalError(this._settings, error)) {
return;
}
this._isFatalError = true;
this._isDestroyed = true;
this._emitter.emit('error', error);
}
_handleEntry(entry, base) {
if (this._isDestroyed || this._isFatalError) {
return;
}
const fullpath = entry.path;
if (base !== undefined) {
entry.path = common$5.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
}
if (common$5.isAppliedFilter(this._settings.entryFilter, entry)) {
this._emitEntry(entry);
}
if (entry.dirent.isDirectory() && common$5.isAppliedFilter(this._settings.deepFilter, entry)) {
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
}
}
_emitEntry(entry) {
this._emitter.emit('entry', entry);
}
}
async$4.default = AsyncReader;
Object.defineProperty(async$5, "__esModule", { value: true });
const async_1$4 = async$4;
class AsyncProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new async_1$4.default(this._root, this._settings);
this._storage = [];
}
read(callback) {
this._reader.onError((error) => {
callFailureCallback(callback, error);
});
this._reader.onEntry((entry) => {
this._storage.push(entry);
});
this._reader.onEnd(() => {
callSuccessCallback(callback, this._storage);
});
this._reader.read();
}
}
async$5.default = AsyncProvider;
function callFailureCallback(callback, error) {
callback(error);
}
function callSuccessCallback(callback, entries) {
callback(null, entries);
}
var stream$2 = {};
Object.defineProperty(stream$2, "__esModule", { value: true });
const stream_1$5 = require$$0$7;
const async_1$3 = async$4;
class StreamProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new async_1$3.default(this._root, this._settings);
this._stream = new stream_1$5.Readable({
objectMode: true,
read: () => { },
destroy: () => {
if (!this._reader.isDestroyed) {
this._reader.destroy();
}
}
});
}
read() {
this._reader.onError((error) => {
this._stream.emit('error', error);
});
this._reader.onEntry((entry) => {
this._stream.push(entry);
});
this._reader.onEnd(() => {
this._stream.push(null);
});
this._reader.read();
return this._stream;
}
}
stream$2.default = StreamProvider;
var sync$4 = {};
var sync$3 = {};
Object.defineProperty(sync$3, "__esModule", { value: true });
const fsScandir$1 = out$2;
const common$4 = common$7;
const reader_1$3 = reader$1;
class SyncReader extends reader_1$3.default {
constructor() {
super(...arguments);
this._scandir = fsScandir$1.scandirSync;
this._storage = [];
this._queue = new Set();
}
read() {
this._pushToQueue(this._root, this._settings.basePath);
this._handleQueue();
return this._storage;
}
_pushToQueue(directory, base) {
this._queue.add({ directory, base });
}
_handleQueue() {
for (const item of this._queue.values()) {
this._handleDirectory(item.directory, item.base);
}
}
_handleDirectory(directory, base) {
try {
const entries = this._scandir(directory, this._settings.fsScandirSettings);
for (const entry of entries) {
this._handleEntry(entry, base);
}
}
catch (error) {
this._handleError(error);
}
}
_handleError(error) {
if (!common$4.isFatalError(this._settings, error)) {
return;
}
throw error;
}
_handleEntry(entry, base) {
const fullpath = entry.path;
if (base !== undefined) {
entry.path = common$4.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
}
if (common$4.isAppliedFilter(this._settings.entryFilter, entry)) {
this._pushToStorage(entry);
}
if (entry.dirent.isDirectory() && common$4.isAppliedFilter(this._settings.deepFilter, entry)) {
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
}
}
_pushToStorage(entry) {
this._storage.push(entry);
}
}
sync$3.default = SyncReader;
Object.defineProperty(sync$4, "__esModule", { value: true });
const sync_1$3 = sync$3;
class SyncProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new sync_1$3.default(this._root, this._settings);
}
read() {
return this._reader.read();
}
}
sync$4.default = SyncProvider;
var settings$1 = {};
Object.defineProperty(settings$1, "__esModule", { value: true });
const path$c = require$$0$4;
const fsScandir = out$2;
class Settings {
constructor(_options = {}) {
this._options = _options;
this.basePath = this._getValue(this._options.basePath, undefined);
this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY);
this.deepFilter = this._getValue(this._options.deepFilter, null);
this.entryFilter = this._getValue(this._options.entryFilter, null);
this.errorFilter = this._getValue(this._options.errorFilter, null);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path$c.sep);
this.fsScandirSettings = new fsScandir.Settings({
followSymbolicLinks: this._options.followSymbolicLinks,
fs: this._options.fs,
pathSegmentSeparator: this._options.pathSegmentSeparator,
stats: this._options.stats,
throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
}
settings$1.default = Settings;
Object.defineProperty(out$3, "__esModule", { value: true });
out$3.Settings = out$3.walkStream = out$3.walkSync = out$3.walk = void 0;
const async_1$2 = async$5;
const stream_1$4 = stream$2;
const sync_1$2 = sync$4;
const settings_1$1 = settings$1;
out$3.Settings = settings_1$1.default;
function walk$2(directory, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
new async_1$2.default(directory, getSettings()).read(optionsOrSettingsOrCallback);
return;
}
new async_1$2.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);
}
out$3.walk = walk$2;
function walkSync(directory, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
const provider = new sync_1$2.default(directory, settings);
return provider.read();
}
out$3.walkSync = walkSync;
function walkStream(directory, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
const provider = new stream_1$4.default(directory, settings);
return provider.read();
}
out$3.walkStream = walkStream;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$1.default) {
return settingsOrOptions;
}
return new settings_1$1.default(settingsOrOptions);
}
var reader = {};
Object.defineProperty(reader, "__esModule", { value: true });
const path$b = require$$0$4;
const fsStat$2 = out$1;
const utils$6 = utils$g;
class Reader {
constructor(_settings) {
this._settings = _settings;
this._fsStatSettings = new fsStat$2.Settings({
followSymbolicLink: this._settings.followSymbolicLinks,
fs: this._settings.fs,
throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks
});
}
_getFullEntryPath(filepath) {
return path$b.resolve(this._settings.cwd, filepath);
}
_makeEntry(stats, pattern) {
const entry = {
name: pattern,
path: pattern,
dirent: utils$6.fs.createDirentFromStats(pattern, stats)
};
if (this._settings.stats) {
entry.stats = stats;
}
return entry;
}
_isFatalError(error) {
return !utils$6.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;
}
}
reader.default = Reader;
var stream$1 = {};
Object.defineProperty(stream$1, "__esModule", { value: true });
const stream_1$3 = require$$0$7;
const fsStat$1 = out$1;
const fsWalk$2 = out$3;
const reader_1$2 = reader;
class ReaderStream extends reader_1$2.default {
constructor() {
super(...arguments);
this._walkStream = fsWalk$2.walkStream;
this._stat = fsStat$1.stat;
}
dynamic(root, options) {
return this._walkStream(root, options);
}
static(patterns, options) {
const filepaths = patterns.map(this._getFullEntryPath, this);
const stream = new stream_1$3.PassThrough({ objectMode: true });
stream._write = (index, _enc, done) => {
return this._getEntry(filepaths[index], patterns[index], options)
.then((entry) => {
if (entry !== null && options.entryFilter(entry)) {
stream.push(entry);
}
if (index === filepaths.length - 1) {
stream.end();
}
done();
})
.catch(done);
};
for (let i = 0; i < filepaths.length; i++) {
stream.write(i);
}
return stream;
}
_getEntry(filepath, pattern, options) {
return this._getStat(filepath)
.then((stats) => this._makeEntry(stats, pattern))
.catch((error) => {
if (options.errorFilter(error)) {
return null;
}
throw error;
});
}
_getStat(filepath) {
return new Promise((resolve, reject) => {
this._stat(filepath, this._fsStatSettings, (error, stats) => {
return error === null ? resolve(stats) : reject(error);
});
});
}
}
stream$1.default = ReaderStream;
Object.defineProperty(async$6, "__esModule", { value: true });
const fsWalk$1 = out$3;
const reader_1$1 = reader;
const stream_1$2 = stream$1;
class ReaderAsync extends reader_1$1.default {
constructor() {
super(...arguments);
this._walkAsync = fsWalk$1.walk;
this._readerStream = new stream_1$2.default(this._settings);
}
dynamic(root, options) {
return new Promise((resolve, reject) => {
this._walkAsync(root, options, (error, entries) => {
if (error === null) {
resolve(entries);
}
else {
reject(error);
}
});
});
}
async static(patterns, options) {
const entries = [];
const stream = this._readerStream.static(patterns, options);
// After #235, replace it with an asynchronous iterator.
return new Promise((resolve, reject) => {
stream.once('error', reject);
stream.on('data', (entry) => entries.push(entry));
stream.once('end', () => resolve(entries));
});
}
}
async$6.default = ReaderAsync;
var provider = {};
var deep = {};
var partial = {};
var matcher = {};
Object.defineProperty(matcher, "__esModule", { value: true });
const utils$5 = utils$g;
class Matcher {
constructor(_patterns, _settings, _micromatchOptions) {
this._patterns = _patterns;
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
this._storage = [];
this._fillStorage();
}
_fillStorage() {
for (const pattern of this._patterns) {
const segments = this._getPatternSegments(pattern);
const sections = this._splitSegmentsIntoSections(segments);
this._storage.push({
complete: sections.length <= 1,
pattern,
segments,
sections
});
}
}
_getPatternSegments(pattern) {
const parts = utils$5.pattern.getPatternParts(pattern, this._micromatchOptions);
return parts.map((part) => {
const dynamic = utils$5.pattern.isDynamicPattern(part, this._settings);
if (!dynamic) {
return {
dynamic: false,
pattern: part
};
}
return {
dynamic: true,
pattern: part,
patternRe: utils$5.pattern.makeRe(part, this._micromatchOptions)
};
});
}
_splitSegmentsIntoSections(segments) {
return utils$5.array.splitWhen(segments, (segment) => segment.dynamic && utils$5.pattern.hasGlobStar(segment.pattern));
}
}
matcher.default = Matcher;
Object.defineProperty(partial, "__esModule", { value: true });
const matcher_1 = matcher;
class PartialMatcher extends matcher_1.default {
match(filepath) {
const parts = filepath.split('/');
const levels = parts.length;
const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);
for (const pattern of patterns) {
const section = pattern.sections[0];
/**
* In this case, the pattern has a globstar and we must read all directories unconditionally,
* but only if the level has reached the end of the first group.
*
* fixtures/{a,b}/**
* ^ true/false ^ always true
*/
if (!pattern.complete && levels > section.length) {
return true;
}
const match = parts.every((part, index) => {
const segment = pattern.segments[index];
if (segment.dynamic && segment.patternRe.test(part)) {
return true;
}
if (!segment.dynamic && segment.pattern === part) {
return true;
}
return false;
});
if (match) {
return true;
}
}
return false;
}
}
partial.default = PartialMatcher;
Object.defineProperty(deep, "__esModule", { value: true });
const utils$4 = utils$g;
const partial_1 = partial;
class DeepFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
}
getFilter(basePath, positive, negative) {
const matcher = this._getMatcher(positive);
const negativeRe = this._getNegativePatternsRe(negative);
return (entry) => this._filter(basePath, entry, matcher, negativeRe);
}
_getMatcher(patterns) {
return new partial_1.default(patterns, this._settings, this._micromatchOptions);
}
_getNegativePatternsRe(patterns) {
const affectDepthOfReadingPatterns = patterns.filter(utils$4.pattern.isAffectDepthOfReadingPattern);
return utils$4.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
}
_filter(basePath, entry, matcher, negativeRe) {
if (this._isSkippedByDeep(basePath, entry.path)) {
return false;
}
if (this._isSkippedSymbolicLink(entry)) {
return false;
}
const filepath = utils$4.path.removeLeadingDotSegment(entry.path);
if (this._isSkippedByPositivePatterns(filepath, matcher)) {
return false;
}
return this._isSkippedByNegativePatterns(filepath, negativeRe);
}
_isSkippedByDeep(basePath, entryPath) {
/**
* Avoid unnecessary depth calculations when it doesn't matter.
*/
if (this._settings.deep === Infinity) {
return false;
}
return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
}
_getEntryLevel(basePath, entryPath) {
const entryPathDepth = entryPath.split('/').length;
if (basePath === '') {
return entryPathDepth;
}
const basePathDepth = basePath.split('/').length;
return entryPathDepth - basePathDepth;
}
_isSkippedSymbolicLink(entry) {
return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
}
_isSkippedByPositivePatterns(entryPath, matcher) {
return !this._settings.baseNameMatch && !matcher.match(entryPath);
}
_isSkippedByNegativePatterns(entryPath, patternsRe) {
return !utils$4.pattern.matchAny(entryPath, patternsRe);
}
}
deep.default = DeepFilter;
var entry$1 = {};
Object.defineProperty(entry$1, "__esModule", { value: true });
const utils$3 = utils$g;
class EntryFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
this.index = new Map();
}
getFilter(positive, negative) {
const positiveRe = utils$3.pattern.convertPatternsToRe(positive, this._micromatchOptions);
const negativeRe = utils$3.pattern.convertPatternsToRe(negative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true }));
return (entry) => this._filter(entry, positiveRe, negativeRe);
}
_filter(entry, positiveRe, negativeRe) {
const filepath = utils$3.path.removeLeadingDotSegment(entry.path);
if (this._settings.unique && this._isDuplicateEntry(filepath)) {
return false;
}
if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
return false;
}
if (this._isSkippedByAbsoluteNegativePatterns(filepath, negativeRe)) {
return false;
}
const isDirectory = entry.dirent.isDirectory();
const isMatched = this._isMatchToPatterns(filepath, positiveRe, isDirectory) && !this._isMatchToPatterns(filepath, negativeRe, isDirectory);
if (this._settings.unique && isMatched) {
this._createIndexRecord(filepath);
}
return isMatched;
}
_isDuplicateEntry(filepath) {
return this.index.has(filepath);
}
_createIndexRecord(filepath) {
this.index.set(filepath, undefined);
}
_onlyFileFilter(entry) {
return this._settings.onlyFiles && !entry.dirent.isFile();
}
_onlyDirectoryFilter(entry) {
return this._settings.onlyDirectories && !entry.dirent.isDirectory();
}
_isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
if (!this._settings.absolute) {
return false;
}
const fullpath = utils$3.path.makeAbsolute(this._settings.cwd, entryPath);
return utils$3.pattern.matchAny(fullpath, patternsRe);
}
_isMatchToPatterns(filepath, patternsRe, isDirectory) {
// Trying to match files and directories by patterns.
const isMatched = utils$3.pattern.matchAny(filepath, patternsRe);
// A pattern with a trailling slash can be used for directory matching.
// To apply such pattern, we need to add a tralling slash to the path.
if (!isMatched && isDirectory) {
return utils$3.pattern.matchAny(filepath + '/', patternsRe);
}
return isMatched;
}
}
entry$1.default = EntryFilter;
var error$2 = {};
Object.defineProperty(error$2, "__esModule", { value: true });
const utils$2 = utils$g;
class ErrorFilter {
constructor(_settings) {
this._settings = _settings;
}
getFilter() {
return (error) => this._isNonFatalError(error);
}
_isNonFatalError(error) {
return utils$2.errno.isEnoentCodeError(error) || this._settings.suppressErrors;
}
}
error$2.default = ErrorFilter;
var entry = {};
Object.defineProperty(entry, "__esModule", { value: true });
const utils$1 = utils$g;
class EntryTransformer {
constructor(_settings) {
this._settings = _settings;
}
getTransformer() {
return (entry) => this._transform(entry);
}
_transform(entry) {
let filepath = entry.path;
if (this._settings.absolute) {
filepath = utils$1.path.makeAbsolute(this._settings.cwd, filepath);
filepath = utils$1.path.unixify(filepath);
}
if (this._settings.markDirectories && entry.dirent.isDirectory()) {
filepath += '/';
}
if (!this._settings.objectMode) {
return filepath;
}
return Object.assign(Object.assign({}, entry), { path: filepath });
}
}
entry.default = EntryTransformer;
Object.defineProperty(provider, "__esModule", { value: true });
const path$a = require$$0$4;
const deep_1 = deep;
const entry_1 = entry$1;
const error_1 = error$2;
const entry_2 = entry;
class Provider {
constructor(_settings) {
this._settings = _settings;
this.errorFilter = new error_1.default(this._settings);
this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());
this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());
this.entryTransformer = new entry_2.default(this._settings);
}
_getRootDirectory(task) {
return path$a.resolve(this._settings.cwd, task.base);
}
_getReaderOptions(task) {
const basePath = task.base === '.' ? '' : task.base;
return {
basePath,
pathSegmentSeparator: '/',
concurrency: this._settings.concurrency,
deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),
entryFilter: this.entryFilter.getFilter(task.positive, task.negative),
errorFilter: this.errorFilter.getFilter(),
followSymbolicLinks: this._settings.followSymbolicLinks,
fs: this._settings.fs,
stats: this._settings.stats,
throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,
transform: this.entryTransformer.getTransformer()
};
}
_getMicromatchOptions() {
return {
dot: this._settings.dot,
matchBase: this._settings.baseNameMatch,
nobrace: !this._settings.braceExpansion,
nocase: !this._settings.caseSensitiveMatch,
noext: !this._settings.extglob,
noglobstar: !this._settings.globstar,
posix: true,
strictSlashes: false
};
}
}
provider.default = Provider;
Object.defineProperty(async$7, "__esModule", { value: true });
const async_1$1 = async$6;
const provider_1$2 = provider;
class ProviderAsync extends provider_1$2.default {
constructor() {
super(...arguments);
this._reader = new async_1$1.default(this._settings);
}
async read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const entries = await this.api(root, task, options);
return entries.map((entry) => options.transform(entry));
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
async$7.default = ProviderAsync;
var stream = {};
Object.defineProperty(stream, "__esModule", { value: true });
const stream_1$1 = require$$0$7;
const stream_2 = stream$1;
const provider_1$1 = provider;
class ProviderStream extends provider_1$1.default {
constructor() {
super(...arguments);
this._reader = new stream_2.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const source = this.api(root, task, options);
const destination = new stream_1$1.Readable({ objectMode: true, read: () => { } });
source
.once('error', (error) => destination.emit('error', error))
.on('data', (entry) => destination.emit('data', options.transform(entry)))
.once('end', () => destination.emit('end'));
destination
.once('close', () => source.destroy());
return destination;
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
stream.default = ProviderStream;
var sync$2 = {};
var sync$1 = {};
Object.defineProperty(sync$1, "__esModule", { value: true });
const fsStat = out$1;
const fsWalk = out$3;
const reader_1 = reader;
class ReaderSync extends reader_1.default {
constructor() {
super(...arguments);
this._walkSync = fsWalk.walkSync;
this._statSync = fsStat.statSync;
}
dynamic(root, options) {
return this._walkSync(root, options);
}
static(patterns, options) {
const entries = [];
for (const pattern of patterns) {
const filepath = this._getFullEntryPath(pattern);
const entry = this._getEntry(filepath, pattern, options);
if (entry === null || !options.entryFilter(entry)) {
continue;
}
entries.push(entry);
}
return entries;
}
_getEntry(filepath, pattern, options) {
try {
const stats = this._getStat(filepath);
return this._makeEntry(stats, pattern);
}
catch (error) {
if (options.errorFilter(error)) {
return null;
}
throw error;
}
}
_getStat(filepath) {
return this._statSync(filepath, this._fsStatSettings);
}
}
sync$1.default = ReaderSync;
Object.defineProperty(sync$2, "__esModule", { value: true });
const sync_1$1 = sync$1;
const provider_1 = provider;
class ProviderSync extends provider_1.default {
constructor() {
super(...arguments);
this._reader = new sync_1$1.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const entries = this.api(root, task, options);
return entries.map(options.transform);
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
sync$2.default = ProviderSync;
var settings = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
const os = require$$2;
/**
* The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
* https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
*/
const CPU_COUNT = Math.max(os.cpus().length, 1);
exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
lstatSync: fs.lstatSync,
stat: fs.stat,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
class Settings {
constructor(_options = {}) {
this._options = _options;
this.absolute = this._getValue(this._options.absolute, false);
this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);
this.braceExpansion = this._getValue(this._options.braceExpansion, true);
this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);
this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);
this.cwd = this._getValue(this._options.cwd, process.cwd());
this.deep = this._getValue(this._options.deep, Infinity);
this.dot = this._getValue(this._options.dot, false);
this.extglob = this._getValue(this._options.extglob, true);
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);
this.fs = this._getFileSystemMethods(this._options.fs);
this.globstar = this._getValue(this._options.globstar, true);
this.ignore = this._getValue(this._options.ignore, []);
this.markDirectories = this._getValue(this._options.markDirectories, false);
this.objectMode = this._getValue(this._options.objectMode, false);
this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);
this.onlyFiles = this._getValue(this._options.onlyFiles, true);
this.stats = this._getValue(this._options.stats, false);
this.suppressErrors = this._getValue(this._options.suppressErrors, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);
this.unique = this._getValue(this._options.unique, true);
if (this.onlyDirectories) {
this.onlyFiles = false;
}
if (this.stats) {
this.objectMode = true;
}
// Remove the cast to the array in the next major (#404).
this.ignore = [].concat(this.ignore);
}
_getValue(option, value) {
return option === undefined ? value : option;
}
_getFileSystemMethods(methods = {}) {
return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);
}
}
exports.default = Settings;
} (settings));
const taskManager = tasks;
const async_1 = async$7;
const stream_1 = stream;
const sync_1 = sync$2;
const settings_1 = settings;
const utils = utils$g;
async function FastGlob(source, options) {
assertPatternsInput(source);
const works = getWorks(source, async_1.default, options);
const result = await Promise.all(works);
return utils.array.flatten(result);
}
// https://github.com/typescript-eslint/typescript-eslint/issues/60
// eslint-disable-next-line no-redeclare
(function (FastGlob) {
FastGlob.glob = FastGlob;
FastGlob.globSync = sync;
FastGlob.globStream = stream;
FastGlob.async = FastGlob;
function sync(source, options) {
assertPatternsInput(source);
const works = getWorks(source, sync_1.default, options);
return utils.array.flatten(works);
}
FastGlob.sync = sync;
function stream(source, options) {
assertPatternsInput(source);
const works = getWorks(source, stream_1.default, options);
/**
* The stream returned by the provider cannot work with an asynchronous iterator.
* To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.
* This affects performance (+25%). I don't see best solution right now.
*/
return utils.stream.merge(works);
}
FastGlob.stream = stream;
function generateTasks(source, options) {
assertPatternsInput(source);
const patterns = [].concat(source);
const settings = new settings_1.default(options);
return taskManager.generate(patterns, settings);
}
FastGlob.generateTasks = generateTasks;
function isDynamicPattern(source, options) {
assertPatternsInput(source);
const settings = new settings_1.default(options);
return utils.pattern.isDynamicPattern(source, settings);
}
FastGlob.isDynamicPattern = isDynamicPattern;
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escape(source);
}
FastGlob.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertPathToPattern(source);
}
FastGlob.convertPathToPattern = convertPathToPattern;
(function (posix) {
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escapePosixPath(source);
}
posix.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertPosixPathToPattern(source);
}
posix.convertPathToPattern = convertPathToPattern;
})(FastGlob.posix || (FastGlob.posix = {}));
(function (win32) {
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escapeWindowsPath(source);
}
win32.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertWindowsPathToPattern(source);
}
win32.convertPathToPattern = convertPathToPattern;
})(FastGlob.win32 || (FastGlob.win32 = {}));
})(FastGlob || (FastGlob = {}));
function getWorks(source, _Provider, options) {
const patterns = [].concat(source);
const settings = new settings_1.default(options);
const tasks = taskManager.generate(patterns, settings);
const provider = new _Provider(settings);
return tasks.map(provider.read, provider);
}
function assertPatternsInput(input) {
const source = [].concat(input);
const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));
if (!isValidSource) {
throw new TypeError('Patterns must be a string (non empty) or an array of strings');
}
}
var out = FastGlob;
var glob = /*@__PURE__*/getDefaultExportFromCjs(out);
function e(e,n,r){throw new Error(r?`No known conditions for "${n}" specifier in "${e}" package`:`Missing "${n}" specifier in "${e}" package`)}function n(n,i,o,f){let s,u,l=r(n,o),c=function(e){let n=new Set(["default",...e.conditions||[]]);return e.unsafe||n.add(e.require?"require":"import"),e.unsafe||n.add(e.browser?"browser":"node"),n}(f||{}),a=i[l];if(void 0===a){let e,n,r,t;for(t in i)n&&t.length<n.length||("/"===t[t.length-1]&&l.startsWith(t)?(u=l.substring(t.length),n=t):t.length>1&&(r=t.indexOf("*",1),~r&&(e=RegExp("^"+t.substring(0,r)+"(.*)"+t.substring(1+r)).exec(l),e&&e[1]&&(u=e[1],n=t))));a=i[n];}return a||e(n,l),s=t(a,c),s||e(n,l,1),u&&function(e,n){let r,t=0,i=e.length,o=/[*]/g,f=/[/]$/;for(;t<i;t++)e[t]=o.test(r=e[t])?r.replace(o,n):f.test(r)?r+n:r;}(s,u),s}function r(e,n,r){if(e===n||"."===n)return ".";let t=e+"/",i=t.length,o=n.slice(0,i)===t,f=o?n.slice(i):n;return "#"===f[0]?f:o||!r?"./"===f.slice(0,2)?f:"./"+f:f}function t(e,n,r){if(e){if("string"==typeof e)return r&&r.add(e),[e];let i,o;if(Array.isArray(e)){for(o=r||new Set,i=0;i<e.length;i++)t(e[i],n,o);if(!r&&o.size)return [...o]}else for(i in e)if(n.has(i))return t(e[i],n,r)}}function o(e,r,t){let i,o=e.exports;if(o){if("string"==typeof o)o={".":o};else for(i in o){"."!==i[0]&&(o={".":o});break}return n(e.name,o,r||".",t)}}function f(e,r,t){if(e.imports)return n(e.name,e.imports,r,t)}
// This file was generated. Do not modify manually!
var astralIdentifierCodes = [509, 0, 227, 0, 150, 4, 294, 9, 1368, 2, 2, 1, 6, 3, 41, 2, 5, 0, 166, 1, 574, 3, 9, 9, 370, 1, 81, 2, 71, 10, 50, 3, 123, 2, 54, 14, 32, 10, 3, 1, 11, 3, 46, 10, 8, 0, 46, 9, 7, 2, 37, 13, 2, 9, 6, 1, 45, 0, 13, 2, 49, 13, 9, 3, 2, 11, 83, 11, 7, 0, 3, 0, 158, 11, 6, 9, 7, 3, 56, 1, 2, 6, 3, 1, 3, 2, 10, 0, 11, 1, 3, 6, 4, 4, 193, 17, 10, 9, 5, 0, 82, 19, 13, 9, 214, 6, 3, 8, 28, 1, 83, 16, 16, 9, 82, 12, 9, 9, 84, 14, 5, 9, 243, 14, 166, 9, 71, 5, 2, 1, 3, 3, 2, 0, 2, 1, 13, 9, 120, 6, 3, 6, 4, 0, 29, 9, 41, 6, 2, 3, 9, 0, 10, 10, 47, 15, 406, 7, 2, 7, 17, 9, 57, 21, 2, 13, 123, 5, 4, 0, 2, 1, 2, 6, 2, 0, 9, 9, 49, 4, 2, 1, 2, 4, 9, 9, 330, 3, 10, 1, 2, 0, 49, 6, 4, 4, 14, 9, 5351, 0, 7, 14, 13835, 9, 87, 9, 39, 4, 60, 6, 26, 9, 1014, 0, 2, 54, 8, 3, 82, 0, 12, 1, 19628, 1, 4706, 45, 3, 22, 543, 4, 4, 5, 9, 7, 3, 6, 31, 3, 149, 2, 1418, 49, 513, 54, 5, 49, 9, 0, 15, 0, 23, 4, 2, 14, 1361, 6, 2, 16, 3, 6, 2, 1, 2, 4, 101, 0, 161, 6, 10, 9, 357, 0, 62, 13, 499, 13, 983, 6, 110, 6, 6, 9, 4759, 9, 787719, 239];
// This file was generated. Do not modify manually!
var astralIdentifierStartCodes = [0, 11, 2, 25, 2, 18, 2, 1, 2, 14, 3, 13, 35, 122, 70, 52, 268, 28, 4, 48, 48, 31, 14, 29, 6, 37, 11, 29, 3, 35, 5, 7, 2, 4, 43, 157, 19, 35, 5, 35, 5, 39, 9, 51, 13, 10, 2, 14, 2, 6, 2, 1, 2, 10, 2, 14, 2, 6, 2, 1, 68, 310, 10, 21, 11, 7, 25, 5, 2, 41, 2, 8, 70, 5, 3, 0, 2, 43, 2, 1, 4, 0, 3, 22, 11, 22, 10, 30, 66, 18, 2, 1, 11, 21, 11, 25, 71, 55, 7, 1, 65, 0, 16, 3, 2, 2, 2, 28, 43, 28, 4, 28, 36, 7, 2, 27, 28, 53, 11, 21, 11, 18, 14, 17, 111, 72, 56, 50, 14, 50, 14, 35, 349, 41, 7, 1, 79, 28, 11, 0, 9, 21, 43, 17, 47, 20, 28, 22, 13, 52, 58, 1, 3, 0, 14, 44, 33, 24, 27, 35, 30, 0, 3, 0, 9, 34, 4, 0, 13, 47, 15, 3, 22, 0, 2, 0, 36, 17, 2, 24, 20, 1, 64, 6, 2, 0, 2, 3, 2, 14, 2, 9, 8, 46, 39, 7, 3, 1, 3, 21, 2, 6, 2, 1, 2, 4, 4, 0, 19, 0, 13, 4, 159, 52, 19, 3, 21, 2, 31, 47, 21, 1, 2, 0, 185, 46, 42, 3, 37, 47, 21, 0, 60, 42, 14, 0, 72, 26, 38, 6, 186, 43, 117, 63, 32, 7, 3, 0, 3, 7, 2, 1, 2, 23, 16, 0, 2, 0, 95, 7, 3, 38, 17, 0, 2, 0, 29, 0, 11, 39, 8, 0, 22, 0, 12, 45, 20, 0, 19, 72, 264, 8, 2, 36, 18, 0, 50, 29, 113, 6, 2, 1, 2, 37, 22, 0, 26, 5, 2, 1, 2, 31, 15, 0, 328, 18, 16, 0, 2, 12, 2, 33, 125, 0, 80, 921, 103, 110, 18, 195, 2637, 96, 16, 1071, 18, 5, 4026, 582, 8634, 568, 8, 30, 18, 78, 18, 29, 19, 47, 17, 3, 32, 20, 6, 18, 689, 63, 129, 74, 6, 0, 67, 12, 65, 1, 2, 0, 29, 6135, 9, 1237, 43, 8, 8936, 3, 2, 6, 2, 1, 2, 290, 16, 0, 30, 2, 3, 0, 15, 3, 9, 395, 2309, 106, 6, 12, 4, 8, 8, 9, 5991, 84, 2, 70, 2, 1, 3, 0, 3, 1, 3, 3, 2, 11, 2, 0, 2, 6, 2, 64, 2, 3, 3, 7, 2, 6, 2, 27, 2, 3, 2, 4, 2, 0, 4, 6, 2, 339, 3, 24, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 7, 1845, 30, 7, 5, 262, 61, 147, 44, 11, 6, 17, 0, 322, 29, 19, 43, 485, 27, 757, 6, 2, 3, 2, 1, 2, 14, 2, 196, 60, 67, 8, 0, 1205, 3, 2, 26, 2, 1, 2, 0, 3, 0, 2, 9, 2, 3, 2, 0, 2, 0, 7, 0, 5, 0, 2, 0, 2, 0, 2, 2, 2, 1, 2, 0, 3, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 3, 3, 2, 6, 2, 3, 2, 3, 2, 0, 2, 9, 2, 16, 6, 2, 2, 4, 2, 16, 4421, 42719, 33, 4153, 7, 221, 3, 5761, 15, 7472, 3104, 541, 1507, 4938, 6, 4191];
// This file was generated. Do not modify manually!
var nonASCIIidentifierChars = "\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u0898-\u089f\u08ca-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u09fe\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0afa-\u0aff\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b55-\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c04\u0c3c\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0cf3\u0d00-\u0d03\u0d3b\u0d3c\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d81-\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0ebc\u0ec8-\u0ece\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1715\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u180f-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1abf-\u1ace\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf4\u1cf7-\u1cf9\u1dc0-\u1dff\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua82c\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua8ff-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f";
// This file was generated. Do not modify manually!
var nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u0870-\u0887\u0889-\u088e\u08a0-\u08c9\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u09fc\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c5d\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cdd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d04-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e86-\u0e8a\u0e8c-\u0ea3\u0ea5\u0ea7-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u1711\u171f-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4c\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf3\u1cf5\u1cf6\u1cfa\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31bf\u31f0-\u31ff\u3400-\u4dbf\u4e00-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7ca\ua7d0\ua7d1\ua7d3\ua7d5-\ua7d9\ua7f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab69\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc";
// These are a run-length and offset encoded representation of the
// >0xffff code points that are a valid part of identifiers. The
// offset starts at 0x10000, and each pair of numbers represents an
// offset to the next range, and then a size of the range.
// Reserved word lists for various dialects of the language
var reservedWords = {
3: "abstract boolean byte char class double enum export extends final float goto implements import int interface long native package private protected public short static super synchronized throws transient volatile",
5: "class enum extends super const export import",
6: "enum",
strict: "implements interface let package private protected public static yield",
strictBind: "eval arguments"
};
// And the keywords
var ecma5AndLessKeywords = "break case catch continue debugger default do else finally for function if return switch throw try var while with null true false instanceof typeof void delete new in this";
var keywords$1 = {
5: ecma5AndLessKeywords,
"5module": ecma5AndLessKeywords + " export import",
6: ecma5AndLessKeywords + " const class extends export import super"
};
var keywordRelationalOperator = /^in(stanceof)?$/;
// ## Character categories
var nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]");
var nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]");
// This has a complexity linear to the value of the code. The
// assumption is that looking up astral identifier characters is
// rare.
function isInAstralSet(code, set) {
var pos = 0x10000;
for (var i = 0; i < set.length; i += 2) {
pos += set[i];
if (pos > code) { return false }
pos += set[i + 1];
if (pos >= code) { return true }
}
return false
}
// Test whether a given character code starts an identifier.
function isIdentifierStart(code, astral) {
if (code < 65) { return code === 36 }
if (code < 91) { return true }
if (code < 97) { return code === 95 }
if (code < 123) { return true }
if (code <= 0xffff) { return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code)) }
if (astral === false) { return false }
return isInAstralSet(code, astralIdentifierStartCodes)
}
// Test whether a given character is part of an identifier.
function isIdentifierChar(code, astral) {
if (code < 48) { return code === 36 }
if (code < 58) { return true }
if (code < 65) { return false }
if (code < 91) { return true }
if (code < 97) { return code === 95 }
if (code < 123) { return true }
if (code <= 0xffff) { return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code)) }
if (astral === false) { return false }
return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes)
}
// ## Token types
// The assignment of fine-grained, information-carrying type objects
// allows the tokenizer to store the information it has about a
// token in a way that is very cheap for the parser to look up.
// All token type variables start with an underscore, to make them
// easy to recognize.
// The `beforeExpr` property is used to disambiguate between regular
// expressions and divisions. It is set on all token types that can
// be followed by an expression (thus, a slash after them would be a
// regular expression).
//
// The `startsExpr` property is used to check if the token ends a
// `yield` expression. It is set on all token types that either can
// directly start an expression (like a quotation mark) or can
// continue an expression (like the body of a string).
//
// `isLoop` marks a keyword as starting a loop, which is important
// to know when parsing a label, in order to allow or disallow
// continue jumps to that label.
var TokenType = function TokenType(label, conf) {
if ( conf === void 0 ) conf = {};
this.label = label;
this.keyword = conf.keyword;
this.beforeExpr = !!conf.beforeExpr;
this.startsExpr = !!conf.startsExpr;
this.isLoop = !!conf.isLoop;
this.isAssign = !!conf.isAssign;
this.prefix = !!conf.prefix;
this.postfix = !!conf.postfix;
this.binop = conf.binop || null;
this.updateContext = null;
};
function binop(name, prec) {
return new TokenType(name, {beforeExpr: true, binop: prec})
}
var beforeExpr = {beforeExpr: true}, startsExpr = {startsExpr: true};
// Map keyword names to token types.
var keywords$2 = {};
// Succinct definitions of keyword token types
function kw(name, options) {
if ( options === void 0 ) options = {};
options.keyword = name;
return keywords$2[name] = new TokenType(name, options)
}
var types$1 = {
num: new TokenType("num", startsExpr),
regexp: new TokenType("regexp", startsExpr),
string: new TokenType("string", startsExpr),
name: new TokenType("name", startsExpr),
privateId: new TokenType("privateId", startsExpr),
eof: new TokenType("eof"),
// Punctuation token types.
bracketL: new TokenType("[", {beforeExpr: true, startsExpr: true}),
bracketR: new TokenType("]"),
braceL: new TokenType("{", {beforeExpr: true, startsExpr: true}),
braceR: new TokenType("}"),
parenL: new TokenType("(", {beforeExpr: true, startsExpr: true}),
parenR: new TokenType(")"),
comma: new TokenType(",", beforeExpr),
semi: new TokenType(";", beforeExpr),
colon: new TokenType(":", beforeExpr),
dot: new TokenType("."),
question: new TokenType("?", beforeExpr),
questionDot: new TokenType("?."),
arrow: new TokenType("=>", beforeExpr),
template: new TokenType("template"),
invalidTemplate: new TokenType("invalidTemplate"),
ellipsis: new TokenType("...", beforeExpr),
backQuote: new TokenType("`", startsExpr),
dollarBraceL: new TokenType("${", {beforeExpr: true, startsExpr: true}),
// Operators. These carry several kinds of properties to help the
// parser use them properly (the presence of these properties is
// what categorizes them as operators).
//
// `binop`, when present, specifies that this operator is a binary
// operator, and will refer to its precedence.
//
// `prefix` and `postfix` mark the operator as a prefix or postfix
// unary operator.
//
// `isAssign` marks all of `=`, `+=`, `-=` etcetera, which act as
// binary operators with a very low precedence, that should result
// in AssignmentExpression nodes.
eq: new TokenType("=", {beforeExpr: true, isAssign: true}),
assign: new TokenType("_=", {beforeExpr: true, isAssign: true}),
incDec: new TokenType("++/--", {prefix: true, postfix: true, startsExpr: true}),
prefix: new TokenType("!/~", {beforeExpr: true, prefix: true, startsExpr: true}),
logicalOR: binop("||", 1),
logicalAND: binop("&&", 2),
bitwiseOR: binop("|", 3),
bitwiseXOR: binop("^", 4),
bitwiseAND: binop("&", 5),
equality: binop("==/!=/===/!==", 6),
relational: binop("</>/<=/>=", 7),
bitShift: binop("<</>>/>>>", 8),
plusMin: new TokenType("+/-", {beforeExpr: true, binop: 9, prefix: true, startsExpr: true}),
modulo: binop("%", 10),
star: binop("*", 10),
slash: binop("/", 10),
starstar: new TokenType("**", {beforeExpr: true}),
coalesce: binop("??", 1),
// Keyword token types.
_break: kw("break"),
_case: kw("case", beforeExpr),
_catch: kw("catch"),
_continue: kw("continue"),
_debugger: kw("debugger"),
_default: kw("default", beforeExpr),
_do: kw("do", {isLoop: true, beforeExpr: true}),
_else: kw("else", beforeExpr),
_finally: kw("finally"),
_for: kw("for", {isLoop: true}),
_function: kw("function", startsExpr),
_if: kw("if"),
_return: kw("return", beforeExpr),
_switch: kw("switch"),
_throw: kw("throw", beforeExpr),
_try: kw("try"),
_var: kw("var"),
_const: kw("const"),
_while: kw("while", {isLoop: true}),
_with: kw("with"),
_new: kw("new", {beforeExpr: true, startsExpr: true}),
_this: kw("this", startsExpr),
_super: kw("super", startsExpr),
_class: kw("class", startsExpr),
_extends: kw("extends", beforeExpr),
_export: kw("export"),
_import: kw("import", startsExpr),
_null: kw("null", startsExpr),
_true: kw("true", startsExpr),
_false: kw("false", startsExpr),
_in: kw("in", {beforeExpr: true, binop: 7}),
_instanceof: kw("instanceof", {beforeExpr: true, binop: 7}),
_typeof: kw("typeof", {beforeExpr: true, prefix: true, startsExpr: true}),
_void: kw("void", {beforeExpr: true, prefix: true, startsExpr: true}),
_delete: kw("delete", {beforeExpr: true, prefix: true, startsExpr: true})
};
// Matches a whole line break (where CRLF is considered a single
// line break). Used to count lines.
var lineBreak = /\r\n?|\n|\u2028|\u2029/;
var lineBreakG = new RegExp(lineBreak.source, "g");
function isNewLine(code) {
return code === 10 || code === 13 || code === 0x2028 || code === 0x2029
}
function nextLineBreak(code, from, end) {
if ( end === void 0 ) end = code.length;
for (var i = from; i < end; i++) {
var next = code.charCodeAt(i);
if (isNewLine(next))
{ return i < end - 1 && next === 13 && code.charCodeAt(i + 1) === 10 ? i + 2 : i + 1 }
}
return -1
}
var nonASCIIwhitespace = /[\u1680\u2000-\u200a\u202f\u205f\u3000\ufeff]/;
var skipWhiteSpace = /(?:\s|\/\/.*|\/\*[^]*?\*\/)*/g;
var ref = Object.prototype;
var hasOwnProperty$1 = ref.hasOwnProperty;
var toString$1 = ref.toString;
var hasOwn = Object.hasOwn || (function (obj, propName) { return (
hasOwnProperty$1.call(obj, propName)
); });
var isArray = Array.isArray || (function (obj) { return (
toString$1.call(obj) === "[object Array]"
); });
function wordsRegexp(words) {
return new RegExp("^(?:" + words.replace(/ /g, "|") + ")$")
}
function codePointToString(code) {
// UTF-16 Decoding
if (code <= 0xFFFF) { return String.fromCharCode(code) }
code -= 0x10000;
return String.fromCharCode((code >> 10) + 0xD800, (code & 1023) + 0xDC00)
}
var loneSurrogate = /(?:[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])/;
// These are used when `options.locations` is on, for the
// `startLoc` and `endLoc` properties.
var Position = function Position(line, col) {
this.line = line;
this.column = col;
};
Position.prototype.offset = function offset (n) {
return new Position(this.line, this.column + n)
};
var SourceLocation = function SourceLocation(p, start, end) {
this.start = start;
this.end = end;
if (p.sourceFile !== null) { this.source = p.sourceFile; }
};
// The `getLineInfo` function is mostly useful when the
// `locations` option is off (for performance reasons) and you
// want to find the line/column position for a given character
// offset. `input` should be the code string that the offset refers
// into.
function getLineInfo(input, offset) {
for (var line = 1, cur = 0;;) {
var nextBreak = nextLineBreak(input, cur, offset);
if (nextBreak < 0) { return new Position(line, offset - cur) }
++line;
cur = nextBreak;
}
}
// A second argument must be given to configure the parser process.
// These options are recognized (only `ecmaVersion` is required):
var defaultOptions = {
// `ecmaVersion` indicates the ECMAScript version to parse. Must be
// either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10
// (2019), 11 (2020), 12 (2021), 13 (2022), 14 (2023), or `"latest"`
// (the latest version the library supports). This influences
// support for strict mode, the set of reserved words, and support
// for new syntax features.
ecmaVersion: null,
// `sourceType` indicates the mode the code should be parsed in.
// Can be either `"script"` or `"module"`. This influences global
// strict mode and parsing of `import` and `export` declarations.
sourceType: "script",
// `onInsertedSemicolon` can be a callback that will be called
// when a semicolon is automatically inserted. It will be passed
// the position of the comma as an offset, and if `locations` is
// enabled, it is given the location as a `{line, column}` object
// as second argument.
onInsertedSemicolon: null,
// `onTrailingComma` is similar to `onInsertedSemicolon`, but for
// trailing commas.
onTrailingComma: null,
// By default, reserved words are only enforced if ecmaVersion >= 5.
// Set `allowReserved` to a boolean value to explicitly turn this on
// an off. When this option has the value "never", reserved words
// and keywords can also not be used as property names.
allowReserved: null,
// When enabled, a return at the top level is not considered an
// error.
allowReturnOutsideFunction: false,
// When enabled, import/export statements are not constrained to
// appearing at the top of the program, and an import.meta expression
// in a script isn't considered an error.
allowImportExportEverywhere: false,
// By default, await identifiers are allowed to appear at the top-level scope only if ecmaVersion >= 2022.
// When enabled, await identifiers are allowed to appear at the top-level scope,
// but they are still not allowed in non-async functions.
allowAwaitOutsideFunction: null,
// When enabled, super identifiers are not constrained to
// appearing in methods and do not raise an error when they appear elsewhere.
allowSuperOutsideMethod: null,
// When enabled, hashbang directive in the beginning of file is
// allowed and treated as a line comment. Enabled by default when
// `ecmaVersion` >= 2023.
allowHashBang: false,
// By default, the parser will verify that private properties are
// only used in places where they are valid and have been declared.
// Set this to false to turn such checks off.
checkPrivateFields: true,
// When `locations` is on, `loc` properties holding objects with
// `start` and `end` properties in `{line, column}` form (with
// line being 1-based and column 0-based) will be attached to the
// nodes.
locations: false,
// A function can be passed as `onToken` option, which will
// cause Acorn to call that function with object in the same
// format as tokens returned from `tokenizer().getToken()`. Note
// that you are not allowed to call the parser from the
// callback—that will corrupt its internal state.
onToken: null,
// A function can be passed as `onComment` option, which will
// cause Acorn to call that function with `(block, text, start,
// end)` parameters whenever a comment is skipped. `block` is a
// boolean indicating whether this is a block (`/* */`) comment,
// `text` is the content of the comment, and `start` and `end` are
// character offsets that denote the start and end of the comment.
// When the `locations` option is on, two more parameters are
// passed, the full `{line, column}` locations of the start and
// end of the comments. Note that you are not allowed to call the
// parser from the callback—that will corrupt its internal state.
onComment: null,
// Nodes have their start and end characters offsets recorded in
// `start` and `end` properties (directly on the node, rather than
// the `loc` object, which holds line/column data. To also add a
// [semi-standardized][range] `range` property holding a `[start,
// end]` array with the same numbers, set the `ranges` option to
// `true`.
//
// [range]: https://bugzilla.mozilla.org/show_bug.cgi?id=745678
ranges: false,
// It is possible to parse multiple files into a single AST by
// passing the tree produced by parsing the first file as
// `program` option in subsequent parses. This will add the
// toplevel forms of the parsed file to the `Program` (top) node
// of an existing parse tree.
program: null,
// When `locations` is on, you can pass this to record the source
// file in every node's `loc` object.
sourceFile: null,
// This value, if given, is stored in every node, whether
// `locations` is on or off.
directSourceFile: null,
// When enabled, parenthesized expressions are represented by
// (non-standard) ParenthesizedExpression nodes
preserveParens: false
};
// Interpret and default an options object
var warnedAboutEcmaVersion = false;
function getOptions(opts) {
var options = {};
for (var opt in defaultOptions)
{ options[opt] = opts && hasOwn(opts, opt) ? opts[opt] : defaultOptions[opt]; }
if (options.ecmaVersion === "latest") {
options.ecmaVersion = 1e8;
} else if (options.ecmaVersion == null) {
if (!warnedAboutEcmaVersion && typeof console === "object" && console.warn) {
warnedAboutEcmaVersion = true;
console.warn("Since Acorn 8.0.0, options.ecmaVersion is required.\nDefaulting to 2020, but this will stop working in the future.");
}
options.ecmaVersion = 11;
} else if (options.ecmaVersion >= 2015) {
options.ecmaVersion -= 2009;
}
if (options.allowReserved == null)
{ options.allowReserved = options.ecmaVersion < 5; }
if (!opts || opts.allowHashBang == null)
{ options.allowHashBang = options.ecmaVersion >= 14; }
if (isArray(options.onToken)) {
var tokens = options.onToken;
options.onToken = function (token) { return tokens.push(token); };
}
if (isArray(options.onComment))
{ options.onComment = pushComment(options, options.onComment); }
return options
}
function pushComment(options, array) {
return function(block, text, start, end, startLoc, endLoc) {
var comment = {
type: block ? "Block" : "Line",
value: text,
start: start,
end: end
};
if (options.locations)
{ comment.loc = new SourceLocation(this, startLoc, endLoc); }
if (options.ranges)
{ comment.range = [start, end]; }
array.push(comment);
}
}
// Each scope gets a bitset that may contain these flags
var
SCOPE_TOP = 1,
SCOPE_FUNCTION = 2,
SCOPE_ASYNC = 4,
SCOPE_GENERATOR = 8,
SCOPE_ARROW = 16,
SCOPE_SIMPLE_CATCH = 32,
SCOPE_SUPER = 64,
SCOPE_DIRECT_SUPER = 128,
SCOPE_CLASS_STATIC_BLOCK = 256,
SCOPE_VAR = SCOPE_TOP | SCOPE_FUNCTION | SCOPE_CLASS_STATIC_BLOCK;
function functionFlags(async, generator) {
return SCOPE_FUNCTION | (async ? SCOPE_ASYNC : 0) | (generator ? SCOPE_GENERATOR : 0)
}
// Used in checkLVal* and declareName to determine the type of a binding
var
BIND_NONE = 0, // Not a binding
BIND_VAR = 1, // Var-style binding
BIND_LEXICAL = 2, // Let- or const-style binding
BIND_FUNCTION = 3, // Function declaration
BIND_SIMPLE_CATCH = 4, // Simple (identifier pattern) catch binding
BIND_OUTSIDE = 5; // Special case for function names as bound inside the function
var Parser$1 = function Parser(options, input, startPos) {
this.options = options = getOptions(options);
this.sourceFile = options.sourceFile;
this.keywords = wordsRegexp(keywords$1[options.ecmaVersion >= 6 ? 6 : options.sourceType === "module" ? "5module" : 5]);
var reserved = "";
if (options.allowReserved !== true) {
reserved = reservedWords[options.ecmaVersion >= 6 ? 6 : options.ecmaVersion === 5 ? 5 : 3];
if (options.sourceType === "module") { reserved += " await"; }
}
this.reservedWords = wordsRegexp(reserved);
var reservedStrict = (reserved ? reserved + " " : "") + reservedWords.strict;
this.reservedWordsStrict = wordsRegexp(reservedStrict);
this.reservedWordsStrictBind = wordsRegexp(reservedStrict + " " + reservedWords.strictBind);
this.input = String(input);
// Used to signal to callers of `readWord1` whether the word
// contained any escape sequences. This is needed because words with
// escape sequences must not be interpreted as keywords.
this.containsEsc = false;
// Set up token state
// The current position of the tokenizer in the input.
if (startPos) {
this.pos = startPos;
this.lineStart = this.input.lastIndexOf("\n", startPos - 1) + 1;
this.curLine = this.input.slice(0, this.lineStart).split(lineBreak).length;
} else {
this.pos = this.lineStart = 0;
this.curLine = 1;
}
// Properties of the current token:
// Its type
this.type = types$1.eof;
// For tokens that include more information than their type, the value
this.value = null;
// Its start and end offset
this.start = this.end = this.pos;
// And, if locations are used, the {line, column} object
// corresponding to those offsets
this.startLoc = this.endLoc = this.curPosition();
// Position information for the previous token
this.lastTokEndLoc = this.lastTokStartLoc = null;
this.lastTokStart = this.lastTokEnd = this.pos;
// The context stack is used to superficially track syntactic
// context to predict whether a regular expression is allowed in a
// given position.
this.context = this.initialContext();
this.exprAllowed = true;
// Figure out if it's a module code.
this.inModule = options.sourceType === "module";
this.strict = this.inModule || this.strictDirective(this.pos);
// Used to signify the start of a potential arrow function
this.potentialArrowAt = -1;
this.potentialArrowInForAwait = false;
// Positions to delayed-check that yield/await does not exist in default parameters.
this.yieldPos = this.awaitPos = this.awaitIdentPos = 0;
// Labels in scope.
this.labels = [];
// Thus-far undefined exports.
this.undefinedExports = Object.create(null);
// If enabled, skip leading hashbang line.
if (this.pos === 0 && options.allowHashBang && this.input.slice(0, 2) === "#!")
{ this.skipLineComment(2); }
// Scope tracking for duplicate variable names (see scope.js)
this.scopeStack = [];
this.enterScope(SCOPE_TOP);
// For RegExp validation
this.regexpState = null;
// The stack of private names.
// Each element has two properties: 'declared' and 'used'.
// When it exited from the outermost class definition, all used private names must be declared.
this.privateNameStack = [];
};
var prototypeAccessors = { inFunction: { configurable: true },inGenerator: { configurable: true },inAsync: { configurable: true },canAwait: { configurable: true },allowSuper: { configurable: true },allowDirectSuper: { configurable: true },treatFunctionsAsVar: { configurable: true },allowNewDotTarget: { configurable: true },inClassStaticBlock: { configurable: true } };
Parser$1.prototype.parse = function parse () {
var node = this.options.program || this.startNode();
this.nextToken();
return this.parseTopLevel(node)
};
prototypeAccessors.inFunction.get = function () { return (this.currentVarScope().flags & SCOPE_FUNCTION) > 0 };
prototypeAccessors.inGenerator.get = function () { return (this.currentVarScope().flags & SCOPE_GENERATOR) > 0 && !this.currentVarScope().inClassFieldInit };
prototypeAccessors.inAsync.get = function () { return (this.currentVarScope().flags & SCOPE_ASYNC) > 0 && !this.currentVarScope().inClassFieldInit };
prototypeAccessors.canAwait.get = function () {
for (var i = this.scopeStack.length - 1; i >= 0; i--) {
var scope = this.scopeStack[i];
if (scope.inClassFieldInit || scope.flags & SCOPE_CLASS_STATIC_BLOCK) { return false }
if (scope.flags & SCOPE_FUNCTION) { return (scope.flags & SCOPE_ASYNC) > 0 }
}
return (this.inModule && this.options.ecmaVersion >= 13) || this.options.allowAwaitOutsideFunction
};
prototypeAccessors.allowSuper.get = function () {
var ref = this.currentThisScope();
var flags = ref.flags;
var inClassFieldInit = ref.inClassFieldInit;
return (flags & SCOPE_SUPER) > 0 || inClassFieldInit || this.options.allowSuperOutsideMethod
};
prototypeAccessors.allowDirectSuper.get = function () { return (this.currentThisScope().flags & SCOPE_DIRECT_SUPER) > 0 };
prototypeAccessors.treatFunctionsAsVar.get = function () { return this.treatFunctionsAsVarInScope(this.currentScope()) };
prototypeAccessors.allowNewDotTarget.get = function () {
var ref = this.currentThisScope();
var flags = ref.flags;
var inClassFieldInit = ref.inClassFieldInit;
return (flags & (SCOPE_FUNCTION | SCOPE_CLASS_STATIC_BLOCK)) > 0 || inClassFieldInit
};
prototypeAccessors.inClassStaticBlock.get = function () {
return (this.currentVarScope().flags & SCOPE_CLASS_STATIC_BLOCK) > 0
};
Parser$1.extend = function extend () {
var plugins = [], len = arguments.length;
while ( len-- ) plugins[ len ] = arguments[ len ];
var cls = this;
for (var i = 0; i < plugins.length; i++) { cls = plugins[i](cls); }
return cls
};
Parser$1.parse = function parse (input, options) {
return new this(options, input).parse()
};
Parser$1.parseExpressionAt = function parseExpressionAt (input, pos, options) {
var parser = new this(options, input, pos);
parser.nextToken();
return parser.parseExpression()
};
Parser$1.tokenizer = function tokenizer (input, options) {
return new this(options, input)
};
Object.defineProperties( Parser$1.prototype, prototypeAccessors );
var pp$9 = Parser$1.prototype;
// ## Parser utilities
var literal = /^(?:'((?:\\.|[^'\\])*?)'|"((?:\\.|[^"\\])*?)")/;
pp$9.strictDirective = function(start) {
if (this.options.ecmaVersion < 5) { return false }
for (;;) {
// Try to find string literal.
skipWhiteSpace.lastIndex = start;
start += skipWhiteSpace.exec(this.input)[0].length;
var match = literal.exec(this.input.slice(start));
if (!match) { return false }
if ((match[1] || match[2]) === "use strict") {
skipWhiteSpace.lastIndex = start + match[0].length;
var spaceAfter = skipWhiteSpace.exec(this.input), end = spaceAfter.index + spaceAfter[0].length;
var next = this.input.charAt(end);
return next === ";" || next === "}" ||
(lineBreak.test(spaceAfter[0]) &&
!(/[(`.[+\-/*%<>=,?^&]/.test(next) || next === "!" && this.input.charAt(end + 1) === "="))
}
start += match[0].length;
// Skip semicolon, if any.
skipWhiteSpace.lastIndex = start;
start += skipWhiteSpace.exec(this.input)[0].length;
if (this.input[start] === ";")
{ start++; }
}
};
// Predicate that tests whether the next token is of the given
// type, and if yes, consumes it as a side effect.
pp$9.eat = function(type) {
if (this.type === type) {
this.next();
return true
} else {
return false
}
};
// Tests whether parsed token is a contextual keyword.
pp$9.isContextual = function(name) {
return this.type === types$1.name && this.value === name && !this.containsEsc
};
// Consumes contextual keyword if possible.
pp$9.eatContextual = function(name) {
if (!this.isContextual(name)) { return false }
this.next();
return true
};
// Asserts that following token is given contextual keyword.
pp$9.expectContextual = function(name) {
if (!this.eatContextual(name)) { this.unexpected(); }
};
// Test whether a semicolon can be inserted at the current position.
pp$9.canInsertSemicolon = function() {
return this.type === types$1.eof ||
this.type === types$1.braceR ||
lineBreak.test(this.input.slice(this.lastTokEnd, this.start))
};
pp$9.insertSemicolon = function() {
if (this.canInsertSemicolon()) {
if (this.options.onInsertedSemicolon)
{ this.options.onInsertedSemicolon(this.lastTokEnd, this.lastTokEndLoc); }
return true
}
};
// Consume a semicolon, or, failing that, see if we are allowed to
// pretend that there is a semicolon at this position.
pp$9.semicolon = function() {
if (!this.eat(types$1.semi) && !this.insertSemicolon()) { this.unexpected(); }
};
pp$9.afterTrailingComma = function(tokType, notNext) {
if (this.type === tokType) {
if (this.options.onTrailingComma)
{ this.options.onTrailingComma(this.lastTokStart, this.lastTokStartLoc); }
if (!notNext)
{ this.next(); }
return true
}
};
// Expect a token of a given type. If found, consume it, otherwise,
// raise an unexpected token error.
pp$9.expect = function(type) {
this.eat(type) || this.unexpected();
};
// Raise an unexpected token error.
pp$9.unexpected = function(pos) {
this.raise(pos != null ? pos : this.start, "Unexpected token");
};
var DestructuringErrors = function DestructuringErrors() {
this.shorthandAssign =
this.trailingComma =
this.parenthesizedAssign =
this.parenthesizedBind =
this.doubleProto =
-1;
};
pp$9.checkPatternErrors = function(refDestructuringErrors, isAssign) {
if (!refDestructuringErrors) { return }
if (refDestructuringErrors.trailingComma > -1)
{ this.raiseRecoverable(refDestructuringErrors.trailingComma, "Comma is not permitted after the rest element"); }
var parens = isAssign ? refDestructuringErrors.parenthesizedAssign : refDestructuringErrors.parenthesizedBind;
if (parens > -1) { this.raiseRecoverable(parens, isAssign ? "Assigning to rvalue" : "Parenthesized pattern"); }
};
pp$9.checkExpressionErrors = function(refDestructuringErrors, andThrow) {
if (!refDestructuringErrors) { return false }
var shorthandAssign = refDestructuringErrors.shorthandAssign;
var doubleProto = refDestructuringErrors.doubleProto;
if (!andThrow) { return shorthandAssign >= 0 || doubleProto >= 0 }
if (shorthandAssign >= 0)
{ this.raise(shorthandAssign, "Shorthand property assignments are valid only in destructuring patterns"); }
if (doubleProto >= 0)
{ this.raiseRecoverable(doubleProto, "Redefinition of __proto__ property"); }
};
pp$9.checkYieldAwaitInDefaultParams = function() {
if (this.yieldPos && (!this.awaitPos || this.yieldPos < this.awaitPos))
{ this.raise(this.yieldPos, "Yield expression cannot be a default value"); }
if (this.awaitPos)
{ this.raise(this.awaitPos, "Await expression cannot be a default value"); }
};
pp$9.isSimpleAssignTarget = function(expr) {
if (expr.type === "ParenthesizedExpression")
{ return this.isSimpleAssignTarget(expr.expression) }
return expr.type === "Identifier" || expr.type === "MemberExpression"
};
var pp$8 = Parser$1.prototype;
// ### Statement parsing
// Parse a program. Initializes the parser, reads any number of
// statements, and wraps them in a Program node. Optionally takes a
// `program` argument. If present, the statements will be appended
// to its body instead of creating a new node.
pp$8.parseTopLevel = function(node) {
var exports = Object.create(null);
if (!node.body) { node.body = []; }
while (this.type !== types$1.eof) {
var stmt = this.parseStatement(null, true, exports);
node.body.push(stmt);
}
if (this.inModule)
{ for (var i = 0, list = Object.keys(this.undefinedExports); i < list.length; i += 1)
{
var name = list[i];
this.raiseRecoverable(this.undefinedExports[name].start, ("Export '" + name + "' is not defined"));
} }
this.adaptDirectivePrologue(node.body);
this.next();
node.sourceType = this.options.sourceType;
return this.finishNode(node, "Program")
};
var loopLabel = {kind: "loop"}, switchLabel = {kind: "switch"};
pp$8.isLet = function(context) {
if (this.options.ecmaVersion < 6 || !this.isContextual("let")) { return false }
skipWhiteSpace.lastIndex = this.pos;
var skip = skipWhiteSpace.exec(this.input);
var next = this.pos + skip[0].length, nextCh = this.input.charCodeAt(next);
// For ambiguous cases, determine if a LexicalDeclaration (or only a
// Statement) is allowed here. If context is not empty then only a Statement
// is allowed. However, `let [` is an explicit negative lookahead for
// ExpressionStatement, so special-case it first.
if (nextCh === 91 || nextCh === 92) { return true } // '[', '/'
if (context) { return false }
if (nextCh === 123 || nextCh > 0xd7ff && nextCh < 0xdc00) { return true } // '{', astral
if (isIdentifierStart(nextCh, true)) {
var pos = next + 1;
while (isIdentifierChar(nextCh = this.input.charCodeAt(pos), true)) { ++pos; }
if (nextCh === 92 || nextCh > 0xd7ff && nextCh < 0xdc00) { return true }
var ident = this.input.slice(next, pos);
if (!keywordRelationalOperator.test(ident)) { return true }
}
return false
};
// check 'async [no LineTerminator here] function'
// - 'async /*foo*/ function' is OK.
// - 'async /*\n*/ function' is invalid.
pp$8.isAsyncFunction = function() {
if (this.options.ecmaVersion < 8 || !this.isContextual("async"))
{ return false }
skipWhiteSpace.lastIndex = this.pos;
var skip = skipWhiteSpace.exec(this.input);
var next = this.pos + skip[0].length, after;
return !lineBreak.test(this.input.slice(this.pos, next)) &&
this.input.slice(next, next + 8) === "function" &&
(next + 8 === this.input.length ||
!(isIdentifierChar(after = this.input.charCodeAt(next + 8)) || after > 0xd7ff && after < 0xdc00))
};
// Parse a single statement.
//
// If expecting a statement and finding a slash operator, parse a
// regular expression literal. This is to handle cases like
// `if (foo) /blah/.exec(foo)`, where looking at the previous token
// does not help.
pp$8.parseStatement = function(context, topLevel, exports) {
var starttype = this.type, node = this.startNode(), kind;
if (this.isLet(context)) {
starttype = types$1._var;
kind = "let";
}
// Most types of statements are recognized by the keyword they
// start with. Many are trivial to parse, some require a bit of
// complexity.
switch (starttype) {
case types$1._break: case types$1._continue: return this.parseBreakContinueStatement(node, starttype.keyword)
case types$1._debugger: return this.parseDebuggerStatement(node)
case types$1._do: return this.parseDoStatement(node)
case types$1._for: return this.parseForStatement(node)
case types$1._function:
// Function as sole body of either an if statement or a labeled statement
// works, but not when it is part of a labeled statement that is the sole
// body of an if statement.
if ((context && (this.strict || context !== "if" && context !== "label")) && this.options.ecmaVersion >= 6) { this.unexpected(); }
return this.parseFunctionStatement(node, false, !context)
case types$1._class:
if (context) { this.unexpected(); }
return this.parseClass(node, true)
case types$1._if: return this.parseIfStatement(node)
case types$1._return: return this.parseReturnStatement(node)
case types$1._switch: return this.parseSwitchStatement(node)
case types$1._throw: return this.parseThrowStatement(node)
case types$1._try: return this.parseTryStatement(node)
case types$1._const: case types$1._var:
kind = kind || this.value;
if (context && kind !== "var") { this.unexpected(); }
return this.parseVarStatement(node, kind)
case types$1._while: return this.parseWhileStatement(node)
case types$1._with: return this.parseWithStatement(node)
case types$1.braceL: return this.parseBlock(true, node)
case types$1.semi: return this.parseEmptyStatement(node)
case types$1._export:
case types$1._import:
if (this.options.ecmaVersion > 10 && starttype === types$1._import) {
skipWhiteSpace.lastIndex = this.pos;
var skip = skipWhiteSpace.exec(this.input);
var next = this.pos + skip[0].length, nextCh = this.input.charCodeAt(next);
if (nextCh === 40 || nextCh === 46) // '(' or '.'
{ return this.parseExpressionStatement(node, this.parseExpression()) }
}
if (!this.options.allowImportExportEverywhere) {
if (!topLevel)
{ this.raise(this.start, "'import' and 'export' may only appear at the top level"); }
if (!this.inModule)
{ this.raise(this.start, "'import' and 'export' may appear only with 'sourceType: module'"); }
}
return starttype === types$1._import ? this.parseImport(node) : this.parseExport(node, exports)
// If the statement does not start with a statement keyword or a
// brace, it's an ExpressionStatement or LabeledStatement. We
// simply start parsing an expression, and afterwards, if the
// next token is a colon and the expression was a simple
// Identifier node, we switch to interpreting it as a label.
default:
if (this.isAsyncFunction()) {
if (context) { this.unexpected(); }
this.next();
return this.parseFunctionStatement(node, true, !context)
}
var maybeName = this.value, expr = this.parseExpression();
if (starttype === types$1.name && expr.type === "Identifier" && this.eat(types$1.colon))
{ return this.parseLabeledStatement(node, maybeName, expr, context) }
else { return this.parseExpressionStatement(node, expr) }
}
};
pp$8.parseBreakContinueStatement = function(node, keyword) {
var isBreak = keyword === "break";
this.next();
if (this.eat(types$1.semi) || this.insertSemicolon()) { node.label = null; }
else if (this.type !== types$1.name) { this.unexpected(); }
else {
node.label = this.parseIdent();
this.semicolon();
}
// Verify that there is an actual destination to break or
// continue to.
var i = 0;
for (; i < this.labels.length; ++i) {
var lab = this.labels[i];
if (node.label == null || lab.name === node.label.name) {
if (lab.kind != null && (isBreak || lab.kind === "loop")) { break }
if (node.label && isBreak) { break }
}
}
if (i === this.labels.length) { this.raise(node.start, "Unsyntactic " + keyword); }
return this.finishNode(node, isBreak ? "BreakStatement" : "ContinueStatement")
};
pp$8.parseDebuggerStatement = function(node) {
this.next();
this.semicolon();
return this.finishNode(node, "DebuggerStatement")
};
pp$8.parseDoStatement = function(node) {
this.next();
this.labels.push(loopLabel);
node.body = this.parseStatement("do");
this.labels.pop();
this.expect(types$1._while);
node.test = this.parseParenExpression();
if (this.options.ecmaVersion >= 6)
{ this.eat(types$1.semi); }
else
{ this.semicolon(); }
return this.finishNode(node, "DoWhileStatement")
};
// Disambiguating between a `for` and a `for`/`in` or `for`/`of`
// loop is non-trivial. Basically, we have to parse the init `var`
// statement or expression, disallowing the `in` operator (see
// the second parameter to `parseExpression`), and then check
// whether the next token is `in` or `of`. When there is no init
// part (semicolon immediately after the opening parenthesis), it
// is a regular `for` loop.
pp$8.parseForStatement = function(node) {
this.next();
var awaitAt = (this.options.ecmaVersion >= 9 && this.canAwait && this.eatContextual("await")) ? this.lastTokStart : -1;
this.labels.push(loopLabel);
this.enterScope(0);
this.expect(types$1.parenL);
if (this.type === types$1.semi) {
if (awaitAt > -1) { this.unexpected(awaitAt); }
return this.parseFor(node, null)
}
var isLet = this.isLet();
if (this.type === types$1._var || this.type === types$1._const || isLet) {
var init$1 = this.startNode(), kind = isLet ? "let" : this.value;
this.next();
this.parseVar(init$1, true, kind);
this.finishNode(init$1, "VariableDeclaration");
if ((this.type === types$1._in || (this.options.ecmaVersion >= 6 && this.isContextual("of"))) && init$1.declarations.length === 1) {
if (this.options.ecmaVersion >= 9) {
if (this.type === types$1._in) {
if (awaitAt > -1) { this.unexpected(awaitAt); }
} else { node.await = awaitAt > -1; }
}
return this.parseForIn(node, init$1)
}
if (awaitAt > -1) { this.unexpected(awaitAt); }
return this.parseFor(node, init$1)
}
var startsWithLet = this.isContextual("let"), isForOf = false;
var refDestructuringErrors = new DestructuringErrors;
var init = this.parseExpression(awaitAt > -1 ? "await" : true, refDestructuringErrors);
if (this.type === types$1._in || (isForOf = this.options.ecmaVersion >= 6 && this.isContextual("of"))) {
if (this.options.ecmaVersion >= 9) {
if (this.type === types$1._in) {
if (awaitAt > -1) { this.unexpected(awaitAt); }
} else { node.await = awaitAt > -1; }
}
if (startsWithLet && isForOf) { this.raise(init.start, "The left-hand side of a for-of loop may not start with 'let'."); }
this.toAssignable(init, false, refDestructuringErrors);
this.checkLValPattern(init);
return this.parseForIn(node, init)
} else {
this.checkExpressionErrors(refDestructuringErrors, true);
}
if (awaitAt > -1) { this.unexpected(awaitAt); }
return this.parseFor(node, init)
};
pp$8.parseFunctionStatement = function(node, isAsync, declarationPosition) {
this.next();
return this.parseFunction(node, FUNC_STATEMENT | (declarationPosition ? 0 : FUNC_HANGING_STATEMENT), false, isAsync)
};
pp$8.parseIfStatement = function(node) {
this.next();
node.test = this.parseParenExpression();
// allow function declarations in branches, but only in non-strict mode
node.consequent = this.parseStatement("if");
node.alternate = this.eat(types$1._else) ? this.parseStatement("if") : null;
return this.finishNode(node, "IfStatement")
};
pp$8.parseReturnStatement = function(node) {
if (!this.inFunction && !this.options.allowReturnOutsideFunction)
{ this.raise(this.start, "'return' outside of function"); }
this.next();
// In `return` (and `break`/`continue`), the keywords with
// optional arguments, we eagerly look for a semicolon or the
// possibility to insert one.
if (this.eat(types$1.semi) || this.insertSemicolon()) { node.argument = null; }
else { node.argument = this.parseExpression(); this.semicolon(); }
return this.finishNode(node, "ReturnStatement")
};
pp$8.parseSwitchStatement = function(node) {
this.next();
node.discriminant = this.parseParenExpression();
node.cases = [];
this.expect(types$1.braceL);
this.labels.push(switchLabel);
this.enterScope(0);
// Statements under must be grouped (by label) in SwitchCase
// nodes. `cur` is used to keep the node that we are currently
// adding statements to.
var cur;
for (var sawDefault = false; this.type !== types$1.braceR;) {
if (this.type === types$1._case || this.type === types$1._default) {
var isCase = this.type === types$1._case;
if (cur) { this.finishNode(cur, "SwitchCase"); }
node.cases.push(cur = this.startNode());
cur.consequent = [];
this.next();
if (isCase) {
cur.test = this.parseExpression();
} else {
if (sawDefault) { this.raiseRecoverable(this.lastTokStart, "Multiple default clauses"); }
sawDefault = true;
cur.test = null;
}
this.expect(types$1.colon);
} else {
if (!cur) { this.unexpected(); }
cur.consequent.push(this.parseStatement(null));
}
}
this.exitScope();
if (cur) { this.finishNode(cur, "SwitchCase"); }
this.next(); // Closing brace
this.labels.pop();
return this.finishNode(node, "SwitchStatement")
};
pp$8.parseThrowStatement = function(node) {
this.next();
if (lineBreak.test(this.input.slice(this.lastTokEnd, this.start)))
{ this.raise(this.lastTokEnd, "Illegal newline after throw"); }
node.argument = this.parseExpression();
this.semicolon();
return this.finishNode(node, "ThrowStatement")
};
// Reused empty array added for node fields that are always empty.
var empty$1 = [];
pp$8.parseCatchClauseParam = function() {
var param = this.parseBindingAtom();
var simple = param.type === "Identifier";
this.enterScope(simple ? SCOPE_SIMPLE_CATCH : 0);
this.checkLValPattern(param, simple ? BIND_SIMPLE_CATCH : BIND_LEXICAL);
this.expect(types$1.parenR);
return param
};
pp$8.parseTryStatement = function(node) {
this.next();
node.block = this.parseBlock();
node.handler = null;
if (this.type === types$1._catch) {
var clause = this.startNode();
this.next();
if (this.eat(types$1.parenL)) {
clause.param = this.parseCatchClauseParam();
} else {
if (this.options.ecmaVersion < 10) { this.unexpected(); }
clause.param = null;
this.enterScope(0);
}
clause.body = this.parseBlock(false);
this.exitScope();
node.handler = this.finishNode(clause, "CatchClause");
}
node.finalizer = this.eat(types$1._finally) ? this.parseBlock() : null;
if (!node.handler && !node.finalizer)
{ this.raise(node.start, "Missing catch or finally clause"); }
return this.finishNode(node, "TryStatement")
};
pp$8.parseVarStatement = function(node, kind, allowMissingInitializer) {
this.next();
this.parseVar(node, false, kind, allowMissingInitializer);
this.semicolon();
return this.finishNode(node, "VariableDeclaration")
};
pp$8.parseWhileStatement = function(node) {
this.next();
node.test = this.parseParenExpression();
this.labels.push(loopLabel);
node.body = this.parseStatement("while");
this.labels.pop();
return this.finishNode(node, "WhileStatement")
};
pp$8.parseWithStatement = function(node) {
if (this.strict) { this.raise(this.start, "'with' in strict mode"); }
this.next();
node.object = this.parseParenExpression();
node.body = this.parseStatement("with");
return this.finishNode(node, "WithStatement")
};
pp$8.parseEmptyStatement = function(node) {
this.next();
return this.finishNode(node, "EmptyStatement")
};
pp$8.parseLabeledStatement = function(node, maybeName, expr, context) {
for (var i$1 = 0, list = this.labels; i$1 < list.length; i$1 += 1)
{
var label = list[i$1];
if (label.name === maybeName)
{ this.raise(expr.start, "Label '" + maybeName + "' is already declared");
} }
var kind = this.type.isLoop ? "loop" : this.type === types$1._switch ? "switch" : null;
for (var i = this.labels.length - 1; i >= 0; i--) {
var label$1 = this.labels[i];
if (label$1.statementStart === node.start) {
// Update information about previous labels on this node
label$1.statementStart = this.start;
label$1.kind = kind;
} else { break }
}
this.labels.push({name: maybeName, kind: kind, statementStart: this.start});
node.body = this.parseStatement(context ? context.indexOf("label") === -1 ? context + "label" : context : "label");
this.labels.pop();
node.label = expr;
return this.finishNode(node, "LabeledStatement")
};
pp$8.parseExpressionStatement = function(node, expr) {
node.expression = expr;
this.semicolon();
return this.finishNode(node, "ExpressionStatement")
};
// Parse a semicolon-enclosed block of statements, handling `"use
// strict"` declarations when `allowStrict` is true (used for
// function bodies).
pp$8.parseBlock = function(createNewLexicalScope, node, exitStrict) {
if ( createNewLexicalScope === void 0 ) createNewLexicalScope = true;
if ( node === void 0 ) node = this.startNode();
node.body = [];
this.expect(types$1.braceL);
if (createNewLexicalScope) { this.enterScope(0); }
while (this.type !== types$1.braceR) {
var stmt = this.parseStatement(null);
node.body.push(stmt);
}
if (exitStrict) { this.strict = false; }
this.next();
if (createNewLexicalScope) { this.exitScope(); }
return this.finishNode(node, "BlockStatement")
};
// Parse a regular `for` loop. The disambiguation code in
// `parseStatement` will already have parsed the init statement or
// expression.
pp$8.parseFor = function(node, init) {
node.init = init;
this.expect(types$1.semi);
node.test = this.type === types$1.semi ? null : this.parseExpression();
this.expect(types$1.semi);
node.update = this.type === types$1.parenR ? null : this.parseExpression();
this.expect(types$1.parenR);
node.body = this.parseStatement("for");
this.exitScope();
this.labels.pop();
return this.finishNode(node, "ForStatement")
};
// Parse a `for`/`in` and `for`/`of` loop, which are almost
// same from parser's perspective.
pp$8.parseForIn = function(node, init) {
var isForIn = this.type === types$1._in;
this.next();
if (
init.type === "VariableDeclaration" &&
init.declarations[0].init != null &&
(
!isForIn ||
this.options.ecmaVersion < 8 ||
this.strict ||
init.kind !== "var" ||
init.declarations[0].id.type !== "Identifier"
)
) {
this.raise(
init.start,
((isForIn ? "for-in" : "for-of") + " loop variable declaration may not have an initializer")
);
}
node.left = init;
node.right = isForIn ? this.parseExpression() : this.parseMaybeAssign();
this.expect(types$1.parenR);
node.body = this.parseStatement("for");
this.exitScope();
this.labels.pop();
return this.finishNode(node, isForIn ? "ForInStatement" : "ForOfStatement")
};
// Parse a list of variable declarations.
pp$8.parseVar = function(node, isFor, kind, allowMissingInitializer) {
node.declarations = [];
node.kind = kind;
for (;;) {
var decl = this.startNode();
this.parseVarId(decl, kind);
if (this.eat(types$1.eq)) {
decl.init = this.parseMaybeAssign(isFor);
} else if (!allowMissingInitializer && kind === "const" && !(this.type === types$1._in || (this.options.ecmaVersion >= 6 && this.isContextual("of")))) {
this.unexpected();
} else if (!allowMissingInitializer && decl.id.type !== "Identifier" && !(isFor && (this.type === types$1._in || this.isContextual("of")))) {
this.raise(this.lastTokEnd, "Complex binding patterns require an initialization value");
} else {
decl.init = null;
}
node.declarations.push(this.finishNode(decl, "VariableDeclarator"));
if (!this.eat(types$1.comma)) { break }
}
return node
};
pp$8.parseVarId = function(decl, kind) {
decl.id = this.parseBindingAtom();
this.checkLValPattern(decl.id, kind === "var" ? BIND_VAR : BIND_LEXICAL, false);
};
var FUNC_STATEMENT = 1, FUNC_HANGING_STATEMENT = 2, FUNC_NULLABLE_ID = 4;
// Parse a function declaration or literal (depending on the
// `statement & FUNC_STATEMENT`).
// Remove `allowExpressionBody` for 7.0.0, as it is only called with false
pp$8.parseFunction = function(node, statement, allowExpressionBody, isAsync, forInit) {
this.initFunction(node);
if (this.options.ecmaVersion >= 9 || this.options.ecmaVersion >= 6 && !isAsync) {
if (this.type === types$1.star && (statement & FUNC_HANGING_STATEMENT))
{ this.unexpected(); }
node.generator = this.eat(types$1.star);
}
if (this.options.ecmaVersion >= 8)
{ node.async = !!isAsync; }
if (statement & FUNC_STATEMENT) {
node.id = (statement & FUNC_NULLABLE_ID) && this.type !== types$1.name ? null : this.parseIdent();
if (node.id && !(statement & FUNC_HANGING_STATEMENT))
// If it is a regular function declaration in sloppy mode, then it is
// subject to Annex B semantics (BIND_FUNCTION). Otherwise, the binding
// mode depends on properties of the current scope (see
// treatFunctionsAsVar).
{ this.checkLValSimple(node.id, (this.strict || node.generator || node.async) ? this.treatFunctionsAsVar ? BIND_VAR : BIND_LEXICAL : BIND_FUNCTION); }
}
var oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, oldAwaitIdentPos = this.awaitIdentPos;
this.yieldPos = 0;
this.awaitPos = 0;
this.awaitIdentPos = 0;
this.enterScope(functionFlags(node.async, node.generator));
if (!(statement & FUNC_STATEMENT))
{ node.id = this.type === types$1.name ? this.parseIdent() : null; }
this.parseFunctionParams(node);
this.parseFunctionBody(node, allowExpressionBody, false, forInit);
this.yieldPos = oldYieldPos;
this.awaitPos = oldAwaitPos;
this.awaitIdentPos = oldAwaitIdentPos;
return this.finishNode(node, (statement & FUNC_STATEMENT) ? "FunctionDeclaration" : "FunctionExpression")
};
pp$8.parseFunctionParams = function(node) {
this.expect(types$1.parenL);
node.params = this.parseBindingList(types$1.parenR, false, this.options.ecmaVersion >= 8);
this.checkYieldAwaitInDefaultParams();
};
// Parse a class declaration or literal (depending on the
// `isStatement` parameter).
pp$8.parseClass = function(node, isStatement) {
this.next();
// ecma-262 14.6 Class Definitions
// A class definition is always strict mode code.
var oldStrict = this.strict;
this.strict = true;
this.parseClassId(node, isStatement);
this.parseClassSuper(node);
var privateNameMap = this.enterClassBody();
var classBody = this.startNode();
var hadConstructor = false;
classBody.body = [];
this.expect(types$1.braceL);
while (this.type !== types$1.braceR) {
var element = this.parseClassElement(node.superClass !== null);
if (element) {
classBody.body.push(element);
if (element.type === "MethodDefinition" && element.kind === "constructor") {
if (hadConstructor) { this.raiseRecoverable(element.start, "Duplicate constructor in the same class"); }
hadConstructor = true;
} else if (element.key && element.key.type === "PrivateIdentifier" && isPrivateNameConflicted(privateNameMap, element)) {
this.raiseRecoverable(element.key.start, ("Identifier '#" + (element.key.name) + "' has already been declared"));
}
}
}
this.strict = oldStrict;
this.next();
node.body = this.finishNode(classBody, "ClassBody");
this.exitClassBody();
return this.finishNode(node, isStatement ? "ClassDeclaration" : "ClassExpression")
};
pp$8.parseClassElement = function(constructorAllowsSuper) {
if (this.eat(types$1.semi)) { return null }
var ecmaVersion = this.options.ecmaVersion;
var node = this.startNode();
var keyName = "";
var isGenerator = false;
var isAsync = false;
var kind = "method";
var isStatic = false;
if (this.eatContextual("static")) {
// Parse static init block
if (ecmaVersion >= 13 && this.eat(types$1.braceL)) {
this.parseClassStaticBlock(node);
return node
}
if (this.isClassElementNameStart() || this.type === types$1.star) {
isStatic = true;
} else {
keyName = "static";
}
}
node.static = isStatic;
if (!keyName && ecmaVersion >= 8 && this.eatContextual("async")) {
if ((this.isClassElementNameStart() || this.type === types$1.star) && !this.canInsertSemicolon()) {
isAsync = true;
} else {
keyName = "async";
}
}
if (!keyName && (ecmaVersion >= 9 || !isAsync) && this.eat(types$1.star)) {
isGenerator = true;
}
if (!keyName && !isAsync && !isGenerator) {
var lastValue = this.value;
if (this.eatContextual("get") || this.eatContextual("set")) {
if (this.isClassElementNameStart()) {
kind = lastValue;
} else {
keyName = lastValue;
}
}
}
// Parse element name
if (keyName) {
// 'async', 'get', 'set', or 'static' were not a keyword contextually.
// The last token is any of those. Make it the element name.
node.computed = false;
node.key = this.startNodeAt(this.lastTokStart, this.lastTokStartLoc);
node.key.name = keyName;
this.finishNode(node.key, "Identifier");
} else {
this.parseClassElementName(node);
}
// Parse element value
if (ecmaVersion < 13 || this.type === types$1.parenL || kind !== "method" || isGenerator || isAsync) {
var isConstructor = !node.static && checkKeyName(node, "constructor");
var allowsDirectSuper = isConstructor && constructorAllowsSuper;
// Couldn't move this check into the 'parseClassMethod' method for backward compatibility.
if (isConstructor && kind !== "method") { this.raise(node.key.start, "Constructor can't have get/set modifier"); }
node.kind = isConstructor ? "constructor" : kind;
this.parseClassMethod(node, isGenerator, isAsync, allowsDirectSuper);
} else {
this.parseClassField(node);
}
return node
};
pp$8.isClassElementNameStart = function() {
return (
this.type === types$1.name ||
this.type === types$1.privateId ||
this.type === types$1.num ||
this.type === types$1.string ||
this.type === types$1.bracketL ||
this.type.keyword
)
};
pp$8.parseClassElementName = function(element) {
if (this.type === types$1.privateId) {
if (this.value === "constructor") {
this.raise(this.start, "Classes can't have an element named '#constructor'");
}
element.computed = false;
element.key = this.parsePrivateIdent();
} else {
this.parsePropertyName(element);
}
};
pp$8.parseClassMethod = function(method, isGenerator, isAsync, allowsDirectSuper) {
// Check key and flags
var key = method.key;
if (method.kind === "constructor") {
if (isGenerator) { this.raise(key.start, "Constructor can't be a generator"); }
if (isAsync) { this.raise(key.start, "Constructor can't be an async method"); }
} else if (method.static && checkKeyName(method, "prototype")) {
this.raise(key.start, "Classes may not have a static property named prototype");
}
// Parse value
var value = method.value = this.parseMethod(isGenerator, isAsync, allowsDirectSuper);
// Check value
if (method.kind === "get" && value.params.length !== 0)
{ this.raiseRecoverable(value.start, "getter should have no params"); }
if (method.kind === "set" && value.params.length !== 1)
{ this.raiseRecoverable(value.start, "setter should have exactly one param"); }
if (method.kind === "set" && value.params[0].type === "RestElement")
{ this.raiseRecoverable(value.params[0].start, "Setter cannot use rest params"); }
return this.finishNode(method, "MethodDefinition")
};
pp$8.parseClassField = function(field) {
if (checkKeyName(field, "constructor")) {
this.raise(field.key.start, "Classes can't have a field named 'constructor'");
} else if (field.static && checkKeyName(field, "prototype")) {
this.raise(field.key.start, "Classes can't have a static field named 'prototype'");
}
if (this.eat(types$1.eq)) {
// To raise SyntaxError if 'arguments' exists in the initializer.
var scope = this.currentThisScope();
var inClassFieldInit = scope.inClassFieldInit;
scope.inClassFieldInit = true;
field.value = this.parseMaybeAssign();
scope.inClassFieldInit = inClassFieldInit;
} else {
field.value = null;
}
this.semicolon();
return this.finishNode(field, "PropertyDefinition")
};
pp$8.parseClassStaticBlock = function(node) {
node.body = [];
var oldLabels = this.labels;
this.labels = [];
this.enterScope(SCOPE_CLASS_STATIC_BLOCK | SCOPE_SUPER);
while (this.type !== types$1.braceR) {
var stmt = this.parseStatement(null);
node.body.push(stmt);
}
this.next();
this.exitScope();
this.labels = oldLabels;
return this.finishNode(node, "StaticBlock")
};
pp$8.parseClassId = function(node, isStatement) {
if (this.type === types$1.name) {
node.id = this.parseIdent();
if (isStatement)
{ this.checkLValSimple(node.id, BIND_LEXICAL, false); }
} else {
if (isStatement === true)
{ this.unexpected(); }
node.id = null;
}
};
pp$8.parseClassSuper = function(node) {
node.superClass = this.eat(types$1._extends) ? this.parseExprSubscripts(null, false) : null;
};
pp$8.enterClassBody = function() {
var element = {declared: Object.create(null), used: []};
this.privateNameStack.push(element);
return element.declared
};
pp$8.exitClassBody = function() {
var ref = this.privateNameStack.pop();
var declared = ref.declared;
var used = ref.used;
if (!this.options.checkPrivateFields) { return }
var len = this.privateNameStack.length;
var parent = len === 0 ? null : this.privateNameStack[len - 1];
for (var i = 0; i < used.length; ++i) {
var id = used[i];
if (!hasOwn(declared, id.name)) {
if (parent) {
parent.used.push(id);
} else {
this.raiseRecoverable(id.start, ("Private field '#" + (id.name) + "' must be declared in an enclosing class"));
}
}
}
};
function isPrivateNameConflicted(privateNameMap, element) {
var name = element.key.name;
var curr = privateNameMap[name];
var next = "true";
if (element.type === "MethodDefinition" && (element.kind === "get" || element.kind === "set")) {
next = (element.static ? "s" : "i") + element.kind;
}
// `class { get #a(){}; static set #a(_){} }` is also conflict.
if (
curr === "iget" && next === "iset" ||
curr === "iset" && next === "iget" ||
curr === "sget" && next === "sset" ||
curr === "sset" && next === "sget"
) {
privateNameMap[name] = "true";
return false
} else if (!curr) {
privateNameMap[name] = next;
return false
} else {
return true
}
}
function checkKeyName(node, name) {
var computed = node.computed;
var key = node.key;
return !computed && (
key.type === "Identifier" && key.name === name ||
key.type === "Literal" && key.value === name
)
}
// Parses module export declaration.
pp$8.parseExportAllDeclaration = function(node, exports) {
if (this.options.ecmaVersion >= 11) {
if (this.eatContextual("as")) {
node.exported = this.parseModuleExportName();
this.checkExport(exports, node.exported, this.lastTokStart);
} else {
node.exported = null;
}
}
this.expectContextual("from");
if (this.type !== types$1.string) { this.unexpected(); }
node.source = this.parseExprAtom();
this.semicolon();
return this.finishNode(node, "ExportAllDeclaration")
};
pp$8.parseExport = function(node, exports) {
this.next();
// export * from '...'
if (this.eat(types$1.star)) {
return this.parseExportAllDeclaration(node, exports)
}
if (this.eat(types$1._default)) { // export default ...
this.checkExport(exports, "default", this.lastTokStart);
node.declaration = this.parseExportDefaultDeclaration();
return this.finishNode(node, "ExportDefaultDeclaration")
}
// export var|const|let|function|class ...
if (this.shouldParseExportStatement()) {
node.declaration = this.parseExportDeclaration(node);
if (node.declaration.type === "VariableDeclaration")
{ this.checkVariableExport(exports, node.declaration.declarations); }
else
{ this.checkExport(exports, node.declaration.id, node.declaration.id.start); }
node.specifiers = [];
node.source = null;
} else { // export { x, y as z } [from '...']
node.declaration = null;
node.specifiers = this.parseExportSpecifiers(exports);
if (this.eatContextual("from")) {
if (this.type !== types$1.string) { this.unexpected(); }
node.source = this.parseExprAtom();
} else {
for (var i = 0, list = node.specifiers; i < list.length; i += 1) {
// check for keywords used as local names
var spec = list[i];
this.checkUnreserved(spec.local);
// check if export is defined
this.checkLocalExport(spec.local);
if (spec.local.type === "Literal") {
this.raise(spec.local.start, "A string literal cannot be used as an exported binding without `from`.");
}
}
node.source = null;
}
this.semicolon();
}
return this.finishNode(node, "ExportNamedDeclaration")
};
pp$8.parseExportDeclaration = function(node) {
return this.parseStatement(null)
};
pp$8.parseExportDefaultDeclaration = function() {
var isAsync;
if (this.type === types$1._function || (isAsync = this.isAsyncFunction())) {
var fNode = this.startNode();
this.next();
if (isAsync) { this.next(); }
return this.parseFunction(fNode, FUNC_STATEMENT | FUNC_NULLABLE_ID, false, isAsync)
} else if (this.type === types$1._class) {
var cNode = this.startNode();
return this.parseClass(cNode, "nullableID")
} else {
var declaration = this.parseMaybeAssign();
this.semicolon();
return declaration
}
};
pp$8.checkExport = function(exports, name, pos) {
if (!exports) { return }
if (typeof name !== "string")
{ name = name.type === "Identifier" ? name.name : name.value; }
if (hasOwn(exports, name))
{ this.raiseRecoverable(pos, "Duplicate export '" + name + "'"); }
exports[name] = true;
};
pp$8.checkPatternExport = function(exports, pat) {
var type = pat.type;
if (type === "Identifier")
{ this.checkExport(exports, pat, pat.start); }
else if (type === "ObjectPattern")
{ for (var i = 0, list = pat.properties; i < list.length; i += 1)
{
var prop = list[i];
this.checkPatternExport(exports, prop);
} }
else if (type === "ArrayPattern")
{ for (var i$1 = 0, list$1 = pat.elements; i$1 < list$1.length; i$1 += 1) {
var elt = list$1[i$1];
if (elt) { this.checkPatternExport(exports, elt); }
} }
else if (type === "Property")
{ this.checkPatternExport(exports, pat.value); }
else if (type === "AssignmentPattern")
{ this.checkPatternExport(exports, pat.left); }
else if (type === "RestElement")
{ this.checkPatternExport(exports, pat.argument); }
else if (type === "ParenthesizedExpression")
{ this.checkPatternExport(exports, pat.expression); }
};
pp$8.checkVariableExport = function(exports, decls) {
if (!exports) { return }
for (var i = 0, list = decls; i < list.length; i += 1)
{
var decl = list[i];
this.checkPatternExport(exports, decl.id);
}
};
pp$8.shouldParseExportStatement = function() {
return this.type.keyword === "var" ||
this.type.keyword === "const" ||
this.type.keyword === "class" ||
this.type.keyword === "function" ||
this.isLet() ||
this.isAsyncFunction()
};
// Parses a comma-separated list of module exports.
pp$8.parseExportSpecifier = function(exports) {
var node = this.startNode();
node.local = this.parseModuleExportName();
node.exported = this.eatContextual("as") ? this.parseModuleExportName() : node.local;
this.checkExport(
exports,
node.exported,
node.exported.start
);
return this.finishNode(node, "ExportSpecifier")
};
pp$8.parseExportSpecifiers = function(exports) {
var nodes = [], first = true;
// export { x, y as z } [from '...']
this.expect(types$1.braceL);
while (!this.eat(types$1.braceR)) {
if (!first) {
this.expect(types$1.comma);
if (this.afterTrailingComma(types$1.braceR)) { break }
} else { first = false; }
nodes.push(this.parseExportSpecifier(exports));
}
return nodes
};
// Parses import declaration.
pp$8.parseImport = function(node) {
this.next();
// import '...'
if (this.type === types$1.string) {
node.specifiers = empty$1;
node.source = this.parseExprAtom();
} else {
node.specifiers = this.parseImportSpecifiers();
this.expectContextual("from");
node.source = this.type === types$1.string ? this.parseExprAtom() : this.unexpected();
}
this.semicolon();
return this.finishNode(node, "ImportDeclaration")
};
// Parses a comma-separated list of module imports.
pp$8.parseImportSpecifier = function() {
var node = this.startNode();
node.imported = this.parseModuleExportName();
if (this.eatContextual("as")) {
node.local = this.parseIdent();
} else {
this.checkUnreserved(node.imported);
node.local = node.imported;
}
this.checkLValSimple(node.local, BIND_LEXICAL);
return this.finishNode(node, "ImportSpecifier")
};
pp$8.parseImportDefaultSpecifier = function() {
// import defaultObj, { x, y as z } from '...'
var node = this.startNode();
node.local = this.parseIdent();
this.checkLValSimple(node.local, BIND_LEXICAL);
return this.finishNode(node, "ImportDefaultSpecifier")
};
pp$8.parseImportNamespaceSpecifier = function() {
var node = this.startNode();
this.next();
this.expectContextual("as");
node.local = this.parseIdent();
this.checkLValSimple(node.local, BIND_LEXICAL);
return this.finishNode(node, "ImportNamespaceSpecifier")
};
pp$8.parseImportSpecifiers = function() {
var nodes = [], first = true;
if (this.type === types$1.name) {
nodes.push(this.parseImportDefaultSpecifier());
if (!this.eat(types$1.comma)) { return nodes }
}
if (this.type === types$1.star) {
nodes.push(this.parseImportNamespaceSpecifier());
return nodes
}
this.expect(types$1.braceL);
while (!this.eat(types$1.braceR)) {
if (!first) {
this.expect(types$1.comma);
if (this.afterTrailingComma(types$1.braceR)) { break }
} else { first = false; }
nodes.push(this.parseImportSpecifier());
}
return nodes
};
pp$8.parseModuleExportName = function() {
if (this.options.ecmaVersion >= 13 && this.type === types$1.string) {
var stringLiteral = this.parseLiteral(this.value);
if (loneSurrogate.test(stringLiteral.value)) {
this.raise(stringLiteral.start, "An export name cannot include a lone surrogate.");
}
return stringLiteral
}
return this.parseIdent(true)
};
// Set `ExpressionStatement#directive` property for directive prologues.
pp$8.adaptDirectivePrologue = function(statements) {
for (var i = 0; i < statements.length && this.isDirectiveCandidate(statements[i]); ++i) {
statements[i].directive = statements[i].expression.raw.slice(1, -1);
}
};
pp$8.isDirectiveCandidate = function(statement) {
return (
this.options.ecmaVersion >= 5 &&
statement.type === "ExpressionStatement" &&
statement.expression.type === "Literal" &&
typeof statement.expression.value === "string" &&
// Reject parenthesized strings.
(this.input[statement.start] === "\"" || this.input[statement.start] === "'")
)
};
var pp$7 = Parser$1.prototype;
// Convert existing expression atom to assignable pattern
// if possible.
pp$7.toAssignable = function(node, isBinding, refDestructuringErrors) {
if (this.options.ecmaVersion >= 6 && node) {
switch (node.type) {
case "Identifier":
if (this.inAsync && node.name === "await")
{ this.raise(node.start, "Cannot use 'await' as identifier inside an async function"); }
break
case "ObjectPattern":
case "ArrayPattern":
case "AssignmentPattern":
case "RestElement":
break
case "ObjectExpression":
node.type = "ObjectPattern";
if (refDestructuringErrors) { this.checkPatternErrors(refDestructuringErrors, true); }
for (var i = 0, list = node.properties; i < list.length; i += 1) {
var prop = list[i];
this.toAssignable(prop, isBinding);
// Early error:
// AssignmentRestProperty[Yield, Await] :
// `...` DestructuringAssignmentTarget[Yield, Await]
//
// It is a Syntax Error if |DestructuringAssignmentTarget| is an |ArrayLiteral| or an |ObjectLiteral|.
if (
prop.type === "RestElement" &&
(prop.argument.type === "ArrayPattern" || prop.argument.type === "ObjectPattern")
) {
this.raise(prop.argument.start, "Unexpected token");
}
}
break
case "Property":
// AssignmentProperty has type === "Property"
if (node.kind !== "init") { this.raise(node.key.start, "Object pattern can't contain getter or setter"); }
this.toAssignable(node.value, isBinding);
break
case "ArrayExpression":
node.type = "ArrayPattern";
if (refDestructuringErrors) { this.checkPatternErrors(refDestructuringErrors, true); }
this.toAssignableList(node.elements, isBinding);
break
case "SpreadElement":
node.type = "RestElement";
this.toAssignable(node.argument, isBinding);
if (node.argument.type === "AssignmentPattern")
{ this.raise(node.argument.start, "Rest elements cannot have a default value"); }
break
case "AssignmentExpression":
if (node.operator !== "=") { this.raise(node.left.end, "Only '=' operator can be used for specifying default value."); }
node.type = "AssignmentPattern";
delete node.operator;
this.toAssignable(node.left, isBinding);
break
case "ParenthesizedExpression":
this.toAssignable(node.expression, isBinding, refDestructuringErrors);
break
case "ChainExpression":
this.raiseRecoverable(node.start, "Optional chaining cannot appear in left-hand side");
break
case "MemberExpression":
if (!isBinding) { break }
default:
this.raise(node.start, "Assigning to rvalue");
}
} else if (refDestructuringErrors) { this.checkPatternErrors(refDestructuringErrors, true); }
return node
};
// Convert list of expression atoms to binding list.
pp$7.toAssignableList = function(exprList, isBinding) {
var end = exprList.length;
for (var i = 0; i < end; i++) {
var elt = exprList[i];
if (elt) { this.toAssignable(elt, isBinding); }
}
if (end) {
var last = exprList[end - 1];
if (this.options.ecmaVersion === 6 && isBinding && last && last.type === "RestElement" && last.argument.type !== "Identifier")
{ this.unexpected(last.argument.start); }
}
return exprList
};
// Parses spread element.
pp$7.parseSpread = function(refDestructuringErrors) {
var node = this.startNode();
this.next();
node.argument = this.parseMaybeAssign(false, refDestructuringErrors);
return this.finishNode(node, "SpreadElement")
};
pp$7.parseRestBinding = function() {
var node = this.startNode();
this.next();
// RestElement inside of a function parameter must be an identifier
if (this.options.ecmaVersion === 6 && this.type !== types$1.name)
{ this.unexpected(); }
node.argument = this.parseBindingAtom();
return this.finishNode(node, "RestElement")
};
// Parses lvalue (assignable) atom.
pp$7.parseBindingAtom = function() {
if (this.options.ecmaVersion >= 6) {
switch (this.type) {
case types$1.bracketL:
var node = this.startNode();
this.next();
node.elements = this.parseBindingList(types$1.bracketR, true, true);
return this.finishNode(node, "ArrayPattern")
case types$1.braceL:
return this.parseObj(true)
}
}
return this.parseIdent()
};
pp$7.parseBindingList = function(close, allowEmpty, allowTrailingComma, allowModifiers) {
var elts = [], first = true;
while (!this.eat(close)) {
if (first) { first = false; }
else { this.expect(types$1.comma); }
if (allowEmpty && this.type === types$1.comma) {
elts.push(null);
} else if (allowTrailingComma && this.afterTrailingComma(close)) {
break
} else if (this.type === types$1.ellipsis) {
var rest = this.parseRestBinding();
this.parseBindingListItem(rest);
elts.push(rest);
if (this.type === types$1.comma) { this.raiseRecoverable(this.start, "Comma is not permitted after the rest element"); }
this.expect(close);
break
} else {
elts.push(this.parseAssignableListItem(allowModifiers));
}
}
return elts
};
pp$7.parseAssignableListItem = function(allowModifiers) {
var elem = this.parseMaybeDefault(this.start, this.startLoc);
this.parseBindingListItem(elem);
return elem
};
pp$7.parseBindingListItem = function(param) {
return param
};
// Parses assignment pattern around given atom if possible.
pp$7.parseMaybeDefault = function(startPos, startLoc, left) {
left = left || this.parseBindingAtom();
if (this.options.ecmaVersion < 6 || !this.eat(types$1.eq)) { return left }
var node = this.startNodeAt(startPos, startLoc);
node.left = left;
node.right = this.parseMaybeAssign();
return this.finishNode(node, "AssignmentPattern")
};
// The following three functions all verify that a node is an lvalue —
// something that can be bound, or assigned to. In order to do so, they perform
// a variety of checks:
//
// - Check that none of the bound/assigned-to identifiers are reserved words.
// - Record name declarations for bindings in the appropriate scope.
// - Check duplicate argument names, if checkClashes is set.
//
// If a complex binding pattern is encountered (e.g., object and array
// destructuring), the entire pattern is recursively checked.
//
// There are three versions of checkLVal*() appropriate for different
// circumstances:
//
// - checkLValSimple() shall be used if the syntactic construct supports
// nothing other than identifiers and member expressions. Parenthesized
// expressions are also correctly handled. This is generally appropriate for
// constructs for which the spec says
//
// > It is a Syntax Error if AssignmentTargetType of [the production] is not
// > simple.
//
// It is also appropriate for checking if an identifier is valid and not
// defined elsewhere, like import declarations or function/class identifiers.
//
// Examples where this is used include:
// a += …;
// import a from '…';
// where a is the node to be checked.
//
// - checkLValPattern() shall be used if the syntactic construct supports
// anything checkLValSimple() supports, as well as object and array
// destructuring patterns. This is generally appropriate for constructs for
// which the spec says
//
// > It is a Syntax Error if [the production] is neither an ObjectLiteral nor
// > an ArrayLiteral and AssignmentTargetType of [the production] is not
// > simple.
//
// Examples where this is used include:
// (a = …);
// const a = …;
// try { … } catch (a) { … }
// where a is the node to be checked.
//
// - checkLValInnerPattern() shall be used if the syntactic construct supports
// anything checkLValPattern() supports, as well as default assignment
// patterns, rest elements, and other constructs that may appear within an
// object or array destructuring pattern.
//
// As a special case, function parameters also use checkLValInnerPattern(),
// as they also support defaults and rest constructs.
//
// These functions deliberately support both assignment and binding constructs,
// as the logic for both is exceedingly similar. If the node is the target of
// an assignment, then bindingType should be set to BIND_NONE. Otherwise, it
// should be set to the appropriate BIND_* constant, like BIND_VAR or
// BIND_LEXICAL.
//
// If the function is called with a non-BIND_NONE bindingType, then
// additionally a checkClashes object may be specified to allow checking for
// duplicate argument names. checkClashes is ignored if the provided construct
// is an assignment (i.e., bindingType is BIND_NONE).
pp$7.checkLValSimple = function(expr, bindingType, checkClashes) {
if ( bindingType === void 0 ) bindingType = BIND_NONE;
var isBind = bindingType !== BIND_NONE;
switch (expr.type) {
case "Identifier":
if (this.strict && this.reservedWordsStrictBind.test(expr.name))
{ this.raiseRecoverable(expr.start, (isBind ? "Binding " : "Assigning to ") + expr.name + " in strict mode"); }
if (isBind) {
if (bindingType === BIND_LEXICAL && expr.name === "let")
{ this.raiseRecoverable(expr.start, "let is disallowed as a lexically bound name"); }
if (checkClashes) {
if (hasOwn(checkClashes, expr.name))
{ this.raiseRecoverable(expr.start, "Argument name clash"); }
checkClashes[expr.name] = true;
}
if (bindingType !== BIND_OUTSIDE) { this.declareName(expr.name, bindingType, expr.start); }
}
break
case "ChainExpression":
this.raiseRecoverable(expr.start, "Optional chaining cannot appear in left-hand side");
break
case "MemberExpression":
if (isBind) { this.raiseRecoverable(expr.start, "Binding member expression"); }
break
case "ParenthesizedExpression":
if (isBind) { this.raiseRecoverable(expr.start, "Binding parenthesized expression"); }
return this.checkLValSimple(expr.expression, bindingType, checkClashes)
default:
this.raise(expr.start, (isBind ? "Binding" : "Assigning to") + " rvalue");
}
};
pp$7.checkLValPattern = function(expr, bindingType, checkClashes) {
if ( bindingType === void 0 ) bindingType = BIND_NONE;
switch (expr.type) {
case "ObjectPattern":
for (var i = 0, list = expr.properties; i < list.length; i += 1) {
var prop = list[i];
this.checkLValInnerPattern(prop, bindingType, checkClashes);
}
break
case "ArrayPattern":
for (var i$1 = 0, list$1 = expr.elements; i$1 < list$1.length; i$1 += 1) {
var elem = list$1[i$1];
if (elem) { this.checkLValInnerPattern(elem, bindingType, checkClashes); }
}
break
default:
this.checkLValSimple(expr, bindingType, checkClashes);
}
};
pp$7.checkLValInnerPattern = function(expr, bindingType, checkClashes) {
if ( bindingType === void 0 ) bindingType = BIND_NONE;
switch (expr.type) {
case "Property":
// AssignmentProperty has type === "Property"
this.checkLValInnerPattern(expr.value, bindingType, checkClashes);
break
case "AssignmentPattern":
this.checkLValPattern(expr.left, bindingType, checkClashes);
break
case "RestElement":
this.checkLValPattern(expr.argument, bindingType, checkClashes);
break
default:
this.checkLValPattern(expr, bindingType, checkClashes);
}
};
// The algorithm used to determine whether a regexp can appear at a
// given point in the program is loosely based on sweet.js' approach.
// See https://github.com/mozilla/sweet.js/wiki/design
var TokContext = function TokContext(token, isExpr, preserveSpace, override, generator) {
this.token = token;
this.isExpr = !!isExpr;
this.preserveSpace = !!preserveSpace;
this.override = override;
this.generator = !!generator;
};
var types$2 = {
b_stat: new TokContext("{", false),
b_expr: new TokContext("{", true),
b_tmpl: new TokContext("${", false),
p_stat: new TokContext("(", false),
p_expr: new TokContext("(", true),
q_tmpl: new TokContext("`", true, true, function (p) { return p.tryReadTemplateToken(); }),
f_stat: new TokContext("function", false),
f_expr: new TokContext("function", true),
f_expr_gen: new TokContext("function", true, false, null, true),
f_gen: new TokContext("function", false, false, null, true)
};
var pp$6 = Parser$1.prototype;
pp$6.initialContext = function() {
return [types$2.b_stat]
};
pp$6.curContext = function() {
return this.context[this.context.length - 1]
};
pp$6.braceIsBlock = function(prevType) {
var parent = this.curContext();
if (parent === types$2.f_expr || parent === types$2.f_stat)
{ return true }
if (prevType === types$1.colon && (parent === types$2.b_stat || parent === types$2.b_expr))
{ return !parent.isExpr }
// The check for `tt.name && exprAllowed` detects whether we are
// after a `yield` or `of` construct. See the `updateContext` for
// `tt.name`.
if (prevType === types$1._return || prevType === types$1.name && this.exprAllowed)
{ return lineBreak.test(this.input.slice(this.lastTokEnd, this.start)) }
if (prevType === types$1._else || prevType === types$1.semi || prevType === types$1.eof || prevType === types$1.parenR || prevType === types$1.arrow)
{ return true }
if (prevType === types$1.braceL)
{ return parent === types$2.b_stat }
if (prevType === types$1._var || prevType === types$1._const || prevType === types$1.name)
{ return false }
return !this.exprAllowed
};
pp$6.inGeneratorContext = function() {
for (var i = this.context.length - 1; i >= 1; i--) {
var context = this.context[i];
if (context.token === "function")
{ return context.generator }
}
return false
};
pp$6.updateContext = function(prevType) {
var update, type = this.type;
if (type.keyword && prevType === types$1.dot)
{ this.exprAllowed = false; }
else if (update = type.updateContext)
{ update.call(this, prevType); }
else
{ this.exprAllowed = type.beforeExpr; }
};
// Used to handle egde cases when token context could not be inferred correctly during tokenization phase
pp$6.overrideContext = function(tokenCtx) {
if (this.curContext() !== tokenCtx) {
this.context[this.context.length - 1] = tokenCtx;
}
};
// Token-specific context update code
types$1.parenR.updateContext = types$1.braceR.updateContext = function() {
if (this.context.length === 1) {
this.exprAllowed = true;
return
}
var out = this.context.pop();
if (out === types$2.b_stat && this.curContext().token === "function") {
out = this.context.pop();
}
this.exprAllowed = !out.isExpr;
};
types$1.braceL.updateContext = function(prevType) {
this.context.push(this.braceIsBlock(prevType) ? types$2.b_stat : types$2.b_expr);
this.exprAllowed = true;
};
types$1.dollarBraceL.updateContext = function() {
this.context.push(types$2.b_tmpl);
this.exprAllowed = true;
};
types$1.parenL.updateContext = function(prevType) {
var statementParens = prevType === types$1._if || prevType === types$1._for || prevType === types$1._with || prevType === types$1._while;
this.context.push(statementParens ? types$2.p_stat : types$2.p_expr);
this.exprAllowed = true;
};
types$1.incDec.updateContext = function() {
// tokExprAllowed stays unchanged
};
types$1._function.updateContext = types$1._class.updateContext = function(prevType) {
if (prevType.beforeExpr && prevType !== types$1._else &&
!(prevType === types$1.semi && this.curContext() !== types$2.p_stat) &&
!(prevType === types$1._return && lineBreak.test(this.input.slice(this.lastTokEnd, this.start))) &&
!((prevType === types$1.colon || prevType === types$1.braceL) && this.curContext() === types$2.b_stat))
{ this.context.push(types$2.f_expr); }
else
{ this.context.push(types$2.f_stat); }
this.exprAllowed = false;
};
types$1.backQuote.updateContext = function() {
if (this.curContext() === types$2.q_tmpl)
{ this.context.pop(); }
else
{ this.context.push(types$2.q_tmpl); }
this.exprAllowed = false;
};
types$1.star.updateContext = function(prevType) {
if (prevType === types$1._function) {
var index = this.context.length - 1;
if (this.context[index] === types$2.f_expr)
{ this.context[index] = types$2.f_expr_gen; }
else
{ this.context[index] = types$2.f_gen; }
}
this.exprAllowed = true;
};
types$1.name.updateContext = function(prevType) {
var allowed = false;
if (this.options.ecmaVersion >= 6 && prevType !== types$1.dot) {
if (this.value === "of" && !this.exprAllowed ||
this.value === "yield" && this.inGeneratorContext())
{ allowed = true; }
}
this.exprAllowed = allowed;
};
// A recursive descent parser operates by defining functions for all
// syntactic elements, and recursively calling those, each function
// advancing the input stream and returning an AST node. Precedence
// of constructs (for example, the fact that `!x[1]` means `!(x[1])`
// instead of `(!x)[1]` is handled by the fact that the parser
// function that parses unary prefix operators is called first, and
// in turn calls the function that parses `[]` subscripts — that
// way, it'll receive the node for `x[1]` already parsed, and wraps
// *that* in the unary operator node.
//
// Acorn uses an [operator precedence parser][opp] to handle binary
// operator precedence, because it is much more compact than using
// the technique outlined above, which uses different, nesting
// functions to specify precedence, for all of the ten binary
// precedence levels that JavaScript defines.
//
// [opp]: http://en.wikipedia.org/wiki/Operator-precedence_parser
var pp$5 = Parser$1.prototype;
// Check if property name clashes with already added.
// Object/class getters and setters are not allowed to clash —
// either with each other or with an init property — and in
// strict mode, init properties are also not allowed to be repeated.
pp$5.checkPropClash = function(prop, propHash, refDestructuringErrors) {
if (this.options.ecmaVersion >= 9 && prop.type === "SpreadElement")
{ return }
if (this.options.ecmaVersion >= 6 && (prop.computed || prop.method || prop.shorthand))
{ return }
var key = prop.key;
var name;
switch (key.type) {
case "Identifier": name = key.name; break
case "Literal": name = String(key.value); break
default: return
}
var kind = prop.kind;
if (this.options.ecmaVersion >= 6) {
if (name === "__proto__" && kind === "init") {
if (propHash.proto) {
if (refDestructuringErrors) {
if (refDestructuringErrors.doubleProto < 0) {
refDestructuringErrors.doubleProto = key.start;
}
} else {
this.raiseRecoverable(key.start, "Redefinition of __proto__ property");
}
}
propHash.proto = true;
}
return
}
name = "$" + name;
var other = propHash[name];
if (other) {
var redefinition;
if (kind === "init") {
redefinition = this.strict && other.init || other.get || other.set;
} else {
redefinition = other.init || other[kind];
}
if (redefinition)
{ this.raiseRecoverable(key.start, "Redefinition of property"); }
} else {
other = propHash[name] = {
init: false,
get: false,
set: false
};
}
other[kind] = true;
};
// ### Expression parsing
// These nest, from the most general expression type at the top to
// 'atomic', nondivisible expression types at the bottom. Most of
// the functions will simply let the function(s) below them parse,
// and, *if* the syntactic construct they handle is present, wrap
// the AST node that the inner parser gave them in another node.
// Parse a full expression. The optional arguments are used to
// forbid the `in` operator (in for loops initalization expressions)
// and provide reference for storing '=' operator inside shorthand
// property assignment in contexts where both object expression
// and object pattern might appear (so it's possible to raise
// delayed syntax error at correct position).
pp$5.parseExpression = function(forInit, refDestructuringErrors) {
var startPos = this.start, startLoc = this.startLoc;
var expr = this.parseMaybeAssign(forInit, refDestructuringErrors);
if (this.type === types$1.comma) {
var node = this.startNodeAt(startPos, startLoc);
node.expressions = [expr];
while (this.eat(types$1.comma)) { node.expressions.push(this.parseMaybeAssign(forInit, refDestructuringErrors)); }
return this.finishNode(node, "SequenceExpression")
}
return expr
};
// Parse an assignment expression. This includes applications of
// operators like `+=`.
pp$5.parseMaybeAssign = function(forInit, refDestructuringErrors, afterLeftParse) {
if (this.isContextual("yield")) {
if (this.inGenerator) { return this.parseYield(forInit) }
// The tokenizer will assume an expression is allowed after
// `yield`, but this isn't that kind of yield
else { this.exprAllowed = false; }
}
var ownDestructuringErrors = false, oldParenAssign = -1, oldTrailingComma = -1, oldDoubleProto = -1;
if (refDestructuringErrors) {
oldParenAssign = refDestructuringErrors.parenthesizedAssign;
oldTrailingComma = refDestructuringErrors.trailingComma;
oldDoubleProto = refDestructuringErrors.doubleProto;
refDestructuringErrors.parenthesizedAssign = refDestructuringErrors.trailingComma = -1;
} else {
refDestructuringErrors = new DestructuringErrors;
ownDestructuringErrors = true;
}
var startPos = this.start, startLoc = this.startLoc;
if (this.type === types$1.parenL || this.type === types$1.name) {
this.potentialArrowAt = this.start;
this.potentialArrowInForAwait = forInit === "await";
}
var left = this.parseMaybeConditional(forInit, refDestructuringErrors);
if (afterLeftParse) { left = afterLeftParse.call(this, left, startPos, startLoc); }
if (this.type.isAssign) {
var node = this.startNodeAt(startPos, startLoc);
node.operator = this.value;
if (this.type === types$1.eq)
{ left = this.toAssignable(left, false, refDestructuringErrors); }
if (!ownDestructuringErrors) {
refDestructuringErrors.parenthesizedAssign = refDestructuringErrors.trailingComma = refDestructuringErrors.doubleProto = -1;
}
if (refDestructuringErrors.shorthandAssign >= left.start)
{ refDestructuringErrors.shorthandAssign = -1; } // reset because shorthand default was used correctly
if (this.type === types$1.eq)
{ this.checkLValPattern(left); }
else
{ this.checkLValSimple(left); }
node.left = left;
this.next();
node.right = this.parseMaybeAssign(forInit);
if (oldDoubleProto > -1) { refDestructuringErrors.doubleProto = oldDoubleProto; }
return this.finishNode(node, "AssignmentExpression")
} else {
if (ownDestructuringErrors) { this.checkExpressionErrors(refDestructuringErrors, true); }
}
if (oldParenAssign > -1) { refDestructuringErrors.parenthesizedAssign = oldParenAssign; }
if (oldTrailingComma > -1) { refDestructuringErrors.trailingComma = oldTrailingComma; }
return left
};
// Parse a ternary conditional (`?:`) operator.
pp$5.parseMaybeConditional = function(forInit, refDestructuringErrors) {
var startPos = this.start, startLoc = this.startLoc;
var expr = this.parseExprOps(forInit, refDestructuringErrors);
if (this.checkExpressionErrors(refDestructuringErrors)) { return expr }
if (this.eat(types$1.question)) {
var node = this.startNodeAt(startPos, startLoc);
node.test = expr;
node.consequent = this.parseMaybeAssign();
this.expect(types$1.colon);
node.alternate = this.parseMaybeAssign(forInit);
return this.finishNode(node, "ConditionalExpression")
}
return expr
};
// Start the precedence parser.
pp$5.parseExprOps = function(forInit, refDestructuringErrors) {
var startPos = this.start, startLoc = this.startLoc;
var expr = this.parseMaybeUnary(refDestructuringErrors, false, false, forInit);
if (this.checkExpressionErrors(refDestructuringErrors)) { return expr }
return expr.start === startPos && expr.type === "ArrowFunctionExpression" ? expr : this.parseExprOp(expr, startPos, startLoc, -1, forInit)
};
// Parse binary operators with the operator precedence parsing
// algorithm. `left` is the left-hand side of the operator.
// `minPrec` provides context that allows the function to stop and
// defer further parser to one of its callers when it encounters an
// operator that has a lower precedence than the set it is parsing.
pp$5.parseExprOp = function(left, leftStartPos, leftStartLoc, minPrec, forInit) {
var prec = this.type.binop;
if (prec != null && (!forInit || this.type !== types$1._in)) {
if (prec > minPrec) {
var logical = this.type === types$1.logicalOR || this.type === types$1.logicalAND;
var coalesce = this.type === types$1.coalesce;
if (coalesce) {
// Handle the precedence of `tt.coalesce` as equal to the range of logical expressions.
// In other words, `node.right` shouldn't contain logical expressions in order to check the mixed error.
prec = types$1.logicalAND.binop;
}
var op = this.value;
this.next();
var startPos = this.start, startLoc = this.startLoc;
var right = this.parseExprOp(this.parseMaybeUnary(null, false, false, forInit), startPos, startLoc, prec, forInit);
var node = this.buildBinary(leftStartPos, leftStartLoc, left, right, op, logical || coalesce);
if ((logical && this.type === types$1.coalesce) || (coalesce && (this.type === types$1.logicalOR || this.type === types$1.logicalAND))) {
this.raiseRecoverable(this.start, "Logical expressions and coalesce expressions cannot be mixed. Wrap either by parentheses");
}
return this.parseExprOp(node, leftStartPos, leftStartLoc, minPrec, forInit)
}
}
return left
};
pp$5.buildBinary = function(startPos, startLoc, left, right, op, logical) {
if (right.type === "PrivateIdentifier") { this.raise(right.start, "Private identifier can only be left side of binary expression"); }
var node = this.startNodeAt(startPos, startLoc);
node.left = left;
node.operator = op;
node.right = right;
return this.finishNode(node, logical ? "LogicalExpression" : "BinaryExpression")
};
// Parse unary operators, both prefix and postfix.
pp$5.parseMaybeUnary = function(refDestructuringErrors, sawUnary, incDec, forInit) {
var startPos = this.start, startLoc = this.startLoc, expr;
if (this.isContextual("await") && this.canAwait) {
expr = this.parseAwait(forInit);
sawUnary = true;
} else if (this.type.prefix) {
var node = this.startNode(), update = this.type === types$1.incDec;
node.operator = this.value;
node.prefix = true;
this.next();
node.argument = this.parseMaybeUnary(null, true, update, forInit);
this.checkExpressionErrors(refDestructuringErrors, true);
if (update) { this.checkLValSimple(node.argument); }
else if (this.strict && node.operator === "delete" &&
node.argument.type === "Identifier")
{ this.raiseRecoverable(node.start, "Deleting local variable in strict mode"); }
else if (node.operator === "delete" && isPrivateFieldAccess(node.argument))
{ this.raiseRecoverable(node.start, "Private fields can not be deleted"); }
else { sawUnary = true; }
expr = this.finishNode(node, update ? "UpdateExpression" : "UnaryExpression");
} else if (!sawUnary && this.type === types$1.privateId) {
if ((forInit || this.privateNameStack.length === 0) && this.options.checkPrivateFields) { this.unexpected(); }
expr = this.parsePrivateIdent();
// only could be private fields in 'in', such as #x in obj
if (this.type !== types$1._in) { this.unexpected(); }
} else {
expr = this.parseExprSubscripts(refDestructuringErrors, forInit);
if (this.checkExpressionErrors(refDestructuringErrors)) { return expr }
while (this.type.postfix && !this.canInsertSemicolon()) {
var node$1 = this.startNodeAt(startPos, startLoc);
node$1.operator = this.value;
node$1.prefix = false;
node$1.argument = expr;
this.checkLValSimple(expr);
this.next();
expr = this.finishNode(node$1, "UpdateExpression");
}
}
if (!incDec && this.eat(types$1.starstar)) {
if (sawUnary)
{ this.unexpected(this.lastTokStart); }
else
{ return this.buildBinary(startPos, startLoc, expr, this.parseMaybeUnary(null, false, false, forInit), "**", false) }
} else {
return expr
}
};
function isPrivateFieldAccess(node) {
return (
node.type === "MemberExpression" && node.property.type === "PrivateIdentifier" ||
node.type === "ChainExpression" && isPrivateFieldAccess(node.expression)
)
}
// Parse call, dot, and `[]`-subscript expressions.
pp$5.parseExprSubscripts = function(refDestructuringErrors, forInit) {
var startPos = this.start, startLoc = this.startLoc;
var expr = this.parseExprAtom(refDestructuringErrors, forInit);
if (expr.type === "ArrowFunctionExpression" && this.input.slice(this.lastTokStart, this.lastTokEnd) !== ")")
{ return expr }
var result = this.parseSubscripts(expr, startPos, startLoc, false, forInit);
if (refDestructuringErrors && result.type === "MemberExpression") {
if (refDestructuringErrors.parenthesizedAssign >= result.start) { refDestructuringErrors.parenthesizedAssign = -1; }
if (refDestructuringErrors.parenthesizedBind >= result.start) { refDestructuringErrors.parenthesizedBind = -1; }
if (refDestructuringErrors.trailingComma >= result.start) { refDestructuringErrors.trailingComma = -1; }
}
return result
};
pp$5.parseSubscripts = function(base, startPos, startLoc, noCalls, forInit) {
var maybeAsyncArrow = this.options.ecmaVersion >= 8 && base.type === "Identifier" && base.name === "async" &&
this.lastTokEnd === base.end && !this.canInsertSemicolon() && base.end - base.start === 5 &&
this.potentialArrowAt === base.start;
var optionalChained = false;
while (true) {
var element = this.parseSubscript(base, startPos, startLoc, noCalls, maybeAsyncArrow, optionalChained, forInit);
if (element.optional) { optionalChained = true; }
if (element === base || element.type === "ArrowFunctionExpression") {
if (optionalChained) {
var chainNode = this.startNodeAt(startPos, startLoc);
chainNode.expression = element;
element = this.finishNode(chainNode, "ChainExpression");
}
return element
}
base = element;
}
};
pp$5.shouldParseAsyncArrow = function() {
return !this.canInsertSemicolon() && this.eat(types$1.arrow)
};
pp$5.parseSubscriptAsyncArrow = function(startPos, startLoc, exprList, forInit) {
return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), exprList, true, forInit)
};
pp$5.parseSubscript = function(base, startPos, startLoc, noCalls, maybeAsyncArrow, optionalChained, forInit) {
var optionalSupported = this.options.ecmaVersion >= 11;
var optional = optionalSupported && this.eat(types$1.questionDot);
if (noCalls && optional) { this.raise(this.lastTokStart, "Optional chaining cannot appear in the callee of new expressions"); }
var computed = this.eat(types$1.bracketL);
if (computed || (optional && this.type !== types$1.parenL && this.type !== types$1.backQuote) || this.eat(types$1.dot)) {
var node = this.startNodeAt(startPos, startLoc);
node.object = base;
if (computed) {
node.property = this.parseExpression();
this.expect(types$1.bracketR);
} else if (this.type === types$1.privateId && base.type !== "Super") {
node.property = this.parsePrivateIdent();
} else {
node.property = this.parseIdent(this.options.allowReserved !== "never");
}
node.computed = !!computed;
if (optionalSupported) {
node.optional = optional;
}
base = this.finishNode(node, "MemberExpression");
} else if (!noCalls && this.eat(types$1.parenL)) {
var refDestructuringErrors = new DestructuringErrors, oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, oldAwaitIdentPos = this.awaitIdentPos;
this.yieldPos = 0;
this.awaitPos = 0;
this.awaitIdentPos = 0;
var exprList = this.parseExprList(types$1.parenR, this.options.ecmaVersion >= 8, false, refDestructuringErrors);
if (maybeAsyncArrow && !optional && this.shouldParseAsyncArrow()) {
this.checkPatternErrors(refDestructuringErrors, false);
this.checkYieldAwaitInDefaultParams();
if (this.awaitIdentPos > 0)
{ this.raise(this.awaitIdentPos, "Cannot use 'await' as identifier inside an async function"); }
this.yieldPos = oldYieldPos;
this.awaitPos = oldAwaitPos;
this.awaitIdentPos = oldAwaitIdentPos;
return this.parseSubscriptAsyncArrow(startPos, startLoc, exprList, forInit)
}
this.checkExpressionErrors(refDestructuringErrors, true);
this.yieldPos = oldYieldPos || this.yieldPos;
this.awaitPos = oldAwaitPos || this.awaitPos;
this.awaitIdentPos = oldAwaitIdentPos || this.awaitIdentPos;
var node$1 = this.startNodeAt(startPos, startLoc);
node$1.callee = base;
node$1.arguments = exprList;
if (optionalSupported) {
node$1.optional = optional;
}
base = this.finishNode(node$1, "CallExpression");
} else if (this.type === types$1.backQuote) {
if (optional || optionalChained) {
this.raise(this.start, "Optional chaining cannot appear in the tag of tagged template expressions");
}
var node$2 = this.startNodeAt(startPos, startLoc);
node$2.tag = base;
node$2.quasi = this.parseTemplate({isTagged: true});
base = this.finishNode(node$2, "TaggedTemplateExpression");
}
return base
};
// Parse an atomic expression — either a single token that is an
// expression, an expression started by a keyword like `function` or
// `new`, or an expression wrapped in punctuation like `()`, `[]`,
// or `{}`.
pp$5.parseExprAtom = function(refDestructuringErrors, forInit, forNew) {
// If a division operator appears in an expression position, the
// tokenizer got confused, and we force it to read a regexp instead.
if (this.type === types$1.slash) { this.readRegexp(); }
var node, canBeArrow = this.potentialArrowAt === this.start;
switch (this.type) {
case types$1._super:
if (!this.allowSuper)
{ this.raise(this.start, "'super' keyword outside a method"); }
node = this.startNode();
this.next();
if (this.type === types$1.parenL && !this.allowDirectSuper)
{ this.raise(node.start, "super() call outside constructor of a subclass"); }
// The `super` keyword can appear at below:
// SuperProperty:
// super [ Expression ]
// super . IdentifierName
// SuperCall:
// super ( Arguments )
if (this.type !== types$1.dot && this.type !== types$1.bracketL && this.type !== types$1.parenL)
{ this.unexpected(); }
return this.finishNode(node, "Super")
case types$1._this:
node = this.startNode();
this.next();
return this.finishNode(node, "ThisExpression")
case types$1.name:
var startPos = this.start, startLoc = this.startLoc, containsEsc = this.containsEsc;
var id = this.parseIdent(false);
if (this.options.ecmaVersion >= 8 && !containsEsc && id.name === "async" && !this.canInsertSemicolon() && this.eat(types$1._function)) {
this.overrideContext(types$2.f_expr);
return this.parseFunction(this.startNodeAt(startPos, startLoc), 0, false, true, forInit)
}
if (canBeArrow && !this.canInsertSemicolon()) {
if (this.eat(types$1.arrow))
{ return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), [id], false, forInit) }
if (this.options.ecmaVersion >= 8 && id.name === "async" && this.type === types$1.name && !containsEsc &&
(!this.potentialArrowInForAwait || this.value !== "of" || this.containsEsc)) {
id = this.parseIdent(false);
if (this.canInsertSemicolon() || !this.eat(types$1.arrow))
{ this.unexpected(); }
return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), [id], true, forInit)
}
}
return id
case types$1.regexp:
var value = this.value;
node = this.parseLiteral(value.value);
node.regex = {pattern: value.pattern, flags: value.flags};
return node
case types$1.num: case types$1.string:
return this.parseLiteral(this.value)
case types$1._null: case types$1._true: case types$1._false:
node = this.startNode();
node.value = this.type === types$1._null ? null : this.type === types$1._true;
node.raw = this.type.keyword;
this.next();
return this.finishNode(node, "Literal")
case types$1.parenL:
var start = this.start, expr = this.parseParenAndDistinguishExpression(canBeArrow, forInit);
if (refDestructuringErrors) {
if (refDestructuringErrors.parenthesizedAssign < 0 && !this.isSimpleAssignTarget(expr))
{ refDestructuringErrors.parenthesizedAssign = start; }
if (refDestructuringErrors.parenthesizedBind < 0)
{ refDestructuringErrors.parenthesizedBind = start; }
}
return expr
case types$1.bracketL:
node = this.startNode();
this.next();
node.elements = this.parseExprList(types$1.bracketR, true, true, refDestructuringErrors);
return this.finishNode(node, "ArrayExpression")
case types$1.braceL:
this.overrideContext(types$2.b_expr);
return this.parseObj(false, refDestructuringErrors)
case types$1._function:
node = this.startNode();
this.next();
return this.parseFunction(node, 0)
case types$1._class:
return this.parseClass(this.startNode(), false)
case types$1._new:
return this.parseNew()
case types$1.backQuote:
return this.parseTemplate()
case types$1._import:
if (this.options.ecmaVersion >= 11) {
return this.parseExprImport(forNew)
} else {
return this.unexpected()
}
default:
return this.parseExprAtomDefault()
}
};
pp$5.parseExprAtomDefault = function() {
this.unexpected();
};
pp$5.parseExprImport = function(forNew) {
var node = this.startNode();
// Consume `import` as an identifier for `import.meta`.
// Because `this.parseIdent(true)` doesn't check escape sequences, it needs the check of `this.containsEsc`.
if (this.containsEsc) { this.raiseRecoverable(this.start, "Escape sequence in keyword import"); }
var meta = this.parseIdent(true);
if (this.type === types$1.parenL && !forNew) {
return this.parseDynamicImport(node)
} else if (this.type === types$1.dot) {
node.meta = meta;
return this.parseImportMeta(node)
} else {
this.unexpected();
}
};
pp$5.parseDynamicImport = function(node) {
this.next(); // skip `(`
// Parse node.source.
node.source = this.parseMaybeAssign();
// Verify ending.
if (!this.eat(types$1.parenR)) {
var errorPos = this.start;
if (this.eat(types$1.comma) && this.eat(types$1.parenR)) {
this.raiseRecoverable(errorPos, "Trailing comma is not allowed in import()");
} else {
this.unexpected(errorPos);
}
}
return this.finishNode(node, "ImportExpression")
};
pp$5.parseImportMeta = function(node) {
this.next(); // skip `.`
var containsEsc = this.containsEsc;
node.property = this.parseIdent(true);
if (node.property.name !== "meta")
{ this.raiseRecoverable(node.property.start, "The only valid meta property for import is 'import.meta'"); }
if (containsEsc)
{ this.raiseRecoverable(node.start, "'import.meta' must not contain escaped characters"); }
if (this.options.sourceType !== "module" && !this.options.allowImportExportEverywhere)
{ this.raiseRecoverable(node.start, "Cannot use 'import.meta' outside a module"); }
return this.finishNode(node, "MetaProperty")
};
pp$5.parseLiteral = function(value) {
var node = this.startNode();
node.value = value;
node.raw = this.input.slice(this.start, this.end);
if (node.raw.charCodeAt(node.raw.length - 1) === 110) { node.bigint = node.raw.slice(0, -1).replace(/_/g, ""); }
this.next();
return this.finishNode(node, "Literal")
};
pp$5.parseParenExpression = function() {
this.expect(types$1.parenL);
var val = this.parseExpression();
this.expect(types$1.parenR);
return val
};
pp$5.shouldParseArrow = function(exprList) {
return !this.canInsertSemicolon()
};
pp$5.parseParenAndDistinguishExpression = function(canBeArrow, forInit) {
var startPos = this.start, startLoc = this.startLoc, val, allowTrailingComma = this.options.ecmaVersion >= 8;
if (this.options.ecmaVersion >= 6) {
this.next();
var innerStartPos = this.start, innerStartLoc = this.startLoc;
var exprList = [], first = true, lastIsComma = false;
var refDestructuringErrors = new DestructuringErrors, oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, spreadStart;
this.yieldPos = 0;
this.awaitPos = 0;
// Do not save awaitIdentPos to allow checking awaits nested in parameters
while (this.type !== types$1.parenR) {
first ? first = false : this.expect(types$1.comma);
if (allowTrailingComma && this.afterTrailingComma(types$1.parenR, true)) {
lastIsComma = true;
break
} else if (this.type === types$1.ellipsis) {
spreadStart = this.start;
exprList.push(this.parseParenItem(this.parseRestBinding()));
if (this.type === types$1.comma) {
this.raiseRecoverable(
this.start,
"Comma is not permitted after the rest element"
);
}
break
} else {
exprList.push(this.parseMaybeAssign(false, refDestructuringErrors, this.parseParenItem));
}
}
var innerEndPos = this.lastTokEnd, innerEndLoc = this.lastTokEndLoc;
this.expect(types$1.parenR);
if (canBeArrow && this.shouldParseArrow(exprList) && this.eat(types$1.arrow)) {
this.checkPatternErrors(refDestructuringErrors, false);
this.checkYieldAwaitInDefaultParams();
this.yieldPos = oldYieldPos;
this.awaitPos = oldAwaitPos;
return this.parseParenArrowList(startPos, startLoc, exprList, forInit)
}
if (!exprList.length || lastIsComma) { this.unexpected(this.lastTokStart); }
if (spreadStart) { this.unexpected(spreadStart); }
this.checkExpressionErrors(refDestructuringErrors, true);
this.yieldPos = oldYieldPos || this.yieldPos;
this.awaitPos = oldAwaitPos || this.awaitPos;
if (exprList.length > 1) {
val = this.startNodeAt(innerStartPos, innerStartLoc);
val.expressions = exprList;
this.finishNodeAt(val, "SequenceExpression", innerEndPos, innerEndLoc);
} else {
val = exprList[0];
}
} else {
val = this.parseParenExpression();
}
if (this.options.preserveParens) {
var par = this.startNodeAt(startPos, startLoc);
par.expression = val;
return this.finishNode(par, "ParenthesizedExpression")
} else {
return val
}
};
pp$5.parseParenItem = function(item) {
return item
};
pp$5.parseParenArrowList = function(startPos, startLoc, exprList, forInit) {
return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), exprList, false, forInit)
};
// New's precedence is slightly tricky. It must allow its argument to
// be a `[]` or dot subscript expression, but not a call — at least,
// not without wrapping it in parentheses. Thus, it uses the noCalls
// argument to parseSubscripts to prevent it from consuming the
// argument list.
var empty = [];
pp$5.parseNew = function() {
if (this.containsEsc) { this.raiseRecoverable(this.start, "Escape sequence in keyword new"); }
var node = this.startNode();
var meta = this.parseIdent(true);
if (this.options.ecmaVersion >= 6 && this.eat(types$1.dot)) {
node.meta = meta;
var containsEsc = this.containsEsc;
node.property = this.parseIdent(true);
if (node.property.name !== "target")
{ this.raiseRecoverable(node.property.start, "The only valid meta property for new is 'new.target'"); }
if (containsEsc)
{ this.raiseRecoverable(node.start, "'new.target' must not contain escaped characters"); }
if (!this.allowNewDotTarget)
{ this.raiseRecoverable(node.start, "'new.target' can only be used in functions and class static block"); }
return this.finishNode(node, "MetaProperty")
}
var startPos = this.start, startLoc = this.startLoc;
node.callee = this.parseSubscripts(this.parseExprAtom(null, false, true), startPos, startLoc, true, false);
if (this.eat(types$1.parenL)) { node.arguments = this.parseExprList(types$1.parenR, this.options.ecmaVersion >= 8, false); }
else { node.arguments = empty; }
return this.finishNode(node, "NewExpression")
};
// Parse template expression.
pp$5.parseTemplateElement = function(ref) {
var isTagged = ref.isTagged;
var elem = this.startNode();
if (this.type === types$1.invalidTemplate) {
if (!isTagged) {
this.raiseRecoverable(this.start, "Bad escape sequence in untagged template literal");
}
elem.value = {
raw: this.value,
cooked: null
};
} else {
elem.value = {
raw: this.input.slice(this.start, this.end).replace(/\r\n?/g, "\n"),
cooked: this.value
};
}
this.next();
elem.tail = this.type === types$1.backQuote;
return this.finishNode(elem, "TemplateElement")
};
pp$5.parseTemplate = function(ref) {
if ( ref === void 0 ) ref = {};
var isTagged = ref.isTagged; if ( isTagged === void 0 ) isTagged = false;
var node = this.startNode();
this.next();
node.expressions = [];
var curElt = this.parseTemplateElement({isTagged: isTagged});
node.quasis = [curElt];
while (!curElt.tail) {
if (this.type === types$1.eof) { this.raise(this.pos, "Unterminated template literal"); }
this.expect(types$1.dollarBraceL);
node.expressions.push(this.parseExpression());
this.expect(types$1.braceR);
node.quasis.push(curElt = this.parseTemplateElement({isTagged: isTagged}));
}
this.next();
return this.finishNode(node, "TemplateLiteral")
};
pp$5.isAsyncProp = function(prop) {
return !prop.computed && prop.key.type === "Identifier" && prop.key.name === "async" &&
(this.type === types$1.name || this.type === types$1.num || this.type === types$1.string || this.type === types$1.bracketL || this.type.keyword || (this.options.ecmaVersion >= 9 && this.type === types$1.star)) &&
!lineBreak.test(this.input.slice(this.lastTokEnd, this.start))
};
// Parse an object literal or binding pattern.
pp$5.parseObj = function(isPattern, refDestructuringErrors) {
var node = this.startNode(), first = true, propHash = {};
node.properties = [];
this.next();
while (!this.eat(types$1.braceR)) {
if (!first) {
this.expect(types$1.comma);
if (this.options.ecmaVersion >= 5 && this.afterTrailingComma(types$1.braceR)) { break }
} else { first = false; }
var prop = this.parseProperty(isPattern, refDestructuringErrors);
if (!isPattern) { this.checkPropClash(prop, propHash, refDestructuringErrors); }
node.properties.push(prop);
}
return this.finishNode(node, isPattern ? "ObjectPattern" : "ObjectExpression")
};
pp$5.parseProperty = function(isPattern, refDestructuringErrors) {
var prop = this.startNode(), isGenerator, isAsync, startPos, startLoc;
if (this.options.ecmaVersion >= 9 && this.eat(types$1.ellipsis)) {
if (isPattern) {
prop.argument = this.parseIdent(false);
if (this.type === types$1.comma) {
this.raiseRecoverable(this.start, "Comma is not permitted after the rest element");
}
return this.finishNode(prop, "RestElement")
}
// Parse argument.
prop.argument = this.parseMaybeAssign(false, refDestructuringErrors);
// To disallow trailing comma via `this.toAssignable()`.
if (this.type === types$1.comma && refDestructuringErrors && refDestructuringErrors.trailingComma < 0) {
refDestructuringErrors.trailingComma = this.start;
}
// Finish
return this.finishNode(prop, "SpreadElement")
}
if (this.options.ecmaVersion >= 6) {
prop.method = false;
prop.shorthand = false;
if (isPattern || refDestructuringErrors) {
startPos = this.start;
startLoc = this.startLoc;
}
if (!isPattern)
{ isGenerator = this.eat(types$1.star); }
}
var containsEsc = this.containsEsc;
this.parsePropertyName(prop);
if (!isPattern && !containsEsc && this.options.ecmaVersion >= 8 && !isGenerator && this.isAsyncProp(prop)) {
isAsync = true;
isGenerator = this.options.ecmaVersion >= 9 && this.eat(types$1.star);
this.parsePropertyName(prop);
} else {
isAsync = false;
}
this.parsePropertyValue(prop, isPattern, isGenerator, isAsync, startPos, startLoc, refDestructuringErrors, containsEsc);
return this.finishNode(prop, "Property")
};
pp$5.parseGetterSetter = function(prop) {
prop.kind = prop.key.name;
this.parsePropertyName(prop);
prop.value = this.parseMethod(false);
var paramCount = prop.kind === "get" ? 0 : 1;
if (prop.value.params.length !== paramCount) {
var start = prop.value.start;
if (prop.kind === "get")
{ this.raiseRecoverable(start, "getter should have no params"); }
else
{ this.raiseRecoverable(start, "setter should have exactly one param"); }
} else {
if (prop.kind === "set" && prop.value.params[0].type === "RestElement")
{ this.raiseRecoverable(prop.value.params[0].start, "Setter cannot use rest params"); }
}
};
pp$5.parsePropertyValue = function(prop, isPattern, isGenerator, isAsync, startPos, startLoc, refDestructuringErrors, containsEsc) {
if ((isGenerator || isAsync) && this.type === types$1.colon)
{ this.unexpected(); }
if (this.eat(types$1.colon)) {
prop.value = isPattern ? this.parseMaybeDefault(this.start, this.startLoc) : this.parseMaybeAssign(false, refDestructuringErrors);
prop.kind = "init";
} else if (this.options.ecmaVersion >= 6 && this.type === types$1.parenL) {
if (isPattern) { this.unexpected(); }
prop.kind = "init";
prop.method = true;
prop.value = this.parseMethod(isGenerator, isAsync);
} else if (!isPattern && !containsEsc &&
this.options.ecmaVersion >= 5 && !prop.computed && prop.key.type === "Identifier" &&
(prop.key.name === "get" || prop.key.name === "set") &&
(this.type !== types$1.comma && this.type !== types$1.braceR && this.type !== types$1.eq)) {
if (isGenerator || isAsync) { this.unexpected(); }
this.parseGetterSetter(prop);
} else if (this.options.ecmaVersion >= 6 && !prop.computed && prop.key.type === "Identifier") {
if (isGenerator || isAsync) { this.unexpected(); }
this.checkUnreserved(prop.key);
if (prop.key.name === "await" && !this.awaitIdentPos)
{ this.awaitIdentPos = startPos; }
prop.kind = "init";
if (isPattern) {
prop.value = this.parseMaybeDefault(startPos, startLoc, this.copyNode(prop.key));
} else if (this.type === types$1.eq && refDestructuringErrors) {
if (refDestructuringErrors.shorthandAssign < 0)
{ refDestructuringErrors.shorthandAssign = this.start; }
prop.value = this.parseMaybeDefault(startPos, startLoc, this.copyNode(prop.key));
} else {
prop.value = this.copyNode(prop.key);
}
prop.shorthand = true;
} else { this.unexpected(); }
};
pp$5.parsePropertyName = function(prop) {
if (this.options.ecmaVersion >= 6) {
if (this.eat(types$1.bracketL)) {
prop.computed = true;
prop.key = this.parseMaybeAssign();
this.expect(types$1.bracketR);
return prop.key
} else {
prop.computed = false;
}
}
return prop.key = this.type === types$1.num || this.type === types$1.string ? this.parseExprAtom() : this.parseIdent(this.options.allowReserved !== "never")
};
// Initialize empty function node.
pp$5.initFunction = function(node) {
node.id = null;
if (this.options.ecmaVersion >= 6) { node.generator = node.expression = false; }
if (this.options.ecmaVersion >= 8) { node.async = false; }
};
// Parse object or class method.
pp$5.parseMethod = function(isGenerator, isAsync, allowDirectSuper) {
var node = this.startNode(), oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, oldAwaitIdentPos = this.awaitIdentPos;
this.initFunction(node);
if (this.options.ecmaVersion >= 6)
{ node.generator = isGenerator; }
if (this.options.ecmaVersion >= 8)
{ node.async = !!isAsync; }
this.yieldPos = 0;
this.awaitPos = 0;
this.awaitIdentPos = 0;
this.enterScope(functionFlags(isAsync, node.generator) | SCOPE_SUPER | (allowDirectSuper ? SCOPE_DIRECT_SUPER : 0));
this.expect(types$1.parenL);
node.params = this.parseBindingList(types$1.parenR, false, this.options.ecmaVersion >= 8);
this.checkYieldAwaitInDefaultParams();
this.parseFunctionBody(node, false, true, false);
this.yieldPos = oldYieldPos;
this.awaitPos = oldAwaitPos;
this.awaitIdentPos = oldAwaitIdentPos;
return this.finishNode(node, "FunctionExpression")
};
// Parse arrow function expression with given parameters.
pp$5.parseArrowExpression = function(node, params, isAsync, forInit) {
var oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, oldAwaitIdentPos = this.awaitIdentPos;
this.enterScope(functionFlags(isAsync, false) | SCOPE_ARROW);
this.initFunction(node);
if (this.options.ecmaVersion >= 8) { node.async = !!isAsync; }
this.yieldPos = 0;
this.awaitPos = 0;
this.awaitIdentPos = 0;
node.params = this.toAssignableList(params, true);
this.parseFunctionBody(node, true, false, forInit);
this.yieldPos = oldYieldPos;
this.awaitPos = oldAwaitPos;
this.awaitIdentPos = oldAwaitIdentPos;
return this.finishNode(node, "ArrowFunctionExpression")
};
// Parse function body and check parameters.
pp$5.parseFunctionBody = function(node, isArrowFunction, isMethod, forInit) {
var isExpression = isArrowFunction && this.type !== types$1.braceL;
var oldStrict = this.strict, useStrict = false;
if (isExpression) {
node.body = this.parseMaybeAssign(forInit);
node.expression = true;
this.checkParams(node, false);
} else {
var nonSimple = this.options.ecmaVersion >= 7 && !this.isSimpleParamList(node.params);
if (!oldStrict || nonSimple) {
useStrict = this.strictDirective(this.end);
// If this is a strict mode function, verify that argument names
// are not repeated, and it does not try to bind the words `eval`
// or `arguments`.
if (useStrict && nonSimple)
{ this.raiseRecoverable(node.start, "Illegal 'use strict' directive in function with non-simple parameter list"); }
}
// Start a new scope with regard to labels and the `inFunction`
// flag (restore them to their old value afterwards).
var oldLabels = this.labels;
this.labels = [];
if (useStrict) { this.strict = true; }
// Add the params to varDeclaredNames to ensure that an error is thrown
// if a let/const declaration in the function clashes with one of the params.
this.checkParams(node, !oldStrict && !useStrict && !isArrowFunction && !isMethod && this.isSimpleParamList(node.params));
// Ensure the function name isn't a forbidden identifier in strict mode, e.g. 'eval'
if (this.strict && node.id) { this.checkLValSimple(node.id, BIND_OUTSIDE); }
node.body = this.parseBlock(false, undefined, useStrict && !oldStrict);
node.expression = false;
this.adaptDirectivePrologue(node.body.body);
this.labels = oldLabels;
}
this.exitScope();
};
pp$5.isSimpleParamList = function(params) {
for (var i = 0, list = params; i < list.length; i += 1)
{
var param = list[i];
if (param.type !== "Identifier") { return false
} }
return true
};
// Checks function params for various disallowed patterns such as using "eval"
// or "arguments" and duplicate parameters.
pp$5.checkParams = function(node, allowDuplicates) {
var nameHash = Object.create(null);
for (var i = 0, list = node.params; i < list.length; i += 1)
{
var param = list[i];
this.checkLValInnerPattern(param, BIND_VAR, allowDuplicates ? null : nameHash);
}
};
// Parses a comma-separated list of expressions, and returns them as
// an array. `close` is the token type that ends the list, and
// `allowEmpty` can be turned on to allow subsequent commas with
// nothing in between them to be parsed as `null` (which is needed
// for array literals).
pp$5.parseExprList = function(close, allowTrailingComma, allowEmpty, refDestructuringErrors) {
var elts = [], first = true;
while (!this.eat(close)) {
if (!first) {
this.expect(types$1.comma);
if (allowTrailingComma && this.afterTrailingComma(close)) { break }
} else { first = false; }
var elt = (void 0);
if (allowEmpty && this.type === types$1.comma)
{ elt = null; }
else if (this.type === types$1.ellipsis) {
elt = this.parseSpread(refDestructuringErrors);
if (refDestructuringErrors && this.type === types$1.comma && refDestructuringErrors.trailingComma < 0)
{ refDestructuringErrors.trailingComma = this.start; }
} else {
elt = this.parseMaybeAssign(false, refDestructuringErrors);
}
elts.push(elt);
}
return elts
};
pp$5.checkUnreserved = function(ref) {
var start = ref.start;
var end = ref.end;
var name = ref.name;
if (this.inGenerator && name === "yield")
{ this.raiseRecoverable(start, "Cannot use 'yield' as identifier inside a generator"); }
if (this.inAsync && name === "await")
{ this.raiseRecoverable(start, "Cannot use 'await' as identifier inside an async function"); }
if (this.currentThisScope().inClassFieldInit && name === "arguments")
{ this.raiseRecoverable(start, "Cannot use 'arguments' in class field initializer"); }
if (this.inClassStaticBlock && (name === "arguments" || name === "await"))
{ this.raise(start, ("Cannot use " + name + " in class static initialization block")); }
if (this.keywords.test(name))
{ this.raise(start, ("Unexpected keyword '" + name + "'")); }
if (this.options.ecmaVersion < 6 &&
this.input.slice(start, end).indexOf("\\") !== -1) { return }
var re = this.strict ? this.reservedWordsStrict : this.reservedWords;
if (re.test(name)) {
if (!this.inAsync && name === "await")
{ this.raiseRecoverable(start, "Cannot use keyword 'await' outside an async function"); }
this.raiseRecoverable(start, ("The keyword '" + name + "' is reserved"));
}
};
// Parse the next token as an identifier. If `liberal` is true (used
// when parsing properties), it will also convert keywords into
// identifiers.
pp$5.parseIdent = function(liberal) {
var node = this.parseIdentNode();
this.next(!!liberal);
this.finishNode(node, "Identifier");
if (!liberal) {
this.checkUnreserved(node);
if (node.name === "await" && !this.awaitIdentPos)
{ this.awaitIdentPos = node.start; }
}
return node
};
pp$5.parseIdentNode = function() {
var node = this.startNode();
if (this.type === types$1.name) {
node.name = this.value;
} else if (this.type.keyword) {
node.name = this.type.keyword;
// To fix https://github.com/acornjs/acorn/issues/575
// `class` and `function` keywords push new context into this.context.
// But there is no chance to pop the context if the keyword is consumed as an identifier such as a property name.
// If the previous token is a dot, this does not apply because the context-managing code already ignored the keyword
if ((node.name === "class" || node.name === "function") &&
(this.lastTokEnd !== this.lastTokStart + 1 || this.input.charCodeAt(this.lastTokStart) !== 46)) {
this.context.pop();
}
} else {
this.unexpected();
}
return node
};
pp$5.parsePrivateIdent = function() {
var node = this.startNode();
if (this.type === types$1.privateId) {
node.name = this.value;
} else {
this.unexpected();
}
this.next();
this.finishNode(node, "PrivateIdentifier");
// For validating existence
if (this.options.checkPrivateFields) {
if (this.privateNameStack.length === 0) {
this.raise(node.start, ("Private field '#" + (node.name) + "' must be declared in an enclosing class"));
} else {
this.privateNameStack[this.privateNameStack.length - 1].used.push(node);
}
}
return node
};
// Parses yield expression inside generator.
pp$5.parseYield = function(forInit) {
if (!this.yieldPos) { this.yieldPos = this.start; }
var node = this.startNode();
this.next();
if (this.type === types$1.semi || this.canInsertSemicolon() || (this.type !== types$1.star && !this.type.startsExpr)) {
node.delegate = false;
node.argument = null;
} else {
node.delegate = this.eat(types$1.star);
node.argument = this.parseMaybeAssign(forInit);
}
return this.finishNode(node, "YieldExpression")
};
pp$5.parseAwait = function(forInit) {
if (!this.awaitPos) { this.awaitPos = this.start; }
var node = this.startNode();
this.next();
node.argument = this.parseMaybeUnary(null, true, false, forInit);
return this.finishNode(node, "AwaitExpression")
};
var pp$4 = Parser$1.prototype;
// This function is used to raise exceptions on parse errors. It
// takes an offset integer (into the current `input`) to indicate
// the location of the error, attaches the position to the end
// of the error message, and then raises a `SyntaxError` with that
// message.
pp$4.raise = function(pos, message) {
var loc = getLineInfo(this.input, pos);
message += " (" + loc.line + ":" + loc.column + ")";
var err = new SyntaxError(message);
err.pos = pos; err.loc = loc; err.raisedAt = this.pos;
throw err
};
pp$4.raiseRecoverable = pp$4.raise;
pp$4.curPosition = function() {
if (this.options.locations) {
return new Position(this.curLine, this.pos - this.lineStart)
}
};
var pp$3 = Parser$1.prototype;
var Scope = function Scope(flags) {
this.flags = flags;
// A list of var-declared names in the current lexical scope
this.var = [];
// A list of lexically-declared names in the current lexical scope
this.lexical = [];
// A list of lexically-declared FunctionDeclaration names in the current lexical scope
this.functions = [];
// A switch to disallow the identifier reference 'arguments'
this.inClassFieldInit = false;
};
// The functions in this module keep track of declared variables in the current scope in order to detect duplicate variable names.
pp$3.enterScope = function(flags) {
this.scopeStack.push(new Scope(flags));
};
pp$3.exitScope = function() {
this.scopeStack.pop();
};
// The spec says:
// > At the top level of a function, or script, function declarations are
// > treated like var declarations rather than like lexical declarations.
pp$3.treatFunctionsAsVarInScope = function(scope) {
return (scope.flags & SCOPE_FUNCTION) || !this.inModule && (scope.flags & SCOPE_TOP)
};
pp$3.declareName = function(name, bindingType, pos) {
var redeclared = false;
if (bindingType === BIND_LEXICAL) {
var scope = this.currentScope();
redeclared = scope.lexical.indexOf(name) > -1 || scope.functions.indexOf(name) > -1 || scope.var.indexOf(name) > -1;
scope.lexical.push(name);
if (this.inModule && (scope.flags & SCOPE_TOP))
{ delete this.undefinedExports[name]; }
} else if (bindingType === BIND_SIMPLE_CATCH) {
var scope$1 = this.currentScope();
scope$1.lexical.push(name);
} else if (bindingType === BIND_FUNCTION) {
var scope$2 = this.currentScope();
if (this.treatFunctionsAsVar)
{ redeclared = scope$2.lexical.indexOf(name) > -1; }
else
{ redeclared = scope$2.lexical.indexOf(name) > -1 || scope$2.var.indexOf(name) > -1; }
scope$2.functions.push(name);
} else {
for (var i = this.scopeStack.length - 1; i >= 0; --i) {
var scope$3 = this.scopeStack[i];
if (scope$3.lexical.indexOf(name) > -1 && !((scope$3.flags & SCOPE_SIMPLE_CATCH) && scope$3.lexical[0] === name) ||
!this.treatFunctionsAsVarInScope(scope$3) && scope$3.functions.indexOf(name) > -1) {
redeclared = true;
break
}
scope$3.var.push(name);
if (this.inModule && (scope$3.flags & SCOPE_TOP))
{ delete this.undefinedExports[name]; }
if (scope$3.flags & SCOPE_VAR) { break }
}
}
if (redeclared) { this.raiseRecoverable(pos, ("Identifier '" + name + "' has already been declared")); }
};
pp$3.checkLocalExport = function(id) {
// scope.functions must be empty as Module code is always strict.
if (this.scopeStack[0].lexical.indexOf(id.name) === -1 &&
this.scopeStack[0].var.indexOf(id.name) === -1) {
this.undefinedExports[id.name] = id;
}
};
pp$3.currentScope = function() {
return this.scopeStack[this.scopeStack.length - 1]
};
pp$3.currentVarScope = function() {
for (var i = this.scopeStack.length - 1;; i--) {
var scope = this.scopeStack[i];
if (scope.flags & SCOPE_VAR) { return scope }
}
};
// Could be useful for `this`, `new.target`, `super()`, `super.property`, and `super[property]`.
pp$3.currentThisScope = function() {
for (var i = this.scopeStack.length - 1;; i--) {
var scope = this.scopeStack[i];
if (scope.flags & SCOPE_VAR && !(scope.flags & SCOPE_ARROW)) { return scope }
}
};
var Node = function Node(parser, pos, loc) {
this.type = "";
this.start = pos;
this.end = 0;
if (parser.options.locations)
{ this.loc = new SourceLocation(parser, loc); }
if (parser.options.directSourceFile)
{ this.sourceFile = parser.options.directSourceFile; }
if (parser.options.ranges)
{ this.range = [pos, 0]; }
};
// Start an AST node, attaching a start offset.
var pp$2 = Parser$1.prototype;
pp$2.startNode = function() {
return new Node(this, this.start, this.startLoc)
};
pp$2.startNodeAt = function(pos, loc) {
return new Node(this, pos, loc)
};
// Finish an AST node, adding `type` and `end` properties.
function finishNodeAt(node, type, pos, loc) {
node.type = type;
node.end = pos;
if (this.options.locations)
{ node.loc.end = loc; }
if (this.options.ranges)
{ node.range[1] = pos; }
return node
}
pp$2.finishNode = function(node, type) {
return finishNodeAt.call(this, node, type, this.lastTokEnd, this.lastTokEndLoc)
};
// Finish node at given position
pp$2.finishNodeAt = function(node, type, pos, loc) {
return finishNodeAt.call(this, node, type, pos, loc)
};
pp$2.copyNode = function(node) {
var newNode = new Node(this, node.start, this.startLoc);
for (var prop in node) { newNode[prop] = node[prop]; }
return newNode
};
// This file contains Unicode properties extracted from the ECMAScript specification.
// The lists are extracted like so:
// $$('#table-binary-unicode-properties > figure > table > tbody > tr > td:nth-child(1) code').map(el => el.innerText)
// #table-binary-unicode-properties
var ecma9BinaryProperties = "ASCII ASCII_Hex_Digit AHex Alphabetic Alpha Any Assigned Bidi_Control Bidi_C Bidi_Mirrored Bidi_M Case_Ignorable CI Cased Changes_When_Casefolded CWCF Changes_When_Casemapped CWCM Changes_When_Lowercased CWL Changes_When_NFKC_Casefolded CWKCF Changes_When_Titlecased CWT Changes_When_Uppercased CWU Dash Default_Ignorable_Code_Point DI Deprecated Dep Diacritic Dia Emoji Emoji_Component Emoji_Modifier Emoji_Modifier_Base Emoji_Presentation Extender Ext Grapheme_Base Gr_Base Grapheme_Extend Gr_Ext Hex_Digit Hex IDS_Binary_Operator IDSB IDS_Trinary_Operator IDST ID_Continue IDC ID_Start IDS Ideographic Ideo Join_Control Join_C Logical_Order_Exception LOE Lowercase Lower Math Noncharacter_Code_Point NChar Pattern_Syntax Pat_Syn Pattern_White_Space Pat_WS Quotation_Mark QMark Radical Regional_Indicator RI Sentence_Terminal STerm Soft_Dotted SD Terminal_Punctuation Term Unified_Ideograph UIdeo Uppercase Upper Variation_Selector VS White_Space space XID_Continue XIDC XID_Start XIDS";
var ecma10BinaryProperties = ecma9BinaryProperties + " Extended_Pictographic";
var ecma11BinaryProperties = ecma10BinaryProperties;
var ecma12BinaryProperties = ecma11BinaryProperties + " EBase EComp EMod EPres ExtPict";
var ecma13BinaryProperties = ecma12BinaryProperties;
var ecma14BinaryProperties = ecma13BinaryProperties;
var unicodeBinaryProperties = {
9: ecma9BinaryProperties,
10: ecma10BinaryProperties,
11: ecma11BinaryProperties,
12: ecma12BinaryProperties,
13: ecma13BinaryProperties,
14: ecma14BinaryProperties
};
// #table-binary-unicode-properties-of-strings
var ecma14BinaryPropertiesOfStrings = "Basic_Emoji Emoji_Keycap_Sequence RGI_Emoji_Modifier_Sequence RGI_Emoji_Flag_Sequence RGI_Emoji_Tag_Sequence RGI_Emoji_ZWJ_Sequence RGI_Emoji";
var unicodeBinaryPropertiesOfStrings = {
9: "",
10: "",
11: "",
12: "",
13: "",
14: ecma14BinaryPropertiesOfStrings
};
// #table-unicode-general-category-values
var unicodeGeneralCategoryValues = "Cased_Letter LC Close_Punctuation Pe Connector_Punctuation Pc Control Cc cntrl Currency_Symbol Sc Dash_Punctuation Pd Decimal_Number Nd digit Enclosing_Mark Me Final_Punctuation Pf Format Cf Initial_Punctuation Pi Letter L Letter_Number Nl Line_Separator Zl Lowercase_Letter Ll Mark M Combining_Mark Math_Symbol Sm Modifier_Letter Lm Modifier_Symbol Sk Nonspacing_Mark Mn Number N Open_Punctuation Ps Other C Other_Letter Lo Other_Number No Other_Punctuation Po Other_Symbol So Paragraph_Separator Zp Private_Use Co Punctuation P punct Separator Z Space_Separator Zs Spacing_Mark Mc Surrogate Cs Symbol S Titlecase_Letter Lt Unassigned Cn Uppercase_Letter Lu";
// #table-unicode-script-values
var ecma9ScriptValues = "Adlam Adlm Ahom Anatolian_Hieroglyphs Hluw Arabic Arab Armenian Armn Avestan Avst Balinese Bali Bamum Bamu Bassa_Vah Bass Batak Batk Bengali Beng Bhaiksuki Bhks Bopomofo Bopo Brahmi Brah Braille Brai Buginese Bugi Buhid Buhd Canadian_Aboriginal Cans Carian Cari Caucasian_Albanian Aghb Chakma Cakm Cham Cham Cherokee Cher Common Zyyy Coptic Copt Qaac Cuneiform Xsux Cypriot Cprt Cyrillic Cyrl Deseret Dsrt Devanagari Deva Duployan Dupl Egyptian_Hieroglyphs Egyp Elbasan Elba Ethiopic Ethi Georgian Geor Glagolitic Glag Gothic Goth Grantha Gran Greek Grek Gujarati Gujr Gurmukhi Guru Han Hani Hangul Hang Hanunoo Hano Hatran Hatr Hebrew Hebr Hiragana Hira Imperial_Aramaic Armi Inherited Zinh Qaai Inscriptional_Pahlavi Phli Inscriptional_Parthian Prti Javanese Java Kaithi Kthi Kannada Knda Katakana Kana Kayah_Li Kali Kharoshthi Khar Khmer Khmr Khojki Khoj Khudawadi Sind Lao Laoo Latin Latn Lepcha Lepc Limbu Limb Linear_A Lina Linear_B Linb Lisu Lisu Lycian Lyci Lydian Lydi Mahajani Mahj Malayalam Mlym Mandaic Mand Manichaean Mani Marchen Marc Masaram_Gondi Gonm Meetei_Mayek Mtei Mende_Kikakui Mend Meroitic_Cursive Merc Meroitic_Hieroglyphs Mero Miao Plrd Modi Mongolian Mong Mro Mroo Multani Mult Myanmar Mymr Nabataean Nbat New_Tai_Lue Talu Newa Newa Nko Nkoo Nushu Nshu Ogham Ogam Ol_Chiki Olck Old_Hungarian Hung Old_Italic Ital Old_North_Arabian Narb Old_Permic Perm Old_Persian Xpeo Old_South_Arabian Sarb Old_Turkic Orkh Oriya Orya Osage Osge Osmanya Osma Pahawh_Hmong Hmng Palmyrene Palm Pau_Cin_Hau Pauc Phags_Pa Phag Phoenician Phnx Psalter_Pahlavi Phlp Rejang Rjng Runic Runr Samaritan Samr Saurashtra Saur Sharada Shrd Shavian Shaw Siddham Sidd SignWriting Sgnw Sinhala Sinh Sora_Sompeng Sora Soyombo Soyo Sundanese Sund Syloti_Nagri Sylo Syriac Syrc Tagalog Tglg Tagbanwa Tagb Tai_Le Tale Tai_Tham Lana Tai_Viet Tavt Takri Takr Tamil Taml Tangut Tang Telugu Telu Thaana Thaa Thai Thai Tibetan Tibt Tifinagh Tfng Tirhuta Tirh Ugaritic Ugar Vai Vaii Warang_Citi Wara Yi Yiii Zanabazar_Square Zanb";
var ecma10ScriptValues = ecma9ScriptValues + " Dogra Dogr Gunjala_Gondi Gong Hanifi_Rohingya Rohg Makasar Maka Medefaidrin Medf Old_Sogdian Sogo Sogdian Sogd";
var ecma11ScriptValues = ecma10ScriptValues + " Elymaic Elym Nandinagari Nand Nyiakeng_Puachue_Hmong Hmnp Wancho Wcho";
var ecma12ScriptValues = ecma11ScriptValues + " Chorasmian Chrs Diak Dives_Akuru Khitan_Small_Script Kits Yezi Yezidi";
var ecma13ScriptValues = ecma12ScriptValues + " Cypro_Minoan Cpmn Old_Uyghur Ougr Tangsa Tnsa Toto Vithkuqi Vith";
var ecma14ScriptValues = ecma13ScriptValues + " Hrkt Katakana_Or_Hiragana Kawi Nag_Mundari Nagm Unknown Zzzz";
var unicodeScriptValues = {
9: ecma9ScriptValues,
10: ecma10ScriptValues,
11: ecma11ScriptValues,
12: ecma12ScriptValues,
13: ecma13ScriptValues,
14: ecma14ScriptValues
};
var data = {};
function buildUnicodeData(ecmaVersion) {
var d = data[ecmaVersion] = {
binary: wordsRegexp(unicodeBinaryProperties[ecmaVersion] + " " + unicodeGeneralCategoryValues),
binaryOfStrings: wordsRegexp(unicodeBinaryPropertiesOfStrings[ecmaVersion]),
nonBinary: {
General_Category: wordsRegexp(unicodeGeneralCategoryValues),
Script: wordsRegexp(unicodeScriptValues[ecmaVersion])
}
};
d.nonBinary.Script_Extensions = d.nonBinary.Script;
d.nonBinary.gc = d.nonBinary.General_Category;
d.nonBinary.sc = d.nonBinary.Script;
d.nonBinary.scx = d.nonBinary.Script_Extensions;
}
for (var i$1 = 0, list = [9, 10, 11, 12, 13, 14]; i$1 < list.length; i$1 += 1) {
var ecmaVersion = list[i$1];
buildUnicodeData(ecmaVersion);
}
var pp$1 = Parser$1.prototype;
var RegExpValidationState = function RegExpValidationState(parser) {
this.parser = parser;
this.validFlags = "gim" + (parser.options.ecmaVersion >= 6 ? "uy" : "") + (parser.options.ecmaVersion >= 9 ? "s" : "") + (parser.options.ecmaVersion >= 13 ? "d" : "") + (parser.options.ecmaVersion >= 15 ? "v" : "");
this.unicodeProperties = data[parser.options.ecmaVersion >= 14 ? 14 : parser.options.ecmaVersion];
this.source = "";
this.flags = "";
this.start = 0;
this.switchU = false;
this.switchV = false;
this.switchN = false;
this.pos = 0;
this.lastIntValue = 0;
this.lastStringValue = "";
this.lastAssertionIsQuantifiable = false;
this.numCapturingParens = 0;
this.maxBackReference = 0;
this.groupNames = [];
this.backReferenceNames = [];
};
RegExpValidationState.prototype.reset = function reset (start, pattern, flags) {
var unicodeSets = flags.indexOf("v") !== -1;
var unicode = flags.indexOf("u") !== -1;
this.start = start | 0;
this.source = pattern + "";
this.flags = flags;
if (unicodeSets && this.parser.options.ecmaVersion >= 15) {
this.switchU = true;
this.switchV = true;
this.switchN = true;
} else {
this.switchU = unicode && this.parser.options.ecmaVersion >= 6;
this.switchV = false;
this.switchN = unicode && this.parser.options.ecmaVersion >= 9;
}
};
RegExpValidationState.prototype.raise = function raise (message) {
this.parser.raiseRecoverable(this.start, ("Invalid regular expression: /" + (this.source) + "/: " + message));
};
// If u flag is given, this returns the code point at the index (it combines a surrogate pair).
// Otherwise, this returns the code unit of the index (can be a part of a surrogate pair).
RegExpValidationState.prototype.at = function at (i, forceU) {
if ( forceU === void 0 ) forceU = false;
var s = this.source;
var l = s.length;
if (i >= l) {
return -1
}
var c = s.charCodeAt(i);
if (!(forceU || this.switchU) || c <= 0xD7FF || c >= 0xE000 || i + 1 >= l) {
return c
}
var next = s.charCodeAt(i + 1);
return next >= 0xDC00 && next <= 0xDFFF ? (c << 10) + next - 0x35FDC00 : c
};
RegExpValidationState.prototype.nextIndex = function nextIndex (i, forceU) {
if ( forceU === void 0 ) forceU = false;
var s = this.source;
var l = s.length;
if (i >= l) {
return l
}
var c = s.charCodeAt(i), next;
if (!(forceU || this.switchU) || c <= 0xD7FF || c >= 0xE000 || i + 1 >= l ||
(next = s.charCodeAt(i + 1)) < 0xDC00 || next > 0xDFFF) {
return i + 1
}
return i + 2
};
RegExpValidationState.prototype.current = function current (forceU) {
if ( forceU === void 0 ) forceU = false;
return this.at(this.pos, forceU)
};
RegExpValidationState.prototype.lookahead = function lookahead (forceU) {
if ( forceU === void 0 ) forceU = false;
return this.at(this.nextIndex(this.pos, forceU), forceU)
};
RegExpValidationState.prototype.advance = function advance (forceU) {
if ( forceU === void 0 ) forceU = false;
this.pos = this.nextIndex(this.pos, forceU);
};
RegExpValidationState.prototype.eat = function eat (ch, forceU) {
if ( forceU === void 0 ) forceU = false;
if (this.current(forceU) === ch) {
this.advance(forceU);
return true
}
return false
};
RegExpValidationState.prototype.eatChars = function eatChars (chs, forceU) {
if ( forceU === void 0 ) forceU = false;
var pos = this.pos;
for (var i = 0, list = chs; i < list.length; i += 1) {
var ch = list[i];
var current = this.at(pos, forceU);
if (current === -1 || current !== ch) {
return false
}
pos = this.nextIndex(pos, forceU);
}
this.pos = pos;
return true
};
/**
* Validate the flags part of a given RegExpLiteral.
*
* @param {RegExpValidationState} state The state to validate RegExp.
* @returns {void}
*/
pp$1.validateRegExpFlags = function(state) {
var validFlags = state.validFlags;
var flags = state.flags;
var u = false;
var v = false;
for (var i = 0; i < flags.length; i++) {
var flag = flags.charAt(i);
if (validFlags.indexOf(flag) === -1) {
this.raise(state.start, "Invalid regular expression flag");
}
if (flags.indexOf(flag, i + 1) > -1) {
this.raise(state.start, "Duplicate regular expression flag");
}
if (flag === "u") { u = true; }
if (flag === "v") { v = true; }
}
if (this.options.ecmaVersion >= 15 && u && v) {
this.raise(state.start, "Invalid regular expression flag");
}
};
/**
* Validate the pattern part of a given RegExpLiteral.
*
* @param {RegExpValidationState} state The state to validate RegExp.
* @returns {void}
*/
pp$1.validateRegExpPattern = function(state) {
this.regexp_pattern(state);
// The goal symbol for the parse is |Pattern[~U, ~N]|. If the result of
// parsing contains a |GroupName|, reparse with the goal symbol
// |Pattern[~U, +N]| and use this result instead. Throw a *SyntaxError*
// exception if _P_ did not conform to the grammar, if any elements of _P_
// were not matched by the parse, or if any Early Error conditions exist.
if (!state.switchN && this.options.ecmaVersion >= 9 && state.groupNames.length > 0) {
state.switchN = true;
this.regexp_pattern(state);
}
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-Pattern
pp$1.regexp_pattern = function(state) {
state.pos = 0;
state.lastIntValue = 0;
state.lastStringValue = "";
state.lastAssertionIsQuantifiable = false;
state.numCapturingParens = 0;
state.maxBackReference = 0;
state.groupNames.length = 0;
state.backReferenceNames.length = 0;
this.regexp_disjunction(state);
if (state.pos !== state.source.length) {
// Make the same messages as V8.
if (state.eat(0x29 /* ) */)) {
state.raise("Unmatched ')'");
}
if (state.eat(0x5D /* ] */) || state.eat(0x7D /* } */)) {
state.raise("Lone quantifier brackets");
}
}
if (state.maxBackReference > state.numCapturingParens) {
state.raise("Invalid escape");
}
for (var i = 0, list = state.backReferenceNames; i < list.length; i += 1) {
var name = list[i];
if (state.groupNames.indexOf(name) === -1) {
state.raise("Invalid named capture referenced");
}
}
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-Disjunction
pp$1.regexp_disjunction = function(state) {
this.regexp_alternative(state);
while (state.eat(0x7C /* | */)) {
this.regexp_alternative(state);
}
// Make the same message as V8.
if (this.regexp_eatQuantifier(state, true)) {
state.raise("Nothing to repeat");
}
if (state.eat(0x7B /* { */)) {
state.raise("Lone quantifier brackets");
}
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-Alternative
pp$1.regexp_alternative = function(state) {
while (state.pos < state.source.length && this.regexp_eatTerm(state))
{ }
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-Term
pp$1.regexp_eatTerm = function(state) {
if (this.regexp_eatAssertion(state)) {
// Handle `QuantifiableAssertion Quantifier` alternative.
// `state.lastAssertionIsQuantifiable` is true if the last eaten Assertion
// is a QuantifiableAssertion.
if (state.lastAssertionIsQuantifiable && this.regexp_eatQuantifier(state)) {
// Make the same message as V8.
if (state.switchU) {
state.raise("Invalid quantifier");
}
}
return true
}
if (state.switchU ? this.regexp_eatAtom(state) : this.regexp_eatExtendedAtom(state)) {
this.regexp_eatQuantifier(state);
return true
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-Assertion
pp$1.regexp_eatAssertion = function(state) {
var start = state.pos;
state.lastAssertionIsQuantifiable = false;
// ^, $
if (state.eat(0x5E /* ^ */) || state.eat(0x24 /* $ */)) {
return true
}
// \b \B
if (state.eat(0x5C /* \ */)) {
if (state.eat(0x42 /* B */) || state.eat(0x62 /* b */)) {
return true
}
state.pos = start;
}
// Lookahead / Lookbehind
if (state.eat(0x28 /* ( */) && state.eat(0x3F /* ? */)) {
var lookbehind = false;
if (this.options.ecmaVersion >= 9) {
lookbehind = state.eat(0x3C /* < */);
}
if (state.eat(0x3D /* = */) || state.eat(0x21 /* ! */)) {
this.regexp_disjunction(state);
if (!state.eat(0x29 /* ) */)) {
state.raise("Unterminated group");
}
state.lastAssertionIsQuantifiable = !lookbehind;
return true
}
}
state.pos = start;
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-Quantifier
pp$1.regexp_eatQuantifier = function(state, noError) {
if ( noError === void 0 ) noError = false;
if (this.regexp_eatQuantifierPrefix(state, noError)) {
state.eat(0x3F /* ? */);
return true
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-QuantifierPrefix
pp$1.regexp_eatQuantifierPrefix = function(state, noError) {
return (
state.eat(0x2A /* * */) ||
state.eat(0x2B /* + */) ||
state.eat(0x3F /* ? */) ||
this.regexp_eatBracedQuantifier(state, noError)
)
};
pp$1.regexp_eatBracedQuantifier = function(state, noError) {
var start = state.pos;
if (state.eat(0x7B /* { */)) {
var min = 0, max = -1;
if (this.regexp_eatDecimalDigits(state)) {
min = state.lastIntValue;
if (state.eat(0x2C /* , */) && this.regexp_eatDecimalDigits(state)) {
max = state.lastIntValue;
}
if (state.eat(0x7D /* } */)) {
// SyntaxError in https://www.ecma-international.org/ecma-262/8.0/#sec-term
if (max !== -1 && max < min && !noError) {
state.raise("numbers out of order in {} quantifier");
}
return true
}
}
if (state.switchU && !noError) {
state.raise("Incomplete quantifier");
}
state.pos = start;
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-Atom
pp$1.regexp_eatAtom = function(state) {
return (
this.regexp_eatPatternCharacters(state) ||
state.eat(0x2E /* . */) ||
this.regexp_eatReverseSolidusAtomEscape(state) ||
this.regexp_eatCharacterClass(state) ||
this.regexp_eatUncapturingGroup(state) ||
this.regexp_eatCapturingGroup(state)
)
};
pp$1.regexp_eatReverseSolidusAtomEscape = function(state) {
var start = state.pos;
if (state.eat(0x5C /* \ */)) {
if (this.regexp_eatAtomEscape(state)) {
return true
}
state.pos = start;
}
return false
};
pp$1.regexp_eatUncapturingGroup = function(state) {
var start = state.pos;
if (state.eat(0x28 /* ( */)) {
if (state.eat(0x3F /* ? */) && state.eat(0x3A /* : */)) {
this.regexp_disjunction(state);
if (state.eat(0x29 /* ) */)) {
return true
}
state.raise("Unterminated group");
}
state.pos = start;
}
return false
};
pp$1.regexp_eatCapturingGroup = function(state) {
if (state.eat(0x28 /* ( */)) {
if (this.options.ecmaVersion >= 9) {
this.regexp_groupSpecifier(state);
} else if (state.current() === 0x3F /* ? */) {
state.raise("Invalid group");
}
this.regexp_disjunction(state);
if (state.eat(0x29 /* ) */)) {
state.numCapturingParens += 1;
return true
}
state.raise("Unterminated group");
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-ExtendedAtom
pp$1.regexp_eatExtendedAtom = function(state) {
return (
state.eat(0x2E /* . */) ||
this.regexp_eatReverseSolidusAtomEscape(state) ||
this.regexp_eatCharacterClass(state) ||
this.regexp_eatUncapturingGroup(state) ||
this.regexp_eatCapturingGroup(state) ||
this.regexp_eatInvalidBracedQuantifier(state) ||
this.regexp_eatExtendedPatternCharacter(state)
)
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-InvalidBracedQuantifier
pp$1.regexp_eatInvalidBracedQuantifier = function(state) {
if (this.regexp_eatBracedQuantifier(state, true)) {
state.raise("Nothing to repeat");
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-SyntaxCharacter
pp$1.regexp_eatSyntaxCharacter = function(state) {
var ch = state.current();
if (isSyntaxCharacter(ch)) {
state.lastIntValue = ch;
state.advance();
return true
}
return false
};
function isSyntaxCharacter(ch) {
return (
ch === 0x24 /* $ */ ||
ch >= 0x28 /* ( */ && ch <= 0x2B /* + */ ||
ch === 0x2E /* . */ ||
ch === 0x3F /* ? */ ||
ch >= 0x5B /* [ */ && ch <= 0x5E /* ^ */ ||
ch >= 0x7B /* { */ && ch <= 0x7D /* } */
)
}
// https://www.ecma-international.org/ecma-262/8.0/#prod-PatternCharacter
// But eat eager.
pp$1.regexp_eatPatternCharacters = function(state) {
var start = state.pos;
var ch = 0;
while ((ch = state.current()) !== -1 && !isSyntaxCharacter(ch)) {
state.advance();
}
return state.pos !== start
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-ExtendedPatternCharacter
pp$1.regexp_eatExtendedPatternCharacter = function(state) {
var ch = state.current();
if (
ch !== -1 &&
ch !== 0x24 /* $ */ &&
!(ch >= 0x28 /* ( */ && ch <= 0x2B /* + */) &&
ch !== 0x2E /* . */ &&
ch !== 0x3F /* ? */ &&
ch !== 0x5B /* [ */ &&
ch !== 0x5E /* ^ */ &&
ch !== 0x7C /* | */
) {
state.advance();
return true
}
return false
};
// GroupSpecifier ::
// [empty]
// `?` GroupName
pp$1.regexp_groupSpecifier = function(state) {
if (state.eat(0x3F /* ? */)) {
if (this.regexp_eatGroupName(state)) {
if (state.groupNames.indexOf(state.lastStringValue) !== -1) {
state.raise("Duplicate capture group name");
}
state.groupNames.push(state.lastStringValue);
return
}
state.raise("Invalid group");
}
};
// GroupName ::
// `<` RegExpIdentifierName `>`
// Note: this updates `state.lastStringValue` property with the eaten name.
pp$1.regexp_eatGroupName = function(state) {
state.lastStringValue = "";
if (state.eat(0x3C /* < */)) {
if (this.regexp_eatRegExpIdentifierName(state) && state.eat(0x3E /* > */)) {
return true
}
state.raise("Invalid capture group name");
}
return false
};
// RegExpIdentifierName ::
// RegExpIdentifierStart
// RegExpIdentifierName RegExpIdentifierPart
// Note: this updates `state.lastStringValue` property with the eaten name.
pp$1.regexp_eatRegExpIdentifierName = function(state) {
state.lastStringValue = "";
if (this.regexp_eatRegExpIdentifierStart(state)) {
state.lastStringValue += codePointToString(state.lastIntValue);
while (this.regexp_eatRegExpIdentifierPart(state)) {
state.lastStringValue += codePointToString(state.lastIntValue);
}
return true
}
return false
};
// RegExpIdentifierStart ::
// UnicodeIDStart
// `$`
// `_`
// `\` RegExpUnicodeEscapeSequence[+U]
pp$1.regexp_eatRegExpIdentifierStart = function(state) {
var start = state.pos;
var forceU = this.options.ecmaVersion >= 11;
var ch = state.current(forceU);
state.advance(forceU);
if (ch === 0x5C /* \ */ && this.regexp_eatRegExpUnicodeEscapeSequence(state, forceU)) {
ch = state.lastIntValue;
}
if (isRegExpIdentifierStart(ch)) {
state.lastIntValue = ch;
return true
}
state.pos = start;
return false
};
function isRegExpIdentifierStart(ch) {
return isIdentifierStart(ch, true) || ch === 0x24 /* $ */ || ch === 0x5F /* _ */
}
// RegExpIdentifierPart ::
// UnicodeIDContinue
// `$`
// `_`
// `\` RegExpUnicodeEscapeSequence[+U]
// <ZWNJ>
// <ZWJ>
pp$1.regexp_eatRegExpIdentifierPart = function(state) {
var start = state.pos;
var forceU = this.options.ecmaVersion >= 11;
var ch = state.current(forceU);
state.advance(forceU);
if (ch === 0x5C /* \ */ && this.regexp_eatRegExpUnicodeEscapeSequence(state, forceU)) {
ch = state.lastIntValue;
}
if (isRegExpIdentifierPart(ch)) {
state.lastIntValue = ch;
return true
}
state.pos = start;
return false
};
function isRegExpIdentifierPart(ch) {
return isIdentifierChar(ch, true) || ch === 0x24 /* $ */ || ch === 0x5F /* _ */ || ch === 0x200C /* <ZWNJ> */ || ch === 0x200D /* <ZWJ> */
}
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-AtomEscape
pp$1.regexp_eatAtomEscape = function(state) {
if (
this.regexp_eatBackReference(state) ||
this.regexp_eatCharacterClassEscape(state) ||
this.regexp_eatCharacterEscape(state) ||
(state.switchN && this.regexp_eatKGroupName(state))
) {
return true
}
if (state.switchU) {
// Make the same message as V8.
if (state.current() === 0x63 /* c */) {
state.raise("Invalid unicode escape");
}
state.raise("Invalid escape");
}
return false
};
pp$1.regexp_eatBackReference = function(state) {
var start = state.pos;
if (this.regexp_eatDecimalEscape(state)) {
var n = state.lastIntValue;
if (state.switchU) {
// For SyntaxError in https://www.ecma-international.org/ecma-262/8.0/#sec-atomescape
if (n > state.maxBackReference) {
state.maxBackReference = n;
}
return true
}
if (n <= state.numCapturingParens) {
return true
}
state.pos = start;
}
return false
};
pp$1.regexp_eatKGroupName = function(state) {
if (state.eat(0x6B /* k */)) {
if (this.regexp_eatGroupName(state)) {
state.backReferenceNames.push(state.lastStringValue);
return true
}
state.raise("Invalid named reference");
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-CharacterEscape
pp$1.regexp_eatCharacterEscape = function(state) {
return (
this.regexp_eatControlEscape(state) ||
this.regexp_eatCControlLetter(state) ||
this.regexp_eatZero(state) ||
this.regexp_eatHexEscapeSequence(state) ||
this.regexp_eatRegExpUnicodeEscapeSequence(state, false) ||
(!state.switchU && this.regexp_eatLegacyOctalEscapeSequence(state)) ||
this.regexp_eatIdentityEscape(state)
)
};
pp$1.regexp_eatCControlLetter = function(state) {
var start = state.pos;
if (state.eat(0x63 /* c */)) {
if (this.regexp_eatControlLetter(state)) {
return true
}
state.pos = start;
}
return false
};
pp$1.regexp_eatZero = function(state) {
if (state.current() === 0x30 /* 0 */ && !isDecimalDigit(state.lookahead())) {
state.lastIntValue = 0;
state.advance();
return true
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-ControlEscape
pp$1.regexp_eatControlEscape = function(state) {
var ch = state.current();
if (ch === 0x74 /* t */) {
state.lastIntValue = 0x09; /* \t */
state.advance();
return true
}
if (ch === 0x6E /* n */) {
state.lastIntValue = 0x0A; /* \n */
state.advance();
return true
}
if (ch === 0x76 /* v */) {
state.lastIntValue = 0x0B; /* \v */
state.advance();
return true
}
if (ch === 0x66 /* f */) {
state.lastIntValue = 0x0C; /* \f */
state.advance();
return true
}
if (ch === 0x72 /* r */) {
state.lastIntValue = 0x0D; /* \r */
state.advance();
return true
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-ControlLetter
pp$1.regexp_eatControlLetter = function(state) {
var ch = state.current();
if (isControlLetter(ch)) {
state.lastIntValue = ch % 0x20;
state.advance();
return true
}
return false
};
function isControlLetter(ch) {
return (
(ch >= 0x41 /* A */ && ch <= 0x5A /* Z */) ||
(ch >= 0x61 /* a */ && ch <= 0x7A /* z */)
)
}
// https://www.ecma-international.org/ecma-262/8.0/#prod-RegExpUnicodeEscapeSequence
pp$1.regexp_eatRegExpUnicodeEscapeSequence = function(state, forceU) {
if ( forceU === void 0 ) forceU = false;
var start = state.pos;
var switchU = forceU || state.switchU;
if (state.eat(0x75 /* u */)) {
if (this.regexp_eatFixedHexDigits(state, 4)) {
var lead = state.lastIntValue;
if (switchU && lead >= 0xD800 && lead <= 0xDBFF) {
var leadSurrogateEnd = state.pos;
if (state.eat(0x5C /* \ */) && state.eat(0x75 /* u */) && this.regexp_eatFixedHexDigits(state, 4)) {
var trail = state.lastIntValue;
if (trail >= 0xDC00 && trail <= 0xDFFF) {
state.lastIntValue = (lead - 0xD800) * 0x400 + (trail - 0xDC00) + 0x10000;
return true
}
}
state.pos = leadSurrogateEnd;
state.lastIntValue = lead;
}
return true
}
if (
switchU &&
state.eat(0x7B /* { */) &&
this.regexp_eatHexDigits(state) &&
state.eat(0x7D /* } */) &&
isValidUnicode(state.lastIntValue)
) {
return true
}
if (switchU) {
state.raise("Invalid unicode escape");
}
state.pos = start;
}
return false
};
function isValidUnicode(ch) {
return ch >= 0 && ch <= 0x10FFFF
}
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-IdentityEscape
pp$1.regexp_eatIdentityEscape = function(state) {
if (state.switchU) {
if (this.regexp_eatSyntaxCharacter(state)) {
return true
}
if (state.eat(0x2F /* / */)) {
state.lastIntValue = 0x2F; /* / */
return true
}
return false
}
var ch = state.current();
if (ch !== 0x63 /* c */ && (!state.switchN || ch !== 0x6B /* k */)) {
state.lastIntValue = ch;
state.advance();
return true
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-DecimalEscape
pp$1.regexp_eatDecimalEscape = function(state) {
state.lastIntValue = 0;
var ch = state.current();
if (ch >= 0x31 /* 1 */ && ch <= 0x39 /* 9 */) {
do {
state.lastIntValue = 10 * state.lastIntValue + (ch - 0x30 /* 0 */);
state.advance();
} while ((ch = state.current()) >= 0x30 /* 0 */ && ch <= 0x39 /* 9 */)
return true
}
return false
};
// Return values used by character set parsing methods, needed to
// forbid negation of sets that can match strings.
var CharSetNone = 0; // Nothing parsed
var CharSetOk = 1; // Construct parsed, cannot contain strings
var CharSetString = 2; // Construct parsed, can contain strings
// https://www.ecma-international.org/ecma-262/8.0/#prod-CharacterClassEscape
pp$1.regexp_eatCharacterClassEscape = function(state) {
var ch = state.current();
if (isCharacterClassEscape(ch)) {
state.lastIntValue = -1;
state.advance();
return CharSetOk
}
var negate = false;
if (
state.switchU &&
this.options.ecmaVersion >= 9 &&
((negate = ch === 0x50 /* P */) || ch === 0x70 /* p */)
) {
state.lastIntValue = -1;
state.advance();
var result;
if (
state.eat(0x7B /* { */) &&
(result = this.regexp_eatUnicodePropertyValueExpression(state)) &&
state.eat(0x7D /* } */)
) {
if (negate && result === CharSetString) { state.raise("Invalid property name"); }
return result
}
state.raise("Invalid property name");
}
return CharSetNone
};
function isCharacterClassEscape(ch) {
return (
ch === 0x64 /* d */ ||
ch === 0x44 /* D */ ||
ch === 0x73 /* s */ ||
ch === 0x53 /* S */ ||
ch === 0x77 /* w */ ||
ch === 0x57 /* W */
)
}
// UnicodePropertyValueExpression ::
// UnicodePropertyName `=` UnicodePropertyValue
// LoneUnicodePropertyNameOrValue
pp$1.regexp_eatUnicodePropertyValueExpression = function(state) {
var start = state.pos;
// UnicodePropertyName `=` UnicodePropertyValue
if (this.regexp_eatUnicodePropertyName(state) && state.eat(0x3D /* = */)) {
var name = state.lastStringValue;
if (this.regexp_eatUnicodePropertyValue(state)) {
var value = state.lastStringValue;
this.regexp_validateUnicodePropertyNameAndValue(state, name, value);
return CharSetOk
}
}
state.pos = start;
// LoneUnicodePropertyNameOrValue
if (this.regexp_eatLoneUnicodePropertyNameOrValue(state)) {
var nameOrValue = state.lastStringValue;
return this.regexp_validateUnicodePropertyNameOrValue(state, nameOrValue)
}
return CharSetNone
};
pp$1.regexp_validateUnicodePropertyNameAndValue = function(state, name, value) {
if (!hasOwn(state.unicodeProperties.nonBinary, name))
{ state.raise("Invalid property name"); }
if (!state.unicodeProperties.nonBinary[name].test(value))
{ state.raise("Invalid property value"); }
};
pp$1.regexp_validateUnicodePropertyNameOrValue = function(state, nameOrValue) {
if (state.unicodeProperties.binary.test(nameOrValue)) { return CharSetOk }
if (state.switchV && state.unicodeProperties.binaryOfStrings.test(nameOrValue)) { return CharSetString }
state.raise("Invalid property name");
};
// UnicodePropertyName ::
// UnicodePropertyNameCharacters
pp$1.regexp_eatUnicodePropertyName = function(state) {
var ch = 0;
state.lastStringValue = "";
while (isUnicodePropertyNameCharacter(ch = state.current())) {
state.lastStringValue += codePointToString(ch);
state.advance();
}
return state.lastStringValue !== ""
};
function isUnicodePropertyNameCharacter(ch) {
return isControlLetter(ch) || ch === 0x5F /* _ */
}
// UnicodePropertyValue ::
// UnicodePropertyValueCharacters
pp$1.regexp_eatUnicodePropertyValue = function(state) {
var ch = 0;
state.lastStringValue = "";
while (isUnicodePropertyValueCharacter(ch = state.current())) {
state.lastStringValue += codePointToString(ch);
state.advance();
}
return state.lastStringValue !== ""
};
function isUnicodePropertyValueCharacter(ch) {
return isUnicodePropertyNameCharacter(ch) || isDecimalDigit(ch)
}
// LoneUnicodePropertyNameOrValue ::
// UnicodePropertyValueCharacters
pp$1.regexp_eatLoneUnicodePropertyNameOrValue = function(state) {
return this.regexp_eatUnicodePropertyValue(state)
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-CharacterClass
pp$1.regexp_eatCharacterClass = function(state) {
if (state.eat(0x5B /* [ */)) {
var negate = state.eat(0x5E /* ^ */);
var result = this.regexp_classContents(state);
if (!state.eat(0x5D /* ] */))
{ state.raise("Unterminated character class"); }
if (negate && result === CharSetString)
{ state.raise("Negated character class may contain strings"); }
return true
}
return false
};
// https://tc39.es/ecma262/#prod-ClassContents
// https://www.ecma-international.org/ecma-262/8.0/#prod-ClassRanges
pp$1.regexp_classContents = function(state) {
if (state.current() === 0x5D /* ] */) { return CharSetOk }
if (state.switchV) { return this.regexp_classSetExpression(state) }
this.regexp_nonEmptyClassRanges(state);
return CharSetOk
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-NonemptyClassRanges
// https://www.ecma-international.org/ecma-262/8.0/#prod-NonemptyClassRangesNoDash
pp$1.regexp_nonEmptyClassRanges = function(state) {
while (this.regexp_eatClassAtom(state)) {
var left = state.lastIntValue;
if (state.eat(0x2D /* - */) && this.regexp_eatClassAtom(state)) {
var right = state.lastIntValue;
if (state.switchU && (left === -1 || right === -1)) {
state.raise("Invalid character class");
}
if (left !== -1 && right !== -1 && left > right) {
state.raise("Range out of order in character class");
}
}
}
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-ClassAtom
// https://www.ecma-international.org/ecma-262/8.0/#prod-ClassAtomNoDash
pp$1.regexp_eatClassAtom = function(state) {
var start = state.pos;
if (state.eat(0x5C /* \ */)) {
if (this.regexp_eatClassEscape(state)) {
return true
}
if (state.switchU) {
// Make the same message as V8.
var ch$1 = state.current();
if (ch$1 === 0x63 /* c */ || isOctalDigit(ch$1)) {
state.raise("Invalid class escape");
}
state.raise("Invalid escape");
}
state.pos = start;
}
var ch = state.current();
if (ch !== 0x5D /* ] */) {
state.lastIntValue = ch;
state.advance();
return true
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-ClassEscape
pp$1.regexp_eatClassEscape = function(state) {
var start = state.pos;
if (state.eat(0x62 /* b */)) {
state.lastIntValue = 0x08; /* <BS> */
return true
}
if (state.switchU && state.eat(0x2D /* - */)) {
state.lastIntValue = 0x2D; /* - */
return true
}
if (!state.switchU && state.eat(0x63 /* c */)) {
if (this.regexp_eatClassControlLetter(state)) {
return true
}
state.pos = start;
}
return (
this.regexp_eatCharacterClassEscape(state) ||
this.regexp_eatCharacterEscape(state)
)
};
// https://tc39.es/ecma262/#prod-ClassSetExpression
// https://tc39.es/ecma262/#prod-ClassUnion
// https://tc39.es/ecma262/#prod-ClassIntersection
// https://tc39.es/ecma262/#prod-ClassSubtraction
pp$1.regexp_classSetExpression = function(state) {
var result = CharSetOk, subResult;
if (this.regexp_eatClassSetRange(state)) ; else if (subResult = this.regexp_eatClassSetOperand(state)) {
if (subResult === CharSetString) { result = CharSetString; }
// https://tc39.es/ecma262/#prod-ClassIntersection
var start = state.pos;
while (state.eatChars([0x26, 0x26] /* && */)) {
if (
state.current() !== 0x26 /* & */ &&
(subResult = this.regexp_eatClassSetOperand(state))
) {
if (subResult !== CharSetString) { result = CharSetOk; }
continue
}
state.raise("Invalid character in character class");
}
if (start !== state.pos) { return result }
// https://tc39.es/ecma262/#prod-ClassSubtraction
while (state.eatChars([0x2D, 0x2D] /* -- */)) {
if (this.regexp_eatClassSetOperand(state)) { continue }
state.raise("Invalid character in character class");
}
if (start !== state.pos) { return result }
} else {
state.raise("Invalid character in character class");
}
// https://tc39.es/ecma262/#prod-ClassUnion
for (;;) {
if (this.regexp_eatClassSetRange(state)) { continue }
subResult = this.regexp_eatClassSetOperand(state);
if (!subResult) { return result }
if (subResult === CharSetString) { result = CharSetString; }
}
};
// https://tc39.es/ecma262/#prod-ClassSetRange
pp$1.regexp_eatClassSetRange = function(state) {
var start = state.pos;
if (this.regexp_eatClassSetCharacter(state)) {
var left = state.lastIntValue;
if (state.eat(0x2D /* - */) && this.regexp_eatClassSetCharacter(state)) {
var right = state.lastIntValue;
if (left !== -1 && right !== -1 && left > right) {
state.raise("Range out of order in character class");
}
return true
}
state.pos = start;
}
return false
};
// https://tc39.es/ecma262/#prod-ClassSetOperand
pp$1.regexp_eatClassSetOperand = function(state) {
if (this.regexp_eatClassSetCharacter(state)) { return CharSetOk }
return this.regexp_eatClassStringDisjunction(state) || this.regexp_eatNestedClass(state)
};
// https://tc39.es/ecma262/#prod-NestedClass
pp$1.regexp_eatNestedClass = function(state) {
var start = state.pos;
if (state.eat(0x5B /* [ */)) {
var negate = state.eat(0x5E /* ^ */);
var result = this.regexp_classContents(state);
if (state.eat(0x5D /* ] */)) {
if (negate && result === CharSetString) {
state.raise("Negated character class may contain strings");
}
return result
}
state.pos = start;
}
if (state.eat(0x5C /* \ */)) {
var result$1 = this.regexp_eatCharacterClassEscape(state);
if (result$1) {
return result$1
}
state.pos = start;
}
return null
};
// https://tc39.es/ecma262/#prod-ClassStringDisjunction
pp$1.regexp_eatClassStringDisjunction = function(state) {
var start = state.pos;
if (state.eatChars([0x5C, 0x71] /* \q */)) {
if (state.eat(0x7B /* { */)) {
var result = this.regexp_classStringDisjunctionContents(state);
if (state.eat(0x7D /* } */)) {
return result
}
} else {
// Make the same message as V8.
state.raise("Invalid escape");
}
state.pos = start;
}
return null
};
// https://tc39.es/ecma262/#prod-ClassStringDisjunctionContents
pp$1.regexp_classStringDisjunctionContents = function(state) {
var result = this.regexp_classString(state);
while (state.eat(0x7C /* | */)) {
if (this.regexp_classString(state) === CharSetString) { result = CharSetString; }
}
return result
};
// https://tc39.es/ecma262/#prod-ClassString
// https://tc39.es/ecma262/#prod-NonEmptyClassString
pp$1.regexp_classString = function(state) {
var count = 0;
while (this.regexp_eatClassSetCharacter(state)) { count++; }
return count === 1 ? CharSetOk : CharSetString
};
// https://tc39.es/ecma262/#prod-ClassSetCharacter
pp$1.regexp_eatClassSetCharacter = function(state) {
var start = state.pos;
if (state.eat(0x5C /* \ */)) {
if (
this.regexp_eatCharacterEscape(state) ||
this.regexp_eatClassSetReservedPunctuator(state)
) {
return true
}
if (state.eat(0x62 /* b */)) {
state.lastIntValue = 0x08; /* <BS> */
return true
}
state.pos = start;
return false
}
var ch = state.current();
if (ch < 0 || ch === state.lookahead() && isClassSetReservedDoublePunctuatorCharacter(ch)) { return false }
if (isClassSetSyntaxCharacter(ch)) { return false }
state.advance();
state.lastIntValue = ch;
return true
};
// https://tc39.es/ecma262/#prod-ClassSetReservedDoublePunctuator
function isClassSetReservedDoublePunctuatorCharacter(ch) {
return (
ch === 0x21 /* ! */ ||
ch >= 0x23 /* # */ && ch <= 0x26 /* & */ ||
ch >= 0x2A /* * */ && ch <= 0x2C /* , */ ||
ch === 0x2E /* . */ ||
ch >= 0x3A /* : */ && ch <= 0x40 /* @ */ ||
ch === 0x5E /* ^ */ ||
ch === 0x60 /* ` */ ||
ch === 0x7E /* ~ */
)
}
// https://tc39.es/ecma262/#prod-ClassSetSyntaxCharacter
function isClassSetSyntaxCharacter(ch) {
return (
ch === 0x28 /* ( */ ||
ch === 0x29 /* ) */ ||
ch === 0x2D /* - */ ||
ch === 0x2F /* / */ ||
ch >= 0x5B /* [ */ && ch <= 0x5D /* ] */ ||
ch >= 0x7B /* { */ && ch <= 0x7D /* } */
)
}
// https://tc39.es/ecma262/#prod-ClassSetReservedPunctuator
pp$1.regexp_eatClassSetReservedPunctuator = function(state) {
var ch = state.current();
if (isClassSetReservedPunctuator(ch)) {
state.lastIntValue = ch;
state.advance();
return true
}
return false
};
// https://tc39.es/ecma262/#prod-ClassSetReservedPunctuator
function isClassSetReservedPunctuator(ch) {
return (
ch === 0x21 /* ! */ ||
ch === 0x23 /* # */ ||
ch === 0x25 /* % */ ||
ch === 0x26 /* & */ ||
ch === 0x2C /* , */ ||
ch === 0x2D /* - */ ||
ch >= 0x3A /* : */ && ch <= 0x3E /* > */ ||
ch === 0x40 /* @ */ ||
ch === 0x60 /* ` */ ||
ch === 0x7E /* ~ */
)
}
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-ClassControlLetter
pp$1.regexp_eatClassControlLetter = function(state) {
var ch = state.current();
if (isDecimalDigit(ch) || ch === 0x5F /* _ */) {
state.lastIntValue = ch % 0x20;
state.advance();
return true
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-HexEscapeSequence
pp$1.regexp_eatHexEscapeSequence = function(state) {
var start = state.pos;
if (state.eat(0x78 /* x */)) {
if (this.regexp_eatFixedHexDigits(state, 2)) {
return true
}
if (state.switchU) {
state.raise("Invalid escape");
}
state.pos = start;
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-DecimalDigits
pp$1.regexp_eatDecimalDigits = function(state) {
var start = state.pos;
var ch = 0;
state.lastIntValue = 0;
while (isDecimalDigit(ch = state.current())) {
state.lastIntValue = 10 * state.lastIntValue + (ch - 0x30 /* 0 */);
state.advance();
}
return state.pos !== start
};
function isDecimalDigit(ch) {
return ch >= 0x30 /* 0 */ && ch <= 0x39 /* 9 */
}
// https://www.ecma-international.org/ecma-262/8.0/#prod-HexDigits
pp$1.regexp_eatHexDigits = function(state) {
var start = state.pos;
var ch = 0;
state.lastIntValue = 0;
while (isHexDigit(ch = state.current())) {
state.lastIntValue = 16 * state.lastIntValue + hexToInt(ch);
state.advance();
}
return state.pos !== start
};
function isHexDigit(ch) {
return (
(ch >= 0x30 /* 0 */ && ch <= 0x39 /* 9 */) ||
(ch >= 0x41 /* A */ && ch <= 0x46 /* F */) ||
(ch >= 0x61 /* a */ && ch <= 0x66 /* f */)
)
}
function hexToInt(ch) {
if (ch >= 0x41 /* A */ && ch <= 0x46 /* F */) {
return 10 + (ch - 0x41 /* A */)
}
if (ch >= 0x61 /* a */ && ch <= 0x66 /* f */) {
return 10 + (ch - 0x61 /* a */)
}
return ch - 0x30 /* 0 */
}
// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-LegacyOctalEscapeSequence
// Allows only 0-377(octal) i.e. 0-255(decimal).
pp$1.regexp_eatLegacyOctalEscapeSequence = function(state) {
if (this.regexp_eatOctalDigit(state)) {
var n1 = state.lastIntValue;
if (this.regexp_eatOctalDigit(state)) {
var n2 = state.lastIntValue;
if (n1 <= 3 && this.regexp_eatOctalDigit(state)) {
state.lastIntValue = n1 * 64 + n2 * 8 + state.lastIntValue;
} else {
state.lastIntValue = n1 * 8 + n2;
}
} else {
state.lastIntValue = n1;
}
return true
}
return false
};
// https://www.ecma-international.org/ecma-262/8.0/#prod-OctalDigit
pp$1.regexp_eatOctalDigit = function(state) {
var ch = state.current();
if (isOctalDigit(ch)) {
state.lastIntValue = ch - 0x30; /* 0 */
state.advance();
return true
}
state.lastIntValue = 0;
return false
};
function isOctalDigit(ch) {
return ch >= 0x30 /* 0 */ && ch <= 0x37 /* 7 */
}
// https://www.ecma-international.org/ecma-262/8.0/#prod-Hex4Digits
// https://www.ecma-international.org/ecma-262/8.0/#prod-HexDigit
// And HexDigit HexDigit in https://www.ecma-international.org/ecma-262/8.0/#prod-HexEscapeSequence
pp$1.regexp_eatFixedHexDigits = function(state, length) {
var start = state.pos;
state.lastIntValue = 0;
for (var i = 0; i < length; ++i) {
var ch = state.current();
if (!isHexDigit(ch)) {
state.pos = start;
return false
}
state.lastIntValue = 16 * state.lastIntValue + hexToInt(ch);
state.advance();
}
return true
};
// Object type used to represent tokens. Note that normally, tokens
// simply exist as properties on the parser object. This is only
// used for the onToken callback and the external tokenizer.
var Token = function Token(p) {
this.type = p.type;
this.value = p.value;
this.start = p.start;
this.end = p.end;
if (p.options.locations)
{ this.loc = new SourceLocation(p, p.startLoc, p.endLoc); }
if (p.options.ranges)
{ this.range = [p.start, p.end]; }
};
// ## Tokenizer
var pp = Parser$1.prototype;
// Move to the next token
pp.next = function(ignoreEscapeSequenceInKeyword) {
if (!ignoreEscapeSequenceInKeyword && this.type.keyword && this.containsEsc)
{ this.raiseRecoverable(this.start, "Escape sequence in keyword " + this.type.keyword); }
if (this.options.onToken)
{ this.options.onToken(new Token(this)); }
this.lastTokEnd = this.end;
this.lastTokStart = this.start;
this.lastTokEndLoc = this.endLoc;
this.lastTokStartLoc = this.startLoc;
this.nextToken();
};
pp.getToken = function() {
this.next();
return new Token(this)
};
// If we're in an ES6 environment, make parsers iterable
if (typeof Symbol !== "undefined")
{ pp[Symbol.iterator] = function() {
var this$1$1 = this;
return {
next: function () {
var token = this$1$1.getToken();
return {
done: token.type === types$1.eof,
value: token
}
}
}
}; }
// Toggle strict mode. Re-reads the next number or string to please
// pedantic tests (`"use strict"; 010;` should fail).
// Read a single token, updating the parser object's token-related
// properties.
pp.nextToken = function() {
var curContext = this.curContext();
if (!curContext || !curContext.preserveSpace) { this.skipSpace(); }
this.start = this.pos;
if (this.options.locations) { this.startLoc = this.curPosition(); }
if (this.pos >= this.input.length) { return this.finishToken(types$1.eof) }
if (curContext.override) { return curContext.override(this) }
else { this.readToken(this.fullCharCodeAtPos()); }
};
pp.readToken = function(code) {
// Identifier or keyword. '\uXXXX' sequences are allowed in
// identifiers, so '\' also dispatches to that.
if (isIdentifierStart(code, this.options.ecmaVersion >= 6) || code === 92 /* '\' */)
{ return this.readWord() }
return this.getTokenFromCode(code)
};
pp.fullCharCodeAtPos = function() {
var code = this.input.charCodeAt(this.pos);
if (code <= 0xd7ff || code >= 0xdc00) { return code }
var next = this.input.charCodeAt(this.pos + 1);
return next <= 0xdbff || next >= 0xe000 ? code : (code << 10) + next - 0x35fdc00
};
pp.skipBlockComment = function() {
var startLoc = this.options.onComment && this.curPosition();
var start = this.pos, end = this.input.indexOf("*/", this.pos += 2);
if (end === -1) { this.raise(this.pos - 2, "Unterminated comment"); }
this.pos = end + 2;
if (this.options.locations) {
for (var nextBreak = (void 0), pos = start; (nextBreak = nextLineBreak(this.input, pos, this.pos)) > -1;) {
++this.curLine;
pos = this.lineStart = nextBreak;
}
}
if (this.options.onComment)
{ this.options.onComment(true, this.input.slice(start + 2, end), start, this.pos,
startLoc, this.curPosition()); }
};
pp.skipLineComment = function(startSkip) {
var start = this.pos;
var startLoc = this.options.onComment && this.curPosition();
var ch = this.input.charCodeAt(this.pos += startSkip);
while (this.pos < this.input.length && !isNewLine(ch)) {
ch = this.input.charCodeAt(++this.pos);
}
if (this.options.onComment)
{ this.options.onComment(false, this.input.slice(start + startSkip, this.pos), start, this.pos,
startLoc, this.curPosition()); }
};
// Called at the start of the parse and after every token. Skips
// whitespace and comments, and.
pp.skipSpace = function() {
loop: while (this.pos < this.input.length) {
var ch = this.input.charCodeAt(this.pos);
switch (ch) {
case 32: case 160: // ' '
++this.pos;
break
case 13:
if (this.input.charCodeAt(this.pos + 1) === 10) {
++this.pos;
}
case 10: case 8232: case 8233:
++this.pos;
if (this.options.locations) {
++this.curLine;
this.lineStart = this.pos;
}
break
case 47: // '/'
switch (this.input.charCodeAt(this.pos + 1)) {
case 42: // '*'
this.skipBlockComment();
break
case 47:
this.skipLineComment(2);
break
default:
break loop
}
break
default:
if (ch > 8 && ch < 14 || ch >= 5760 && nonASCIIwhitespace.test(String.fromCharCode(ch))) {
++this.pos;
} else {
break loop
}
}
}
};
// Called at the end of every token. Sets `end`, `val`, and
// maintains `context` and `exprAllowed`, and skips the space after
// the token, so that the next one's `start` will point at the
// right position.
pp.finishToken = function(type, val) {
this.end = this.pos;
if (this.options.locations) { this.endLoc = this.curPosition(); }
var prevType = this.type;
this.type = type;
this.value = val;
this.updateContext(prevType);
};
// ### Token reading
// This is the function that is called to fetch the next token. It
// is somewhat obscure, because it works in character codes rather
// than characters, and because operator parsing has been inlined
// into it.
//
// All in the name of speed.
//
pp.readToken_dot = function() {
var next = this.input.charCodeAt(this.pos + 1);
if (next >= 48 && next <= 57) { return this.readNumber(true) }
var next2 = this.input.charCodeAt(this.pos + 2);
if (this.options.ecmaVersion >= 6 && next === 46 && next2 === 46) { // 46 = dot '.'
this.pos += 3;
return this.finishToken(types$1.ellipsis)
} else {
++this.pos;
return this.finishToken(types$1.dot)
}
};
pp.readToken_slash = function() { // '/'
var next = this.input.charCodeAt(this.pos + 1);
if (this.exprAllowed) { ++this.pos; return this.readRegexp() }
if (next === 61) { return this.finishOp(types$1.assign, 2) }
return this.finishOp(types$1.slash, 1)
};
pp.readToken_mult_modulo_exp = function(code) { // '%*'
var next = this.input.charCodeAt(this.pos + 1);
var size = 1;
var tokentype = code === 42 ? types$1.star : types$1.modulo;
// exponentiation operator ** and **=
if (this.options.ecmaVersion >= 7 && code === 42 && next === 42) {
++size;
tokentype = types$1.starstar;
next = this.input.charCodeAt(this.pos + 2);
}
if (next === 61) { return this.finishOp(types$1.assign, size + 1) }
return this.finishOp(tokentype, size)
};
pp.readToken_pipe_amp = function(code) { // '|&'
var next = this.input.charCodeAt(this.pos + 1);
if (next === code) {
if (this.options.ecmaVersion >= 12) {
var next2 = this.input.charCodeAt(this.pos + 2);
if (next2 === 61) { return this.finishOp(types$1.assign, 3) }
}
return this.finishOp(code === 124 ? types$1.logicalOR : types$1.logicalAND, 2)
}
if (next === 61) { return this.finishOp(types$1.assign, 2) }
return this.finishOp(code === 124 ? types$1.bitwiseOR : types$1.bitwiseAND, 1)
};
pp.readToken_caret = function() { // '^'
var next = this.input.charCodeAt(this.pos + 1);
if (next === 61) { return this.finishOp(types$1.assign, 2) }
return this.finishOp(types$1.bitwiseXOR, 1)
};
pp.readToken_plus_min = function(code) { // '+-'
var next = this.input.charCodeAt(this.pos + 1);
if (next === code) {
if (next === 45 && !this.inModule && this.input.charCodeAt(this.pos + 2) === 62 &&
(this.lastTokEnd === 0 || lineBreak.test(this.input.slice(this.lastTokEnd, this.pos)))) {
// A `-->` line comment
this.skipLineComment(3);
this.skipSpace();
return this.nextToken()
}
return this.finishOp(types$1.incDec, 2)
}
if (next === 61) { return this.finishOp(types$1.assign, 2) }
return this.finishOp(types$1.plusMin, 1)
};
pp.readToken_lt_gt = function(code) { // '<>'
var next = this.input.charCodeAt(this.pos + 1);
var size = 1;
if (next === code) {
size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2;
if (this.input.charCodeAt(this.pos + size) === 61) { return this.finishOp(types$1.assign, size + 1) }
return this.finishOp(types$1.bitShift, size)
}
if (next === 33 && code === 60 && !this.inModule && this.input.charCodeAt(this.pos + 2) === 45 &&
this.input.charCodeAt(this.pos + 3) === 45) {
// `<!--`, an XML-style comment that should be interpreted as a line comment
this.skipLineComment(4);
this.skipSpace();
return this.nextToken()
}
if (next === 61) { size = 2; }
return this.finishOp(types$1.relational, size)
};
pp.readToken_eq_excl = function(code) { // '=!'
var next = this.input.charCodeAt(this.pos + 1);
if (next === 61) { return this.finishOp(types$1.equality, this.input.charCodeAt(this.pos + 2) === 61 ? 3 : 2) }
if (code === 61 && next === 62 && this.options.ecmaVersion >= 6) { // '=>'
this.pos += 2;
return this.finishToken(types$1.arrow)
}
return this.finishOp(code === 61 ? types$1.eq : types$1.prefix, 1)
};
pp.readToken_question = function() { // '?'
var ecmaVersion = this.options.ecmaVersion;
if (ecmaVersion >= 11) {
var next = this.input.charCodeAt(this.pos + 1);
if (next === 46) {
var next2 = this.input.charCodeAt(this.pos + 2);
if (next2 < 48 || next2 > 57) { return this.finishOp(types$1.questionDot, 2) }
}
if (next === 63) {
if (ecmaVersion >= 12) {
var next2$1 = this.input.charCodeAt(this.pos + 2);
if (next2$1 === 61) { return this.finishOp(types$1.assign, 3) }
}
return this.finishOp(types$1.coalesce, 2)
}
}
return this.finishOp(types$1.question, 1)
};
pp.readToken_numberSign = function() { // '#'
var ecmaVersion = this.options.ecmaVersion;
var code = 35; // '#'
if (ecmaVersion >= 13) {
++this.pos;
code = this.fullCharCodeAtPos();
if (isIdentifierStart(code, true) || code === 92 /* '\' */) {
return this.finishToken(types$1.privateId, this.readWord1())
}
}
this.raise(this.pos, "Unexpected character '" + codePointToString(code) + "'");
};
pp.getTokenFromCode = function(code) {
switch (code) {
// The interpretation of a dot depends on whether it is followed
// by a digit or another two dots.
case 46: // '.'
return this.readToken_dot()
// Punctuation tokens.
case 40: ++this.pos; return this.finishToken(types$1.parenL)
case 41: ++this.pos; return this.finishToken(types$1.parenR)
case 59: ++this.pos; return this.finishToken(types$1.semi)
case 44: ++this.pos; return this.finishToken(types$1.comma)
case 91: ++this.pos; return this.finishToken(types$1.bracketL)
case 93: ++this.pos; return this.finishToken(types$1.bracketR)
case 123: ++this.pos; return this.finishToken(types$1.braceL)
case 125: ++this.pos; return this.finishToken(types$1.braceR)
case 58: ++this.pos; return this.finishToken(types$1.colon)
case 96: // '`'
if (this.options.ecmaVersion < 6) { break }
++this.pos;
return this.finishToken(types$1.backQuote)
case 48: // '0'
var next = this.input.charCodeAt(this.pos + 1);
if (next === 120 || next === 88) { return this.readRadixNumber(16) } // '0x', '0X' - hex number
if (this.options.ecmaVersion >= 6) {
if (next === 111 || next === 79) { return this.readRadixNumber(8) } // '0o', '0O' - octal number
if (next === 98 || next === 66) { return this.readRadixNumber(2) } // '0b', '0B' - binary number
}
// Anything else beginning with a digit is an integer, octal
// number, or float.
case 49: case 50: case 51: case 52: case 53: case 54: case 55: case 56: case 57: // 1-9
return this.readNumber(false)
// Quotes produce strings.
case 34: case 39: // '"', "'"
return this.readString(code)
// Operators are parsed inline in tiny state machines. '=' (61) is
// often referred to. `finishOp` simply skips the amount of
// characters it is given as second argument, and returns a token
// of the type given by its first argument.
case 47: // '/'
return this.readToken_slash()
case 37: case 42: // '%*'
return this.readToken_mult_modulo_exp(code)
case 124: case 38: // '|&'
return this.readToken_pipe_amp(code)
case 94: // '^'
return this.readToken_caret()
case 43: case 45: // '+-'
return this.readToken_plus_min(code)
case 60: case 62: // '<>'
return this.readToken_lt_gt(code)
case 61: case 33: // '=!'
return this.readToken_eq_excl(code)
case 63: // '?'
return this.readToken_question()
case 126: // '~'
return this.finishOp(types$1.prefix, 1)
case 35: // '#'
return this.readToken_numberSign()
}
this.raise(this.pos, "Unexpected character '" + codePointToString(code) + "'");
};
pp.finishOp = function(type, size) {
var str = this.input.slice(this.pos, this.pos + size);
this.pos += size;
return this.finishToken(type, str)
};
pp.readRegexp = function() {
var escaped, inClass, start = this.pos;
for (;;) {
if (this.pos >= this.input.length) { this.raise(start, "Unterminated regular expression"); }
var ch = this.input.charAt(this.pos);
if (lineBreak.test(ch)) { this.raise(start, "Unterminated regular expression"); }
if (!escaped) {
if (ch === "[") { inClass = true; }
else if (ch === "]" && inClass) { inClass = false; }
else if (ch === "/" && !inClass) { break }
escaped = ch === "\\";
} else { escaped = false; }
++this.pos;
}
var pattern = this.input.slice(start, this.pos);
++this.pos;
var flagsStart = this.pos;
var flags = this.readWord1();
if (this.containsEsc) { this.unexpected(flagsStart); }
// Validate pattern
var state = this.regexpState || (this.regexpState = new RegExpValidationState(this));
state.reset(start, pattern, flags);
this.validateRegExpFlags(state);
this.validateRegExpPattern(state);
// Create Literal#value property value.
var value = null;
try {
value = new RegExp(pattern, flags);
} catch (e) {
// ESTree requires null if it failed to instantiate RegExp object.
// https://github.com/estree/estree/blob/a27003adf4fd7bfad44de9cef372a2eacd527b1c/es5.md#regexpliteral
}
return this.finishToken(types$1.regexp, {pattern: pattern, flags: flags, value: value})
};
// Read an integer in the given radix. Return null if zero digits
// were read, the integer value otherwise. When `len` is given, this
// will return `null` unless the integer has exactly `len` digits.
pp.readInt = function(radix, len, maybeLegacyOctalNumericLiteral) {
// `len` is used for character escape sequences. In that case, disallow separators.
var allowSeparators = this.options.ecmaVersion >= 12 && len === undefined;
// `maybeLegacyOctalNumericLiteral` is true if it doesn't have prefix (0x,0o,0b)
// and isn't fraction part nor exponent part. In that case, if the first digit
// is zero then disallow separators.
var isLegacyOctalNumericLiteral = maybeLegacyOctalNumericLiteral && this.input.charCodeAt(this.pos) === 48;
var start = this.pos, total = 0, lastCode = 0;
for (var i = 0, e = len == null ? Infinity : len; i < e; ++i, ++this.pos) {
var code = this.input.charCodeAt(this.pos), val = (void 0);
if (allowSeparators && code === 95) {
if (isLegacyOctalNumericLiteral) { this.raiseRecoverable(this.pos, "Numeric separator is not allowed in legacy octal numeric literals"); }
if (lastCode === 95) { this.raiseRecoverable(this.pos, "Numeric separator must be exactly one underscore"); }
if (i === 0) { this.raiseRecoverable(this.pos, "Numeric separator is not allowed at the first of digits"); }
lastCode = code;
continue
}
if (code >= 97) { val = code - 97 + 10; } // a
else if (code >= 65) { val = code - 65 + 10; } // A
else if (code >= 48 && code <= 57) { val = code - 48; } // 0-9
else { val = Infinity; }
if (val >= radix) { break }
lastCode = code;
total = total * radix + val;
}
if (allowSeparators && lastCode === 95) { this.raiseRecoverable(this.pos - 1, "Numeric separator is not allowed at the last of digits"); }
if (this.pos === start || len != null && this.pos - start !== len) { return null }
return total
};
function stringToNumber(str, isLegacyOctalNumericLiteral) {
if (isLegacyOctalNumericLiteral) {
return parseInt(str, 8)
}
// `parseFloat(value)` stops parsing at the first numeric separator then returns a wrong value.
return parseFloat(str.replace(/_/g, ""))
}
function stringToBigInt(str) {
if (typeof BigInt !== "function") {
return null
}
// `BigInt(value)` throws syntax error if the string contains numeric separators.
return BigInt(str.replace(/_/g, ""))
}
pp.readRadixNumber = function(radix) {
var start = this.pos;
this.pos += 2; // 0x
var val = this.readInt(radix);
if (val == null) { this.raise(this.start + 2, "Expected number in radix " + radix); }
if (this.options.ecmaVersion >= 11 && this.input.charCodeAt(this.pos) === 110) {
val = stringToBigInt(this.input.slice(start, this.pos));
++this.pos;
} else if (isIdentifierStart(this.fullCharCodeAtPos())) { this.raise(this.pos, "Identifier directly after number"); }
return this.finishToken(types$1.num, val)
};
// Read an integer, octal integer, or floating-point number.
pp.readNumber = function(startsWithDot) {
var start = this.pos;
if (!startsWithDot && this.readInt(10, undefined, true) === null) { this.raise(start, "Invalid number"); }
var octal = this.pos - start >= 2 && this.input.charCodeAt(start) === 48;
if (octal && this.strict) { this.raise(start, "Invalid number"); }
var next = this.input.charCodeAt(this.pos);
if (!octal && !startsWithDot && this.options.ecmaVersion >= 11 && next === 110) {
var val$1 = stringToBigInt(this.input.slice(start, this.pos));
++this.pos;
if (isIdentifierStart(this.fullCharCodeAtPos())) { this.raise(this.pos, "Identifier directly after number"); }
return this.finishToken(types$1.num, val$1)
}
if (octal && /[89]/.test(this.input.slice(start, this.pos))) { octal = false; }
if (next === 46 && !octal) { // '.'
++this.pos;
this.readInt(10);
next = this.input.charCodeAt(this.pos);
}
if ((next === 69 || next === 101) && !octal) { // 'eE'
next = this.input.charCodeAt(++this.pos);
if (next === 43 || next === 45) { ++this.pos; } // '+-'
if (this.readInt(10) === null) { this.raise(start, "Invalid number"); }
}
if (isIdentifierStart(this.fullCharCodeAtPos())) { this.raise(this.pos, "Identifier directly after number"); }
var val = stringToNumber(this.input.slice(start, this.pos), octal);
return this.finishToken(types$1.num, val)
};
// Read a string value, interpreting backslash-escapes.
pp.readCodePoint = function() {
var ch = this.input.charCodeAt(this.pos), code;
if (ch === 123) { // '{'
if (this.options.ecmaVersion < 6) { this.unexpected(); }
var codePos = ++this.pos;
code = this.readHexChar(this.input.indexOf("}", this.pos) - this.pos);
++this.pos;
if (code > 0x10FFFF) { this.invalidStringToken(codePos, "Code point out of bounds"); }
} else {
code = this.readHexChar(4);
}
return code
};
pp.readString = function(quote) {
var out = "", chunkStart = ++this.pos;
for (;;) {
if (this.pos >= this.input.length) { this.raise(this.start, "Unterminated string constant"); }
var ch = this.input.charCodeAt(this.pos);
if (ch === quote) { break }
if (ch === 92) { // '\'
out += this.input.slice(chunkStart, this.pos);
out += this.readEscapedChar(false);
chunkStart = this.pos;
} else if (ch === 0x2028 || ch === 0x2029) {
if (this.options.ecmaVersion < 10) { this.raise(this.start, "Unterminated string constant"); }
++this.pos;
if (this.options.locations) {
this.curLine++;
this.lineStart = this.pos;
}
} else {
if (isNewLine(ch)) { this.raise(this.start, "Unterminated string constant"); }
++this.pos;
}
}
out += this.input.slice(chunkStart, this.pos++);
return this.finishToken(types$1.string, out)
};
// Reads template string tokens.
var INVALID_TEMPLATE_ESCAPE_ERROR = {};
pp.tryReadTemplateToken = function() {
this.inTemplateElement = true;
try {
this.readTmplToken();
} catch (err) {
if (err === INVALID_TEMPLATE_ESCAPE_ERROR) {
this.readInvalidTemplateToken();
} else {
throw err
}
}
this.inTemplateElement = false;
};
pp.invalidStringToken = function(position, message) {
if (this.inTemplateElement && this.options.ecmaVersion >= 9) {
throw INVALID_TEMPLATE_ESCAPE_ERROR
} else {
this.raise(position, message);
}
};
pp.readTmplToken = function() {
var out = "", chunkStart = this.pos;
for (;;) {
if (this.pos >= this.input.length) { this.raise(this.start, "Unterminated template"); }
var ch = this.input.charCodeAt(this.pos);
if (ch === 96 || ch === 36 && this.input.charCodeAt(this.pos + 1) === 123) { // '`', '${'
if (this.pos === this.start && (this.type === types$1.template || this.type === types$1.invalidTemplate)) {
if (ch === 36) {
this.pos += 2;
return this.finishToken(types$1.dollarBraceL)
} else {
++this.pos;
return this.finishToken(types$1.backQuote)
}
}
out += this.input.slice(chunkStart, this.pos);
return this.finishToken(types$1.template, out)
}
if (ch === 92) { // '\'
out += this.input.slice(chunkStart, this.pos);
out += this.readEscapedChar(true);
chunkStart = this.pos;
} else if (isNewLine(ch)) {
out += this.input.slice(chunkStart, this.pos);
++this.pos;
switch (ch) {
case 13:
if (this.input.charCodeAt(this.pos) === 10) { ++this.pos; }
case 10:
out += "\n";
break
default:
out += String.fromCharCode(ch);
break
}
if (this.options.locations) {
++this.curLine;
this.lineStart = this.pos;
}
chunkStart = this.pos;
} else {
++this.pos;
}
}
};
// Reads a template token to search for the end, without validating any escape sequences
pp.readInvalidTemplateToken = function() {
for (; this.pos < this.input.length; this.pos++) {
switch (this.input[this.pos]) {
case "\\":
++this.pos;
break
case "$":
if (this.input[this.pos + 1] !== "{") {
break
}
// falls through
case "`":
return this.finishToken(types$1.invalidTemplate, this.input.slice(this.start, this.pos))
// no default
}
}
this.raise(this.start, "Unterminated template");
};
// Used to read escaped characters
pp.readEscapedChar = function(inTemplate) {
var ch = this.input.charCodeAt(++this.pos);
++this.pos;
switch (ch) {
case 110: return "\n" // 'n' -> '\n'
case 114: return "\r" // 'r' -> '\r'
case 120: return String.fromCharCode(this.readHexChar(2)) // 'x'
case 117: return codePointToString(this.readCodePoint()) // 'u'
case 116: return "\t" // 't' -> '\t'
case 98: return "\b" // 'b' -> '\b'
case 118: return "\u000b" // 'v' -> '\u000b'
case 102: return "\f" // 'f' -> '\f'
case 13: if (this.input.charCodeAt(this.pos) === 10) { ++this.pos; } // '\r\n'
case 10: // ' \n'
if (this.options.locations) { this.lineStart = this.pos; ++this.curLine; }
return ""
case 56:
case 57:
if (this.strict) {
this.invalidStringToken(
this.pos - 1,
"Invalid escape sequence"
);
}
if (inTemplate) {
var codePos = this.pos - 1;
this.invalidStringToken(
codePos,
"Invalid escape sequence in template string"
);
}
default:
if (ch >= 48 && ch <= 55) {
var octalStr = this.input.substr(this.pos - 1, 3).match(/^[0-7]+/)[0];
var octal = parseInt(octalStr, 8);
if (octal > 255) {
octalStr = octalStr.slice(0, -1);
octal = parseInt(octalStr, 8);
}
this.pos += octalStr.length - 1;
ch = this.input.charCodeAt(this.pos);
if ((octalStr !== "0" || ch === 56 || ch === 57) && (this.strict || inTemplate)) {
this.invalidStringToken(
this.pos - 1 - octalStr.length,
inTemplate
? "Octal literal in template string"
: "Octal literal in strict mode"
);
}
return String.fromCharCode(octal)
}
if (isNewLine(ch)) {
// Unicode new line characters after \ get removed from output in both
// template literals and strings
return ""
}
return String.fromCharCode(ch)
}
};
// Used to read character escape sequences ('\x', '\u', '\U').
pp.readHexChar = function(len) {
var codePos = this.pos;
var n = this.readInt(16, len);
if (n === null) { this.invalidStringToken(codePos, "Bad character escape sequence"); }
return n
};
// Read an identifier, and return it as a string. Sets `this.containsEsc`
// to whether the word contained a '\u' escape.
//
// Incrementally adds only escaped chars, adding other chunks as-is
// as a micro-optimization.
pp.readWord1 = function() {
this.containsEsc = false;
var word = "", first = true, chunkStart = this.pos;
var astral = this.options.ecmaVersion >= 6;
while (this.pos < this.input.length) {
var ch = this.fullCharCodeAtPos();
if (isIdentifierChar(ch, astral)) {
this.pos += ch <= 0xffff ? 1 : 2;
} else if (ch === 92) { // "\"
this.containsEsc = true;
word += this.input.slice(chunkStart, this.pos);
var escStart = this.pos;
if (this.input.charCodeAt(++this.pos) !== 117) // "u"
{ this.invalidStringToken(this.pos, "Expecting Unicode escape sequence \\uXXXX"); }
++this.pos;
var esc = this.readCodePoint();
if (!(first ? isIdentifierStart : isIdentifierChar)(esc, astral))
{ this.invalidStringToken(escStart, "Invalid Unicode escape"); }
word += codePointToString(esc);
chunkStart = this.pos;
} else {
break
}
first = false;
}
return word + this.input.slice(chunkStart, this.pos)
};
// Read an identifier or keyword token. Will check for reserved
// words when necessary.
pp.readWord = function() {
var word = this.readWord1();
var type = types$1.name;
if (this.keywords.test(word)) {
type = keywords$2[word];
}
return this.finishToken(type, word)
};
// Acorn is a tiny, fast JavaScript parser written in JavaScript.
//
// Acorn was written by Marijn Haverbeke, Ingvar Stepanyan, and
// various contributors and released under an MIT license.
//
// Git repositories for Acorn are available at
//
// http://marijnhaverbeke.nl/git/acorn
// https://github.com/acornjs/acorn.git
//
// Please use the [github bug tracker][ghbt] to report issues.
//
// [ghbt]: https://github.com/acornjs/acorn/issues
//
// [walk]: util/walk.js
var version$2 = "8.10.0";
Parser$1.acorn = {
Parser: Parser$1,
version: version$2,
defaultOptions: defaultOptions,
Position: Position,
SourceLocation: SourceLocation,
getLineInfo: getLineInfo,
Node: Node,
TokenType: TokenType,
tokTypes: types$1,
keywordTypes: keywords$2,
TokContext: TokContext,
tokContexts: types$2,
isIdentifierChar: isIdentifierChar,
isIdentifierStart: isIdentifierStart,
Token: Token,
isNewLine: isNewLine,
lineBreak: lineBreak,
lineBreakG: lineBreakG,
nonASCIIwhitespace: nonASCIIwhitespace
};
// The main exported interface (under `self.acorn` when in the
// browser) is a `parse` function that takes a code string and
// returns an abstract syntax tree as specified by [Mozilla parser
// API][api].
//
// [api]: https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API
function parse$b(input, options) {
return Parser$1.parse(input, options)
}
// This function tries to parse a single expression at a given
// offset in a string. Useful for parsing mixed-language formats
// that embed JavaScript expressions.
function parseExpressionAt(input, pos, options) {
return Parser$1.parseExpressionAt(input, pos, options)
}
// Acorn is organized as a tokenizer and a recursive-descent parser.
// The `tokenizer` export provides an interface to the tokenizer.
function tokenizer(input, options) {
return Parser$1.tokenizer(input, options)
}
var acorn = {
__proto__: null,
Node: Node,
Parser: Parser$1,
Position: Position,
SourceLocation: SourceLocation,
TokContext: TokContext,
Token: Token,
TokenType: TokenType,
defaultOptions: defaultOptions,
getLineInfo: getLineInfo,
isIdentifierChar: isIdentifierChar,
isIdentifierStart: isIdentifierStart,
isNewLine: isNewLine,
keywordTypes: keywords$2,
lineBreak: lineBreak,
lineBreakG: lineBreakG,
nonASCIIwhitespace: nonASCIIwhitespace,
parse: parse$b,
parseExpressionAt: parseExpressionAt,
tokContexts: types$2,
tokTypes: types$1,
tokenizer: tokenizer,
version: version$2
};
const HASH_RE = /#/g;
const AMPERSAND_RE = /&/g;
const EQUAL_RE = /=/g;
const PLUS_RE = /\+/g;
const ENC_CARET_RE = /%5e/gi;
const ENC_BACKTICK_RE = /%60/gi;
const ENC_PIPE_RE = /%7c/gi;
const ENC_SPACE_RE = /%20/gi;
function encode(text) {
return encodeURI("" + text).replace(ENC_PIPE_RE, "|");
}
function encodeQueryValue(input) {
return encode(typeof input === "string" ? input : JSON.stringify(input)).replace(PLUS_RE, "%2B").replace(ENC_SPACE_RE, "+").replace(HASH_RE, "%23").replace(AMPERSAND_RE, "%26").replace(ENC_BACKTICK_RE, "`").replace(ENC_CARET_RE, "^");
}
function encodeQueryKey(text) {
return encodeQueryValue(text).replace(EQUAL_RE, "%3D");
}
function encodeQueryItem(key, value) {
if (typeof value === "number" || typeof value === "boolean") {
value = String(value);
}
if (!value) {
return encodeQueryKey(key);
}
if (Array.isArray(value)) {
return value.map((_value) => `${encodeQueryKey(key)}=${encodeQueryValue(_value)}`).join("&");
}
return `${encodeQueryKey(key)}=${encodeQueryValue(value)}`;
}
function stringifyQuery(query) {
return Object.keys(query).filter((k) => query[k] !== void 0).map((k) => encodeQueryItem(k, query[k])).filter(Boolean).join("&");
}
new Set(builtinModules);
function matchAll(regex, string, addition) {
const matches = [];
for (const match of string.matchAll(regex)) {
matches.push({
...addition,
...match.groups,
code: match[0],
start: match.index,
end: match.index + match[0].length
});
}
return matches;
}
function clearImports(imports) {
return (imports || "").replace(/(\/\/[^\n]*\n|\/\*.*\*\/)/g, "").replace(/\s+/g, " ");
}
function getImportNames(cleanedImports) {
const topLevelImports = cleanedImports.replace(/{([^}]*)}/, "");
const namespacedImport = topLevelImports.match(/\* as \s*(\S*)/)?.[1];
const defaultImport = topLevelImports.split(",").find((index) => !/[*{}]/.test(index))?.trim() || void 0;
return {
namespacedImport,
defaultImport
};
}
/**
* @typedef ErrnoExceptionFields
* @property {number | undefined} [errnode]
* @property {string | undefined} [code]
* @property {string | undefined} [path]
* @property {string | undefined} [syscall]
* @property {string | undefined} [url]
*
* @typedef {Error & ErrnoExceptionFields} ErrnoException
*/
const isWindows$3 = process$1.platform === 'win32';
const own$1 = {}.hasOwnProperty;
const classRegExp = /^([A-Z][a-z\d]*)+$/;
// Sorted by a rough estimate on most frequently used entries.
const kTypes = new Set([
'string',
'function',
'number',
'object',
// Accept 'Function' and 'Object' as alternative to the lower cased version.
'Function',
'Object',
'boolean',
'bigint',
'symbol'
]);
/**
* Create a list string in the form like 'A and B' or 'A, B, ..., and Z'.
* We cannot use Intl.ListFormat because it's not available in
* --without-intl builds.
*
* @param {Array<string>} array
* An array of strings.
* @param {string} [type]
* The list type to be inserted before the last element.
* @returns {string}
*/
function formatList(array, type = 'and') {
return array.length < 3
? array.join(` ${type} `)
: `${array.slice(0, -1).join(', ')}, ${type} ${array[array.length - 1]}`
}
/** @type {Map<string, MessageFunction | string>} */
const messages = new Map();
const nodeInternalPrefix = '__node_internal_';
/** @type {number} */
let userStackTraceLimit;
createError(
'ERR_INVALID_ARG_TYPE',
/**
* @param {string} name
* @param {Array<string> | string} expected
* @param {unknown} actual
*/
(name, expected, actual) => {
assert$1(typeof name === 'string', "'name' must be a string");
if (!Array.isArray(expected)) {
expected = [expected];
}
let message = 'The ';
if (name.endsWith(' argument')) {
// For cases like 'first argument'
message += `${name} `;
} else {
const type = name.includes('.') ? 'property' : 'argument';
message += `"${name}" ${type} `;
}
message += 'must be ';
/** @type {Array<string>} */
const types = [];
/** @type {Array<string>} */
const instances = [];
/** @type {Array<string>} */
const other = [];
for (const value of expected) {
assert$1(
typeof value === 'string',
'All expected entries have to be of type string'
);
if (kTypes.has(value)) {
types.push(value.toLowerCase());
} else if (classRegExp.exec(value) === null) {
assert$1(
value !== 'object',
'The value "object" should be written as "Object"'
);
other.push(value);
} else {
instances.push(value);
}
}
// Special handle `object` in case other instances are allowed to outline
// the differences between each other.
if (instances.length > 0) {
const pos = types.indexOf('object');
if (pos !== -1) {
types.slice(pos, 1);
instances.push('Object');
}
}
if (types.length > 0) {
message += `${types.length > 1 ? 'one of type' : 'of type'} ${formatList(
types,
'or'
)}`;
if (instances.length > 0 || other.length > 0) message += ' or ';
}
if (instances.length > 0) {
message += `an instance of ${formatList(instances, 'or')}`;
if (other.length > 0) message += ' or ';
}
if (other.length > 0) {
if (other.length > 1) {
message += `one of ${formatList(other, 'or')}`;
} else {
if (other[0].toLowerCase() !== other[0]) message += 'an ';
message += `${other[0]}`;
}
}
message += `. Received ${determineSpecificType(actual)}`;
return message
},
TypeError
);
createError(
'ERR_INVALID_MODULE_SPECIFIER',
/**
* @param {string} request
* @param {string} reason
* @param {string} [base]
*/
(request, reason, base = undefined) => {
return `Invalid module "${request}" ${reason}${
base ? ` imported from ${base}` : ''
}`
},
TypeError
);
createError(
'ERR_INVALID_PACKAGE_CONFIG',
/**
* @param {string} path
* @param {string} [base]
* @param {string} [message]
*/
(path, base, message) => {
return `Invalid package config ${path}${
base ? ` while importing ${base}` : ''
}${message ? `. ${message}` : ''}`
},
Error
);
createError(
'ERR_INVALID_PACKAGE_TARGET',
/**
* @param {string} pkgPath
* @param {string} key
* @param {unknown} target
* @param {boolean} [isImport=false]
* @param {string} [base]
*/
(pkgPath, key, target, isImport = false, base = undefined) => {
const relError =
typeof target === 'string' &&
!isImport &&
target.length > 0 &&
!target.startsWith('./');
if (key === '.') {
assert$1(isImport === false);
return (
`Invalid "exports" main target ${JSON.stringify(target)} defined ` +
`in the package config ${pkgPath}package.json${
base ? ` imported from ${base}` : ''
}${relError ? '; targets must start with "./"' : ''}`
)
}
return `Invalid "${
isImport ? 'imports' : 'exports'
}" target ${JSON.stringify(
target
)} defined for '${key}' in the package config ${pkgPath}package.json${
base ? ` imported from ${base}` : ''
}${relError ? '; targets must start with "./"' : ''}`
},
Error
);
createError(
'ERR_MODULE_NOT_FOUND',
/**
* @param {string} path
* @param {string} base
* @param {string} [type]
*/
(path, base, type = 'package') => {
return `Cannot find ${type} '${path}' imported from ${base}`
},
Error
);
createError(
'ERR_NETWORK_IMPORT_DISALLOWED',
"import of '%s' by %s is not supported: %s",
Error
);
createError(
'ERR_PACKAGE_IMPORT_NOT_DEFINED',
/**
* @param {string} specifier
* @param {string} packagePath
* @param {string} base
*/
(specifier, packagePath, base) => {
return `Package import specifier "${specifier}" is not defined${
packagePath ? ` in package ${packagePath}package.json` : ''
} imported from ${base}`
},
TypeError
);
createError(
'ERR_PACKAGE_PATH_NOT_EXPORTED',
/**
* @param {string} pkgPath
* @param {string} subpath
* @param {string} [base]
*/
(pkgPath, subpath, base = undefined) => {
if (subpath === '.')
return `No "exports" main defined in ${pkgPath}package.json${
base ? ` imported from ${base}` : ''
}`
return `Package subpath '${subpath}' is not defined by "exports" in ${pkgPath}package.json${
base ? ` imported from ${base}` : ''
}`
},
Error
);
createError(
'ERR_UNSUPPORTED_DIR_IMPORT',
"Directory import '%s' is not supported " +
'resolving ES modules imported from %s',
Error
);
createError(
'ERR_UNKNOWN_FILE_EXTENSION',
/**
* @param {string} ext
* @param {string} path
*/
(ext, path) => {
return `Unknown file extension "${ext}" for ${path}`
},
TypeError
);
createError(
'ERR_INVALID_ARG_VALUE',
/**
* @param {string} name
* @param {unknown} value
* @param {string} [reason='is invalid']
*/
(name, value, reason = 'is invalid') => {
let inspected = inspect(value);
if (inspected.length > 128) {
inspected = `${inspected.slice(0, 128)}...`;
}
const type = name.includes('.') ? 'property' : 'argument';
return `The ${type} '${name}' ${reason}. Received ${inspected}`
},
TypeError
// Note: extra classes have been shaken out.
// , RangeError
);
createError(
'ERR_UNSUPPORTED_ESM_URL_SCHEME',
/**
* @param {URL} url
* @param {Array<string>} supported
*/
(url, supported) => {
let message = `Only URLs with a scheme in: ${formatList(
supported
)} are supported by the default ESM loader`;
if (isWindows$3 && url.protocol.length === 2) {
message += '. On Windows, absolute paths must be valid file:// URLs';
}
message += `. Received protocol '${url.protocol}'`;
return message
},
Error
);
/**
* Utility function for registering the error codes. Only used here. Exported
* *only* to allow for testing.
* @param {string} sym
* @param {MessageFunction | string} value
* @param {ErrorConstructor} def
* @returns {new (...args: Array<any>) => Error}
*/
function createError(sym, value, def) {
// Special case for SystemError that formats the error message differently
// The SystemErrors only have SystemError as their base classes.
messages.set(sym, value);
return makeNodeErrorWithCode(def, sym)
}
/**
* @param {ErrorConstructor} Base
* @param {string} key
* @returns {ErrorConstructor}
*/
function makeNodeErrorWithCode(Base, key) {
// @ts-expect-error Its a Node error.
return NodeError
/**
* @param {Array<unknown>} args
*/
function NodeError(...args) {
const limit = Error.stackTraceLimit;
if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0;
const error = new Base();
// Reset the limit and setting the name property.
if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit;
const message = getMessage(key, args, error);
Object.defineProperties(error, {
// Note: no need to implement `kIsNodeError` symbol, would be hard,
// probably.
message: {
value: message,
enumerable: false,
writable: true,
configurable: true
},
toString: {
/** @this {Error} */
value() {
return `${this.name} [${key}]: ${this.message}`
},
enumerable: false,
writable: true,
configurable: true
}
});
captureLargerStackTrace(error);
// @ts-expect-error Its a Node error.
error.code = key;
return error
}
}
/**
* @returns {boolean}
*/
function isErrorStackTraceLimitWritable() {
// Do no touch Error.stackTraceLimit as V8 would attempt to install
// it again during deserialization.
try {
// @ts-expect-error: not in types?
if (v8.startupSnapshot.isBuildingSnapshot()) {
return false
}
} catch {}
const desc = Object.getOwnPropertyDescriptor(Error, 'stackTraceLimit');
if (desc === undefined) {
return Object.isExtensible(Error)
}
return own$1.call(desc, 'writable') && desc.writable !== undefined
? desc.writable
: desc.set !== undefined
}
/**
* This function removes unnecessary frames from Node.js core errors.
* @template {(...args: unknown[]) => unknown} T
* @param {T} fn
* @returns {T}
*/
function hideStackFrames(fn) {
// We rename the functions that will be hidden to cut off the stacktrace
// at the outermost one
const hidden = nodeInternalPrefix + fn.name;
Object.defineProperty(fn, 'name', {value: hidden});
return fn
}
const captureLargerStackTrace = hideStackFrames(
/**
* @param {Error} error
* @returns {Error}
*/
// @ts-expect-error: fine
function (error) {
const stackTraceLimitIsWritable = isErrorStackTraceLimitWritable();
if (stackTraceLimitIsWritable) {
userStackTraceLimit = Error.stackTraceLimit;
Error.stackTraceLimit = Number.POSITIVE_INFINITY;
}
Error.captureStackTrace(error);
// Reset the limit
if (stackTraceLimitIsWritable) Error.stackTraceLimit = userStackTraceLimit;
return error
}
);
/**
* @param {string} key
* @param {Array<unknown>} args
* @param {Error} self
* @returns {string}
*/
function getMessage(key, args, self) {
const message = messages.get(key);
assert$1(message !== undefined, 'expected `message` to be found');
if (typeof message === 'function') {
assert$1(
message.length <= args.length, // Default options do not count.
`Code: ${key}; The provided arguments length (${args.length}) does not ` +
`match the required ones (${message.length}).`
);
return Reflect.apply(message, self, args)
}
const regex = /%[dfijoOs]/g;
let expectedLength = 0;
while (regex.exec(message) !== null) expectedLength++;
assert$1(
expectedLength === args.length,
`Code: ${key}; The provided arguments length (${args.length}) does not ` +
`match the required ones (${expectedLength}).`
);
if (args.length === 0) return message
args.unshift(message);
return Reflect.apply(format$2, null, args)
}
/**
* Determine the specific type of a value for type-mismatch errors.
* @param {unknown} value
* @returns {string}
*/
function determineSpecificType(value) {
if (value === null || value === undefined) {
return String(value)
}
if (typeof value === 'function' && value.name) {
return `function ${value.name}`
}
if (typeof value === 'object') {
if (value.constructor && value.constructor.name) {
return `an instance of ${value.constructor.name}`
}
return `${inspect(value, {depth: -1})}`
}
let inspected = inspect(value, {colors: false});
if (inspected.length > 28) {
inspected = `${inspected.slice(0, 25)}...`;
}
return `type ${typeof value} (${inspected})`
}
pathToFileURL(process.cwd());
const ESM_STATIC_IMPORT_RE = /(?<=\s|^|;)import\s*([\s"']*(?<imports>[\p{L}\p{M}\w\t\n\r $*,/{}@.]+)from\s*)?["']\s*(?<specifier>(?<="\s*)[^"]*[^\s"](?=\s*")|(?<='\s*)[^']*[^\s'](?=\s*'))\s*["'][\s;]*/gmu;
const TYPE_RE = /^\s*?type\s/;
function findStaticImports(code) {
return matchAll(ESM_STATIC_IMPORT_RE, code, { type: "static" });
}
function parseStaticImport(matched) {
const cleanedImports = clearImports(matched.imports);
const namedImports = {};
for (const namedImport of cleanedImports.match(/{([^}]*)}/)?.[1]?.split(",") || []) {
const [, source = namedImport.trim(), importName = source] = namedImport.match(/^\s*(\S*) as (\S*)\s*$/) || [];
if (source && !TYPE_RE.test(source)) {
namedImports[source] = importName;
}
}
const { namespacedImport, defaultImport } = getImportNames(cleanedImports);
return {
...matched,
defaultImport,
namespacedImport,
namedImports
};
}
const ESM_RE = /([\s;]|^)(import[\s\w*,{}]*from|import\s*["'*{]|export\b\s*(?:[*{]|default|class|type|function|const|var|let|async function)|import\.meta\b)/m;
function hasESMSyntax(code) {
return ESM_RE.test(code);
}
const normalizedClientEntry$1 = normalizePath$3(CLIENT_ENTRY);
const normalizedEnvEntry$1 = normalizePath$3(ENV_ENTRY);
// special id for paths marked with browser: false
// https://github.com/defunctzombie/package-browser-field-spec#ignore-a-module
const browserExternalId = '__vite-browser-external';
// special id for packages that are optional peer deps
const optionalPeerDepId = '__vite-optional-peer-dep';
const subpathImportsPrefix = '#';
const startsWithWordCharRE = /^\w/;
const debug$d = createDebugger('vite:resolve-details', {
onlyWhenFocused: true,
});
function resolvePlugin(resolveOptions) {
const { root, isProduction, asSrc, ssrConfig, preferRelative = false, } = resolveOptions;
const { target: ssrTarget, noExternal: ssrNoExternal } = ssrConfig ?? {};
// In unix systems, absolute paths inside root first needs to be checked as an
// absolute URL (/root/root/path-to-file) resulting in failed checks before falling
// back to checking the path as absolute. If /root/root isn't a valid path, we can
// avoid these checks. Absolute paths inside root are common in user code as many
// paths are resolved by the user. For example for an alias.
const rootInRoot = tryStatSync(path$o.join(root, root))?.isDirectory() ?? false;
return {
name: 'vite:resolve',
async resolveId(id, importer, resolveOpts) {
if (id[0] === '\0' ||
id.startsWith('virtual:') ||
// When injected directly in html/client code
id.startsWith('/virtual:')) {
return;
}
const ssr = resolveOpts?.ssr === true;
// We need to delay depsOptimizer until here instead of passing it as an option
// the resolvePlugin because the optimizer is created on server listen during dev
const depsOptimizer = resolveOptions.getDepsOptimizer?.(ssr);
if (id.startsWith(browserExternalId)) {
return id;
}
const targetWeb = !ssr || ssrTarget === 'webworker';
// this is passed by @rollup/plugin-commonjs
const isRequire = resolveOpts?.custom?.['node-resolve']?.isRequire ?? false;
// end user can configure different conditions for ssr and client.
// falls back to client conditions if no ssr conditions supplied
const ssrConditions = resolveOptions.ssrConfig?.resolve?.conditions ||
resolveOptions.conditions;
const options = {
isRequire,
...resolveOptions,
scan: resolveOpts?.scan ?? resolveOptions.scan,
conditions: ssr ? ssrConditions : resolveOptions.conditions,
};
const resolvedImports = resolveSubpathImports(id, importer, options, targetWeb);
if (resolvedImports) {
id = resolvedImports;
if (resolveOpts.custom?.['vite:import-glob']?.isSubImportsPattern) {
return id;
}
}
if (importer) {
if (isTsRequest(importer) ||
resolveOpts.custom?.depScan?.loader?.startsWith('ts')) {
options.isFromTsImporter = true;
}
else {
const moduleLang = this.getModuleInfo(importer)?.meta?.vite?.lang;
options.isFromTsImporter = moduleLang && isTsRequest(`.${moduleLang}`);
}
}
let res;
// resolve pre-bundled deps requests, these could be resolved by
// tryFileResolve or /fs/ resolution but these files may not yet
// exists if we are in the middle of a deps re-processing
if (asSrc && depsOptimizer?.isOptimizedDepUrl(id)) {
const optimizedPath = id.startsWith(FS_PREFIX)
? fsPathFromId(id)
: normalizePath$3(path$o.resolve(root, id.slice(1)));
return optimizedPath;
}
// explicit fs paths that starts with /@fs/*
if (asSrc && id.startsWith(FS_PREFIX)) {
res = fsPathFromId(id);
// We don't need to resolve these paths since they are already resolved
// always return here even if res doesn't exist since /@fs/ is explicit
// if the file doesn't exist it should be a 404.
debug$d?.(`[@fs] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
return ensureVersionQuery(res, id, options, depsOptimizer);
}
// URL
// /foo -> /fs-root/foo
if (asSrc &&
id[0] === '/' &&
(rootInRoot || !id.startsWith(withTrailingSlash(root)))) {
const fsPath = path$o.resolve(root, id.slice(1));
if ((res = tryFsResolve(fsPath, options))) {
debug$d?.(`[url] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
return ensureVersionQuery(res, id, options, depsOptimizer);
}
}
// relative
if (id[0] === '.' ||
((preferRelative || importer?.endsWith('.html')) &&
startsWithWordCharRE.test(id))) {
const basedir = importer ? path$o.dirname(importer) : process.cwd();
const fsPath = path$o.resolve(basedir, id);
// handle browser field mapping for relative imports
const normalizedFsPath = normalizePath$3(fsPath);
if (depsOptimizer?.isOptimizedDepFile(normalizedFsPath)) {
// Optimized files could not yet exist in disk, resolve to the full path
// Inject the current browserHash version if the path doesn't have one
if (!resolveOptions.isBuild &&
!normalizedFsPath.match(DEP_VERSION_RE)) {
const browserHash = optimizedDepInfoFromFile(depsOptimizer.metadata, normalizedFsPath)?.browserHash;
if (browserHash) {
return injectQuery(normalizedFsPath, `v=${browserHash}`);
}
}
return normalizedFsPath;
}
if (targetWeb &&
options.browserField &&
(res = tryResolveBrowserMapping(fsPath, importer, options, true))) {
return res;
}
if ((res = tryFsResolve(fsPath, options))) {
res = ensureVersionQuery(res, id, options, depsOptimizer);
debug$d?.(`[relative] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
// If this isn't a script imported from a .html file, include side effects
// hints so the non-used code is properly tree-shaken during build time.
if (!options.idOnly &&
!options.scan &&
options.isBuild &&
!importer?.endsWith('.html')) {
const resPkg = findNearestPackageData(path$o.dirname(res), options.packageCache);
if (resPkg) {
return {
id: res,
moduleSideEffects: resPkg.hasSideEffects(res),
};
}
}
return res;
}
}
// drive relative fs paths (only windows)
if (isWindows$4 && id[0] === '/') {
const basedir = importer ? path$o.dirname(importer) : process.cwd();
const fsPath = path$o.resolve(basedir, id);
if ((res = tryFsResolve(fsPath, options))) {
debug$d?.(`[drive-relative] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
return ensureVersionQuery(res, id, options, depsOptimizer);
}
}
// absolute fs paths
if (isNonDriveRelativeAbsolutePath(id) &&
(res = tryFsResolve(id, options))) {
debug$d?.(`[fs] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
return ensureVersionQuery(res, id, options, depsOptimizer);
}
// external
if (isExternalUrl(id)) {
return options.idOnly ? id : { id, external: true };
}
// data uri: pass through (this only happens during build and will be
// handled by dedicated plugin)
if (isDataUrl(id)) {
return null;
}
// bare package imports, perform node resolve
if (bareImportRE.test(id)) {
const external = options.shouldExternalize?.(id, importer);
if (!external &&
asSrc &&
depsOptimizer &&
!options.scan &&
(res = await tryOptimizedResolve(depsOptimizer, id, importer, options.preserveSymlinks, options.packageCache))) {
return res;
}
if (targetWeb &&
options.browserField &&
(res = tryResolveBrowserMapping(id, importer, options, false, external))) {
return res;
}
if ((res = tryNodeResolve(id, importer, options, targetWeb, depsOptimizer, ssr, external))) {
return res;
}
// node built-ins.
// externalize if building for SSR, otherwise redirect to empty module
if (isBuiltin(id)) {
if (ssr) {
if (ssrNoExternal === true) {
let message = `Cannot bundle Node.js built-in "${id}"`;
if (importer) {
message += ` imported from "${path$o.relative(process.cwd(), importer)}"`;
}
message += `. Consider disabling ssr.noExternal or remove the built-in dependency.`;
this.error(message);
}
return options.idOnly ? id : { id, external: true };
}
else {
if (!asSrc) {
debug$d?.(`externalized node built-in "${id}" to empty module. ` +
`(imported by: ${colors$1.white(colors$1.dim(importer))})`);
}
else if (isProduction) {
this.warn(`Module "${id}" has been externalized for browser compatibility, imported by "${importer}". ` +
`See http://vitejs.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.`);
}
return isProduction
? browserExternalId
: `${browserExternalId}:${id}`;
}
}
}
debug$d?.(`[fallthrough] ${colors$1.dim(id)}`);
},
load(id) {
if (id.startsWith(browserExternalId)) {
if (isProduction) {
return `export default {}`;
}
else {
id = id.slice(browserExternalId.length + 1);
return `\
export default new Proxy({}, {
get(_, key) {
throw new Error(\`Module "${id}" has been externalized for browser compatibility. Cannot access "${id}.\${key}" in client code. See http://vitejs.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.\`)
}
})`;
}
}
if (id.startsWith(optionalPeerDepId)) {
if (isProduction) {
return `export default {}`;
}
else {
const [, peerDep, parentDep] = id.split(':');
return `throw new Error(\`Could not resolve "${peerDep}" imported by "${parentDep}". Is it installed?\`)`;
}
}
},
};
}
function resolveSubpathImports(id, importer, options, targetWeb) {
if (!importer || !id.startsWith(subpathImportsPrefix))
return;
const basedir = path$o.dirname(importer);
const pkgData = findNearestPackageData(basedir, options.packageCache);
if (!pkgData)
return;
let importsPath = resolveExportsOrImports(pkgData.data, id, options, targetWeb, 'imports');
if (importsPath?.[0] === '.') {
importsPath = path$o.relative(basedir, path$o.join(pkgData.dir, importsPath));
if (importsPath[0] !== '.') {
importsPath = `./${importsPath}`;
}
}
return importsPath;
}
function ensureVersionQuery(resolved, id, options, depsOptimizer) {
if (!options.isBuild &&
!options.scan &&
depsOptimizer &&
!(resolved === normalizedClientEntry$1 || resolved === normalizedEnvEntry$1)) {
// Ensure that direct imports of node_modules have the same version query
// as if they would have been imported through a bare import
// Use the original id to do the check as the resolved id may be the real
// file path after symlinks resolution
const isNodeModule = isInNodeModules(id) || isInNodeModules(resolved);
if (isNodeModule && !resolved.match(DEP_VERSION_RE)) {
const versionHash = depsOptimizer.metadata.browserHash;
if (versionHash && isOptimizable(resolved, depsOptimizer.options)) {
resolved = injectQuery(resolved, `v=${versionHash}`);
}
}
}
return resolved;
}
function splitFileAndPostfix(path) {
const file = cleanUrl(path);
return { file, postfix: path.slice(file.length) };
}
function tryFsResolve(fsPath, options, tryIndex = true, targetWeb = true, skipPackageJson = false) {
// Dependencies like es5-ext use `#` in their paths. We don't support `#` in user
// source code so we only need to perform the check for dependencies.
// We don't support `?` in node_modules paths, so we only need to check in this branch.
const hashIndex = fsPath.indexOf('#');
if (hashIndex >= 0 && isInNodeModules(fsPath)) {
const queryIndex = fsPath.indexOf('?');
// We only need to check foo#bar?baz and foo#bar, ignore foo?bar#baz
if (queryIndex < 0 || queryIndex > hashIndex) {
const file = queryIndex > hashIndex ? fsPath.slice(0, queryIndex) : fsPath;
const res = tryCleanFsResolve(file, options, tryIndex, targetWeb, skipPackageJson);
if (res)
return res + fsPath.slice(file.length);
}
}
const { file, postfix } = splitFileAndPostfix(fsPath);
const res = tryCleanFsResolve(file, options, tryIndex, targetWeb, skipPackageJson);
if (res)
return res + postfix;
}
const knownTsOutputRE = /\.(?:js|mjs|cjs|jsx)$/;
const isPossibleTsOutput = (url) => knownTsOutputRE.test(url);
function tryCleanFsResolve(file, options, tryIndex = true, targetWeb = true, skipPackageJson = false) {
const { tryPrefix, extensions, preserveSymlinks } = options;
const fileStat = tryStatSync(file);
// Try direct match first
if (fileStat?.isFile())
return getRealPath(file, options.preserveSymlinks);
let res;
// If path.dirname is a valid directory, try extensions and ts resolution logic
const possibleJsToTs = options.isFromTsImporter && isPossibleTsOutput(file);
if (possibleJsToTs || extensions.length || tryPrefix) {
const dirPath = path$o.dirname(file);
const dirStat = tryStatSync(dirPath);
if (dirStat?.isDirectory()) {
if (possibleJsToTs) {
// try resolve .js, .mjs, .cjs or .jsx import to typescript file
const fileExt = path$o.extname(file);
const fileName = file.slice(0, -fileExt.length);
if ((res = tryResolveRealFile(fileName + fileExt.replace('js', 'ts'), preserveSymlinks)))
return res;
// for .js, also try .tsx
if (fileExt === '.js' &&
(res = tryResolveRealFile(fileName + '.tsx', preserveSymlinks)))
return res;
}
if ((res = tryResolveRealFileWithExtensions(file, extensions, preserveSymlinks)))
return res;
if (tryPrefix) {
const prefixed = `${dirPath}/${options.tryPrefix}${path$o.basename(file)}`;
if ((res = tryResolveRealFile(prefixed, preserveSymlinks)))
return res;
if ((res = tryResolveRealFileWithExtensions(prefixed, extensions, preserveSymlinks)))
return res;
}
}
}
if (tryIndex && fileStat) {
// Path points to a directory, check for package.json and entry and /index file
const dirPath = file;
if (!skipPackageJson) {
let pkgPath = `${dirPath}/package.json`;
try {
if (fs$l.existsSync(pkgPath)) {
if (!options.preserveSymlinks) {
pkgPath = safeRealpathSync(pkgPath);
}
// path points to a node package
const pkg = loadPackageData(pkgPath);
return resolvePackageEntry(dirPath, pkg, targetWeb, options);
}
}
catch (e) {
if (e.code !== 'ENOENT')
throw e;
}
}
if ((res = tryResolveRealFileWithExtensions(`${dirPath}/index`, extensions, preserveSymlinks)))
return res;
if (tryPrefix) {
if ((res = tryResolveRealFileWithExtensions(`${dirPath}/${options.tryPrefix}index`, extensions, preserveSymlinks)))
return res;
}
}
}
function tryResolveRealFile(file, preserveSymlinks) {
const stat = tryStatSync(file);
if (stat?.isFile())
return getRealPath(file, preserveSymlinks);
}
function tryResolveRealFileWithExtensions(filePath, extensions, preserveSymlinks) {
for (const ext of extensions) {
const res = tryResolveRealFile(filePath + ext, preserveSymlinks);
if (res)
return res;
}
}
function tryNodeResolve(id, importer, options, targetWeb, depsOptimizer, ssr = false, externalize, allowLinkedExternal = true) {
const { root, dedupe, isBuild, preserveSymlinks, packageCache } = options;
// check for deep import, e.g. "my-lib/foo"
const deepMatch = id.match(deepImportRE);
const pkgId = deepMatch ? deepMatch[1] || deepMatch[2] : id;
let basedir;
if (dedupe?.includes(pkgId)) {
basedir = root;
}
else if (importer &&
path$o.isAbsolute(importer) &&
// css processing appends `*` for importer
(importer[importer.length - 1] === '*' || fs$l.existsSync(cleanUrl(importer)))) {
basedir = path$o.dirname(importer);
}
else {
basedir = root;
}
const pkg = resolvePackageData(pkgId, basedir, preserveSymlinks, packageCache);
if (!pkg) {
// if import can't be found, check if it's an optional peer dep.
// if so, we can resolve to a special id that errors only when imported.
if (basedir !== root && // root has no peer dep
!isBuiltin(id) &&
!id.includes('\0') &&
bareImportRE.test(id)) {
const mainPkg = findNearestMainPackageData(basedir, packageCache)?.data;
if (mainPkg) {
const pkgName = getNpmPackageName(id);
if (pkgName != null &&
mainPkg.peerDependencies?.[pkgName] &&
mainPkg.peerDependenciesMeta?.[pkgName]?.optional) {
return {
id: `${optionalPeerDepId}:${id}:${mainPkg.name}`,
};
}
}
}
return;
}
const resolveId = deepMatch ? resolveDeepImport : resolvePackageEntry;
const unresolvedId = deepMatch ? '.' + id.slice(pkgId.length) : pkgId;
let resolved;
try {
resolved = resolveId(unresolvedId, pkg, targetWeb, options);
}
catch (err) {
if (!options.tryEsmOnly) {
throw err;
}
}
if (!resolved && options.tryEsmOnly) {
resolved = resolveId(unresolvedId, pkg, targetWeb, {
...options,
isRequire: false,
mainFields: DEFAULT_MAIN_FIELDS,
extensions: DEFAULT_EXTENSIONS$1,
});
}
if (!resolved) {
return;
}
const processResult = (resolved) => {
if (!externalize) {
return resolved;
}
// don't external symlink packages
if (!allowLinkedExternal && !isInNodeModules(resolved.id)) {
return resolved;
}
const resolvedExt = path$o.extname(resolved.id);
// don't external non-js imports
if (resolvedExt &&
resolvedExt !== '.js' &&
resolvedExt !== '.mjs' &&
resolvedExt !== '.cjs') {
return resolved;
}
let resolvedId = id;
if (deepMatch && !pkg?.data.exports && path$o.extname(id) !== resolvedExt) {
// id date-fns/locale
// resolve.id ...date-fns/esm/locale/index.js
const index = resolved.id.indexOf(id);
if (index > -1) {
resolvedId = resolved.id.slice(index);
debug$d?.(`[processResult] ${colors$1.cyan(id)} -> ${colors$1.dim(resolvedId)}`);
}
}
return { ...resolved, id: resolvedId, external: true };
};
if (!options.idOnly &&
((!options.scan && isBuild && !depsOptimizer) || externalize)) {
// Resolve package side effects for build so that rollup can better
// perform tree-shaking
return processResult({
id: resolved,
moduleSideEffects: pkg.hasSideEffects(resolved),
});
}
const ext = path$o.extname(resolved);
if (!options.ssrOptimizeCheck &&
(!isInNodeModules(resolved) || // linked
!depsOptimizer || // resolving before listening to the server
options.scan) // initial esbuild scan phase
) {
return { id: resolved };
}
// if we reach here, it's a valid dep import that hasn't been optimized.
const isJsType = depsOptimizer
? isOptimizable(resolved, depsOptimizer.options)
: OPTIMIZABLE_ENTRY_RE.test(resolved);
let exclude = depsOptimizer?.options.exclude;
let include = depsOptimizer?.options.include;
if (options.ssrOptimizeCheck) {
// we don't have the depsOptimizer
exclude = options.ssrConfig?.optimizeDeps?.exclude;
include = options.ssrConfig?.optimizeDeps?.include;
}
const skipOptimization = depsOptimizer?.options.noDiscovery ||
!isJsType ||
(importer && isInNodeModules(importer)) ||
exclude?.includes(pkgId) ||
exclude?.includes(id) ||
SPECIAL_QUERY_RE.test(resolved) ||
// During dev SSR, we don't have a way to reload the module graph if
// a non-optimized dep is found. So we need to skip optimization here.
// The only optimized deps are the ones explicitly listed in the config.
(!options.ssrOptimizeCheck && !isBuild && ssr) ||
// Only optimize non-external CJS deps during SSR by default
(ssr &&
!(ext === '.cjs' ||
(ext === '.js' &&
findNearestPackageData(path$o.dirname(resolved), options.packageCache)
?.data.type !== 'module')) &&
!(include?.includes(pkgId) || include?.includes(id)));
if (options.ssrOptimizeCheck) {
return {
id: skipOptimization
? injectQuery(resolved, `__vite_skip_optimization`)
: resolved,
};
}
if (skipOptimization) {
// excluded from optimization
// Inject a version query to npm deps so that the browser
// can cache it without re-validation, but only do so for known js types.
// otherwise we may introduce duplicated modules for externalized files
// from pre-bundled deps.
if (!isBuild) {
const versionHash = depsOptimizer.metadata.browserHash;
if (versionHash && isJsType) {
resolved = injectQuery(resolved, `v=${versionHash}`);
}
}
}
else {
// this is a missing import, queue optimize-deps re-run and
// get a resolved its optimized info
const optimizedInfo = depsOptimizer.registerMissingImport(id, resolved);
resolved = depsOptimizer.getOptimizedDepId(optimizedInfo);
}
if (!options.idOnly && !options.scan && isBuild) {
// Resolve package side effects for build so that rollup can better
// perform tree-shaking
return {
id: resolved,
moduleSideEffects: pkg.hasSideEffects(resolved),
};
}
else {
return { id: resolved };
}
}
async function tryOptimizedResolve(depsOptimizer, id, importer, preserveSymlinks, packageCache) {
// TODO: we need to wait until scanning is done here as this function
// is used in the preAliasPlugin to decide if an aliased dep is optimized,
// and avoid replacing the bare import with the resolved path.
// We should be able to remove this in the future
await depsOptimizer.scanProcessing;
const metadata = depsOptimizer.metadata;
const depInfo = optimizedDepInfoFromId(metadata, id);
if (depInfo) {
return depsOptimizer.getOptimizedDepId(depInfo);
}
if (!importer)
return;
// further check if id is imported by nested dependency
let idPkgDir;
const nestedIdMatch = `> ${id}`;
for (const optimizedData of metadata.depInfoList) {
if (!optimizedData.src)
continue; // Ignore chunks
// check where "foo" is nested in "my-lib > foo"
if (!optimizedData.id.endsWith(nestedIdMatch))
continue;
// lazily initialize idPkgDir
if (idPkgDir == null) {
const pkgName = getNpmPackageName(id);
if (!pkgName)
break;
idPkgDir = resolvePackageData(pkgName, importer, preserveSymlinks, packageCache)?.dir;
// if still null, it likely means that this id isn't a dep for importer.
// break to bail early
if (idPkgDir == null)
break;
idPkgDir = normalizePath$3(idPkgDir);
}
// match by src to correctly identify if id belongs to nested dependency
if (optimizedData.src.startsWith(withTrailingSlash(idPkgDir))) {
return depsOptimizer.getOptimizedDepId(optimizedData);
}
}
}
function resolvePackageEntry(id, { dir, data, setResolvedCache, getResolvedCache }, targetWeb, options) {
const cached = getResolvedCache('.', targetWeb);
if (cached) {
return cached;
}
try {
let entryPoint;
// resolve exports field with highest priority
// using https://github.com/lukeed/resolve.exports
if (data.exports) {
entryPoint = resolveExportsOrImports(data, '.', options, targetWeb, 'exports');
}
const resolvedFromExports = !!entryPoint;
// if exports resolved to .mjs, still resolve other fields.
// This is because .mjs files can technically import .cjs files which would
// make them invalid for pure ESM environments - so if other module/browser
// fields are present, prioritize those instead.
if (targetWeb &&
options.browserField &&
(!entryPoint || entryPoint.endsWith('.mjs'))) {
// check browser field
// https://github.com/defunctzombie/package-browser-field-spec
const browserEntry = typeof data.browser === 'string'
? data.browser
: isObject$2(data.browser) && data.browser['.'];
if (browserEntry) {
// check if the package also has a "module" field.
if (!options.isRequire &&
options.mainFields.includes('module') &&
typeof data.module === 'string' &&
data.module !== browserEntry) {
// if both are present, we may have a problem: some package points both
// to ESM, with "module" targeting Node.js, while some packages points
// "module" to browser ESM and "browser" to UMD/IIFE.
// the heuristics here is to actually read the browser entry when
// possible and check for hints of ESM. If it is not ESM, prefer "module"
// instead; Otherwise, assume it's ESM and use it.
const resolvedBrowserEntry = tryFsResolve(path$o.join(dir, browserEntry), options);
if (resolvedBrowserEntry) {
const content = fs$l.readFileSync(resolvedBrowserEntry, 'utf-8');
if (hasESMSyntax(content)) {
// likely ESM, prefer browser
entryPoint = browserEntry;
}
else {
// non-ESM, UMD or IIFE or CJS(!!! e.g. firebase 7.x), prefer module
entryPoint = data.module;
}
}
}
else {
entryPoint = browserEntry;
}
}
}
// fallback to mainFields if still not resolved
// TODO: review if `.mjs` check is still needed
if (!resolvedFromExports && (!entryPoint || entryPoint.endsWith('.mjs'))) {
for (const field of options.mainFields) {
if (field === 'browser')
continue; // already checked above
if (typeof data[field] === 'string') {
entryPoint = data[field];
break;
}
}
}
entryPoint || (entryPoint = data.main);
// try default entry when entry is not define
// https://nodejs.org/api/modules.html#all-together
const entryPoints = entryPoint
? [entryPoint]
: ['index.js', 'index.json', 'index.node'];
for (let entry of entryPoints) {
// make sure we don't get scripts when looking for sass
let skipPackageJson = false;
if (options.mainFields[0] === 'sass' &&
!options.extensions.includes(path$o.extname(entry))) {
entry = '';
skipPackageJson = true;
}
else {
// resolve object browser field in package.json
const { browser: browserField } = data;
if (targetWeb && options.browserField && isObject$2(browserField)) {
entry = mapWithBrowserField(entry, browserField) || entry;
}
}
const entryPointPath = path$o.join(dir, entry);
const resolvedEntryPoint = tryFsResolve(entryPointPath, options, true, true, skipPackageJson);
if (resolvedEntryPoint) {
debug$d?.(`[package entry] ${colors$1.cyan(id)} -> ${colors$1.dim(resolvedEntryPoint)}`);
setResolvedCache('.', resolvedEntryPoint, targetWeb);
return resolvedEntryPoint;
}
}
}
catch (e) {
packageEntryFailure(id, e.message);
}
packageEntryFailure(id);
}
function packageEntryFailure(id, details) {
throw new Error(`Failed to resolve entry for package "${id}". ` +
`The package may have incorrect main/module/exports specified in its package.json` +
(details ? ': ' + details : '.'));
}
function resolveExportsOrImports(pkg, key, options, targetWeb, type) {
const additionalConditions = new Set(options.overrideConditions || [
'production',
'development',
'module',
...options.conditions,
]);
const conditions = [...additionalConditions].filter((condition) => {
switch (condition) {
case 'production':
return options.isProduction;
case 'development':
return !options.isProduction;
}
return true;
});
const fn = type === 'imports' ? f : o;
const result = fn(pkg, key, {
browser: targetWeb && !additionalConditions.has('node'),
require: options.isRequire && !additionalConditions.has('import'),
conditions,
});
return result ? result[0] : undefined;
}
function resolveDeepImport(id, { webResolvedImports, setResolvedCache, getResolvedCache, dir, data, }, targetWeb, options) {
const cache = getResolvedCache(id, targetWeb);
if (cache) {
return cache;
}
let relativeId = id;
const { exports: exportsField, browser: browserField } = data;
// map relative based on exports data
if (exportsField) {
if (isObject$2(exportsField) && !Array.isArray(exportsField)) {
// resolve without postfix (see #7098)
const { file, postfix } = splitFileAndPostfix(relativeId);
const exportsId = resolveExportsOrImports(data, file, options, targetWeb, 'exports');
if (exportsId !== undefined) {
relativeId = exportsId + postfix;
}
else {
relativeId = undefined;
}
}
else {
// not exposed
relativeId = undefined;
}
if (!relativeId) {
throw new Error(`Package subpath '${relativeId}' is not defined by "exports" in ` +
`${path$o.join(dir, 'package.json')}.`);
}
}
else if (targetWeb && options.browserField && isObject$2(browserField)) {
// resolve without postfix (see #7098)
const { file, postfix } = splitFileAndPostfix(relativeId);
const mapped = mapWithBrowserField(file, browserField);
if (mapped) {
relativeId = mapped + postfix;
}
else if (mapped === false) {
return (webResolvedImports[id] = browserExternalId);
}
}
if (relativeId) {
const resolved = tryFsResolve(path$o.join(dir, relativeId), options, !exportsField, // try index only if no exports field
targetWeb);
if (resolved) {
debug$d?.(`[node/deep-import] ${colors$1.cyan(id)} -> ${colors$1.dim(resolved)}`);
setResolvedCache(id, resolved, targetWeb);
return resolved;
}
}
}
function tryResolveBrowserMapping(id, importer, options, isFilePath, externalize) {
let res;
const pkg = importer &&
findNearestPackageData(path$o.dirname(importer), options.packageCache);
if (pkg && isObject$2(pkg.data.browser)) {
const mapId = isFilePath ? './' + slash$1(path$o.relative(pkg.dir, id)) : id;
const browserMappedPath = mapWithBrowserField(mapId, pkg.data.browser);
if (browserMappedPath) {
if ((res = bareImportRE.test(browserMappedPath)
? tryNodeResolve(browserMappedPath, importer, options, true)?.id
: tryFsResolve(path$o.join(pkg.dir, browserMappedPath), options))) {
debug$d?.(`[browser mapped] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
let result = { id: res };
if (options.idOnly) {
return result;
}
if (!options.scan && options.isBuild) {
const resPkg = findNearestPackageData(path$o.dirname(res), options.packageCache);
if (resPkg) {
result = {
id: res,
moduleSideEffects: resPkg.hasSideEffects(res),
};
}
}
return externalize ? { ...result, external: true } : result;
}
}
else if (browserMappedPath === false) {
return browserExternalId;
}
}
}
/**
* given a relative path in pkg dir,
* return a relative path in pkg dir,
* mapped with the "map" object
*
* - Returning `undefined` means there is no browser mapping for this id
* - Returning `false` means this id is explicitly externalized for browser
*/
function mapWithBrowserField(relativePathInPkgDir, map) {
const normalizedPath = path$o.posix.normalize(relativePathInPkgDir);
for (const key in map) {
const normalizedKey = path$o.posix.normalize(key);
if (normalizedPath === normalizedKey ||
equalWithoutSuffix(normalizedPath, normalizedKey, '.js') ||
equalWithoutSuffix(normalizedPath, normalizedKey, '/index.js')) {
return map[key];
}
}
}
function equalWithoutSuffix(path, key, suffix) {
return key.endsWith(suffix) && key.slice(0, -suffix.length) === path;
}
function getRealPath(resolved, preserveSymlinks) {
if (!preserveSymlinks && browserExternalId !== resolved) {
resolved = safeRealpathSync(resolved);
}
return normalizePath$3(resolved);
}
var dist = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.lilconfigSync = exports.lilconfig = exports.defaultLoaders = void 0;
const path = require$$0$4;
const fs = require$$0__default;
const os = require$$2;
const fsReadFileAsync = fs.promises.readFile;
function getDefaultSearchPlaces(name) {
return [
'package.json',
`.${name}rc.json`,
`.${name}rc.js`,
`${name}.config.js`,
`.${name}rc.cjs`,
`${name}.config.cjs`,
];
}
function getSearchPaths(startDir, stopDir) {
return startDir
.split(path.sep)
.reduceRight((acc, _, ind, arr) => {
const currentPath = arr.slice(0, ind + 1).join(path.sep);
if (!acc.passedStopDir)
acc.searchPlaces.push(currentPath || path.sep);
if (currentPath === stopDir)
acc.passedStopDir = true;
return acc;
}, { searchPlaces: [], passedStopDir: false }).searchPlaces;
}
exports.defaultLoaders = Object.freeze({
'.js': __require,
'.json': __require,
'.cjs': __require,
noExt(_, content) {
return JSON.parse(content);
},
});
function getExtDesc(ext) {
return ext === 'noExt' ? 'files without extensions' : `extension "${ext}"`;
}
function getOptions(name, options = {}) {
const conf = {
stopDir: os.homedir(),
searchPlaces: getDefaultSearchPlaces(name),
ignoreEmptySearchPlaces: true,
transform: (x) => x,
packageProp: [name],
...options,
loaders: { ...exports.defaultLoaders, ...options.loaders },
};
conf.searchPlaces.forEach(place => {
const key = path.extname(place) || 'noExt';
const loader = conf.loaders[key];
if (!loader) {
throw new Error(`No loader specified for ${getExtDesc(key)}, so searchPlaces item "${place}" is invalid`);
}
if (typeof loader !== 'function') {
throw new Error(`loader for ${getExtDesc(key)} is not a function (type provided: "${typeof loader}"), so searchPlaces item "${place}" is invalid`);
}
});
return conf;
}
function getPackageProp(props, obj) {
if (typeof props === 'string' && props in obj)
return obj[props];
return ((Array.isArray(props) ? props : props.split('.')).reduce((acc, prop) => (acc === undefined ? acc : acc[prop]), obj) || null);
}
function getSearchItems(searchPlaces, searchPaths) {
return searchPaths.reduce((acc, searchPath) => {
searchPlaces.forEach(fileName => acc.push({
fileName,
filepath: path.join(searchPath, fileName),
loaderKey: path.extname(fileName) || 'noExt',
}));
return acc;
}, []);
}
function validateFilePath(filepath) {
if (!filepath)
throw new Error('load must pass a non-empty string');
}
function validateLoader(loader, ext) {
if (!loader)
throw new Error(`No loader specified for extension "${ext}"`);
if (typeof loader !== 'function')
throw new Error('loader is not a function');
}
function lilconfig(name, options) {
const { ignoreEmptySearchPlaces, loaders, packageProp, searchPlaces, stopDir, transform, } = getOptions(name, options);
return {
async search(searchFrom = process.cwd()) {
const searchPaths = getSearchPaths(searchFrom, stopDir);
const result = {
config: null,
filepath: '',
};
const searchItems = getSearchItems(searchPlaces, searchPaths);
for (const { fileName, filepath, loaderKey } of searchItems) {
try {
await fs.promises.access(filepath);
}
catch (_a) {
continue;
}
const content = String(await fsReadFileAsync(filepath));
const loader = loaders[loaderKey];
if (fileName === 'package.json') {
const pkg = await loader(filepath, content);
const maybeConfig = getPackageProp(packageProp, pkg);
if (maybeConfig != null) {
result.config = maybeConfig;
result.filepath = filepath;
break;
}
continue;
}
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces)
continue;
if (isEmpty) {
result.isEmpty = true;
result.config = undefined;
}
else {
validateLoader(loader, loaderKey);
result.config = await loader(filepath, content);
}
result.filepath = filepath;
break;
}
if (result.filepath === '' && result.config === null)
return transform(null);
return transform(result);
},
async load(filepath) {
validateFilePath(filepath);
const absPath = path.resolve(process.cwd(), filepath);
const { base, ext } = path.parse(absPath);
const loaderKey = ext || 'noExt';
const loader = loaders[loaderKey];
validateLoader(loader, loaderKey);
const content = String(await fsReadFileAsync(absPath));
if (base === 'package.json') {
const pkg = await loader(absPath, content);
return transform({
config: getPackageProp(packageProp, pkg),
filepath: absPath,
});
}
const result = {
config: null,
filepath: absPath,
};
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces)
return transform({
config: undefined,
filepath: absPath,
isEmpty: true,
});
result.config = isEmpty
? undefined
: await loader(absPath, content);
return transform(isEmpty ? { ...result, isEmpty, config: undefined } : result);
},
};
}
exports.lilconfig = lilconfig;
function lilconfigSync(name, options) {
const { ignoreEmptySearchPlaces, loaders, packageProp, searchPlaces, stopDir, transform, } = getOptions(name, options);
return {
search(searchFrom = process.cwd()) {
const searchPaths = getSearchPaths(searchFrom, stopDir);
const result = {
config: null,
filepath: '',
};
const searchItems = getSearchItems(searchPlaces, searchPaths);
for (const { fileName, filepath, loaderKey } of searchItems) {
try {
fs.accessSync(filepath);
}
catch (_a) {
continue;
}
const loader = loaders[loaderKey];
const content = String(fs.readFileSync(filepath));
if (fileName === 'package.json') {
const pkg = loader(filepath, content);
const maybeConfig = getPackageProp(packageProp, pkg);
if (maybeConfig != null) {
result.config = maybeConfig;
result.filepath = filepath;
break;
}
continue;
}
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces)
continue;
if (isEmpty) {
result.isEmpty = true;
result.config = undefined;
}
else {
validateLoader(loader, loaderKey);
result.config = loader(filepath, content);
}
result.filepath = filepath;
break;
}
if (result.filepath === '' && result.config === null)
return transform(null);
return transform(result);
},
load(filepath) {
validateFilePath(filepath);
const absPath = path.resolve(process.cwd(), filepath);
const { base, ext } = path.parse(absPath);
const loaderKey = ext || 'noExt';
const loader = loaders[loaderKey];
validateLoader(loader, loaderKey);
const content = String(fs.readFileSync(absPath));
if (base === 'package.json') {
const pkg = loader(absPath, content);
return transform({
config: getPackageProp(packageProp, pkg),
filepath: absPath,
});
}
const result = {
config: null,
filepath: absPath,
};
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces)
return transform({
filepath: absPath,
config: undefined,
isEmpty: true,
});
result.config = isEmpty ? undefined : loader(absPath, content);
return transform(isEmpty ? { ...result, isEmpty, config: undefined } : result);
},
};
}
exports.lilconfigSync = lilconfigSync;
} (dist));
const ALIAS = Symbol.for('yaml.alias');
const DOC = Symbol.for('yaml.document');
const MAP = Symbol.for('yaml.map');
const PAIR = Symbol.for('yaml.pair');
const SCALAR$1 = Symbol.for('yaml.scalar');
const SEQ = Symbol.for('yaml.seq');
const NODE_TYPE = Symbol.for('yaml.node.type');
const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
const isScalar$1 = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR$1;
const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
function isCollection$1(node) {
if (node && typeof node === 'object')
switch (node[NODE_TYPE]) {
case MAP:
case SEQ:
return true;
}
return false;
}
function isNode$1(node) {
if (node && typeof node === 'object')
switch (node[NODE_TYPE]) {
case ALIAS:
case MAP:
case SCALAR$1:
case SEQ:
return true;
}
return false;
}
const hasAnchor = (node) => (isScalar$1(node) || isCollection$1(node)) && !!node.anchor;
class NodeBase {
constructor(type) {
Object.defineProperty(this, NODE_TYPE, { value: type });
}
/** Create a copy of this node. */
clone() {
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
if (this.range)
copy.range = this.range.slice();
return copy;
}
}
const BREAK$1 = Symbol('break visit');
const SKIP$1 = Symbol('skip children');
const REMOVE$1 = Symbol('remove node');
/**
* Apply a visitor to an AST node or document.
*
* Walks through the tree (depth-first) starting from `node`, calling a
* `visitor` function with three arguments:
* - `key`: For sequence values and map `Pair`, the node's index in the
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
* `null` for the root node.
* - `node`: The current node.
* - `path`: The ancestry of the current node.
*
* The return value of the visitor may be used to control the traversal:
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this node, continue with next
* sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current node, then continue with the next one
* - `Node`: Replace the current node, then continue by visiting it
* - `number`: While iterating the items of a sequence or map, set the index
* of the next step. This is useful especially if the index of the current
* node has changed.
*
* If `visitor` is a single function, it will be called with all values
* encountered in the tree, including e.g. `null` values. Alternatively,
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
* `Alias` and `Scalar` node. To define the same visitor function for more than
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
* specific defined one will be used for each node.
*/
function visit$1(node, visitor) {
const visitor_ = initVisitor(visitor);
if (isDocument(node)) {
const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));
if (cd === REMOVE$1)
node.contents = null;
}
else
visit_(null, node, visitor_, Object.freeze([]));
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visit$1.BREAK = BREAK$1;
/** Do not visit the children of the current node */
visit$1.SKIP = SKIP$1;
/** Remove the current node */
visit$1.REMOVE = REMOVE$1;
function visit_(key, node, visitor, path) {
const ctrl = callVisitor(key, node, visitor, path);
if (isNode$1(ctrl) || isPair(ctrl)) {
replaceNode(key, path, ctrl);
return visit_(key, ctrl, visitor, path);
}
if (typeof ctrl !== 'symbol') {
if (isCollection$1(node)) {
path = Object.freeze(path.concat(node));
for (let i = 0; i < node.items.length; ++i) {
const ci = visit_(i, node.items[i], visitor, path);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK$1)
return BREAK$1;
else if (ci === REMOVE$1) {
node.items.splice(i, 1);
i -= 1;
}
}
}
else if (isPair(node)) {
path = Object.freeze(path.concat(node));
const ck = visit_('key', node.key, visitor, path);
if (ck === BREAK$1)
return BREAK$1;
else if (ck === REMOVE$1)
node.key = null;
const cv = visit_('value', node.value, visitor, path);
if (cv === BREAK$1)
return BREAK$1;
else if (cv === REMOVE$1)
node.value = null;
}
}
return ctrl;
}
/**
* Apply an async visitor to an AST node or document.
*
* Walks through the tree (depth-first) starting from `node`, calling a
* `visitor` function with three arguments:
* - `key`: For sequence values and map `Pair`, the node's index in the
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
* `null` for the root node.
* - `node`: The current node.
* - `path`: The ancestry of the current node.
*
* The return value of the visitor may be used to control the traversal:
* - `Promise`: Must resolve to one of the following values
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this node, continue with next
* sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current node, then continue with the next one
* - `Node`: Replace the current node, then continue by visiting it
* - `number`: While iterating the items of a sequence or map, set the index
* of the next step. This is useful especially if the index of the current
* node has changed.
*
* If `visitor` is a single function, it will be called with all values
* encountered in the tree, including e.g. `null` values. Alternatively,
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
* `Alias` and `Scalar` node. To define the same visitor function for more than
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
* specific defined one will be used for each node.
*/
async function visitAsync(node, visitor) {
const visitor_ = initVisitor(visitor);
if (isDocument(node)) {
const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));
if (cd === REMOVE$1)
node.contents = null;
}
else
await visitAsync_(null, node, visitor_, Object.freeze([]));
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visitAsync.BREAK = BREAK$1;
/** Do not visit the children of the current node */
visitAsync.SKIP = SKIP$1;
/** Remove the current node */
visitAsync.REMOVE = REMOVE$1;
async function visitAsync_(key, node, visitor, path) {
const ctrl = await callVisitor(key, node, visitor, path);
if (isNode$1(ctrl) || isPair(ctrl)) {
replaceNode(key, path, ctrl);
return visitAsync_(key, ctrl, visitor, path);
}
if (typeof ctrl !== 'symbol') {
if (isCollection$1(node)) {
path = Object.freeze(path.concat(node));
for (let i = 0; i < node.items.length; ++i) {
const ci = await visitAsync_(i, node.items[i], visitor, path);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK$1)
return BREAK$1;
else if (ci === REMOVE$1) {
node.items.splice(i, 1);
i -= 1;
}
}
}
else if (isPair(node)) {
path = Object.freeze(path.concat(node));
const ck = await visitAsync_('key', node.key, visitor, path);
if (ck === BREAK$1)
return BREAK$1;
else if (ck === REMOVE$1)
node.key = null;
const cv = await visitAsync_('value', node.value, visitor, path);
if (cv === BREAK$1)
return BREAK$1;
else if (cv === REMOVE$1)
node.value = null;
}
}
return ctrl;
}
function initVisitor(visitor) {
if (typeof visitor === 'object' &&
(visitor.Collection || visitor.Node || visitor.Value)) {
return Object.assign({
Alias: visitor.Node,
Map: visitor.Node,
Scalar: visitor.Node,
Seq: visitor.Node
}, visitor.Value && {
Map: visitor.Value,
Scalar: visitor.Value,
Seq: visitor.Value
}, visitor.Collection && {
Map: visitor.Collection,
Seq: visitor.Collection
}, visitor);
}
return visitor;
}
function callVisitor(key, node, visitor, path) {
if (typeof visitor === 'function')
return visitor(key, node, path);
if (isMap(node))
return visitor.Map?.(key, node, path);
if (isSeq(node))
return visitor.Seq?.(key, node, path);
if (isPair(node))
return visitor.Pair?.(key, node, path);
if (isScalar$1(node))
return visitor.Scalar?.(key, node, path);
if (isAlias(node))
return visitor.Alias?.(key, node, path);
return undefined;
}
function replaceNode(key, path, node) {
const parent = path[path.length - 1];
if (isCollection$1(parent)) {
parent.items[key] = node;
}
else if (isPair(parent)) {
if (key === 'key')
parent.key = node;
else
parent.value = node;
}
else if (isDocument(parent)) {
parent.contents = node;
}
else {
const pt = isAlias(parent) ? 'alias' : 'scalar';
throw new Error(`Cannot replace node with ${pt} parent`);
}
}
const escapeChars = {
'!': '%21',
',': '%2C',
'[': '%5B',
']': '%5D',
'{': '%7B',
'}': '%7D'
};
const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
class Directives {
constructor(yaml, tags) {
/**
* The directives-end/doc-start marker `---`. If `null`, a marker may still be
* included in the document's stringified representation.
*/
this.docStart = null;
/** The doc-end marker `...`. */
this.docEnd = false;
this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
this.tags = Object.assign({}, Directives.defaultTags, tags);
}
clone() {
const copy = new Directives(this.yaml, this.tags);
copy.docStart = this.docStart;
return copy;
}
/**
* During parsing, get a Directives instance for the current document and
* update the stream state according to the current version's spec.
*/
atDocument() {
const res = new Directives(this.yaml, this.tags);
switch (this.yaml.version) {
case '1.1':
this.atNextDocument = true;
break;
case '1.2':
this.atNextDocument = false;
this.yaml = {
explicit: Directives.defaultYaml.explicit,
version: '1.2'
};
this.tags = Object.assign({}, Directives.defaultTags);
break;
}
return res;
}
/**
* @param onError - May be called even if the action was successful
* @returns `true` on success
*/
add(line, onError) {
if (this.atNextDocument) {
this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
this.tags = Object.assign({}, Directives.defaultTags);
this.atNextDocument = false;
}
const parts = line.trim().split(/[ \t]+/);
const name = parts.shift();
switch (name) {
case '%TAG': {
if (parts.length !== 2) {
onError(0, '%TAG directive should contain exactly two parts');
if (parts.length < 2)
return false;
}
const [handle, prefix] = parts;
this.tags[handle] = prefix;
return true;
}
case '%YAML': {
this.yaml.explicit = true;
if (parts.length !== 1) {
onError(0, '%YAML directive should contain exactly one part');
return false;
}
const [version] = parts;
if (version === '1.1' || version === '1.2') {
this.yaml.version = version;
return true;
}
else {
const isValid = /^\d+\.\d+$/.test(version);
onError(6, `Unsupported YAML version ${version}`, isValid);
return false;
}
}
default:
onError(0, `Unknown directive ${name}`, true);
return false;
}
}
/**
* Resolves a tag, matching handles to those defined in %TAG directives.
*
* @returns Resolved tag, which may also be the non-specific tag `'!'` or a
* `'!local'` tag, or `null` if unresolvable.
*/
tagName(source, onError) {
if (source === '!')
return '!'; // non-specific tag
if (source[0] !== '!') {
onError(`Not a valid tag: ${source}`);
return null;
}
if (source[1] === '<') {
const verbatim = source.slice(2, -1);
if (verbatim === '!' || verbatim === '!!') {
onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
return null;
}
if (source[source.length - 1] !== '>')
onError('Verbatim tags must end with a >');
return verbatim;
}
const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);
if (!suffix)
onError(`The ${source} tag has no suffix`);
const prefix = this.tags[handle];
if (prefix)
return prefix + decodeURIComponent(suffix);
if (handle === '!')
return source; // local tag
onError(`Could not resolve tag: ${source}`);
return null;
}
/**
* Given a fully resolved tag, returns its printable string form,
* taking into account current tag prefixes and defaults.
*/
tagString(tag) {
for (const [handle, prefix] of Object.entries(this.tags)) {
if (tag.startsWith(prefix))
return handle + escapeTagName(tag.substring(prefix.length));
}
return tag[0] === '!' ? tag : `!<${tag}>`;
}
toString(doc) {
const lines = this.yaml.explicit
? [`%YAML ${this.yaml.version || '1.2'}`]
: [];
const tagEntries = Object.entries(this.tags);
let tagNames;
if (doc && tagEntries.length > 0 && isNode$1(doc.contents)) {
const tags = {};
visit$1(doc.contents, (_key, node) => {
if (isNode$1(node) && node.tag)
tags[node.tag] = true;
});
tagNames = Object.keys(tags);
}
else
tagNames = [];
for (const [handle, prefix] of tagEntries) {
if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
continue;
if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
lines.push(`%TAG ${handle} ${prefix}`);
}
return lines.join('\n');
}
}
Directives.defaultYaml = { explicit: false, version: '1.2' };
Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
/**
* Verify that the input string is a valid anchor.
*
* Will throw on errors.
*/
function anchorIsValid(anchor) {
if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
const sa = JSON.stringify(anchor);
const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
throw new Error(msg);
}
return true;
}
function anchorNames(root) {
const anchors = new Set();
visit$1(root, {
Value(_key, node) {
if (node.anchor)
anchors.add(node.anchor);
}
});
return anchors;
}
/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
function findNewAnchor(prefix, exclude) {
for (let i = 1; true; ++i) {
const name = `${prefix}${i}`;
if (!exclude.has(name))
return name;
}
}
function createNodeAnchors(doc, prefix) {
const aliasObjects = [];
const sourceObjects = new Map();
let prevAnchors = null;
return {
onAnchor: (source) => {
aliasObjects.push(source);
if (!prevAnchors)
prevAnchors = anchorNames(doc);
const anchor = findNewAnchor(prefix, prevAnchors);
prevAnchors.add(anchor);
return anchor;
},
/**
* With circular references, the source node is only resolved after all
* of its child nodes are. This is why anchors are set only after all of
* the nodes have been created.
*/
setAnchors: () => {
for (const source of aliasObjects) {
const ref = sourceObjects.get(source);
if (typeof ref === 'object' &&
ref.anchor &&
(isScalar$1(ref.node) || isCollection$1(ref.node))) {
ref.node.anchor = ref.anchor;
}
else {
const error = new Error('Failed to resolve repeated object (this should not happen)');
error.source = source;
throw error;
}
}
},
sourceObjects
};
}
class Alias extends NodeBase {
constructor(source) {
super(ALIAS);
this.source = source;
Object.defineProperty(this, 'tag', {
set() {
throw new Error('Alias nodes cannot have tags');
}
});
}
/**
* Resolve the value of this alias within `doc`, finding the last
* instance of the `source` anchor before this node.
*/
resolve(doc) {
let found = undefined;
visit$1(doc, {
Node: (_key, node) => {
if (node === this)
return visit$1.BREAK;
if (node.anchor === this.source)
found = node;
}
});
return found;
}
toJSON(_arg, ctx) {
if (!ctx)
return { source: this.source };
const { anchors, doc, maxAliasCount } = ctx;
const source = this.resolve(doc);
if (!source) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new ReferenceError(msg);
}
const data = anchors.get(source);
/* istanbul ignore if */
if (!data || data.res === undefined) {
const msg = 'This should not happen: Alias anchor was not resolved?';
throw new ReferenceError(msg);
}
if (maxAliasCount >= 0) {
data.count += 1;
if (data.aliasCount === 0)
data.aliasCount = getAliasCount(doc, source, anchors);
if (data.count * data.aliasCount > maxAliasCount) {
const msg = 'Excessive alias count indicates a resource exhaustion attack';
throw new ReferenceError(msg);
}
}
return data.res;
}
toString(ctx, _onComment, _onChompKeep) {
const src = `*${this.source}`;
if (ctx) {
anchorIsValid(this.source);
if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new Error(msg);
}
if (ctx.implicitKey)
return `${src} `;
}
return src;
}
}
function getAliasCount(doc, node, anchors) {
if (isAlias(node)) {
const source = node.resolve(doc);
const anchor = anchors && source && anchors.get(source);
return anchor ? anchor.count * anchor.aliasCount : 0;
}
else if (isCollection$1(node)) {
let count = 0;
for (const item of node.items) {
const c = getAliasCount(doc, item, anchors);
if (c > count)
count = c;
}
return count;
}
else if (isPair(node)) {
const kc = getAliasCount(doc, node.key, anchors);
const vc = getAliasCount(doc, node.value, anchors);
return Math.max(kc, vc);
}
return 1;
}
/**
* Recursively convert any node or its contents to native JavaScript
*
* @param value - The input value
* @param arg - If `value` defines a `toJSON()` method, use this
* as its first argument
* @param ctx - Conversion context, originally set in Document#toJS(). If
* `{ keep: true }` is not set, output should be suitable for JSON
* stringification.
*/
function toJS(value, arg, ctx) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
if (Array.isArray(value))
return value.map((v, i) => toJS(v, String(i), ctx));
if (value && typeof value.toJSON === 'function') {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
if (!ctx || !hasAnchor(value))
return value.toJSON(arg, ctx);
const data = { aliasCount: 0, count: 1, res: undefined };
ctx.anchors.set(value, data);
ctx.onCreate = res => {
data.res = res;
delete ctx.onCreate;
};
const res = value.toJSON(arg, ctx);
if (ctx.onCreate)
ctx.onCreate(res);
return res;
}
if (typeof value === 'bigint' && !ctx?.keep)
return Number(value);
return value;
}
const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
class Scalar extends NodeBase {
constructor(value) {
super(SCALAR$1);
this.value = value;
}
toJSON(arg, ctx) {
return ctx?.keep ? this.value : toJS(this.value, arg, ctx);
}
toString() {
return String(this.value);
}
}
Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
Scalar.PLAIN = 'PLAIN';
Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
const defaultTagPrefix = 'tag:yaml.org,2002:';
function findTagObject(value, tagName, tags) {
if (tagName) {
const match = tags.filter(t => t.tag === tagName);
const tagObj = match.find(t => !t.format) ?? match[0];
if (!tagObj)
throw new Error(`Tag ${tagName} not found`);
return tagObj;
}
return tags.find(t => t.identify?.(value) && !t.format);
}
function createNode(value, tagName, ctx) {
if (isDocument(value))
value = value.contents;
if (isNode$1(value))
return value;
if (isPair(value)) {
const map = ctx.schema[MAP].createNode?.(ctx.schema, null, ctx);
map.items.push(value);
return map;
}
if (value instanceof String ||
value instanceof Number ||
value instanceof Boolean ||
(typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere
) {
// https://tc39.es/ecma262/#sec-serializejsonproperty
value = value.valueOf();
}
const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
// Detect duplicate references to the same object & use Alias nodes for all
// after first. The `ref` wrapper allows for circular references to resolve.
let ref = undefined;
if (aliasDuplicateObjects && value && typeof value === 'object') {
ref = sourceObjects.get(value);
if (ref) {
if (!ref.anchor)
ref.anchor = onAnchor(value);
return new Alias(ref.anchor);
}
else {
ref = { anchor: null, node: null };
sourceObjects.set(value, ref);
}
}
if (tagName?.startsWith('!!'))
tagName = defaultTagPrefix + tagName.slice(2);
let tagObj = findTagObject(value, tagName, schema.tags);
if (!tagObj) {
if (value && typeof value.toJSON === 'function') {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
value = value.toJSON();
}
if (!value || typeof value !== 'object') {
const node = new Scalar(value);
if (ref)
ref.node = node;
return node;
}
tagObj =
value instanceof Map
? schema[MAP]
: Symbol.iterator in Object(value)
? schema[SEQ]
: schema[MAP];
}
if (onTagObj) {
onTagObj(tagObj);
delete ctx.onTagObj;
}
const node = tagObj?.createNode
? tagObj.createNode(ctx.schema, value, ctx)
: new Scalar(value);
if (tagName)
node.tag = tagName;
if (ref)
ref.node = node;
return node;
}
function collectionFromPath(schema, path, value) {
let v = value;
for (let i = path.length - 1; i >= 0; --i) {
const k = path[i];
if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
const a = [];
a[k] = v;
v = a;
}
else {
v = new Map([[k, v]]);
}
}
return createNode(v, undefined, {
aliasDuplicateObjects: false,
keepUndefined: false,
onAnchor: () => {
throw new Error('This should not happen, please report a bug.');
},
schema,
sourceObjects: new Map()
});
}
// Type guard is intentionally a little wrong so as to be more useful,
// as it does not cover untypable empty non-string iterables (e.g. []).
const isEmptyPath = (path) => path == null ||
(typeof path === 'object' && !!path[Symbol.iterator]().next().done);
class Collection extends NodeBase {
constructor(type, schema) {
super(type);
Object.defineProperty(this, 'schema', {
value: schema,
configurable: true,
enumerable: false,
writable: true
});
}
/**
* Create a copy of this collection.
*
* @param schema - If defined, overwrites the original's schema
*/
clone(schema) {
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
if (schema)
copy.schema = schema;
copy.items = copy.items.map(it => isNode$1(it) || isPair(it) ? it.clone(schema) : it);
if (this.range)
copy.range = this.range.slice();
return copy;
}
/**
* Adds a value to the collection. For `!!map` and `!!omap` the value must
* be a Pair instance or a `{ key, value }` object, which may not have a key
* that already exists in the map.
*/
addIn(path, value) {
if (isEmptyPath(path))
this.add(value);
else {
const [key, ...rest] = path;
const node = this.get(key, true);
if (isCollection$1(node))
node.addIn(rest, value);
else if (node === undefined && this.schema)
this.set(key, collectionFromPath(this.schema, rest, value));
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
}
/**
* Removes a value from the collection.
* @returns `true` if the item was found and removed.
*/
deleteIn(path) {
const [key, ...rest] = path;
if (rest.length === 0)
return this.delete(key);
const node = this.get(key, true);
if (isCollection$1(node))
return node.deleteIn(rest);
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
/**
* Returns item at `key`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
getIn(path, keepScalar) {
const [key, ...rest] = path;
const node = this.get(key, true);
if (rest.length === 0)
return !keepScalar && isScalar$1(node) ? node.value : node;
else
return isCollection$1(node) ? node.getIn(rest, keepScalar) : undefined;
}
hasAllNullValues(allowScalar) {
return this.items.every(node => {
if (!isPair(node))
return false;
const n = node.value;
return (n == null ||
(allowScalar &&
isScalar$1(n) &&
n.value == null &&
!n.commentBefore &&
!n.comment &&
!n.tag));
});
}
/**
* Checks if the collection includes a value with the key `key`.
*/
hasIn(path) {
const [key, ...rest] = path;
if (rest.length === 0)
return this.has(key);
const node = this.get(key, true);
return isCollection$1(node) ? node.hasIn(rest) : false;
}
/**
* Sets a value in this collection. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
setIn(path, value) {
const [key, ...rest] = path;
if (rest.length === 0) {
this.set(key, value);
}
else {
const node = this.get(key, true);
if (isCollection$1(node))
node.setIn(rest, value);
else if (node === undefined && this.schema)
this.set(key, collectionFromPath(this.schema, rest, value));
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
}
}
Collection.maxFlowStringSingleLineLength = 60;
/**
* Stringifies a comment.
*
* Empty comment lines are left empty,
* lines consisting of a single space are replaced by `#`,
* and all other lines are prefixed with a `#`.
*/
const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');
function indentComment(comment, indent) {
if (/^\n+$/.test(comment))
return comment.substring(1);
return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;
}
const lineComment = (str, indent, comment) => str.endsWith('\n')
? indentComment(comment, indent)
: comment.includes('\n')
? '\n' + indentComment(comment, indent)
: (str.endsWith(' ') ? '' : ' ') + comment;
const FOLD_FLOW = 'flow';
const FOLD_BLOCK = 'block';
const FOLD_QUOTED = 'quoted';
/**
* Tries to keep input at up to `lineWidth` characters, splitting only on spaces
* not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
* terminated with `\n` and started with `indent`.
*/
function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
if (!lineWidth || lineWidth < 0)
return text;
const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
if (text.length <= endStep)
return text;
const folds = [];
const escapedFolds = {};
let end = lineWidth - indent.length;
if (typeof indentAtStart === 'number') {
if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
folds.push(0);
else
end = lineWidth - indentAtStart;
}
let split = undefined;
let prev = undefined;
let overflow = false;
let i = -1;
let escStart = -1;
let escEnd = -1;
if (mode === FOLD_BLOCK) {
i = consumeMoreIndentedLines(text, i);
if (i !== -1)
end = i + endStep;
}
for (let ch; (ch = text[(i += 1)]);) {
if (mode === FOLD_QUOTED && ch === '\\') {
escStart = i;
switch (text[i + 1]) {
case 'x':
i += 3;
break;
case 'u':
i += 5;
break;
case 'U':
i += 9;
break;
default:
i += 1;
}
escEnd = i;
}
if (ch === '\n') {
if (mode === FOLD_BLOCK)
i = consumeMoreIndentedLines(text, i);
end = i + endStep;
split = undefined;
}
else {
if (ch === ' ' &&
prev &&
prev !== ' ' &&
prev !== '\n' &&
prev !== '\t') {
// space surrounded by non-space can be replaced with newline + indent
const next = text[i + 1];
if (next && next !== ' ' && next !== '\n' && next !== '\t')
split = i;
}
if (i >= end) {
if (split) {
folds.push(split);
end = split + endStep;
split = undefined;
}
else if (mode === FOLD_QUOTED) {
// white-space collected at end may stretch past lineWidth
while (prev === ' ' || prev === '\t') {
prev = ch;
ch = text[(i += 1)];
overflow = true;
}
// Account for newline escape, but don't break preceding escape
const j = i > escEnd + 1 ? i - 2 : escStart - 1;
// Bail out if lineWidth & minContentWidth are shorter than an escape string
if (escapedFolds[j])
return text;
folds.push(j);
escapedFolds[j] = true;
end = j + endStep;
split = undefined;
}
else {
overflow = true;
}
}
}
prev = ch;
}
if (overflow && onOverflow)
onOverflow();
if (folds.length === 0)
return text;
if (onFold)
onFold();
let res = text.slice(0, folds[0]);
for (let i = 0; i < folds.length; ++i) {
const fold = folds[i];
const end = folds[i + 1] || text.length;
if (fold === 0)
res = `\n${indent}${text.slice(0, end)}`;
else {
if (mode === FOLD_QUOTED && escapedFolds[fold])
res += `${text[fold]}\\`;
res += `\n${indent}${text.slice(fold + 1, end)}`;
}
}
return res;
}
/**
* Presumes `i + 1` is at the start of a line
* @returns index of last newline in more-indented block
*/
function consumeMoreIndentedLines(text, i) {
let ch = text[i + 1];
while (ch === ' ' || ch === '\t') {
do {
ch = text[(i += 1)];
} while (ch && ch !== '\n');
ch = text[i + 1];
}
return i;
}
const getFoldOptions = (ctx) => ({
indentAtStart: ctx.indentAtStart,
lineWidth: ctx.options.lineWidth,
minContentWidth: ctx.options.minContentWidth
});
// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
// presume that's starting a new document.
const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
function lineLengthOverLimit(str, lineWidth, indentLength) {
if (!lineWidth || lineWidth < 0)
return false;
const limit = lineWidth - indentLength;
const strLen = str.length;
if (strLen <= limit)
return false;
for (let i = 0, start = 0; i < strLen; ++i) {
if (str[i] === '\n') {
if (i - start > limit)
return true;
start = i + 1;
if (strLen - start <= limit)
return false;
}
}
return true;
}
function doubleQuotedString(value, ctx) {
const json = JSON.stringify(value);
if (ctx.options.doubleQuotedAsJSON)
return json;
const { implicitKey } = ctx;
const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
let str = '';
let start = 0;
for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
// space before newline needs to be escaped to not be folded
str += json.slice(start, i) + '\\ ';
i += 1;
start = i;
ch = '\\';
}
if (ch === '\\')
switch (json[i + 1]) {
case 'u':
{
str += json.slice(start, i);
const code = json.substr(i + 2, 4);
switch (code) {
case '0000':
str += '\\0';
break;
case '0007':
str += '\\a';
break;
case '000b':
str += '\\v';
break;
case '001b':
str += '\\e';
break;
case '0085':
str += '\\N';
break;
case '00a0':
str += '\\_';
break;
case '2028':
str += '\\L';
break;
case '2029':
str += '\\P';
break;
default:
if (code.substr(0, 2) === '00')
str += '\\x' + code.substr(2);
else
str += json.substr(i, 6);
}
i += 5;
start = i + 1;
}
break;
case 'n':
if (implicitKey ||
json[i + 2] === '"' ||
json.length < minMultiLineLength) {
i += 1;
}
else {
// folding will eat first newline
str += json.slice(start, i) + '\n\n';
while (json[i + 2] === '\\' &&
json[i + 3] === 'n' &&
json[i + 4] !== '"') {
str += '\n';
i += 2;
}
str += indent;
// space after newline needs to be escaped to not be folded
if (json[i + 2] === ' ')
str += '\\';
i += 1;
start = i + 1;
}
break;
default:
i += 1;
}
}
str = start ? str + json.slice(start) : json;
return implicitKey
? str
: foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx));
}
function singleQuotedString(value, ctx) {
if (ctx.options.singleQuote === false ||
(ctx.implicitKey && value.includes('\n')) ||
/[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline
)
return doubleQuotedString(value, ctx);
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
return ctx.implicitKey
? res
: foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx));
}
function quotedString(value, ctx) {
const { singleQuote } = ctx.options;
let qs;
if (singleQuote === false)
qs = doubleQuotedString;
else {
const hasDouble = value.includes('"');
const hasSingle = value.includes("'");
if (hasDouble && !hasSingle)
qs = singleQuotedString;
else if (hasSingle && !hasDouble)
qs = doubleQuotedString;
else
qs = singleQuote ? singleQuotedString : doubleQuotedString;
}
return qs(value, ctx);
}
function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
const { blockQuote, commentString, lineWidth } = ctx.options;
// 1. Block can't end in whitespace unless the last line is non-empty.
// 2. Strings consisting of only whitespace are best rendered explicitly.
if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
return quotedString(value, ctx);
}
const indent = ctx.indent ||
(ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
const literal = blockQuote === 'literal'
? true
: blockQuote === 'folded' || type === Scalar.BLOCK_FOLDED
? false
: type === Scalar.BLOCK_LITERAL
? true
: !lineLengthOverLimit(value, lineWidth, indent.length);
if (!value)
return literal ? '|\n' : '>\n';
// determine chomping from whitespace at value end
let chomp;
let endStart;
for (endStart = value.length; endStart > 0; --endStart) {
const ch = value[endStart - 1];
if (ch !== '\n' && ch !== '\t' && ch !== ' ')
break;
}
let end = value.substring(endStart);
const endNlPos = end.indexOf('\n');
if (endNlPos === -1) {
chomp = '-'; // strip
}
else if (value === end || endNlPos !== end.length - 1) {
chomp = '+'; // keep
if (onChompKeep)
onChompKeep();
}
else {
chomp = ''; // clip
}
if (end) {
value = value.slice(0, -end.length);
if (end[end.length - 1] === '\n')
end = end.slice(0, -1);
end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`);
}
// determine indent indicator from whitespace at value start
let startWithSpace = false;
let startEnd;
let startNlPos = -1;
for (startEnd = 0; startEnd < value.length; ++startEnd) {
const ch = value[startEnd];
if (ch === ' ')
startWithSpace = true;
else if (ch === '\n')
startNlPos = startEnd;
else
break;
}
let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);
if (start) {
value = value.substring(start.length);
start = start.replace(/\n+/g, `$&${indent}`);
}
const indentSize = indent ? '2' : '1'; // root is at -1
let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
if (comment) {
header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
if (onComment)
onComment();
}
if (literal) {
value = value.replace(/\n+/g, `$&${indent}`);
return `${header}\n${indent}${start}${value}${end}`;
}
value = value
.replace(/\n+/g, '\n$&')
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
.replace(/\n+/g, `$&${indent}`);
const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx));
return `${header}\n${indent}${body}`;
}
function plainString(item, ctx, onComment, onChompKeep) {
const { type, value } = item;
const { actualString, implicitKey, indent, inFlow } = ctx;
if ((implicitKey && /[\n[\]{},]/.test(value)) ||
(inFlow && /[[\]{},]/.test(value))) {
return quotedString(value, ctx);
}
if (!value ||
/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
// not allowed:
// - empty string, '-' or '?'
// - start with an indicator character (except [?:-]) or /[?-] /
// - '\n ', ': ' or ' \n' anywhere
// - '#' not preceded by a non-space char
// - end with ' ' or ':'
return implicitKey || inFlow || !value.includes('\n')
? quotedString(value, ctx)
: blockString(item, ctx, onComment, onChompKeep);
}
if (!implicitKey &&
!inFlow &&
type !== Scalar.PLAIN &&
value.includes('\n')) {
// Where allowed & type not set explicitly, prefer block style for multiline strings
return blockString(item, ctx, onComment, onChompKeep);
}
if (indent === '' && containsDocumentMarker(value)) {
ctx.forceBlockIndent = true;
return blockString(item, ctx, onComment, onChompKeep);
}
const str = value.replace(/\n+/g, `$&\n${indent}`);
// Verify that output will be parsed as a string, as e.g. plain numbers and
// booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
// and others in v1.1.
if (actualString) {
const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);
const { compat, tags } = ctx.doc.schema;
if (tags.some(test) || compat?.some(test))
return quotedString(value, ctx);
}
return implicitKey
? str
: foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx));
}
function stringifyString(item, ctx, onComment, onChompKeep) {
const { implicitKey, inFlow } = ctx;
const ss = typeof item.value === 'string'
? item
: Object.assign({}, item, { value: String(item.value) });
let { type } = item;
if (type !== Scalar.QUOTE_DOUBLE) {
// force double quotes on control characters & unpaired surrogates
if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
type = Scalar.QUOTE_DOUBLE;
}
const _stringify = (_type) => {
switch (_type) {
case Scalar.BLOCK_FOLDED:
case Scalar.BLOCK_LITERAL:
return implicitKey || inFlow
? quotedString(ss.value, ctx) // blocks are not valid inside flow containers
: blockString(ss, ctx, onComment, onChompKeep);
case Scalar.QUOTE_DOUBLE:
return doubleQuotedString(ss.value, ctx);
case Scalar.QUOTE_SINGLE:
return singleQuotedString(ss.value, ctx);
case Scalar.PLAIN:
return plainString(ss, ctx, onComment, onChompKeep);
default:
return null;
}
};
let res = _stringify(type);
if (res === null) {
const { defaultKeyType, defaultStringType } = ctx.options;
const t = (implicitKey && defaultKeyType) || defaultStringType;
res = _stringify(t);
if (res === null)
throw new Error(`Unsupported default string type ${t}`);
}
return res;
}
function createStringifyContext(doc, options) {
const opt = Object.assign({
blockQuote: true,
commentString: stringifyComment,
defaultKeyType: null,
defaultStringType: 'PLAIN',
directives: null,
doubleQuotedAsJSON: false,
doubleQuotedMinMultiLineLength: 40,
falseStr: 'false',
indentSeq: true,
lineWidth: 80,
minContentWidth: 20,
nullStr: 'null',
simpleKeys: false,
singleQuote: null,
trueStr: 'true',
verifyAliasOrder: true
}, doc.schema.toStringOptions, options);
let inFlow;
switch (opt.collectionStyle) {
case 'block':
inFlow = false;
break;
case 'flow':
inFlow = true;
break;
default:
inFlow = null;
}
return {
anchors: new Set(),
doc,
indent: '',
indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',
inFlow,
options: opt
};
}
function getTagObject(tags, item) {
if (item.tag) {
const match = tags.filter(t => t.tag === item.tag);
if (match.length > 0)
return match.find(t => t.format === item.format) ?? match[0];
}
let tagObj = undefined;
let obj;
if (isScalar$1(item)) {
obj = item.value;
const match = tags.filter(t => t.identify?.(obj));
tagObj =
match.find(t => t.format === item.format) ?? match.find(t => !t.format);
}
else {
obj = item;
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
}
if (!tagObj) {
const name = obj?.constructor?.name ?? typeof obj;
throw new Error(`Tag not resolved for ${name} value`);
}
return tagObj;
}
// needs to be called before value stringifier to allow for circular anchor refs
function stringifyProps(node, tagObj, { anchors, doc }) {
if (!doc.directives)
return '';
const props = [];
const anchor = (isScalar$1(node) || isCollection$1(node)) && node.anchor;
if (anchor && anchorIsValid(anchor)) {
anchors.add(anchor);
props.push(`&${anchor}`);
}
const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
if (tag)
props.push(doc.directives.tagString(tag));
return props.join(' ');
}
function stringify$2(item, ctx, onComment, onChompKeep) {
if (isPair(item))
return item.toString(ctx, onComment, onChompKeep);
if (isAlias(item)) {
if (ctx.doc.directives)
return item.toString(ctx);
if (ctx.resolvedAliases?.has(item)) {
throw new TypeError(`Cannot stringify circular structure without alias nodes`);
}
else {
if (ctx.resolvedAliases)
ctx.resolvedAliases.add(item);
else
ctx.resolvedAliases = new Set([item]);
item = item.resolve(ctx.doc);
}
}
let tagObj = undefined;
const node = isNode$1(item)
? item
: ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
if (!tagObj)
tagObj = getTagObject(ctx.doc.schema.tags, node);
const props = stringifyProps(node, tagObj, ctx);
if (props.length > 0)
ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;
const str = typeof tagObj.stringify === 'function'
? tagObj.stringify(node, ctx, onComment, onChompKeep)
: isScalar$1(node)
? stringifyString(node, ctx, onComment, onChompKeep)
: node.toString(ctx, onComment, onChompKeep);
if (!props)
return str;
return isScalar$1(node) || str[0] === '{' || str[0] === '['
? `${props} ${str}`
: `${props}\n${ctx.indent}${str}`;
}
function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;
let keyComment = (isNode$1(key) && key.comment) || null;
if (simpleKeys) {
if (keyComment) {
throw new Error('With simple keys, key nodes cannot have comments');
}
if (isCollection$1(key)) {
const msg = 'With simple keys, collection cannot be used as a key value';
throw new Error(msg);
}
}
let explicitKey = !simpleKeys &&
(!key ||
(keyComment && value == null && !ctx.inFlow) ||
isCollection$1(key) ||
(isScalar$1(key)
? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL
: typeof key === 'object'));
ctx = Object.assign({}, ctx, {
allNullValues: false,
implicitKey: !explicitKey && (simpleKeys || !allNullValues),
indent: indent + indentStep
});
let keyCommentDone = false;
let chompKeep = false;
let str = stringify$2(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));
if (!explicitKey && !ctx.inFlow && str.length > 1024) {
if (simpleKeys)
throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
explicitKey = true;
}
if (ctx.inFlow) {
if (allNullValues || value == null) {
if (keyCommentDone && onComment)
onComment();
return str === '' ? '?' : explicitKey ? `? ${str}` : str;
}
}
else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {
str = `? ${str}`;
if (keyComment && !keyCommentDone) {
str += lineComment(str, ctx.indent, commentString(keyComment));
}
else if (chompKeep && onChompKeep)
onChompKeep();
return str;
}
if (keyCommentDone)
keyComment = null;
if (explicitKey) {
if (keyComment)
str += lineComment(str, ctx.indent, commentString(keyComment));
str = `? ${str}\n${indent}:`;
}
else {
str = `${str}:`;
if (keyComment)
str += lineComment(str, ctx.indent, commentString(keyComment));
}
let vcb = '';
let valueComment = null;
if (isNode$1(value)) {
if (value.spaceBefore)
vcb = '\n';
if (value.commentBefore) {
const cs = commentString(value.commentBefore);
vcb += `\n${indentComment(cs, ctx.indent)}`;
}
valueComment = value.comment;
}
else if (value && typeof value === 'object') {
value = doc.createNode(value);
}
ctx.implicitKey = false;
if (!explicitKey && !keyComment && isScalar$1(value))
ctx.indentAtStart = str.length + 1;
chompKeep = false;
if (!indentSeq &&
indentStep.length >= 2 &&
!ctx.inFlow &&
!explicitKey &&
isSeq(value) &&
!value.flow &&
!value.tag &&
!value.anchor) {
// If indentSeq === false, consider '- ' as part of indentation where possible
ctx.indent = ctx.indent.substr(2);
}
let valueCommentDone = false;
const valueStr = stringify$2(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));
let ws = ' ';
if (vcb || keyComment) {
if (valueStr === '' && !ctx.inFlow)
ws = vcb === '\n' ? '\n\n' : vcb;
else
ws = `${vcb}\n${ctx.indent}`;
}
else if (!explicitKey && isCollection$1(value)) {
const flow = valueStr[0] === '[' || valueStr[0] === '{';
if (!flow || valueStr.includes('\n'))
ws = `\n${ctx.indent}`;
}
else if (valueStr === '' || valueStr[0] === '\n')
ws = '';
str += ws + valueStr;
if (ctx.inFlow) {
if (valueCommentDone && onComment)
onComment();
}
else if (valueComment && !valueCommentDone) {
str += lineComment(str, ctx.indent, commentString(valueComment));
}
else if (chompKeep && onChompKeep) {
onChompKeep();
}
return str;
}
function warn(logLevel, warning) {
if (logLevel === 'debug' || logLevel === 'warn') {
if (typeof process !== 'undefined' && process.emitWarning)
process.emitWarning(warning);
else
console.warn(warning);
}
}
const MERGE_KEY = '<<';
function addPairToJSMap(ctx, map, { key, value }) {
if (ctx?.doc.schema.merge && isMergeKey(key)) {
value = isAlias(value) ? value.resolve(ctx.doc) : value;
if (isSeq(value))
for (const it of value.items)
mergeToJSMap(ctx, map, it);
else if (Array.isArray(value))
for (const it of value)
mergeToJSMap(ctx, map, it);
else
mergeToJSMap(ctx, map, value);
}
else {
const jsKey = toJS(key, '', ctx);
if (map instanceof Map) {
map.set(jsKey, toJS(value, jsKey, ctx));
}
else if (map instanceof Set) {
map.add(jsKey);
}
else {
const stringKey = stringifyKey(key, jsKey, ctx);
const jsValue = toJS(value, stringKey, ctx);
if (stringKey in map)
Object.defineProperty(map, stringKey, {
value: jsValue,
writable: true,
enumerable: true,
configurable: true
});
else
map[stringKey] = jsValue;
}
}
return map;
}
const isMergeKey = (key) => key === MERGE_KEY ||
(isScalar$1(key) &&
key.value === MERGE_KEY &&
(!key.type || key.type === Scalar.PLAIN));
// If the value associated with a merge key is a single mapping node, each of
// its key/value pairs is inserted into the current mapping, unless the key
// already exists in it. If the value associated with the merge key is a
// sequence, then this sequence is expected to contain mapping nodes and each
// of these nodes is merged in turn according to its order in the sequence.
// Keys in mapping nodes earlier in the sequence override keys specified in
// later mapping nodes. -- http://yaml.org/type/merge.html
function mergeToJSMap(ctx, map, value) {
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (!isMap(source))
throw new Error('Merge sources must be maps or map aliases');
const srcMap = source.toJSON(null, ctx, Map);
for (const [key, value] of srcMap) {
if (map instanceof Map) {
if (!map.has(key))
map.set(key, value);
}
else if (map instanceof Set) {
map.add(key);
}
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
Object.defineProperty(map, key, {
value,
writable: true,
enumerable: true,
configurable: true
});
}
}
return map;
}
function stringifyKey(key, jsKey, ctx) {
if (jsKey === null)
return '';
if (typeof jsKey !== 'object')
return String(jsKey);
if (isNode$1(key) && ctx && ctx.doc) {
const strCtx = createStringifyContext(ctx.doc, {});
strCtx.anchors = new Set();
for (const node of ctx.anchors.keys())
strCtx.anchors.add(node.anchor);
strCtx.inFlow = true;
strCtx.inStringifyKey = true;
const strKey = key.toString(strCtx);
if (!ctx.mapKeyWarned) {
let jsonStr = JSON.stringify(strKey);
if (jsonStr.length > 40)
jsonStr = jsonStr.substring(0, 36) + '..."';
warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
ctx.mapKeyWarned = true;
}
return strKey;
}
return JSON.stringify(jsKey);
}
function createPair(key, value, ctx) {
const k = createNode(key, undefined, ctx);
const v = createNode(value, undefined, ctx);
return new Pair(k, v);
}
class Pair {
constructor(key, value = null) {
Object.defineProperty(this, NODE_TYPE, { value: PAIR });
this.key = key;
this.value = value;
}
clone(schema) {
let { key, value } = this;
if (isNode$1(key))
key = key.clone(schema);
if (isNode$1(value))
value = value.clone(schema);
return new Pair(key, value);
}
toJSON(_, ctx) {
const pair = ctx?.mapAsMap ? new Map() : {};
return addPairToJSMap(ctx, pair, this);
}
toString(ctx, onComment, onChompKeep) {
return ctx?.doc
? stringifyPair(this, ctx, onComment, onChompKeep)
: JSON.stringify(this);
}
}
function stringifyCollection(collection, ctx, options) {
const flow = ctx.inFlow ?? collection.flow;
const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;
return stringify(collection, ctx, options);
}
function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {
const { indent, options: { commentString } } = ctx;
const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });
let chompKeep = false; // flag for the preceding node's status
const lines = [];
for (let i = 0; i < items.length; ++i) {
const item = items[i];
let comment = null;
if (isNode$1(item)) {
if (!chompKeep && item.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, item.commentBefore, chompKeep);
if (item.comment)
comment = item.comment;
}
else if (isPair(item)) {
const ik = isNode$1(item.key) ? item.key : null;
if (ik) {
if (!chompKeep && ik.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);
}
}
chompKeep = false;
let str = stringify$2(item, itemCtx, () => (comment = null), () => (chompKeep = true));
if (comment)
str += lineComment(str, itemIndent, commentString(comment));
if (chompKeep && comment)
chompKeep = false;
lines.push(blockItemPrefix + str);
}
let str;
if (lines.length === 0) {
str = flowChars.start + flowChars.end;
}
else {
str = lines[0];
for (let i = 1; i < lines.length; ++i) {
const line = lines[i];
str += line ? `\n${indent}${line}` : '\n';
}
}
if (comment) {
str += '\n' + indentComment(commentString(comment), indent);
if (onComment)
onComment();
}
else if (chompKeep && onChompKeep)
onChompKeep();
return str;
}
function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {
const { indent, indentStep, options: { commentString } } = ctx;
itemIndent += indentStep;
const itemCtx = Object.assign({}, ctx, {
indent: itemIndent,
inFlow: true,
type: null
});
let reqNewline = false;
let linesAtValue = 0;
const lines = [];
for (let i = 0; i < items.length; ++i) {
const item = items[i];
let comment = null;
if (isNode$1(item)) {
if (item.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, item.commentBefore, false);
if (item.comment)
comment = item.comment;
}
else if (isPair(item)) {
const ik = isNode$1(item.key) ? item.key : null;
if (ik) {
if (ik.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, ik.commentBefore, false);
if (ik.comment)
reqNewline = true;
}
const iv = isNode$1(item.value) ? item.value : null;
if (iv) {
if (iv.comment)
comment = iv.comment;
if (iv.commentBefore)
reqNewline = true;
}
else if (item.value == null && ik && ik.comment) {
comment = ik.comment;
}
}
if (comment)
reqNewline = true;
let str = stringify$2(item, itemCtx, () => (comment = null));
if (i < items.length - 1)
str += ',';
if (comment)
str += lineComment(str, itemIndent, commentString(comment));
if (!reqNewline && (lines.length > linesAtValue || str.includes('\n')))
reqNewline = true;
lines.push(str);
linesAtValue = lines.length;
}
let str;
const { start, end } = flowChars;
if (lines.length === 0) {
str = start + end;
}
else {
if (!reqNewline) {
const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
reqNewline = len > Collection.maxFlowStringSingleLineLength;
}
if (reqNewline) {
str = start;
for (const line of lines)
str += line ? `\n${indentStep}${indent}${line}` : '\n';
str += `\n${indent}${end}`;
}
else {
str = `${start} ${lines.join(' ')} ${end}`;
}
}
if (comment) {
str += lineComment(str, commentString(comment), indent);
if (onComment)
onComment();
}
return str;
}
function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
if (comment && chompKeep)
comment = comment.replace(/^\n+/, '');
if (comment) {
const ic = indentComment(commentString(comment), indent);
lines.push(ic.trimStart()); // Avoid double indent on first line
}
}
function findPair(items, key) {
const k = isScalar$1(key) ? key.value : key;
for (const it of items) {
if (isPair(it)) {
if (it.key === key || it.key === k)
return it;
if (isScalar$1(it.key) && it.key.value === k)
return it;
}
}
return undefined;
}
class YAMLMap extends Collection {
constructor(schema) {
super(MAP, schema);
this.items = [];
}
static get tagName() {
return 'tag:yaml.org,2002:map';
}
/**
* Adds a value to the collection.
*
* @param overwrite - If not set `true`, using a key that is already in the
* collection will throw. Otherwise, overwrites the previous value.
*/
add(pair, overwrite) {
let _pair;
if (isPair(pair))
_pair = pair;
else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
// In TypeScript, this never happens.
_pair = new Pair(pair, pair?.value);
}
else
_pair = new Pair(pair.key, pair.value);
const prev = findPair(this.items, _pair.key);
const sortEntries = this.schema?.sortMapEntries;
if (prev) {
if (!overwrite)
throw new Error(`Key ${_pair.key} already set`);
// For scalars, keep the old node & its comments and anchors
if (isScalar$1(prev.value) && isScalarValue(_pair.value))
prev.value.value = _pair.value;
else
prev.value = _pair.value;
}
else if (sortEntries) {
const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
if (i === -1)
this.items.push(_pair);
else
this.items.splice(i, 0, _pair);
}
else {
this.items.push(_pair);
}
}
delete(key) {
const it = findPair(this.items, key);
if (!it)
return false;
const del = this.items.splice(this.items.indexOf(it), 1);
return del.length > 0;
}
get(key, keepScalar) {
const it = findPair(this.items, key);
const node = it?.value;
return (!keepScalar && isScalar$1(node) ? node.value : node) ?? undefined;
}
has(key) {
return !!findPair(this.items, key);
}
set(key, value) {
this.add(new Pair(key, value), true);
}
/**
* @param ctx - Conversion context, originally set in Document#toJS()
* @param {Class} Type - If set, forces the returned collection type
* @returns Instance of Type, Map, or Object
*/
toJSON(_, ctx, Type) {
const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};
if (ctx?.onCreate)
ctx.onCreate(map);
for (const item of this.items)
addPairToJSMap(ctx, map, item);
return map;
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
for (const item of this.items) {
if (!isPair(item))
throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
}
if (!ctx.allNullValues && this.hasAllNullValues(false))
ctx = Object.assign({}, ctx, { allNullValues: true });
return stringifyCollection(this, ctx, {
blockItemPrefix: '',
flowChars: { start: '{', end: '}' },
itemIndent: ctx.indent || '',
onChompKeep,
onComment
});
}
}
function createMap(schema, obj, ctx) {
const { keepUndefined, replacer } = ctx;
const map = new YAMLMap(schema);
const add = (key, value) => {
if (typeof replacer === 'function')
value = replacer.call(obj, key, value);
else if (Array.isArray(replacer) && !replacer.includes(key))
return;
if (value !== undefined || keepUndefined)
map.items.push(createPair(key, value, ctx));
};
if (obj instanceof Map) {
for (const [key, value] of obj)
add(key, value);
}
else if (obj && typeof obj === 'object') {
for (const key of Object.keys(obj))
add(key, obj[key]);
}
if (typeof schema.sortMapEntries === 'function') {
map.items.sort(schema.sortMapEntries);
}
return map;
}
const map$1 = {
collection: 'map',
createNode: createMap,
default: true,
nodeClass: YAMLMap,
tag: 'tag:yaml.org,2002:map',
resolve(map, onError) {
if (!isMap(map))
onError('Expected a mapping for this tag');
return map;
}
};
class YAMLSeq extends Collection {
constructor(schema) {
super(SEQ, schema);
this.items = [];
}
static get tagName() {
return 'tag:yaml.org,2002:seq';
}
add(value) {
this.items.push(value);
}
/**
* Removes a value from the collection.
*
* `key` must contain a representation of an integer for this to succeed.
* It may be wrapped in a `Scalar`.
*
* @returns `true` if the item was found and removed.
*/
delete(key) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
return false;
const del = this.items.splice(idx, 1);
return del.length > 0;
}
get(key, keepScalar) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
return undefined;
const it = this.items[idx];
return !keepScalar && isScalar$1(it) ? it.value : it;
}
/**
* Checks if the collection includes a value with the key `key`.
*
* `key` must contain a representation of an integer for this to succeed.
* It may be wrapped in a `Scalar`.
*/
has(key) {
const idx = asItemIndex(key);
return typeof idx === 'number' && idx < this.items.length;
}
/**
* Sets a value in this collection. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*
* If `key` does not contain a representation of an integer, this will throw.
* It may be wrapped in a `Scalar`.
*/
set(key, value) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
throw new Error(`Expected a valid index, not ${key}.`);
const prev = this.items[idx];
if (isScalar$1(prev) && isScalarValue(value))
prev.value = value;
else
this.items[idx] = value;
}
toJSON(_, ctx) {
const seq = [];
if (ctx?.onCreate)
ctx.onCreate(seq);
let i = 0;
for (const item of this.items)
seq.push(toJS(item, String(i++), ctx));
return seq;
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
return stringifyCollection(this, ctx, {
blockItemPrefix: '- ',
flowChars: { start: '[', end: ']' },
itemIndent: (ctx.indent || '') + ' ',
onChompKeep,
onComment
});
}
}
function asItemIndex(key) {
let idx = isScalar$1(key) ? key.value : key;
if (idx && typeof idx === 'string')
idx = Number(idx);
return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
? idx
: null;
}
function createSeq(schema, obj, ctx) {
const { replacer } = ctx;
const seq = new YAMLSeq(schema);
if (obj && Symbol.iterator in Object(obj)) {
let i = 0;
for (let it of obj) {
if (typeof replacer === 'function') {
const key = obj instanceof Set ? it : String(i++);
it = replacer.call(obj, key, it);
}
seq.items.push(createNode(it, undefined, ctx));
}
}
return seq;
}
const seq = {
collection: 'seq',
createNode: createSeq,
default: true,
nodeClass: YAMLSeq,
tag: 'tag:yaml.org,2002:seq',
resolve(seq, onError) {
if (!isSeq(seq))
onError('Expected a sequence for this tag');
return seq;
}
};
const string = {
identify: value => typeof value === 'string',
default: true,
tag: 'tag:yaml.org,2002:str',
resolve: str => str,
stringify(item, ctx, onComment, onChompKeep) {
ctx = Object.assign({ actualString: true }, ctx);
return stringifyString(item, ctx, onComment, onChompKeep);
}
};
const nullTag = {
identify: value => value == null,
createNode: () => new Scalar(null),
default: true,
tag: 'tag:yaml.org,2002:null',
test: /^(?:~|[Nn]ull|NULL)?$/,
resolve: () => new Scalar(null),
stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)
? source
: ctx.options.nullStr
};
const boolTag = {
identify: value => typeof value === 'boolean',
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'),
stringify({ source, value }, ctx) {
if (source && boolTag.test.test(source)) {
const sv = source[0] === 't' || source[0] === 'T';
if (value === sv)
return source;
}
return value ? ctx.options.trueStr : ctx.options.falseStr;
}
};
function stringifyNumber({ format, minFractionDigits, tag, value }) {
if (typeof value === 'bigint')
return String(value);
const num = typeof value === 'number' ? value : Number(value);
if (!isFinite(num))
return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
let n = JSON.stringify(value);
if (!format &&
minFractionDigits &&
(!tag || tag === 'tag:yaml.org,2002:float') &&
/^\d/.test(n)) {
let i = n.indexOf('.');
if (i < 0) {
i = n.length;
n += '.';
}
let d = minFractionDigits - (n.length - i - 1);
while (d-- > 0)
n += '0';
}
return n;
}
const floatNaN$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
resolve: str => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'
? Number.NEGATIVE_INFINITY
: Number.POSITIVE_INFINITY,
stringify: stringifyNumber
};
const floatExp$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'EXP',
test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
resolve: str => parseFloat(str),
stringify(node) {
const num = Number(node.value);
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
}
};
const float$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
resolve(str) {
const node = new Scalar(parseFloat(str));
const dot = str.indexOf('.');
if (dot !== -1 && str[str.length - 1] === '0')
node.minFractionDigits = str.length - dot - 1;
return node;
},
stringify: stringifyNumber
};
const intIdentify$2 = (value) => typeof value === 'bigint' || Number.isInteger(value);
const intResolve$1 = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
function intStringify$1(node, radix, prefix) {
const { value } = node;
if (intIdentify$2(value) && value >= 0)
return prefix + value.toString(radix);
return stringifyNumber(node);
}
const intOct$1 = {
identify: value => intIdentify$2(value) && value >= 0,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'OCT',
test: /^0o[0-7]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 2, 8, opt),
stringify: node => intStringify$1(node, 8, '0o')
};
const int$1 = {
identify: intIdentify$2,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^[-+]?[0-9]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 0, 10, opt),
stringify: stringifyNumber
};
const intHex$1 = {
identify: value => intIdentify$2(value) && value >= 0,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'HEX',
test: /^0x[0-9a-fA-F]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 2, 16, opt),
stringify: node => intStringify$1(node, 16, '0x')
};
const schema$2 = [
map$1,
seq,
string,
nullTag,
boolTag,
intOct$1,
int$1,
intHex$1,
floatNaN$1,
floatExp$1,
float$1
];
function intIdentify$1(value) {
return typeof value === 'bigint' || Number.isInteger(value);
}
const stringifyJSON = ({ value }) => JSON.stringify(value);
const jsonScalars = [
{
identify: value => typeof value === 'string',
default: true,
tag: 'tag:yaml.org,2002:str',
resolve: str => str,
stringify: stringifyJSON
},
{
identify: value => value == null,
createNode: () => new Scalar(null),
default: true,
tag: 'tag:yaml.org,2002:null',
test: /^null$/,
resolve: () => null,
stringify: stringifyJSON
},
{
identify: value => typeof value === 'boolean',
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^true|false$/,
resolve: str => str === 'true',
stringify: stringifyJSON
},
{
identify: intIdentify$1,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^-?(?:0|[1-9][0-9]*)$/,
resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
stringify: ({ value }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value)
},
{
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
resolve: str => parseFloat(str),
stringify: stringifyJSON
}
];
const jsonError = {
default: true,
tag: '',
test: /^/,
resolve(str, onError) {
onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
return str;
}
};
const schema$1 = [map$1, seq].concat(jsonScalars, jsonError);
const binary = {
identify: value => value instanceof Uint8Array,
default: false,
tag: 'tag:yaml.org,2002:binary',
/**
* Returns a Buffer in node and an Uint8Array in browsers
*
* To use the resulting buffer as an image, you'll want to do something like:
*
* const blob = new Blob([buffer], { type: 'image/jpeg' })
* document.querySelector('#photo').src = URL.createObjectURL(blob)
*/
resolve(src, onError) {
if (typeof Buffer === 'function') {
return Buffer.from(src, 'base64');
}
else if (typeof atob === 'function') {
// On IE 11, atob() can't handle newlines
const str = atob(src.replace(/[\n\r]/g, ''));
const buffer = new Uint8Array(str.length);
for (let i = 0; i < str.length; ++i)
buffer[i] = str.charCodeAt(i);
return buffer;
}
else {
onError('This environment does not support reading binary tags; either Buffer or atob is required');
return src;
}
},
stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
const buf = value; // checked earlier by binary.identify()
let str;
if (typeof Buffer === 'function') {
str =
buf instanceof Buffer
? buf.toString('base64')
: Buffer.from(buf.buffer).toString('base64');
}
else if (typeof btoa === 'function') {
let s = '';
for (let i = 0; i < buf.length; ++i)
s += String.fromCharCode(buf[i]);
str = btoa(s);
}
else {
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
}
if (!type)
type = Scalar.BLOCK_LITERAL;
if (type !== Scalar.QUOTE_DOUBLE) {
const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
const n = Math.ceil(str.length / lineWidth);
const lines = new Array(n);
for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
lines[i] = str.substr(o, lineWidth);
}
str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' ');
}
return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
}
};
function resolvePairs(seq, onError) {
if (isSeq(seq)) {
for (let i = 0; i < seq.items.length; ++i) {
let item = seq.items[i];
if (isPair(item))
continue;
else if (isMap(item)) {
if (item.items.length > 1)
onError('Each pair must have its own sequence indicator');
const pair = item.items[0] || new Pair(new Scalar(null));
if (item.commentBefore)
pair.key.commentBefore = pair.key.commentBefore
? `${item.commentBefore}\n${pair.key.commentBefore}`
: item.commentBefore;
if (item.comment) {
const cn = pair.value ?? pair.key;
cn.comment = cn.comment
? `${item.comment}\n${cn.comment}`
: item.comment;
}
item = pair;
}
seq.items[i] = isPair(item) ? item : new Pair(item);
}
}
else
onError('Expected a sequence for this tag');
return seq;
}
function createPairs(schema, iterable, ctx) {
const { replacer } = ctx;
const pairs = new YAMLSeq(schema);
pairs.tag = 'tag:yaml.org,2002:pairs';
let i = 0;
if (iterable && Symbol.iterator in Object(iterable))
for (let it of iterable) {
if (typeof replacer === 'function')
it = replacer.call(iterable, String(i++), it);
let key, value;
if (Array.isArray(it)) {
if (it.length === 2) {
key = it[0];
value = it[1];
}
else
throw new TypeError(`Expected [key, value] tuple: ${it}`);
}
else if (it && it instanceof Object) {
const keys = Object.keys(it);
if (keys.length === 1) {
key = keys[0];
value = it[key];
}
else
throw new TypeError(`Expected { key: value } tuple: ${it}`);
}
else {
key = it;
}
pairs.items.push(createPair(key, value, ctx));
}
return pairs;
}
const pairs = {
collection: 'seq',
default: false,
tag: 'tag:yaml.org,2002:pairs',
resolve: resolvePairs,
createNode: createPairs
};
class YAMLOMap extends YAMLSeq {
constructor() {
super();
this.add = YAMLMap.prototype.add.bind(this);
this.delete = YAMLMap.prototype.delete.bind(this);
this.get = YAMLMap.prototype.get.bind(this);
this.has = YAMLMap.prototype.has.bind(this);
this.set = YAMLMap.prototype.set.bind(this);
this.tag = YAMLOMap.tag;
}
/**
* If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
* but TypeScript won't allow widening the signature of a child method.
*/
toJSON(_, ctx) {
if (!ctx)
return super.toJSON(_);
const map = new Map();
if (ctx?.onCreate)
ctx.onCreate(map);
for (const pair of this.items) {
let key, value;
if (isPair(pair)) {
key = toJS(pair.key, '', ctx);
value = toJS(pair.value, key, ctx);
}
else {
key = toJS(pair, '', ctx);
}
if (map.has(key))
throw new Error('Ordered maps must not include duplicate keys');
map.set(key, value);
}
return map;
}
}
YAMLOMap.tag = 'tag:yaml.org,2002:omap';
const omap = {
collection: 'seq',
identify: value => value instanceof Map,
nodeClass: YAMLOMap,
default: false,
tag: 'tag:yaml.org,2002:omap',
resolve(seq, onError) {
const pairs = resolvePairs(seq, onError);
const seenKeys = [];
for (const { key } of pairs.items) {
if (isScalar$1(key)) {
if (seenKeys.includes(key.value)) {
onError(`Ordered maps must not include duplicate keys: ${key.value}`);
}
else {
seenKeys.push(key.value);
}
}
}
return Object.assign(new YAMLOMap(), pairs);
},
createNode(schema, iterable, ctx) {
const pairs = createPairs(schema, iterable, ctx);
const omap = new YAMLOMap();
omap.items = pairs.items;
return omap;
}
};
function boolStringify({ value, source }, ctx) {
const boolObj = value ? trueTag : falseTag;
if (source && boolObj.test.test(source))
return source;
return value ? ctx.options.trueStr : ctx.options.falseStr;
}
const trueTag = {
identify: value => value === true,
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
resolve: () => new Scalar(true),
stringify: boolStringify
};
const falseTag = {
identify: value => value === false,
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
resolve: () => new Scalar(false),
stringify: boolStringify
};
const floatNaN = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'
? Number.NEGATIVE_INFINITY
: Number.POSITIVE_INFINITY,
stringify: stringifyNumber
};
const floatExp = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'EXP',
test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
resolve: (str) => parseFloat(str.replace(/_/g, '')),
stringify(node) {
const num = Number(node.value);
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
}
};
const float = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
resolve(str) {
const node = new Scalar(parseFloat(str.replace(/_/g, '')));
const dot = str.indexOf('.');
if (dot !== -1) {
const f = str.substring(dot + 1).replace(/_/g, '');
if (f[f.length - 1] === '0')
node.minFractionDigits = f.length;
}
return node;
},
stringify: stringifyNumber
};
const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
function intResolve(str, offset, radix, { intAsBigInt }) {
const sign = str[0];
if (sign === '-' || sign === '+')
offset += 1;
str = str.substring(offset).replace(/_/g, '');
if (intAsBigInt) {
switch (radix) {
case 2:
str = `0b${str}`;
break;
case 8:
str = `0o${str}`;
break;
case 16:
str = `0x${str}`;
break;
}
const n = BigInt(str);
return sign === '-' ? BigInt(-1) * n : n;
}
const n = parseInt(str, radix);
return sign === '-' ? -1 * n : n;
}
function intStringify(node, radix, prefix) {
const { value } = node;
if (intIdentify(value)) {
const str = value.toString(radix);
return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
}
return stringifyNumber(node);
}
const intBin = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'BIN',
test: /^[-+]?0b[0-1_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
stringify: node => intStringify(node, 2, '0b')
};
const intOct = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'OCT',
test: /^[-+]?0[0-7_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
stringify: node => intStringify(node, 8, '0')
};
const int = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^[-+]?[0-9][0-9_]*$/,
resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
stringify: stringifyNumber
};
const intHex = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'HEX',
test: /^[-+]?0x[0-9a-fA-F_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
stringify: node => intStringify(node, 16, '0x')
};
class YAMLSet extends YAMLMap {
constructor(schema) {
super(schema);
this.tag = YAMLSet.tag;
}
add(key) {
let pair;
if (isPair(key))
pair = key;
else if (typeof key === 'object' &&
'key' in key &&
'value' in key &&
key.value === null)
pair = new Pair(key.key, null);
else
pair = new Pair(key, null);
const prev = findPair(this.items, pair.key);
if (!prev)
this.items.push(pair);
}
/**
* If `keepPair` is `true`, returns the Pair matching `key`.
* Otherwise, returns the value of that Pair's key.
*/
get(key, keepPair) {
const pair = findPair(this.items, key);
return !keepPair && isPair(pair)
? isScalar$1(pair.key)
? pair.key.value
: pair.key
: pair;
}
set(key, value) {
if (typeof value !== 'boolean')
throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
const prev = findPair(this.items, key);
if (prev && !value) {
this.items.splice(this.items.indexOf(prev), 1);
}
else if (!prev && value) {
this.items.push(new Pair(key));
}
}
toJSON(_, ctx) {
return super.toJSON(_, ctx, Set);
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
if (this.hasAllNullValues(true))
return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
else
throw new Error('Set items must all have null values');
}
}
YAMLSet.tag = 'tag:yaml.org,2002:set';
const set = {
collection: 'map',
identify: value => value instanceof Set,
nodeClass: YAMLSet,
default: false,
tag: 'tag:yaml.org,2002:set',
resolve(map, onError) {
if (isMap(map)) {
if (map.hasAllNullValues(true))
return Object.assign(new YAMLSet(), map);
else
onError('Set items must all have null values');
}
else
onError('Expected a mapping for this tag');
return map;
},
createNode(schema, iterable, ctx) {
const { replacer } = ctx;
const set = new YAMLSet(schema);
if (iterable && Symbol.iterator in Object(iterable))
for (let value of iterable) {
if (typeof replacer === 'function')
value = replacer.call(iterable, value, value);
set.items.push(createPair(value, null, ctx));
}
return set;
}
};
/** Internal types handle bigint as number, because TS can't figure it out. */
function parseSexagesimal(str, asBigInt) {
const sign = str[0];
const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
const num = (n) => asBigInt ? BigInt(n) : Number(n);
const res = parts
.replace(/_/g, '')
.split(':')
.reduce((res, p) => res * num(60) + num(p), num(0));
return (sign === '-' ? num(-1) * res : res);
}
/**
* hhhh:mm:ss.sss
*
* Internal types handle bigint as number, because TS can't figure it out.
*/
function stringifySexagesimal(node) {
let { value } = node;
let num = (n) => n;
if (typeof value === 'bigint')
num = n => BigInt(n);
else if (isNaN(value) || !isFinite(value))
return stringifyNumber(node);
let sign = '';
if (value < 0) {
sign = '-';
value *= num(-1);
}
const _60 = num(60);
const parts = [value % _60]; // seconds, including ms
if (value < 60) {
parts.unshift(0); // at least one : is required
}
else {
value = (value - parts[0]) / _60;
parts.unshift(value % _60); // minutes
if (value >= 60) {
value = (value - parts[0]) / _60;
parts.unshift(value); // hours
}
}
return (sign +
parts
.map(n => (n < 10 ? '0' + String(n) : String(n)))
.join(':')
.replace(/000000\d*$/, '') // % 60 may introduce error
);
}
const intTime = {
identify: value => typeof value === 'bigint' || Number.isInteger(value),
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'TIME',
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
stringify: stringifySexagesimal
};
const floatTime = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'TIME',
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
resolve: str => parseSexagesimal(str, false),
stringify: stringifySexagesimal
};
const timestamp = {
identify: value => value instanceof Date,
default: true,
tag: 'tag:yaml.org,2002:timestamp',
// If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
// may be omitted altogether, resulting in a date format. In such a case, the time part is
// assumed to be 00:00:00Z (start of day, UTC).
test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
'(?:' + // time is optional
'(?:t|T|[ \\t]+)' + // t | T | whitespace
'([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
'(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
')?$'),
resolve(str) {
const match = str.match(timestamp.test);
if (!match)
throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
const [, year, month, day, hour, minute, second] = match.map(Number);
const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
const tz = match[8];
if (tz && tz !== 'Z') {
let d = parseSexagesimal(tz, false);
if (Math.abs(d) < 30)
d *= 60;
date -= 60000 * d;
}
return new Date(date);
},
stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
};
const schema = [
map$1,
seq,
string,
nullTag,
trueTag,
falseTag,
intBin,
intOct,
int,
intHex,
floatNaN,
floatExp,
float,
binary,
omap,
pairs,
set,
intTime,
floatTime,
timestamp
];
const schemas = new Map([
['core', schema$2],
['failsafe', [map$1, seq, string]],
['json', schema$1],
['yaml11', schema],
['yaml-1.1', schema]
]);
const tagsByName = {
binary,
bool: boolTag,
float: float$1,
floatExp: floatExp$1,
floatNaN: floatNaN$1,
floatTime,
int: int$1,
intHex: intHex$1,
intOct: intOct$1,
intTime,
map: map$1,
null: nullTag,
omap,
pairs,
seq,
set,
timestamp
};
const coreKnownTags = {
'tag:yaml.org,2002:binary': binary,
'tag:yaml.org,2002:omap': omap,
'tag:yaml.org,2002:pairs': pairs,
'tag:yaml.org,2002:set': set,
'tag:yaml.org,2002:timestamp': timestamp
};
function getTags(customTags, schemaName) {
let tags = schemas.get(schemaName);
if (!tags) {
if (Array.isArray(customTags))
tags = [];
else {
const keys = Array.from(schemas.keys())
.filter(key => key !== 'yaml11')
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`);
}
}
if (Array.isArray(customTags)) {
for (const tag of customTags)
tags = tags.concat(tag);
}
else if (typeof customTags === 'function') {
tags = customTags(tags.slice());
}
return tags.map(tag => {
if (typeof tag !== 'string')
return tag;
const tagObj = tagsByName[tag];
if (tagObj)
return tagObj;
const keys = Object.keys(tagsByName)
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
});
}
const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
class Schema {
constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
this.compat = Array.isArray(compat)
? getTags(compat, 'compat')
: compat
? getTags(null, compat)
: null;
this.merge = !!merge;
this.name = (typeof schema === 'string' && schema) || 'core';
this.knownTags = resolveKnownTags ? coreKnownTags : {};
this.tags = getTags(customTags, this.name);
this.toStringOptions = toStringDefaults ?? null;
Object.defineProperty(this, MAP, { value: map$1 });
Object.defineProperty(this, SCALAR$1, { value: string });
Object.defineProperty(this, SEQ, { value: seq });
// Used by createMap()
this.sortMapEntries =
typeof sortMapEntries === 'function'
? sortMapEntries
: sortMapEntries === true
? sortMapEntriesByKey
: null;
}
clone() {
const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
copy.tags = this.tags.slice();
return copy;
}
}
function stringifyDocument(doc, options) {
const lines = [];
let hasDirectives = options.directives === true;
if (options.directives !== false && doc.directives) {
const dir = doc.directives.toString(doc);
if (dir) {
lines.push(dir);
hasDirectives = true;
}
else if (doc.directives.docStart)
hasDirectives = true;
}
if (hasDirectives)
lines.push('---');
const ctx = createStringifyContext(doc, options);
const { commentString } = ctx.options;
if (doc.commentBefore) {
if (lines.length !== 1)
lines.unshift('');
const cs = commentString(doc.commentBefore);
lines.unshift(indentComment(cs, ''));
}
let chompKeep = false;
let contentComment = null;
if (doc.contents) {
if (isNode$1(doc.contents)) {
if (doc.contents.spaceBefore && hasDirectives)
lines.push('');
if (doc.contents.commentBefore) {
const cs = commentString(doc.contents.commentBefore);
lines.push(indentComment(cs, ''));
}
// top-level block scalars need to be indented if followed by a comment
ctx.forceBlockIndent = !!doc.comment;
contentComment = doc.contents.comment;
}
const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
let body = stringify$2(doc.contents, ctx, () => (contentComment = null), onChompKeep);
if (contentComment)
body += lineComment(body, '', commentString(contentComment));
if ((body[0] === '|' || body[0] === '>') &&
lines[lines.length - 1] === '---') {
// Top-level block scalars with a preceding doc marker ought to use the
// same line for their header.
lines[lines.length - 1] = `--- ${body}`;
}
else
lines.push(body);
}
else {
lines.push(stringify$2(doc.contents, ctx));
}
if (doc.directives?.docEnd) {
if (doc.comment) {
const cs = commentString(doc.comment);
if (cs.includes('\n')) {
lines.push('...');
lines.push(indentComment(cs, ''));
}
else {
lines.push(`... ${cs}`);
}
}
else {
lines.push('...');
}
}
else {
let dc = doc.comment;
if (dc && chompKeep)
dc = dc.replace(/^\n+/, '');
if (dc) {
if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
lines.push('');
lines.push(indentComment(commentString(dc), ''));
}
}
return lines.join('\n') + '\n';
}
/**
* Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
* in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
* 2021 edition: https://tc39.es/ecma262/#sec-json.parse
*
* Includes extensions for handling Map and Set objects.
*/
function applyReviver(reviver, obj, key, val) {
if (val && typeof val === 'object') {
if (Array.isArray(val)) {
for (let i = 0, len = val.length; i < len; ++i) {
const v0 = val[i];
const v1 = applyReviver(reviver, val, String(i), v0);
if (v1 === undefined)
delete val[i];
else if (v1 !== v0)
val[i] = v1;
}
}
else if (val instanceof Map) {
for (const k of Array.from(val.keys())) {
const v0 = val.get(k);
const v1 = applyReviver(reviver, val, k, v0);
if (v1 === undefined)
val.delete(k);
else if (v1 !== v0)
val.set(k, v1);
}
}
else if (val instanceof Set) {
for (const v0 of Array.from(val)) {
const v1 = applyReviver(reviver, val, v0, v0);
if (v1 === undefined)
val.delete(v0);
else if (v1 !== v0) {
val.delete(v0);
val.add(v1);
}
}
}
else {
for (const [k, v0] of Object.entries(val)) {
const v1 = applyReviver(reviver, val, k, v0);
if (v1 === undefined)
delete val[k];
else if (v1 !== v0)
val[k] = v1;
}
}
}
return reviver.call(obj, key, val);
}
class Document {
constructor(value, replacer, options) {
/** A comment before this Document */
this.commentBefore = null;
/** A comment immediately after this Document */
this.comment = null;
/** Errors encountered during parsing. */
this.errors = [];
/** Warnings encountered during parsing. */
this.warnings = [];
Object.defineProperty(this, NODE_TYPE, { value: DOC });
let _replacer = null;
if (typeof replacer === 'function' || Array.isArray(replacer)) {
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
replacer = undefined;
}
const opt = Object.assign({
intAsBigInt: false,
keepSourceTokens: false,
logLevel: 'warn',
prettyErrors: true,
strict: true,
uniqueKeys: true,
version: '1.2'
}, options);
this.options = opt;
let { version } = opt;
if (options?._directives) {
this.directives = options._directives.atDocument();
if (this.directives.yaml.explicit)
version = this.directives.yaml.version;
}
else
this.directives = new Directives({ version });
this.setSchema(version, options);
if (value === undefined)
this.contents = null;
else {
this.contents = this.createNode(value, _replacer, options);
}
}
/**
* Create a deep copy of this Document and its contents.
*
* Custom Node values that inherit from `Object` still refer to their original instances.
*/
clone() {
const copy = Object.create(Document.prototype, {
[NODE_TYPE]: { value: DOC }
});
copy.commentBefore = this.commentBefore;
copy.comment = this.comment;
copy.errors = this.errors.slice();
copy.warnings = this.warnings.slice();
copy.options = Object.assign({}, this.options);
if (this.directives)
copy.directives = this.directives.clone();
copy.schema = this.schema.clone();
copy.contents = isNode$1(this.contents)
? this.contents.clone(copy.schema)
: this.contents;
if (this.range)
copy.range = this.range.slice();
return copy;
}
/** Adds a value to the document. */
add(value) {
if (assertCollection(this.contents))
this.contents.add(value);
}
/** Adds a value to the document. */
addIn(path, value) {
if (assertCollection(this.contents))
this.contents.addIn(path, value);
}
/**
* Create a new `Alias` node, ensuring that the target `node` has the required anchor.
*
* If `node` already has an anchor, `name` is ignored.
* Otherwise, the `node.anchor` value will be set to `name`,
* or if an anchor with that name is already present in the document,
* `name` will be used as a prefix for a new unique anchor.
* If `name` is undefined, the generated anchor will use 'a' as a prefix.
*/
createAlias(node, name) {
if (!node.anchor) {
const prev = anchorNames(this);
node.anchor =
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
!name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name;
}
return new Alias(node.anchor);
}
createNode(value, replacer, options) {
let _replacer = undefined;
if (typeof replacer === 'function') {
value = replacer.call({ '': value }, '', value);
_replacer = replacer;
}
else if (Array.isArray(replacer)) {
const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
const asStr = replacer.filter(keyToStr).map(String);
if (asStr.length > 0)
replacer = replacer.concat(asStr);
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
replacer = undefined;
}
const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};
const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this,
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
anchorPrefix || 'a');
const ctx = {
aliasDuplicateObjects: aliasDuplicateObjects ?? true,
keepUndefined: keepUndefined ?? false,
onAnchor,
onTagObj,
replacer: _replacer,
schema: this.schema,
sourceObjects
};
const node = createNode(value, tag, ctx);
if (flow && isCollection$1(node))
node.flow = true;
setAnchors();
return node;
}
/**
* Convert a key and a value into a `Pair` using the current schema,
* recursively wrapping all values as `Scalar` or `Collection` nodes.
*/
createPair(key, value, options = {}) {
const k = this.createNode(key, null, options);
const v = this.createNode(value, null, options);
return new Pair(k, v);
}
/**
* Removes a value from the document.
* @returns `true` if the item was found and removed.
*/
delete(key) {
return assertCollection(this.contents) ? this.contents.delete(key) : false;
}
/**
* Removes a value from the document.
* @returns `true` if the item was found and removed.
*/
deleteIn(path) {
if (isEmptyPath(path)) {
if (this.contents == null)
return false;
this.contents = null;
return true;
}
return assertCollection(this.contents)
? this.contents.deleteIn(path)
: false;
}
/**
* Returns item at `key`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
get(key, keepScalar) {
return isCollection$1(this.contents)
? this.contents.get(key, keepScalar)
: undefined;
}
/**
* Returns item at `path`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
getIn(path, keepScalar) {
if (isEmptyPath(path))
return !keepScalar && isScalar$1(this.contents)
? this.contents.value
: this.contents;
return isCollection$1(this.contents)
? this.contents.getIn(path, keepScalar)
: undefined;
}
/**
* Checks if the document includes a value with the key `key`.
*/
has(key) {
return isCollection$1(this.contents) ? this.contents.has(key) : false;
}
/**
* Checks if the document includes a value at `path`.
*/
hasIn(path) {
if (isEmptyPath(path))
return this.contents !== undefined;
return isCollection$1(this.contents) ? this.contents.hasIn(path) : false;
}
/**
* Sets a value in this document. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
set(key, value) {
if (this.contents == null) {
this.contents = collectionFromPath(this.schema, [key], value);
}
else if (assertCollection(this.contents)) {
this.contents.set(key, value);
}
}
/**
* Sets a value in this document. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
setIn(path, value) {
if (isEmptyPath(path))
this.contents = value;
else if (this.contents == null) {
this.contents = collectionFromPath(this.schema, Array.from(path), value);
}
else if (assertCollection(this.contents)) {
this.contents.setIn(path, value);
}
}
/**
* Change the YAML version and schema used by the document.
* A `null` version disables support for directives, explicit tags, anchors, and aliases.
* It also requires the `schema` option to be given as a `Schema` instance value.
*
* Overrides all previously set schema options.
*/
setSchema(version, options = {}) {
if (typeof version === 'number')
version = String(version);
let opt;
switch (version) {
case '1.1':
if (this.directives)
this.directives.yaml.version = '1.1';
else
this.directives = new Directives({ version: '1.1' });
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
break;
case '1.2':
case 'next':
if (this.directives)
this.directives.yaml.version = version;
else
this.directives = new Directives({ version });
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
break;
case null:
if (this.directives)
delete this.directives;
opt = null;
break;
default: {
const sv = JSON.stringify(version);
throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
}
}
// Not using `instanceof Schema` to allow for duck typing
if (options.schema instanceof Object)
this.schema = options.schema;
else if (opt)
this.schema = new Schema(Object.assign(opt, options));
else
throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
}
// json & jsonArg are only used from toJSON()
toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
const ctx = {
anchors: new Map(),
doc: this,
keep: !json,
mapAsMap: mapAsMap === true,
mapKeyWarned: false,
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,
stringify: stringify$2
};
const res = toJS(this.contents, jsonArg ?? '', ctx);
if (typeof onAnchor === 'function')
for (const { count, res } of ctx.anchors.values())
onAnchor(res, count);
return typeof reviver === 'function'
? applyReviver(reviver, { '': res }, '', res)
: res;
}
/**
* A JSON representation of the document `contents`.
*
* @param jsonArg Used by `JSON.stringify` to indicate the array index or
* property name.
*/
toJSON(jsonArg, onAnchor) {
return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
}
/** A YAML representation of the document. */
toString(options = {}) {
if (this.errors.length > 0)
throw new Error('Document with errors cannot be stringified');
if ('indent' in options &&
(!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
const s = JSON.stringify(options.indent);
throw new Error(`"indent" option must be a positive integer, not ${s}`);
}
return stringifyDocument(this, options);
}
}
function assertCollection(contents) {
if (isCollection$1(contents))
return true;
throw new Error('Expected a YAML collection as document contents');
}
class YAMLError extends Error {
constructor(name, pos, code, message) {
super();
this.name = name;
this.code = code;
this.message = message;
this.pos = pos;
}
}
class YAMLParseError extends YAMLError {
constructor(pos, code, message) {
super('YAMLParseError', pos, code, message);
}
}
class YAMLWarning extends YAMLError {
constructor(pos, code, message) {
super('YAMLWarning', pos, code, message);
}
}
const prettifyError = (src, lc) => (error) => {
if (error.pos[0] === -1)
return;
error.linePos = error.pos.map(pos => lc.linePos(pos));
const { line, col } = error.linePos[0];
error.message += ` at line ${line}, column ${col}`;
let ci = col - 1;
let lineStr = src
.substring(lc.lineStarts[line - 1], lc.lineStarts[line])
.replace(/[\n\r]+$/, '');
// Trim to max 80 chars, keeping col position near the middle
if (ci >= 60 && lineStr.length > 80) {
const trimStart = Math.min(ci - 39, lineStr.length - 79);
lineStr = '…' + lineStr.substring(trimStart);
ci -= trimStart - 1;
}
if (lineStr.length > 80)
lineStr = lineStr.substring(0, 79) + '…';
// Include previous line in context if pointing at line start
if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
// Regexp won't match if start is trimmed
let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
if (prev.length > 80)
prev = prev.substring(0, 79) + '…\n';
lineStr = prev + lineStr;
}
if (/[^ ]/.test(lineStr)) {
let count = 1;
const end = error.linePos[1];
if (end && end.line === line && end.col > col) {
count = Math.min(end.col - col, 80 - ci);
}
const pointer = ' '.repeat(ci) + '^'.repeat(count);
error.message += `:\n\n${lineStr}\n${pointer}\n`;
}
};
function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
let spaceBefore = false;
let atNewline = startOnNewline;
let hasSpace = startOnNewline;
let comment = '';
let commentSep = '';
let hasNewline = false;
let hasNewlineAfterProp = false;
let reqSpace = false;
let anchor = null;
let tag = null;
let comma = null;
let found = null;
let start = null;
for (const token of tokens) {
if (reqSpace) {
if (token.type !== 'space' &&
token.type !== 'newline' &&
token.type !== 'comma')
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
reqSpace = false;
}
switch (token.type) {
case 'space':
// At the doc level, tabs at line start may be parsed
// as leading white space rather than indentation.
// In a flow collection, only the parser handles indent.
if (!flow &&
atNewline &&
indicator !== 'doc-start' &&
token.source[0] === '\t')
onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
hasSpace = true;
break;
case 'comment': {
if (!hasSpace)
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
const cb = token.source.substring(1) || ' ';
if (!comment)
comment = cb;
else
comment += commentSep + cb;
commentSep = '';
atNewline = false;
break;
}
case 'newline':
if (atNewline) {
if (comment)
comment += token.source;
else
spaceBefore = true;
}
else
commentSep += token.source;
atNewline = true;
hasNewline = true;
if (anchor || tag)
hasNewlineAfterProp = true;
hasSpace = true;
break;
case 'anchor':
if (anchor)
onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
if (token.source.endsWith(':'))
onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
anchor = token;
if (start === null)
start = token.offset;
atNewline = false;
hasSpace = false;
reqSpace = true;
break;
case 'tag': {
if (tag)
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
tag = token;
if (start === null)
start = token.offset;
atNewline = false;
hasSpace = false;
reqSpace = true;
break;
}
case indicator:
// Could here handle preceding comments differently
if (anchor || tag)
onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
if (found)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);
found = token;
atNewline = false;
hasSpace = false;
break;
case 'comma':
if (flow) {
if (comma)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
comma = token;
atNewline = false;
hasSpace = false;
break;
}
// else fallthrough
default:
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
atNewline = false;
hasSpace = false;
}
}
const last = tokens[tokens.length - 1];
const end = last ? last.offset + last.source.length : offset;
if (reqSpace &&
next &&
next.type !== 'space' &&
next.type !== 'newline' &&
next.type !== 'comma' &&
(next.type !== 'scalar' || next.source !== ''))
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
return {
comma,
found,
spaceBefore,
comment,
hasNewline,
hasNewlineAfterProp,
anchor,
tag,
end,
start: start ?? end
};
}
function containsNewline(key) {
if (!key)
return null;
switch (key.type) {
case 'alias':
case 'scalar':
case 'double-quoted-scalar':
case 'single-quoted-scalar':
if (key.source.includes('\n'))
return true;
if (key.end)
for (const st of key.end)
if (st.type === 'newline')
return true;
return false;
case 'flow-collection':
for (const it of key.items) {
for (const st of it.start)
if (st.type === 'newline')
return true;
if (it.sep)
for (const st of it.sep)
if (st.type === 'newline')
return true;
if (containsNewline(it.key) || containsNewline(it.value))
return true;
}
return false;
default:
return true;
}
}
function flowIndentCheck(indent, fc, onError) {
if (fc?.type === 'flow-collection') {
const end = fc.end[0];
if (end.indent === indent &&
(end.source === ']' || end.source === '}') &&
containsNewline(fc)) {
const msg = 'Flow end indicator should be more indented than parent';
onError(end, 'BAD_INDENT', msg, true);
}
}
}
function mapIncludes(ctx, items, search) {
const { uniqueKeys } = ctx.options;
if (uniqueKeys === false)
return false;
const isEqual = typeof uniqueKeys === 'function'
? uniqueKeys
: (a, b) => a === b ||
(isScalar$1(a) &&
isScalar$1(b) &&
a.value === b.value &&
!(a.value === '<<' && ctx.schema.merge));
return items.some(pair => isEqual(pair.key, search));
}
const startColMsg = 'All mapping items must start at the same column';
function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {
const map = new YAMLMap(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
let offset = bm.offset;
for (const collItem of bm.items) {
const { start, key, sep, value } = collItem;
// key properties
const keyProps = resolveProps(start, {
indicator: 'explicit-key-ind',
next: key ?? sep?.[0],
offset,
onError,
startOnNewline: true
});
const implicitKey = !keyProps.found;
if (implicitKey) {
if (key) {
if (key.type === 'block-seq')
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
else if ('indent' in key && key.indent !== bm.indent)
onError(offset, 'BAD_INDENT', startColMsg);
}
if (!keyProps.anchor && !keyProps.tag && !sep) {
// TODO: assert being at last item?
if (keyProps.comment) {
if (map.comment)
map.comment += '\n' + keyProps.comment;
else
map.comment = keyProps.comment;
}
continue;
}
if (keyProps.hasNewlineAfterProp || containsNewline(key)) {
onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
}
}
else if (keyProps.found?.indent !== bm.indent) {
onError(offset, 'BAD_INDENT', startColMsg);
}
// key value
const keyStart = keyProps.end;
const keyNode = key
? composeNode(ctx, key, keyProps, onError)
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
if (ctx.schema.compat)
flowIndentCheck(bm.indent, key, onError);
if (mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
// value properties
const valueProps = resolveProps(sep ?? [], {
indicator: 'map-value-ind',
next: value,
offset: keyNode.range[2],
onError,
startOnNewline: !key || key.type === 'block-scalar'
});
offset = valueProps.end;
if (valueProps.found) {
if (implicitKey) {
if (value?.type === 'block-map' && !valueProps.hasNewline)
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
if (ctx.options.strict &&
keyProps.start < valueProps.found.offset - 1024)
onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
}
// value value
const valueNode = value
? composeNode(ctx, value, valueProps, onError)
: composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
if (ctx.schema.compat)
flowIndentCheck(bm.indent, value, onError);
offset = valueNode.range[2];
const pair = new Pair(keyNode, valueNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
map.items.push(pair);
}
else {
// key with no value
if (implicitKey)
onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
if (valueProps.comment) {
if (keyNode.comment)
keyNode.comment += '\n' + valueProps.comment;
else
keyNode.comment = valueProps.comment;
}
const pair = new Pair(keyNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
map.items.push(pair);
}
}
map.range = [bm.offset, offset, offset];
return map;
}
function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {
const seq = new YAMLSeq(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
let offset = bs.offset;
for (const { start, value } of bs.items) {
const props = resolveProps(start, {
indicator: 'seq-item-ind',
next: value,
offset,
onError,
startOnNewline: true
});
offset = props.end;
if (!props.found) {
if (props.anchor || props.tag || value) {
if (value && value.type === 'block-seq')
onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');
else
onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
}
else {
// TODO: assert being at last item?
if (props.comment)
seq.comment = props.comment;
continue;
}
}
const node = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, offset, start, null, props, onError);
if (ctx.schema.compat)
flowIndentCheck(bs.indent, value, onError);
offset = node.range[2];
seq.items.push(node);
}
seq.range = [bs.offset, offset, offset];
return seq;
}
function resolveEnd(end, offset, reqSpace, onError) {
let comment = '';
if (end) {
let hasSpace = false;
let sep = '';
for (const token of end) {
const { source, type } = token;
switch (type) {
case 'space':
hasSpace = true;
break;
case 'comment': {
if (reqSpace && !hasSpace)
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
const cb = source.substring(1) || ' ';
if (!comment)
comment = cb;
else
comment += sep + cb;
sep = '';
break;
}
case 'newline':
if (comment)
sep += source;
hasSpace = true;
break;
default:
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
}
offset += source.length;
}
}
return { comment, offset };
}
const blockMsg = 'Block collections are not allowed within flow collections';
const isBlock$1 = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {
const isMap = fc.start.source === '{';
const fcName = isMap ? 'flow map' : 'flow sequence';
const coll = isMap
? new YAMLMap(ctx.schema)
: new YAMLSeq(ctx.schema);
coll.flow = true;
const atRoot = ctx.atRoot;
if (atRoot)
ctx.atRoot = false;
let offset = fc.offset + fc.start.source.length;
for (let i = 0; i < fc.items.length; ++i) {
const collItem = fc.items[i];
const { start, key, sep, value } = collItem;
const props = resolveProps(start, {
flow: fcName,
indicator: 'explicit-key-ind',
next: key ?? sep?.[0],
offset,
onError,
startOnNewline: false
});
if (!props.found) {
if (!props.anchor && !props.tag && !sep && !value) {
if (i === 0 && props.comma)
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
else if (i < fc.items.length - 1)
onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
if (props.comment) {
if (coll.comment)
coll.comment += '\n' + props.comment;
else
coll.comment = props.comment;
}
offset = props.end;
continue;
}
if (!isMap && ctx.options.strict && containsNewline(key))
onError(key, // checked by containsNewline()
'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
}
if (i === 0) {
if (props.comma)
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
}
else {
if (!props.comma)
onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
if (props.comment) {
let prevItemComment = '';
loop: for (const st of start) {
switch (st.type) {
case 'comma':
case 'space':
break;
case 'comment':
prevItemComment = st.source.substring(1);
break loop;
default:
break loop;
}
}
if (prevItemComment) {
let prev = coll.items[coll.items.length - 1];
if (isPair(prev))
prev = prev.value ?? prev.key;
if (prev.comment)
prev.comment += '\n' + prevItemComment;
else
prev.comment = prevItemComment;
props.comment = props.comment.substring(prevItemComment.length + 1);
}
}
}
if (!isMap && !sep && !props.found) {
// item is a value in a seq
// → key & sep are empty, start does not include ? or :
const valueNode = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, props.end, sep, null, props, onError);
coll.items.push(valueNode);
offset = valueNode.range[2];
if (isBlock$1(value))
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
}
else {
// item is a key+value pair
// key value
const keyStart = props.end;
const keyNode = key
? composeNode(ctx, key, props, onError)
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
if (isBlock$1(key))
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
// value properties
const valueProps = resolveProps(sep ?? [], {
flow: fcName,
indicator: 'map-value-ind',
next: value,
offset: keyNode.range[2],
onError,
startOnNewline: false
});
if (valueProps.found) {
if (!isMap && !props.found && ctx.options.strict) {
if (sep)
for (const st of sep) {
if (st === valueProps.found)
break;
if (st.type === 'newline') {
onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
break;
}
}
if (props.start < valueProps.found.offset - 1024)
onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
}
}
else if (value) {
if ('source' in value && value.source && value.source[0] === ':')
onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
else
onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
}
// value value
const valueNode = value
? composeNode(ctx, value, valueProps, onError)
: valueProps.found
? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
: null;
if (valueNode) {
if (isBlock$1(value))
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
}
else if (valueProps.comment) {
if (keyNode.comment)
keyNode.comment += '\n' + valueProps.comment;
else
keyNode.comment = valueProps.comment;
}
const pair = new Pair(keyNode, valueNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
if (isMap) {
const map = coll;
if (mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
map.items.push(pair);
}
else {
const map = new YAMLMap(ctx.schema);
map.flow = true;
map.items.push(pair);
coll.items.push(map);
}
offset = valueNode ? valueNode.range[2] : valueProps.end;
}
}
const expectedEnd = isMap ? '}' : ']';
const [ce, ...ee] = fc.end;
let cePos = offset;
if (ce && ce.source === expectedEnd)
cePos = ce.offset + ce.source.length;
else {
const name = fcName[0].toUpperCase() + fcName.substring(1);
const msg = atRoot
? `${name} must end with a ${expectedEnd}`
: `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
if (ce && ce.source.length !== 1)
ee.unshift(ce);
}
if (ee.length > 0) {
const end = resolveEnd(ee, cePos, ctx.options.strict, onError);
if (end.comment) {
if (coll.comment)
coll.comment += '\n' + end.comment;
else
coll.comment = end.comment;
}
coll.range = [fc.offset, cePos, end.offset];
}
else {
coll.range = [fc.offset, cePos, cePos];
}
return coll;
}
function composeCollection(CN, ctx, token, tagToken, onError) {
let coll;
switch (token.type) {
case 'block-map': {
coll = resolveBlockMap(CN, ctx, token, onError);
break;
}
case 'block-seq': {
coll = resolveBlockSeq(CN, ctx, token, onError);
break;
}
case 'flow-collection': {
coll = resolveFlowCollection(CN, ctx, token, onError);
break;
}
}
if (!tagToken)
return coll;
const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
if (!tagName)
return coll;
// Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841
const Coll = coll.constructor;
if (tagName === '!' || tagName === Coll.tagName) {
coll.tag = Coll.tagName;
return coll;
}
const expType = isMap(coll) ? 'map' : 'seq';
let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);
if (!tag) {
const kt = ctx.schema.knownTags[tagName];
if (kt && kt.collection === expType) {
ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
tag = kt;
}
else {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
coll.tag = tagName;
return coll;
}
}
const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);
const node = isNode$1(res)
? res
: new Scalar(res);
node.range = coll.range;
node.tag = tagName;
if (tag?.format)
node.format = tag.format;
return node;
}
function resolveBlockScalar(scalar, strict, onError) {
const start = scalar.offset;
const header = parseBlockScalarHeader(scalar, strict, onError);
if (!header)
return { value: '', type: null, comment: '', range: [start, start, start] };
const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
const lines = scalar.source ? splitLines(scalar.source) : [];
// determine the end of content & start of chomping
let chompStart = lines.length;
for (let i = lines.length - 1; i >= 0; --i) {
const content = lines[i][1];
if (content === '' || content === '\r')
chompStart = i;
else
break;
}
// shortcut for empty contents
if (chompStart === 0) {
const value = header.chomp === '+' && lines.length > 0
? '\n'.repeat(Math.max(1, lines.length - 1))
: '';
let end = start + header.length;
if (scalar.source)
end += scalar.source.length;
return { value, type, comment: header.comment, range: [start, end, end] };
}
// find the indentation level to trim from start
let trimIndent = scalar.indent + header.indent;
let offset = scalar.offset + header.length;
let contentStart = 0;
for (let i = 0; i < chompStart; ++i) {
const [indent, content] = lines[i];
if (content === '' || content === '\r') {
if (header.indent === 0 && indent.length > trimIndent)
trimIndent = indent.length;
}
else {
if (indent.length < trimIndent) {
const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
onError(offset + indent.length, 'MISSING_CHAR', message);
}
if (header.indent === 0)
trimIndent = indent.length;
contentStart = i;
break;
}
offset += indent.length + content.length + 1;
}
// include trailing more-indented empty lines in content
for (let i = lines.length - 1; i >= chompStart; --i) {
if (lines[i][0].length > trimIndent)
chompStart = i + 1;
}
let value = '';
let sep = '';
let prevMoreIndented = false;
// leading whitespace is kept intact
for (let i = 0; i < contentStart; ++i)
value += lines[i][0].slice(trimIndent) + '\n';
for (let i = contentStart; i < chompStart; ++i) {
let [indent, content] = lines[i];
offset += indent.length + content.length + 1;
const crlf = content[content.length - 1] === '\r';
if (crlf)
content = content.slice(0, -1);
/* istanbul ignore if already caught in lexer */
if (content && indent.length < trimIndent) {
const src = header.indent
? 'explicit indentation indicator'
: 'first line';
const message = `Block scalar lines must not be less indented than their ${src}`;
onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
indent = '';
}
if (type === Scalar.BLOCK_LITERAL) {
value += sep + indent.slice(trimIndent) + content;
sep = '\n';
}
else if (indent.length > trimIndent || content[0] === '\t') {
// more-indented content within a folded block
if (sep === ' ')
sep = '\n';
else if (!prevMoreIndented && sep === '\n')
sep = '\n\n';
value += sep + indent.slice(trimIndent) + content;
sep = '\n';
prevMoreIndented = true;
}
else if (content === '') {
// empty line
if (sep === '\n')
value += '\n';
else
sep = '\n';
}
else {
value += sep + content;
sep = ' ';
prevMoreIndented = false;
}
}
switch (header.chomp) {
case '-':
break;
case '+':
for (let i = chompStart; i < lines.length; ++i)
value += '\n' + lines[i][0].slice(trimIndent);
if (value[value.length - 1] !== '\n')
value += '\n';
break;
default:
value += '\n';
}
const end = start + header.length + scalar.source.length;
return { value, type, comment: header.comment, range: [start, end, end] };
}
function parseBlockScalarHeader({ offset, props }, strict, onError) {
/* istanbul ignore if should not happen */
if (props[0].type !== 'block-scalar-header') {
onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
return null;
}
const { source } = props[0];
const mode = source[0];
let indent = 0;
let chomp = '';
let error = -1;
for (let i = 1; i < source.length; ++i) {
const ch = source[i];
if (!chomp && (ch === '-' || ch === '+'))
chomp = ch;
else {
const n = Number(ch);
if (!indent && n)
indent = n;
else if (error === -1)
error = offset + i;
}
}
if (error !== -1)
onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
let hasSpace = false;
let comment = '';
let length = source.length;
for (let i = 1; i < props.length; ++i) {
const token = props[i];
switch (token.type) {
case 'space':
hasSpace = true;
// fallthrough
case 'newline':
length += token.source.length;
break;
case 'comment':
if (strict && !hasSpace) {
const message = 'Comments must be separated from other tokens by white space characters';
onError(token, 'MISSING_CHAR', message);
}
length += token.source.length;
comment = token.source.substring(1);
break;
case 'error':
onError(token, 'UNEXPECTED_TOKEN', token.message);
length += token.source.length;
break;
/* istanbul ignore next should not happen */
default: {
const message = `Unexpected token in block scalar header: ${token.type}`;
onError(token, 'UNEXPECTED_TOKEN', message);
const ts = token.source;
if (ts && typeof ts === 'string')
length += ts.length;
}
}
}
return { mode, indent, chomp, comment, length };
}
/** @returns Array of lines split up as `[indent, content]` */
function splitLines(source) {
const split = source.split(/\n( *)/);
const first = split[0];
const m = first.match(/^( *)/);
const line0 = m?.[1]
? [m[1], first.slice(m[1].length)]
: ['', first];
const lines = [line0];
for (let i = 1; i < split.length; i += 2)
lines.push([split[i], split[i + 1]]);
return lines;
}
function resolveFlowScalar(scalar, strict, onError) {
const { offset, type, source, end } = scalar;
let _type;
let value;
const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
switch (type) {
case 'scalar':
_type = Scalar.PLAIN;
value = plainValue(source, _onError);
break;
case 'single-quoted-scalar':
_type = Scalar.QUOTE_SINGLE;
value = singleQuotedValue(source, _onError);
break;
case 'double-quoted-scalar':
_type = Scalar.QUOTE_DOUBLE;
value = doubleQuotedValue(source, _onError);
break;
/* istanbul ignore next should not happen */
default:
onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
return {
value: '',
type: null,
comment: '',
range: [offset, offset + source.length, offset + source.length]
};
}
const valueEnd = offset + source.length;
const re = resolveEnd(end, valueEnd, strict, onError);
return {
value,
type: _type,
comment: re.comment,
range: [offset, valueEnd, re.offset]
};
}
function plainValue(source, onError) {
let badChar = '';
switch (source[0]) {
/* istanbul ignore next should not happen */
case '\t':
badChar = 'a tab character';
break;
case ',':
badChar = 'flow indicator character ,';
break;
case '%':
badChar = 'directive indicator character %';
break;
case '|':
case '>': {
badChar = `block scalar indicator ${source[0]}`;
break;
}
case '@':
case '`': {
badChar = `reserved character ${source[0]}`;
break;
}
}
if (badChar)
onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
return foldLines(source);
}
function singleQuotedValue(source, onError) {
if (source[source.length - 1] !== "'" || source.length === 1)
onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
return foldLines(source.slice(1, -1)).replace(/''/g, "'");
}
function foldLines(source) {
/**
* The negative lookbehind here and in the `re` RegExp is to
* prevent causing a polynomial search time in certain cases.
*
* The try-catch is for Safari, which doesn't support this yet:
* https://caniuse.com/js-regexp-lookbehind
*/
let first, line;
try {
first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
}
catch (_) {
first = /(.*?)[ \t]*\r?\n/sy;
line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
}
let match = first.exec(source);
if (!match)
return source;
let res = match[1];
let sep = ' ';
let pos = first.lastIndex;
line.lastIndex = pos;
while ((match = line.exec(source))) {
if (match[1] === '') {
if (sep === '\n')
res += sep;
else
sep = '\n';
}
else {
res += sep + match[1];
sep = ' ';
}
pos = line.lastIndex;
}
const last = /[ \t]*(.*)/sy;
last.lastIndex = pos;
match = last.exec(source);
return res + sep + (match?.[1] ?? '');
}
function doubleQuotedValue(source, onError) {
let res = '';
for (let i = 1; i < source.length - 1; ++i) {
const ch = source[i];
if (ch === '\r' && source[i + 1] === '\n')
continue;
if (ch === '\n') {
const { fold, offset } = foldNewline(source, i);
res += fold;
i = offset;
}
else if (ch === '\\') {
let next = source[++i];
const cc = escapeCodes[next];
if (cc)
res += cc;
else if (next === '\n') {
// skip escaped newlines, but still trim the following line
next = source[i + 1];
while (next === ' ' || next === '\t')
next = source[++i + 1];
}
else if (next === '\r' && source[i + 1] === '\n') {
// skip escaped CRLF newlines, but still trim the following line
next = source[++i + 1];
while (next === ' ' || next === '\t')
next = source[++i + 1];
}
else if (next === 'x' || next === 'u' || next === 'U') {
const length = { x: 2, u: 4, U: 8 }[next];
res += parseCharCode(source, i + 1, length, onError);
i += length;
}
else {
const raw = source.substr(i - 1, 2);
onError(i - 1, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
res += raw;
}
}
else if (ch === ' ' || ch === '\t') {
// trim trailing whitespace
const wsStart = i;
let next = source[i + 1];
while (next === ' ' || next === '\t')
next = source[++i + 1];
if (next !== '\n' && !(next === '\r' && source[i + 2] === '\n'))
res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
}
else {
res += ch;
}
}
if (source[source.length - 1] !== '"' || source.length === 1)
onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
return res;
}
/**
* Fold a single newline into a space, multiple newlines to N - 1 newlines.
* Presumes `source[offset] === '\n'`
*/
function foldNewline(source, offset) {
let fold = '';
let ch = source[offset + 1];
while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
if (ch === '\r' && source[offset + 2] !== '\n')
break;
if (ch === '\n')
fold += '\n';
offset += 1;
ch = source[offset + 1];
}
if (!fold)
fold = ' ';
return { fold, offset };
}
const escapeCodes = {
'0': '\0',
a: '\x07',
b: '\b',
e: '\x1b',
f: '\f',
n: '\n',
r: '\r',
t: '\t',
v: '\v',
N: '\u0085',
_: '\u00a0',
L: '\u2028',
P: '\u2029',
' ': ' ',
'"': '"',
'/': '/',
'\\': '\\',
'\t': '\t'
};
function parseCharCode(source, offset, length, onError) {
const cc = source.substr(offset, length);
const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
const code = ok ? parseInt(cc, 16) : NaN;
if (isNaN(code)) {
const raw = source.substr(offset - 2, length + 2);
onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
return raw;
}
return String.fromCodePoint(code);
}
function composeScalar(ctx, token, tagToken, onError) {
const { value, type, comment, range } = token.type === 'block-scalar'
? resolveBlockScalar(token, ctx.options.strict, onError)
: resolveFlowScalar(token, ctx.options.strict, onError);
const tagName = tagToken
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
: null;
const tag = tagToken && tagName
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
: token.type === 'scalar'
? findScalarTagByTest(ctx, value, token, onError)
: ctx.schema[SCALAR$1];
let scalar;
try {
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
scalar = isScalar$1(res) ? res : new Scalar(res);
}
catch (error) {
const msg = error instanceof Error ? error.message : String(error);
onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);
scalar = new Scalar(value);
}
scalar.range = range;
scalar.source = value;
if (type)
scalar.type = type;
if (tagName)
scalar.tag = tagName;
if (tag.format)
scalar.format = tag.format;
if (comment)
scalar.comment = comment;
return scalar;
}
function findScalarTagByName(schema, value, tagName, tagToken, onError) {
if (tagName === '!')
return schema[SCALAR$1]; // non-specific tag
const matchWithTest = [];
for (const tag of schema.tags) {
if (!tag.collection && tag.tag === tagName) {
if (tag.default && tag.test)
matchWithTest.push(tag);
else
return tag;
}
}
for (const tag of matchWithTest)
if (tag.test?.test(value))
return tag;
const kt = schema.knownTags[tagName];
if (kt && !kt.collection) {
// Ensure that the known tag is available for stringifying,
// but does not get used by default.
schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
return kt;
}
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
return schema[SCALAR$1];
}
function findScalarTagByTest({ directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[SCALAR$1];
if (schema.compat) {
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
schema[SCALAR$1];
if (tag.tag !== compat.tag) {
const ts = directives.tagString(tag.tag);
const cs = directives.tagString(compat.tag);
const msg = `Value may be parsed as either ${ts} or ${cs}`;
onError(token, 'TAG_RESOLVE_FAILED', msg, true);
}
}
return tag;
}
function emptyScalarPosition(offset, before, pos) {
if (before) {
if (pos === null)
pos = before.length;
for (let i = pos - 1; i >= 0; --i) {
let st = before[i];
switch (st.type) {
case 'space':
case 'comment':
case 'newline':
offset -= st.source.length;
continue;
}
// Technically, an empty scalar is immediately after the last non-empty
// node, but it's more useful to place it after any whitespace.
st = before[++i];
while (st?.type === 'space') {
offset += st.source.length;
st = before[++i];
}
break;
}
}
return offset;
}
const CN = { composeNode, composeEmptyNode };
function composeNode(ctx, token, props, onError) {
const { spaceBefore, comment, anchor, tag } = props;
let node;
let isSrcToken = true;
switch (token.type) {
case 'alias':
node = composeAlias(ctx, token, onError);
if (anchor || tag)
onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
break;
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
case 'block-scalar':
node = composeScalar(ctx, token, tag, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
case 'block-map':
case 'block-seq':
case 'flow-collection':
node = composeCollection(CN, ctx, token, tag, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
default: {
const message = token.type === 'error'
? token.message
: `Unsupported token (type: ${token.type})`;
onError(token, 'UNEXPECTED_TOKEN', message);
node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);
isSrcToken = false;
}
}
if (anchor && node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
if (spaceBefore)
node.spaceBefore = true;
if (comment) {
if (token.type === 'scalar' && token.source === '')
node.comment = comment;
else
node.commentBefore = comment;
}
// @ts-expect-error Type checking misses meaning of isSrcToken
if (ctx.options.keepSourceTokens && isSrcToken)
node.srcToken = token;
return node;
}
function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {
const token = {
type: 'scalar',
offset: emptyScalarPosition(offset, before, pos),
indent: -1,
source: ''
};
const node = composeScalar(ctx, token, tag, onError);
if (anchor) {
node.anchor = anchor.source.substring(1);
if (node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
}
if (spaceBefore)
node.spaceBefore = true;
if (comment)
node.comment = comment;
return node;
}
function composeAlias({ options }, { offset, source, end }, onError) {
const alias = new Alias(source.substring(1));
if (alias.source === '')
onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
if (alias.source.endsWith(':'))
onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);
const valueEnd = offset + source.length;
const re = resolveEnd(end, valueEnd, options.strict, onError);
alias.range = [offset, valueEnd, re.offset];
if (re.comment)
alias.comment = re.comment;
return alias;
}
function composeDoc(options, directives, { offset, start, value, end }, onError) {
const opts = Object.assign({ _directives: directives }, options);
const doc = new Document(undefined, opts);
const ctx = {
atRoot: true,
directives: doc.directives,
options: doc.options,
schema: doc.schema
};
const props = resolveProps(start, {
indicator: 'doc-start',
next: value ?? end?.[0],
offset,
onError,
startOnNewline: true
});
if (props.found) {
doc.directives.docStart = true;
if (value &&
(value.type === 'block-map' || value.type === 'block-seq') &&
!props.hasNewline)
onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
}
doc.contents = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, props.end, start, null, props, onError);
const contentEnd = doc.contents.range[2];
const re = resolveEnd(end, contentEnd, false, onError);
if (re.comment)
doc.comment = re.comment;
doc.range = [offset, contentEnd, re.offset];
return doc;
}
function getErrorPos(src) {
if (typeof src === 'number')
return [src, src + 1];
if (Array.isArray(src))
return src.length === 2 ? src : [src[0], src[1]];
const { offset, source } = src;
return [offset, offset + (typeof source === 'string' ? source.length : 1)];
}
function parsePrelude(prelude) {
let comment = '';
let atComment = false;
let afterEmptyLine = false;
for (let i = 0; i < prelude.length; ++i) {
const source = prelude[i];
switch (source[0]) {
case '#':
comment +=
(comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
(source.substring(1) || ' ');
atComment = true;
afterEmptyLine = false;
break;
case '%':
if (prelude[i + 1]?.[0] !== '#')
i += 1;
atComment = false;
break;
default:
// This may be wrong after doc-end, but in that case it doesn't matter
if (!atComment)
afterEmptyLine = true;
atComment = false;
}
}
return { comment, afterEmptyLine };
}
/**
* Compose a stream of CST nodes into a stream of YAML Documents.
*
* ```ts
* import { Composer, Parser } from 'yaml'
*
* const src: string = ...
* const tokens = new Parser().parse(src)
* const docs = new Composer().compose(tokens)
* ```
*/
class Composer {
constructor(options = {}) {
this.doc = null;
this.atDirectives = false;
this.prelude = [];
this.errors = [];
this.warnings = [];
this.onError = (source, code, message, warning) => {
const pos = getErrorPos(source);
if (warning)
this.warnings.push(new YAMLWarning(pos, code, message));
else
this.errors.push(new YAMLParseError(pos, code, message));
};
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
this.directives = new Directives({ version: options.version || '1.2' });
this.options = options;
}
decorate(doc, afterDoc) {
const { comment, afterEmptyLine } = parsePrelude(this.prelude);
//console.log({ dc: doc.comment, prelude, comment })
if (comment) {
const dc = doc.contents;
if (afterDoc) {
doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
}
else if (afterEmptyLine || doc.directives.docStart || !dc) {
doc.commentBefore = comment;
}
else if (isCollection$1(dc) && !dc.flow && dc.items.length > 0) {
let it = dc.items[0];
if (isPair(it))
it = it.key;
const cb = it.commentBefore;
it.commentBefore = cb ? `${comment}\n${cb}` : comment;
}
else {
const cb = dc.commentBefore;
dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
}
}
if (afterDoc) {
Array.prototype.push.apply(doc.errors, this.errors);
Array.prototype.push.apply(doc.warnings, this.warnings);
}
else {
doc.errors = this.errors;
doc.warnings = this.warnings;
}
this.prelude = [];
this.errors = [];
this.warnings = [];
}
/**
* Current stream status information.
*
* Mostly useful at the end of input for an empty stream.
*/
streamInfo() {
return {
comment: parsePrelude(this.prelude).comment,
directives: this.directives,
errors: this.errors,
warnings: this.warnings
};
}
/**
* Compose tokens into documents.
*
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
*/
*compose(tokens, forceDoc = false, endOffset = -1) {
for (const token of tokens)
yield* this.next(token);
yield* this.end(forceDoc, endOffset);
}
/** Advance the composer by one CST token. */
*next(token) {
switch (token.type) {
case 'directive':
this.directives.add(token.source, (offset, message, warning) => {
const pos = getErrorPos(token);
pos[0] += offset;
this.onError(pos, 'BAD_DIRECTIVE', message, warning);
});
this.prelude.push(token.source);
this.atDirectives = true;
break;
case 'document': {
const doc = composeDoc(this.options, this.directives, token, this.onError);
if (this.atDirectives && !doc.directives.docStart)
this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');
this.decorate(doc, false);
if (this.doc)
yield this.doc;
this.doc = doc;
this.atDirectives = false;
break;
}
case 'byte-order-mark':
case 'space':
break;
case 'comment':
case 'newline':
this.prelude.push(token.source);
break;
case 'error': {
const msg = token.source
? `${token.message}: ${JSON.stringify(token.source)}`
: token.message;
const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
if (this.atDirectives || !this.doc)
this.errors.push(error);
else
this.doc.errors.push(error);
break;
}
case 'doc-end': {
if (!this.doc) {
const msg = 'Unexpected doc-end without preceding document';
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
break;
}
this.doc.directives.docEnd = true;
const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
this.decorate(this.doc, true);
if (end.comment) {
const dc = this.doc.comment;
this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
}
this.doc.range[2] = end.offset;
break;
}
default:
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
}
}
/**
* Call at end of input to yield any remaining document.
*
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
*/
*end(forceDoc = false, endOffset = -1) {
if (this.doc) {
this.decorate(this.doc, true);
yield this.doc;
this.doc = null;
}
else if (forceDoc) {
const opts = Object.assign({ _directives: this.directives }, this.options);
const doc = new Document(undefined, opts);
if (this.atDirectives)
this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
doc.range = [0, endOffset, endOffset];
this.decorate(doc, false);
yield doc;
}
}
}
function resolveAsScalar(token, strict = true, onError) {
if (token) {
const _onError = (pos, code, message) => {
const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
if (onError)
onError(offset, code, message);
else
throw new YAMLParseError([offset, offset + 1], code, message);
};
switch (token.type) {
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return resolveFlowScalar(token, strict, _onError);
case 'block-scalar':
return resolveBlockScalar(token, strict, _onError);
}
}
return null;
}
/**
* Create a new scalar token with `value`
*
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
* as this function does not support any schema operations and won't check for such conflicts.
*
* @param value The string representation of the value, which will have its content properly indented.
* @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
* @param context.indent The indent level of the token.
* @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
* @param context.offset The offset position of the token.
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
*/
function createScalarToken(value, context) {
const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
const source = stringifyString({ type, value }, {
implicitKey,
indent: indent > 0 ? ' '.repeat(indent) : '',
inFlow,
options: { blockQuote: true, lineWidth: -1 }
});
const end = context.end ?? [
{ type: 'newline', offset: -1, indent, source: '\n' }
];
switch (source[0]) {
case '|':
case '>': {
const he = source.indexOf('\n');
const head = source.substring(0, he);
const body = source.substring(he + 1) + '\n';
const props = [
{ type: 'block-scalar-header', offset, indent, source: head }
];
if (!addEndtoBlockProps(props, end))
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
return { type: 'block-scalar', offset, indent, props, source: body };
}
case '"':
return { type: 'double-quoted-scalar', offset, indent, source, end };
case "'":
return { type: 'single-quoted-scalar', offset, indent, source, end };
default:
return { type: 'scalar', offset, indent, source, end };
}
}
/**
* Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
*
* Best efforts are made to retain any comments previously associated with the `token`,
* though all contents within a collection's `items` will be overwritten.
*
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
* as this function does not support any schema operations and won't check for such conflicts.
*
* @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
* @param value The string representation of the value, which will have its content properly indented.
* @param context.afterKey In most cases, values after a key should have an additional level of indentation.
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
* @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
*/
function setScalarValue(token, value, context = {}) {
let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
let indent = 'indent' in token ? token.indent : null;
if (afterKey && typeof indent === 'number')
indent += 2;
if (!type)
switch (token.type) {
case 'single-quoted-scalar':
type = 'QUOTE_SINGLE';
break;
case 'double-quoted-scalar':
type = 'QUOTE_DOUBLE';
break;
case 'block-scalar': {
const header = token.props[0];
if (header.type !== 'block-scalar-header')
throw new Error('Invalid block scalar header');
type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
break;
}
default:
type = 'PLAIN';
}
const source = stringifyString({ type, value }, {
implicitKey: implicitKey || indent === null,
indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
inFlow,
options: { blockQuote: true, lineWidth: -1 }
});
switch (source[0]) {
case '|':
case '>':
setBlockScalarValue(token, source);
break;
case '"':
setFlowScalarValue(token, source, 'double-quoted-scalar');
break;
case "'":
setFlowScalarValue(token, source, 'single-quoted-scalar');
break;
default:
setFlowScalarValue(token, source, 'scalar');
}
}
function setBlockScalarValue(token, source) {
const he = source.indexOf('\n');
const head = source.substring(0, he);
const body = source.substring(he + 1) + '\n';
if (token.type === 'block-scalar') {
const header = token.props[0];
if (header.type !== 'block-scalar-header')
throw new Error('Invalid block scalar header');
header.source = head;
token.source = body;
}
else {
const { offset } = token;
const indent = 'indent' in token ? token.indent : -1;
const props = [
{ type: 'block-scalar-header', offset, indent, source: head }
];
if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
for (const key of Object.keys(token))
if (key !== 'type' && key !== 'offset')
delete token[key];
Object.assign(token, { type: 'block-scalar', indent, props, source: body });
}
}
/** @returns `true` if last token is a newline */
function addEndtoBlockProps(props, end) {
if (end)
for (const st of end)
switch (st.type) {
case 'space':
case 'comment':
props.push(st);
break;
case 'newline':
props.push(st);
return true;
}
return false;
}
function setFlowScalarValue(token, source, type) {
switch (token.type) {
case 'scalar':
case 'double-quoted-scalar':
case 'single-quoted-scalar':
token.type = type;
token.source = source;
break;
case 'block-scalar': {
const end = token.props.slice(1);
let oa = source.length;
if (token.props[0].type === 'block-scalar-header')
oa -= token.props[0].source.length;
for (const tok of end)
tok.offset += oa;
delete token.props;
Object.assign(token, { type, source, end });
break;
}
case 'block-map':
case 'block-seq': {
const offset = token.offset + source.length;
const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
delete token.items;
Object.assign(token, { type, source, end: [nl] });
break;
}
default: {
const indent = 'indent' in token ? token.indent : -1;
const end = 'end' in token && Array.isArray(token.end)
? token.end.filter(st => st.type === 'space' ||
st.type === 'comment' ||
st.type === 'newline')
: [];
for (const key of Object.keys(token))
if (key !== 'type' && key !== 'offset')
delete token[key];
Object.assign(token, { type, indent, source, end });
}
}
}
/**
* Stringify a CST document, token, or collection item
*
* Fair warning: This applies no validation whatsoever, and
* simply concatenates the sources in their logical order.
*/
const stringify$1 = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
function stringifyToken(token) {
switch (token.type) {
case 'block-scalar': {
let res = '';
for (const tok of token.props)
res += stringifyToken(tok);
return res + token.source;
}
case 'block-map':
case 'block-seq': {
let res = '';
for (const item of token.items)
res += stringifyItem(item);
return res;
}
case 'flow-collection': {
let res = token.start.source;
for (const item of token.items)
res += stringifyItem(item);
for (const st of token.end)
res += st.source;
return res;
}
case 'document': {
let res = stringifyItem(token);
if (token.end)
for (const st of token.end)
res += st.source;
return res;
}
default: {
let res = token.source;
if ('end' in token && token.end)
for (const st of token.end)
res += st.source;
return res;
}
}
}
function stringifyItem({ start, key, sep, value }) {
let res = '';
for (const st of start)
res += st.source;
if (key)
res += stringifyToken(key);
if (sep)
for (const st of sep)
res += st.source;
if (value)
res += stringifyToken(value);
return res;
}
const BREAK = Symbol('break visit');
const SKIP = Symbol('skip children');
const REMOVE = Symbol('remove item');
/**
* Apply a visitor to a CST document or item.
*
* Walks through the tree (depth-first) starting from the root, calling a
* `visitor` function with two arguments when entering each item:
* - `item`: The current item, which included the following members:
* - `start: SourceToken[]` Source tokens before the key or value,
* possibly including its anchor or tag.
* - `key?: Token | null` Set for pair values. May then be `null`, if
* the key before the `:` separator is empty.
* - `sep?: SourceToken[]` Source tokens between the key and the value,
* which should include the `:` map value indicator if `value` is set.
* - `value?: Token` The value of a sequence item, or of a map pair.
* - `path`: The steps from the root to the current node, as an array of
* `['key' | 'value', number]` tuples.
*
* The return value of the visitor may be used to control the traversal:
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this token, continue with
* next sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current item, then continue with the next one
* - `number`: Set the index of the next step. This is useful especially if
* the index of the current token has changed.
* - `function`: Define the next visitor for this item. After the original
* visitor is called on item entry, next visitors are called after handling
* a non-empty `key` and when exiting the item.
*/
function visit(cst, visitor) {
if ('type' in cst && cst.type === 'document')
cst = { start: cst.start, value: cst.value };
_visit(Object.freeze([]), cst, visitor);
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visit.BREAK = BREAK;
/** Do not visit the children of the current item */
visit.SKIP = SKIP;
/** Remove the current item */
visit.REMOVE = REMOVE;
/** Find the item at `path` from `cst` as the root */
visit.itemAtPath = (cst, path) => {
let item = cst;
for (const [field, index] of path) {
const tok = item?.[field];
if (tok && 'items' in tok) {
item = tok.items[index];
}
else
return undefined;
}
return item;
};
/**
* Get the immediate parent collection of the item at `path` from `cst` as the root.
*
* Throws an error if the collection is not found, which should never happen if the item itself exists.
*/
visit.parentCollection = (cst, path) => {
const parent = visit.itemAtPath(cst, path.slice(0, -1));
const field = path[path.length - 1][0];
const coll = parent?.[field];
if (coll && 'items' in coll)
return coll;
throw new Error('Parent collection not found');
};
function _visit(path, item, visitor) {
let ctrl = visitor(item, path);
if (typeof ctrl === 'symbol')
return ctrl;
for (const field of ['key', 'value']) {
const token = item[field];
if (token && 'items' in token) {
for (let i = 0; i < token.items.length; ++i) {
const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK)
return BREAK;
else if (ci === REMOVE) {
token.items.splice(i, 1);
i -= 1;
}
}
if (typeof ctrl === 'function' && field === 'key')
ctrl = ctrl(item, path);
}
}
return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
}
/** The byte order mark */
const BOM = '\u{FEFF}';
/** Start of doc-mode */
const DOCUMENT = '\x02'; // C0: Start of Text
/** Unexpected end of flow-mode */
const FLOW_END = '\x18'; // C0: Cancel
/** Next token is a scalar value */
const SCALAR = '\x1f'; // C0: Unit Separator
/** @returns `true` if `token` is a flow or block collection */
const isCollection = (token) => !!token && 'items' in token;
/** @returns `true` if `token` is a flow or block scalar; not an alias */
const isScalar = (token) => !!token &&
(token.type === 'scalar' ||
token.type === 'single-quoted-scalar' ||
token.type === 'double-quoted-scalar' ||
token.type === 'block-scalar');
/* istanbul ignore next */
/** Get a printable representation of a lexer token */
function prettyToken(token) {
switch (token) {
case BOM:
return '<BOM>';
case DOCUMENT:
return '<DOC>';
case FLOW_END:
return '<FLOW_END>';
case SCALAR:
return '<SCALAR>';
default:
return JSON.stringify(token);
}
}
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
function tokenType(source) {
switch (source) {
case BOM:
return 'byte-order-mark';
case DOCUMENT:
return 'doc-mode';
case FLOW_END:
return 'flow-error-end';
case SCALAR:
return 'scalar';
case '---':
return 'doc-start';
case '...':
return 'doc-end';
case '':
case '\n':
case '\r\n':
return 'newline';
case '-':
return 'seq-item-ind';
case '?':
return 'explicit-key-ind';
case ':':
return 'map-value-ind';
case '{':
return 'flow-map-start';
case '}':
return 'flow-map-end';
case '[':
return 'flow-seq-start';
case ']':
return 'flow-seq-end';
case ',':
return 'comma';
}
switch (source[0]) {
case ' ':
case '\t':
return 'space';
case '#':
return 'comment';
case '%':
return 'directive-line';
case '*':
return 'alias';
case '&':
return 'anchor';
case '!':
return 'tag';
case "'":
return 'single-quoted-scalar';
case '"':
return 'double-quoted-scalar';
case '|':
case '>':
return 'block-scalar-header';
}
return null;
}
var cst = {
__proto__: null,
BOM: BOM,
DOCUMENT: DOCUMENT,
FLOW_END: FLOW_END,
SCALAR: SCALAR,
createScalarToken: createScalarToken,
isCollection: isCollection,
isScalar: isScalar,
prettyToken: prettyToken,
resolveAsScalar: resolveAsScalar,
setScalarValue: setScalarValue,
stringify: stringify$1,
tokenType: tokenType,
visit: visit
};
/*
START -> stream
stream
directive -> line-end -> stream
indent + line-end -> stream
[else] -> line-start
line-end
comment -> line-end
newline -> .
input-end -> END
line-start
doc-start -> doc
doc-end -> stream
[else] -> indent -> block-start
block-start
seq-item-start -> block-start
explicit-key-start -> block-start
map-value-start -> block-start
[else] -> doc
doc
line-end -> line-start
spaces -> doc
anchor -> doc
tag -> doc
flow-start -> flow -> doc
flow-end -> error -> doc
seq-item-start -> error -> doc
explicit-key-start -> error -> doc
map-value-start -> doc
alias -> doc
quote-start -> quoted-scalar -> doc
block-scalar-header -> line-end -> block-scalar(min) -> line-start
[else] -> plain-scalar(false, min) -> doc
flow
line-end -> flow
spaces -> flow
anchor -> flow
tag -> flow
flow-start -> flow -> flow
flow-end -> .
seq-item-start -> error -> flow
explicit-key-start -> flow
map-value-start -> flow
alias -> flow
quote-start -> quoted-scalar -> flow
comma -> flow
[else] -> plain-scalar(true, 0) -> flow
quoted-scalar
quote-end -> .
[else] -> quoted-scalar
block-scalar(min)
newline + peek(indent < min) -> .
[else] -> block-scalar(min)
plain-scalar(is-flow, min)
scalar-end(is-flow) -> .
peek(newline + (indent < min)) -> .
[else] -> plain-scalar(min)
*/
function isEmpty(ch) {
switch (ch) {
case undefined:
case ' ':
case '\n':
case '\r':
case '\t':
return true;
default:
return false;
}
}
const hexDigits = '0123456789ABCDEFabcdef'.split('');
const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
const invalidFlowScalarChars = ',[]{}'.split('');
const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
/**
* Splits an input string into lexical tokens, i.e. smaller strings that are
* easily identifiable by `tokens.tokenType()`.
*
* Lexing starts always in a "stream" context. Incomplete input may be buffered
* until a complete token can be emitted.
*
* In addition to slices of the original input, the following control characters
* may also be emitted:
*
* - `\x02` (Start of Text): A document starts with the next token
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
* - `\x1f` (Unit Separator): Next token is a scalar value
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
*/
class Lexer {
constructor() {
/**
* Flag indicating whether the end of the current buffer marks the end of
* all input
*/
this.atEnd = false;
/**
* Explicit indent set in block scalar header, as an offset from the current
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
* explicitly set.
*/
this.blockScalarIndent = -1;
/**
* Block scalars that include a + (keep) chomping indicator in their header
* include trailing empty lines, which are otherwise excluded from the
* scalar's contents.
*/
this.blockScalarKeep = false;
/** Current input */
this.buffer = '';
/**
* Flag noting whether the map value indicator : can immediately follow this
* node within a flow context.
*/
this.flowKey = false;
/** Count of surrounding flow collection levels. */
this.flowLevel = 0;
/**
* Minimum level of indentation required for next lines to be parsed as a
* part of the current scalar value.
*/
this.indentNext = 0;
/** Indentation level of the current line. */
this.indentValue = 0;
/** Position of the next \n character. */
this.lineEndPos = null;
/** Stores the state of the lexer if reaching the end of incpomplete input */
this.next = null;
/** A pointer to `buffer`; the current position of the lexer. */
this.pos = 0;
}
/**
* Generate YAML tokens from the `source` string. If `incomplete`,
* a part of the last line may be left as a buffer for the next call.
*
* @returns A generator of lexical tokens
*/
*lex(source, incomplete = false) {
if (source) {
this.buffer = this.buffer ? this.buffer + source : source;
this.lineEndPos = null;
}
this.atEnd = !incomplete;
let next = this.next ?? 'stream';
while (next && (incomplete || this.hasChars(1)))
next = yield* this.parseNext(next);
}
atLineEnd() {
let i = this.pos;
let ch = this.buffer[i];
while (ch === ' ' || ch === '\t')
ch = this.buffer[++i];
if (!ch || ch === '#' || ch === '\n')
return true;
if (ch === '\r')
return this.buffer[i + 1] === '\n';
return false;
}
charAt(n) {
return this.buffer[this.pos + n];
}
continueScalar(offset) {
let ch = this.buffer[offset];
if (this.indentNext > 0) {
let indent = 0;
while (ch === ' ')
ch = this.buffer[++indent + offset];
if (ch === '\r') {
const next = this.buffer[indent + offset + 1];
if (next === '\n' || (!next && !this.atEnd))
return offset + indent + 1;
}
return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
? offset + indent
: -1;
}
if (ch === '-' || ch === '.') {
const dt = this.buffer.substr(offset, 3);
if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
return -1;
}
return offset;
}
getLine() {
let end = this.lineEndPos;
if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
end = this.buffer.indexOf('\n', this.pos);
this.lineEndPos = end;
}
if (end === -1)
return this.atEnd ? this.buffer.substring(this.pos) : null;
if (this.buffer[end - 1] === '\r')
end -= 1;
return this.buffer.substring(this.pos, end);
}
hasChars(n) {
return this.pos + n <= this.buffer.length;
}
setNext(state) {
this.buffer = this.buffer.substring(this.pos);
this.pos = 0;
this.lineEndPos = null;
this.next = state;
return null;
}
peek(n) {
return this.buffer.substr(this.pos, n);
}
*parseNext(next) {
switch (next) {
case 'stream':
return yield* this.parseStream();
case 'line-start':
return yield* this.parseLineStart();
case 'block-start':
return yield* this.parseBlockStart();
case 'doc':
return yield* this.parseDocument();
case 'flow':
return yield* this.parseFlowCollection();
case 'quoted-scalar':
return yield* this.parseQuotedScalar();
case 'block-scalar':
return yield* this.parseBlockScalar();
case 'plain-scalar':
return yield* this.parsePlainScalar();
}
}
*parseStream() {
let line = this.getLine();
if (line === null)
return this.setNext('stream');
if (line[0] === BOM) {
yield* this.pushCount(1);
line = line.substring(1);
}
if (line[0] === '%') {
let dirEnd = line.length;
const cs = line.indexOf('#');
if (cs !== -1) {
const ch = line[cs - 1];
if (ch === ' ' || ch === '\t')
dirEnd = cs - 1;
}
while (true) {
const ch = line[dirEnd - 1];
if (ch === ' ' || ch === '\t')
dirEnd -= 1;
else
break;
}
const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
yield* this.pushCount(line.length - n); // possible comment
this.pushNewline();
return 'stream';
}
if (this.atLineEnd()) {
const sp = yield* this.pushSpaces(true);
yield* this.pushCount(line.length - sp);
yield* this.pushNewline();
return 'stream';
}
yield DOCUMENT;
return yield* this.parseLineStart();
}
*parseLineStart() {
const ch = this.charAt(0);
if (!ch && !this.atEnd)
return this.setNext('line-start');
if (ch === '-' || ch === '.') {
if (!this.atEnd && !this.hasChars(4))
return this.setNext('line-start');
const s = this.peek(3);
if (s === '---' && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
this.indentValue = 0;
this.indentNext = 0;
return 'doc';
}
else if (s === '...' && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
return 'stream';
}
}
this.indentValue = yield* this.pushSpaces(false);
if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
this.indentNext = this.indentValue;
return yield* this.parseBlockStart();
}
*parseBlockStart() {
const [ch0, ch1] = this.peek(2);
if (!ch1 && !this.atEnd)
return this.setNext('block-start');
if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
this.indentNext = this.indentValue + 1;
this.indentValue += n;
return yield* this.parseBlockStart();
}
return 'doc';
}
*parseDocument() {
yield* this.pushSpaces(true);
const line = this.getLine();
if (line === null)
return this.setNext('doc');
let n = yield* this.pushIndicators();
switch (line[n]) {
case '#':
yield* this.pushCount(line.length - n);
// fallthrough
case undefined:
yield* this.pushNewline();
return yield* this.parseLineStart();
case '{':
case '[':
yield* this.pushCount(1);
this.flowKey = false;
this.flowLevel = 1;
return 'flow';
case '}':
case ']':
// this is an error
yield* this.pushCount(1);
return 'doc';
case '*':
yield* this.pushUntil(isNotAnchorChar);
return 'doc';
case '"':
case "'":
return yield* this.parseQuotedScalar();
case '|':
case '>':
n += yield* this.parseBlockScalarHeader();
n += yield* this.pushSpaces(true);
yield* this.pushCount(line.length - n);
yield* this.pushNewline();
return yield* this.parseBlockScalar();
default:
return yield* this.parsePlainScalar();
}
}
*parseFlowCollection() {
let nl, sp;
let indent = -1;
do {
nl = yield* this.pushNewline();
if (nl > 0) {
sp = yield* this.pushSpaces(false);
this.indentValue = indent = sp;
}
else {
sp = 0;
}
sp += yield* this.pushSpaces(true);
} while (nl + sp > 0);
const line = this.getLine();
if (line === null)
return this.setNext('flow');
if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
(indent === 0 &&
(line.startsWith('---') || line.startsWith('...')) &&
isEmpty(line[3]))) {
// Allowing for the terminal ] or } at the same (rather than greater)
// indent level as the initial [ or { is technically invalid, but
// failing here would be surprising to users.
const atFlowEndMarker = indent === this.indentNext - 1 &&
this.flowLevel === 1 &&
(line[0] === ']' || line[0] === '}');
if (!atFlowEndMarker) {
// this is an error
this.flowLevel = 0;
yield FLOW_END;
return yield* this.parseLineStart();
}
}
let n = 0;
while (line[n] === ',') {
n += yield* this.pushCount(1);
n += yield* this.pushSpaces(true);
this.flowKey = false;
}
n += yield* this.pushIndicators();
switch (line[n]) {
case undefined:
return 'flow';
case '#':
yield* this.pushCount(line.length - n);
return 'flow';
case '{':
case '[':
yield* this.pushCount(1);
this.flowKey = false;
this.flowLevel += 1;
return 'flow';
case '}':
case ']':
yield* this.pushCount(1);
this.flowKey = true;
this.flowLevel -= 1;
return this.flowLevel ? 'flow' : 'doc';
case '*':
yield* this.pushUntil(isNotAnchorChar);
return 'flow';
case '"':
case "'":
this.flowKey = true;
return yield* this.parseQuotedScalar();
case ':': {
const next = this.charAt(1);
if (this.flowKey || isEmpty(next) || next === ',') {
this.flowKey = false;
yield* this.pushCount(1);
yield* this.pushSpaces(true);
return 'flow';
}
}
// fallthrough
default:
this.flowKey = false;
return yield* this.parsePlainScalar();
}
}
*parseQuotedScalar() {
const quote = this.charAt(0);
let end = this.buffer.indexOf(quote, this.pos + 1);
if (quote === "'") {
while (end !== -1 && this.buffer[end + 1] === "'")
end = this.buffer.indexOf("'", end + 2);
}
else {
// double-quote
while (end !== -1) {
let n = 0;
while (this.buffer[end - 1 - n] === '\\')
n += 1;
if (n % 2 === 0)
break;
end = this.buffer.indexOf('"', end + 1);
}
}
// Only looking for newlines within the quotes
const qb = this.buffer.substring(0, end);
let nl = qb.indexOf('\n', this.pos);
if (nl !== -1) {
while (nl !== -1) {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
break;
nl = qb.indexOf('\n', cs);
}
if (nl !== -1) {
// this is an error caused by an unexpected unindent
end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
}
}
if (end === -1) {
if (!this.atEnd)
return this.setNext('quoted-scalar');
end = this.buffer.length;
}
yield* this.pushToIndex(end + 1, false);
return this.flowLevel ? 'flow' : 'doc';
}
*parseBlockScalarHeader() {
this.blockScalarIndent = -1;
this.blockScalarKeep = false;
let i = this.pos;
while (true) {
const ch = this.buffer[++i];
if (ch === '+')
this.blockScalarKeep = true;
else if (ch > '0' && ch <= '9')
this.blockScalarIndent = Number(ch) - 1;
else if (ch !== '-')
break;
}
return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
}
*parseBlockScalar() {
let nl = this.pos - 1; // may be -1 if this.pos === 0
let indent = 0;
let ch;
loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
switch (ch) {
case ' ':
indent += 1;
break;
case '\n':
nl = i;
indent = 0;
break;
case '\r': {
const next = this.buffer[i + 1];
if (!next && !this.atEnd)
return this.setNext('block-scalar');
if (next === '\n')
break;
} // fallthrough
default:
break loop;
}
}
if (!ch && !this.atEnd)
return this.setNext('block-scalar');
if (indent >= this.indentNext) {
if (this.blockScalarIndent === -1)
this.indentNext = indent;
else
this.indentNext += this.blockScalarIndent;
do {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
break;
nl = this.buffer.indexOf('\n', cs);
} while (nl !== -1);
if (nl === -1) {
if (!this.atEnd)
return this.setNext('block-scalar');
nl = this.buffer.length;
}
}
if (!this.blockScalarKeep) {
do {
let i = nl - 1;
let ch = this.buffer[i];
if (ch === '\r')
ch = this.buffer[--i];
const lastChar = i; // Drop the line if last char not more indented
while (ch === ' ' || ch === '\t')
ch = this.buffer[--i];
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
nl = i;
else
break;
} while (true);
}
yield SCALAR;
yield* this.pushToIndex(nl + 1, true);
return yield* this.parseLineStart();
}
*parsePlainScalar() {
const inFlow = this.flowLevel > 0;
let end = this.pos - 1;
let i = this.pos - 1;
let ch;
while ((ch = this.buffer[++i])) {
if (ch === ':') {
const next = this.buffer[i + 1];
if (isEmpty(next) || (inFlow && next === ','))
break;
end = i;
}
else if (isEmpty(ch)) {
let next = this.buffer[i + 1];
if (ch === '\r') {
if (next === '\n') {
i += 1;
ch = '\n';
next = this.buffer[i + 1];
}
else
end = i;
}
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
break;
if (ch === '\n') {
const cs = this.continueScalar(i + 1);
if (cs === -1)
break;
i = Math.max(i, cs - 2); // to advance, but still account for ' #'
}
}
else {
if (inFlow && invalidFlowScalarChars.includes(ch))
break;
end = i;
}
}
if (!ch && !this.atEnd)
return this.setNext('plain-scalar');
yield SCALAR;
yield* this.pushToIndex(end + 1, true);
return inFlow ? 'flow' : 'doc';
}
*pushCount(n) {
if (n > 0) {
yield this.buffer.substr(this.pos, n);
this.pos += n;
return n;
}
return 0;
}
*pushToIndex(i, allowEmpty) {
const s = this.buffer.slice(this.pos, i);
if (s) {
yield s;
this.pos += s.length;
return s.length;
}
else if (allowEmpty)
yield '';
return 0;
}
*pushIndicators() {
switch (this.charAt(0)) {
case '!':
return ((yield* this.pushTag()) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
case '&':
return ((yield* this.pushUntil(isNotAnchorChar)) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
case '-': // this is an error
case '?': // this is an error outside flow collections
case ':': {
const inFlow = this.flowLevel > 0;
const ch1 = this.charAt(1);
if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
if (!inFlow)
this.indentNext = this.indentValue + 1;
else if (this.flowKey)
this.flowKey = false;
return ((yield* this.pushCount(1)) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
}
}
}
return 0;
}
*pushTag() {
if (this.charAt(1) === '<') {
let i = this.pos + 2;
let ch = this.buffer[i];
while (!isEmpty(ch) && ch !== '>')
ch = this.buffer[++i];
return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
}
else {
let i = this.pos + 1;
let ch = this.buffer[i];
while (ch) {
if (tagChars.includes(ch))
ch = this.buffer[++i];
else if (ch === '%' &&
hexDigits.includes(this.buffer[i + 1]) &&
hexDigits.includes(this.buffer[i + 2])) {
ch = this.buffer[(i += 3)];
}
else
break;
}
return yield* this.pushToIndex(i, false);
}
}
*pushNewline() {
const ch = this.buffer[this.pos];
if (ch === '\n')
return yield* this.pushCount(1);
else if (ch === '\r' && this.charAt(1) === '\n')
return yield* this.pushCount(2);
else
return 0;
}
*pushSpaces(allowTabs) {
let i = this.pos - 1;
let ch;
do {
ch = this.buffer[++i];
} while (ch === ' ' || (allowTabs && ch === '\t'));
const n = i - this.pos;
if (n > 0) {
yield this.buffer.substr(this.pos, n);
this.pos = i;
}
return n;
}
*pushUntil(test) {
let i = this.pos;
let ch = this.buffer[i];
while (!test(ch))
ch = this.buffer[++i];
return yield* this.pushToIndex(i, false);
}
}
/**
* Tracks newlines during parsing in order to provide an efficient API for
* determining the one-indexed `{ line, col }` position for any offset
* within the input.
*/
class LineCounter {
constructor() {
this.lineStarts = [];
/**
* Should be called in ascending order. Otherwise, call
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
*/
this.addNewLine = (offset) => this.lineStarts.push(offset);
/**
* Performs a binary search and returns the 1-indexed { line, col }
* position of `offset`. If `line === 0`, `addNewLine` has never been
* called or `offset` is before the first known newline.
*/
this.linePos = (offset) => {
let low = 0;
let high = this.lineStarts.length;
while (low < high) {
const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
if (this.lineStarts[mid] < offset)
low = mid + 1;
else
high = mid;
}
if (this.lineStarts[low] === offset)
return { line: low + 1, col: 1 };
if (low === 0)
return { line: 0, col: offset };
const start = this.lineStarts[low - 1];
return { line: low, col: offset - start + 1 };
};
}
}
function includesToken(list, type) {
for (let i = 0; i < list.length; ++i)
if (list[i].type === type)
return true;
return false;
}
function findNonEmptyIndex(list) {
for (let i = 0; i < list.length; ++i) {
switch (list[i].type) {
case 'space':
case 'comment':
case 'newline':
break;
default:
return i;
}
}
return -1;
}
function isFlowToken(token) {
switch (token?.type) {
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
case 'flow-collection':
return true;
default:
return false;
}
}
function getPrevProps(parent) {
switch (parent.type) {
case 'document':
return parent.start;
case 'block-map': {
const it = parent.items[parent.items.length - 1];
return it.sep ?? it.start;
}
case 'block-seq':
return parent.items[parent.items.length - 1].start;
/* istanbul ignore next should not happen */
default:
return [];
}
}
/** Note: May modify input array */
function getFirstKeyStartProps(prev) {
if (prev.length === 0)
return [];
let i = prev.length;
loop: while (--i >= 0) {
switch (prev[i].type) {
case 'doc-start':
case 'explicit-key-ind':
case 'map-value-ind':
case 'seq-item-ind':
case 'newline':
break loop;
}
}
while (prev[++i]?.type === 'space') {
/* loop */
}
return prev.splice(i, prev.length);
}
function fixFlowSeqItems(fc) {
if (fc.start.type === 'flow-seq-start') {
for (const it of fc.items) {
if (it.sep &&
!it.value &&
!includesToken(it.start, 'explicit-key-ind') &&
!includesToken(it.sep, 'map-value-ind')) {
if (it.key)
it.value = it.key;
delete it.key;
if (isFlowToken(it.value)) {
if (it.value.end)
Array.prototype.push.apply(it.value.end, it.sep);
else
it.value.end = it.sep;
}
else
Array.prototype.push.apply(it.start, it.sep);
delete it.sep;
}
}
}
}
/**
* A YAML concrete syntax tree (CST) parser
*
* ```ts
* const src: string = ...
* for (const token of new Parser().parse(src)) {
* // token: Token
* }
* ```
*
* To use the parser with a user-provided lexer:
*
* ```ts
* function* parse(source: string, lexer: Lexer) {
* const parser = new Parser()
* for (const lexeme of lexer.lex(source))
* yield* parser.next(lexeme)
* yield* parser.end()
* }
*
* const src: string = ...
* const lexer = new Lexer()
* for (const token of parse(src, lexer)) {
* // token: Token
* }
* ```
*/
class Parser {
/**
* @param onNewLine - If defined, called separately with the start position of
* each new line (in `parse()`, including the start of input).
*/
constructor(onNewLine) {
/** If true, space and sequence indicators count as indentation */
this.atNewLine = true;
/** If true, next token is a scalar value */
this.atScalar = false;
/** Current indentation level */
this.indent = 0;
/** Current offset since the start of parsing */
this.offset = 0;
/** On the same line with a block map key */
this.onKeyLine = false;
/** Top indicates the node that's currently being built */
this.stack = [];
/** The source of the current token, set in parse() */
this.source = '';
/** The type of the current token, set in parse() */
this.type = '';
// Must be defined after `next()`
this.lexer = new Lexer();
this.onNewLine = onNewLine;
}
/**
* Parse `source` as a YAML stream.
* If `incomplete`, a part of the last line may be left as a buffer for the next call.
*
* Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
*
* @returns A generator of tokens representing each directive, document, and other structure.
*/
*parse(source, incomplete = false) {
if (this.onNewLine && this.offset === 0)
this.onNewLine(0);
for (const lexeme of this.lexer.lex(source, incomplete))
yield* this.next(lexeme);
if (!incomplete)
yield* this.end();
}
/**
* Advance the parser by the `source` of one lexical token.
*/
*next(source) {
this.source = source;
if (this.atScalar) {
this.atScalar = false;
yield* this.step();
this.offset += source.length;
return;
}
const type = tokenType(source);
if (!type) {
const message = `Not a YAML token: ${source}`;
yield* this.pop({ type: 'error', offset: this.offset, message, source });
this.offset += source.length;
}
else if (type === 'scalar') {
this.atNewLine = false;
this.atScalar = true;
this.type = 'scalar';
}
else {
this.type = type;
yield* this.step();
switch (type) {
case 'newline':
this.atNewLine = true;
this.indent = 0;
if (this.onNewLine)
this.onNewLine(this.offset + source.length);
break;
case 'space':
if (this.atNewLine && source[0] === ' ')
this.indent += source.length;
break;
case 'explicit-key-ind':
case 'map-value-ind':
case 'seq-item-ind':
if (this.atNewLine)
this.indent += source.length;
break;
case 'doc-mode':
case 'flow-error-end':
return;
default:
this.atNewLine = false;
}
this.offset += source.length;
}
}
/** Call at end of input to push out any remaining constructions */
*end() {
while (this.stack.length > 0)
yield* this.pop();
}
get sourceToken() {
const st = {
type: this.type,
offset: this.offset,
indent: this.indent,
source: this.source
};
return st;
}
*step() {
const top = this.peek(1);
if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
while (this.stack.length > 0)
yield* this.pop();
this.stack.push({
type: 'doc-end',
offset: this.offset,
source: this.source
});
return;
}
if (!top)
return yield* this.stream();
switch (top.type) {
case 'document':
return yield* this.document(top);
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return yield* this.scalar(top);
case 'block-scalar':
return yield* this.blockScalar(top);
case 'block-map':
return yield* this.blockMap(top);
case 'block-seq':
return yield* this.blockSequence(top);
case 'flow-collection':
return yield* this.flowCollection(top);
case 'doc-end':
return yield* this.documentEnd(top);
}
/* istanbul ignore next should not happen */
yield* this.pop();
}
peek(n) {
return this.stack[this.stack.length - n];
}
*pop(error) {
const token = error ?? this.stack.pop();
/* istanbul ignore if should not happen */
if (!token) {
const message = 'Tried to pop an empty stack';
yield { type: 'error', offset: this.offset, source: '', message };
}
else if (this.stack.length === 0) {
yield token;
}
else {
const top = this.peek(1);
if (token.type === 'block-scalar') {
// Block scalars use their parent rather than header indent
token.indent = 'indent' in top ? top.indent : 0;
}
else if (token.type === 'flow-collection' && top.type === 'document') {
// Ignore all indent for top-level flow collections
token.indent = 0;
}
if (token.type === 'flow-collection')
fixFlowSeqItems(token);
switch (top.type) {
case 'document':
top.value = token;
break;
case 'block-scalar':
top.props.push(token); // error
break;
case 'block-map': {
const it = top.items[top.items.length - 1];
if (it.value) {
top.items.push({ start: [], key: token, sep: [] });
this.onKeyLine = true;
return;
}
else if (it.sep) {
it.value = token;
}
else {
Object.assign(it, { key: token, sep: [] });
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
return;
}
break;
}
case 'block-seq': {
const it = top.items[top.items.length - 1];
if (it.value)
top.items.push({ start: [], value: token });
else
it.value = token;
break;
}
case 'flow-collection': {
const it = top.items[top.items.length - 1];
if (!it || it.value)
top.items.push({ start: [], key: token, sep: [] });
else if (it.sep)
it.value = token;
else
Object.assign(it, { key: token, sep: [] });
return;
}
/* istanbul ignore next should not happen */
default:
yield* this.pop();
yield* this.pop(token);
}
if ((top.type === 'document' ||
top.type === 'block-map' ||
top.type === 'block-seq') &&
(token.type === 'block-map' || token.type === 'block-seq')) {
const last = token.items[token.items.length - 1];
if (last &&
!last.sep &&
!last.value &&
last.start.length > 0 &&
findNonEmptyIndex(last.start) === -1 &&
(token.indent === 0 ||
last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
if (top.type === 'document')
top.end = last.start;
else
top.items.push({ start: last.start });
token.items.splice(-1, 1);
}
}
}
}
*stream() {
switch (this.type) {
case 'directive-line':
yield { type: 'directive', offset: this.offset, source: this.source };
return;
case 'byte-order-mark':
case 'space':
case 'comment':
case 'newline':
yield this.sourceToken;
return;
case 'doc-mode':
case 'doc-start': {
const doc = {
type: 'document',
offset: this.offset,
start: []
};
if (this.type === 'doc-start')
doc.start.push(this.sourceToken);
this.stack.push(doc);
return;
}
}
yield {
type: 'error',
offset: this.offset,
message: `Unexpected ${this.type} token in YAML stream`,
source: this.source
};
}
*document(doc) {
if (doc.value)
return yield* this.lineEnd(doc);
switch (this.type) {
case 'doc-start': {
if (findNonEmptyIndex(doc.start) !== -1) {
yield* this.pop();
yield* this.step();
}
else
doc.start.push(this.sourceToken);
return;
}
case 'anchor':
case 'tag':
case 'space':
case 'comment':
case 'newline':
doc.start.push(this.sourceToken);
return;
}
const bv = this.startBlockValue(doc);
if (bv)
this.stack.push(bv);
else {
yield {
type: 'error',
offset: this.offset,
message: `Unexpected ${this.type} token in YAML document`,
source: this.source
};
}
}
*scalar(scalar) {
if (this.type === 'map-value-ind') {
const prev = getPrevProps(this.peek(2));
const start = getFirstKeyStartProps(prev);
let sep;
if (scalar.end) {
sep = scalar.end;
sep.push(this.sourceToken);
delete scalar.end;
}
else
sep = [this.sourceToken];
const map = {
type: 'block-map',
offset: scalar.offset,
indent: scalar.indent,
items: [{ start, key: scalar, sep }]
};
this.onKeyLine = true;
this.stack[this.stack.length - 1] = map;
}
else
yield* this.lineEnd(scalar);
}
*blockScalar(scalar) {
switch (this.type) {
case 'space':
case 'comment':
case 'newline':
scalar.props.push(this.sourceToken);
return;
case 'scalar':
scalar.source = this.source;
// block-scalar source includes trailing newline
this.atNewLine = true;
this.indent = 0;
if (this.onNewLine) {
let nl = this.source.indexOf('\n') + 1;
while (nl !== 0) {
this.onNewLine(this.offset + nl);
nl = this.source.indexOf('\n', nl) + 1;
}
}
yield* this.pop();
break;
/* istanbul ignore next should not happen */
default:
yield* this.pop();
yield* this.step();
}
}
*blockMap(map) {
const it = map.items[map.items.length - 1];
// it.sep is true-ish if pair already has key or : separator
switch (this.type) {
case 'newline':
this.onKeyLine = false;
if (it.value) {
const end = 'end' in it.value ? it.value.end : undefined;
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
if (last?.type === 'comment')
end?.push(this.sourceToken);
else
map.items.push({ start: [this.sourceToken] });
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
it.start.push(this.sourceToken);
}
return;
case 'space':
case 'comment':
if (it.value) {
map.items.push({ start: [this.sourceToken] });
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
if (this.atIndentedComment(it.start, map.indent)) {
const prev = map.items[map.items.length - 2];
const end = prev?.value?.end;
if (Array.isArray(end)) {
Array.prototype.push.apply(end, it.start);
end.push(this.sourceToken);
map.items.pop();
return;
}
}
it.start.push(this.sourceToken);
}
return;
}
if (this.indent >= map.indent) {
const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;
// For empty nodes, assign newline-separated not indented empty tokens to following node
let start = [];
if (atNextItem && it.sep && !it.value) {
const nl = [];
for (let i = 0; i < it.sep.length; ++i) {
const st = it.sep[i];
switch (st.type) {
case 'newline':
nl.push(i);
break;
case 'space':
break;
case 'comment':
if (st.indent > map.indent)
nl.length = 0;
break;
default:
nl.length = 0;
}
}
if (nl.length >= 2)
start = it.sep.splice(nl[1]);
}
switch (this.type) {
case 'anchor':
case 'tag':
if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start });
this.onKeyLine = true;
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
it.start.push(this.sourceToken);
}
return;
case 'explicit-key-ind':
if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
it.start.push(this.sourceToken);
}
else if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start });
}
else {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken] }]
});
}
this.onKeyLine = true;
return;
case 'map-value-ind':
if (includesToken(it.start, 'explicit-key-ind')) {
if (!it.sep) {
if (includesToken(it.start, 'newline')) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
}
else {
const start = getFirstKeyStartProps(it.start);
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
});
}
}
else if (it.value) {
map.items.push({ start: [], key: null, sep: [this.sourceToken] });
}
else if (includesToken(it.sep, 'map-value-ind')) {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
});
}
else if (isFlowToken(it.key) &&
!includesToken(it.sep, 'newline')) {
const start = getFirstKeyStartProps(it.start);
const key = it.key;
const sep = it.sep;
sep.push(this.sourceToken);
// @ts-expect-error type guard is wrong here
delete it.key, delete it.sep;
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key, sep }]
});
}
else if (start.length > 0) {
// Not actually at next item
it.sep = it.sep.concat(start, this.sourceToken);
}
else {
it.sep.push(this.sourceToken);
}
}
else {
if (!it.sep) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
}
else if (it.value || atNextItem) {
map.items.push({ start, key: null, sep: [this.sourceToken] });
}
else if (includesToken(it.sep, 'map-value-ind')) {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [], key: null, sep: [this.sourceToken] }]
});
}
else {
it.sep.push(this.sourceToken);
}
}
this.onKeyLine = true;
return;
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar': {
const fs = this.flowScalar(this.type);
if (atNextItem || it.value) {
map.items.push({ start, key: fs, sep: [] });
this.onKeyLine = true;
}
else if (it.sep) {
this.stack.push(fs);
}
else {
Object.assign(it, { key: fs, sep: [] });
this.onKeyLine = true;
}
return;
}
default: {
const bv = this.startBlockValue(map);
if (bv) {
if (atNextItem &&
bv.type !== 'block-seq' &&
includesToken(it.start, 'explicit-key-ind')) {
map.items.push({ start });
}
this.stack.push(bv);
return;
}
}
}
}
yield* this.pop();
yield* this.step();
}
*blockSequence(seq) {
const it = seq.items[seq.items.length - 1];
switch (this.type) {
case 'newline':
if (it.value) {
const end = 'end' in it.value ? it.value.end : undefined;
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
if (last?.type === 'comment')
end?.push(this.sourceToken);
else
seq.items.push({ start: [this.sourceToken] });
}
else
it.start.push(this.sourceToken);
return;
case 'space':
case 'comment':
if (it.value)
seq.items.push({ start: [this.sourceToken] });
else {
if (this.atIndentedComment(it.start, seq.indent)) {
const prev = seq.items[seq.items.length - 2];
const end = prev?.value?.end;
if (Array.isArray(end)) {
Array.prototype.push.apply(end, it.start);
end.push(this.sourceToken);
seq.items.pop();
return;
}
}
it.start.push(this.sourceToken);
}
return;
case 'anchor':
case 'tag':
if (it.value || this.indent <= seq.indent)
break;
it.start.push(this.sourceToken);
return;
case 'seq-item-ind':
if (this.indent !== seq.indent)
break;
if (it.value || includesToken(it.start, 'seq-item-ind'))
seq.items.push({ start: [this.sourceToken] });
else
it.start.push(this.sourceToken);
return;
}
if (this.indent > seq.indent) {
const bv = this.startBlockValue(seq);
if (bv) {
this.stack.push(bv);
return;
}
}
yield* this.pop();
yield* this.step();
}
*flowCollection(fc) {
const it = fc.items[fc.items.length - 1];
if (this.type === 'flow-error-end') {
let top;
do {
yield* this.pop();
top = this.peek(1);
} while (top && top.type === 'flow-collection');
}
else if (fc.end.length === 0) {
switch (this.type) {
case 'comma':
case 'explicit-key-ind':
if (!it || it.sep)
fc.items.push({ start: [this.sourceToken] });
else
it.start.push(this.sourceToken);
return;
case 'map-value-ind':
if (!it || it.value)
fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
else if (it.sep)
it.sep.push(this.sourceToken);
else
Object.assign(it, { key: null, sep: [this.sourceToken] });
return;
case 'space':
case 'comment':
case 'newline':
case 'anchor':
case 'tag':
if (!it || it.value)
fc.items.push({ start: [this.sourceToken] });
else if (it.sep)
it.sep.push(this.sourceToken);
else
it.start.push(this.sourceToken);
return;
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar': {
const fs = this.flowScalar(this.type);
if (!it || it.value)
fc.items.push({ start: [], key: fs, sep: [] });
else if (it.sep)
this.stack.push(fs);
else
Object.assign(it, { key: fs, sep: [] });
return;
}
case 'flow-map-end':
case 'flow-seq-end':
fc.end.push(this.sourceToken);
return;
}
const bv = this.startBlockValue(fc);
/* istanbul ignore else should not happen */
if (bv)
this.stack.push(bv);
else {
yield* this.pop();
yield* this.step();
}
}
else {
const parent = this.peek(2);
if (parent.type === 'block-map' &&
((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
(this.type === 'newline' &&
!parent.items[parent.items.length - 1].sep))) {
yield* this.pop();
yield* this.step();
}
else if (this.type === 'map-value-ind' &&
parent.type !== 'flow-collection') {
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
fixFlowSeqItems(fc);
const sep = fc.end.splice(1, fc.end.length);
sep.push(this.sourceToken);
const map = {
type: 'block-map',
offset: fc.offset,
indent: fc.indent,
items: [{ start, key: fc, sep }]
};
this.onKeyLine = true;
this.stack[this.stack.length - 1] = map;
}
else {
yield* this.lineEnd(fc);
}
}
}
flowScalar(type) {
if (this.onNewLine) {
let nl = this.source.indexOf('\n') + 1;
while (nl !== 0) {
this.onNewLine(this.offset + nl);
nl = this.source.indexOf('\n', nl) + 1;
}
}
return {
type,
offset: this.offset,
indent: this.indent,
source: this.source
};
}
startBlockValue(parent) {
switch (this.type) {
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return this.flowScalar(this.type);
case 'block-scalar-header':
return {
type: 'block-scalar',
offset: this.offset,
indent: this.indent,
props: [this.sourceToken],
source: ''
};
case 'flow-map-start':
case 'flow-seq-start':
return {
type: 'flow-collection',
offset: this.offset,
indent: this.indent,
start: this.sourceToken,
items: [],
end: []
};
case 'seq-item-ind':
return {
type: 'block-seq',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken] }]
};
case 'explicit-key-ind': {
this.onKeyLine = true;
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
start.push(this.sourceToken);
return {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start }]
};
}
case 'map-value-ind': {
this.onKeyLine = true;
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
return {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
};
}
}
return null;
}
atIndentedComment(start, indent) {
if (this.type !== 'comment')
return false;
if (this.indent <= indent)
return false;
return start.every(st => st.type === 'newline' || st.type === 'space');
}
*documentEnd(docEnd) {
if (this.type !== 'doc-mode') {
if (docEnd.end)
docEnd.end.push(this.sourceToken);
else
docEnd.end = [this.sourceToken];
if (this.type === 'newline')
yield* this.pop();
}
}
*lineEnd(token) {
switch (this.type) {
case 'comma':
case 'doc-start':
case 'doc-end':
case 'flow-seq-end':
case 'flow-map-end':
case 'map-value-ind':
yield* this.pop();
yield* this.step();
break;
case 'newline':
this.onKeyLine = false;
// fallthrough
case 'space':
case 'comment':
default:
// all other values are errors
if (token.end)
token.end.push(this.sourceToken);
else
token.end = [this.sourceToken];
if (this.type === 'newline')
yield* this.pop();
}
}
}
function parseOptions(options) {
const prettyErrors = options.prettyErrors !== false;
const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null;
return { lineCounter, prettyErrors };
}
/**
* Parse the input as a stream of YAML documents.
*
* Documents should be separated from each other by `...` or `---` marker lines.
*
* @returns If an empty `docs` array is returned, it will be of type
* EmptyStream and contain additional stream information. In
* TypeScript, you should use `'empty' in docs` as a type guard for it.
*/
function parseAllDocuments(source, options = {}) {
const { lineCounter, prettyErrors } = parseOptions(options);
const parser = new Parser(lineCounter?.addNewLine);
const composer = new Composer(options);
const docs = Array.from(composer.compose(parser.parse(source)));
if (prettyErrors && lineCounter)
for (const doc of docs) {
doc.errors.forEach(prettifyError(source, lineCounter));
doc.warnings.forEach(prettifyError(source, lineCounter));
}
if (docs.length > 0)
return docs;
return Object.assign([], { empty: true }, composer.streamInfo());
}
/** Parse an input string into a single YAML.Document */
function parseDocument(source, options = {}) {
const { lineCounter, prettyErrors } = parseOptions(options);
const parser = new Parser(lineCounter?.addNewLine);
const composer = new Composer(options);
// `doc` is always set by compose.end(true) at the very latest
let doc = null;
for (const _doc of composer.compose(parser.parse(source), true, source.length)) {
if (!doc)
doc = _doc;
else if (doc.options.logLevel !== 'silent') {
doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
break;
}
}
if (prettyErrors && lineCounter) {
doc.errors.forEach(prettifyError(source, lineCounter));
doc.warnings.forEach(prettifyError(source, lineCounter));
}
return doc;
}
function parse$a(src, reviver, options) {
let _reviver = undefined;
if (typeof reviver === 'function') {
_reviver = reviver;
}
else if (options === undefined && reviver && typeof reviver === 'object') {
options = reviver;
}
const doc = parseDocument(src, options);
if (!doc)
return null;
doc.warnings.forEach(warning => warn(doc.options.logLevel, warning));
if (doc.errors.length > 0) {
if (doc.options.logLevel !== 'silent')
throw doc.errors[0];
else
doc.errors = [];
}
return doc.toJS(Object.assign({ reviver: _reviver }, options));
}
function stringify(value, replacer, options) {
let _replacer = null;
if (typeof replacer === 'function' || Array.isArray(replacer)) {
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
}
if (typeof options === 'string')
options = options.length;
if (typeof options === 'number') {
const indent = Math.round(options);
options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
}
if (value === undefined) {
const { keepUndefined } = options ?? replacer ?? {};
if (!keepUndefined)
return undefined;
}
return new Document(value, _replacer, options).toString(options);
}
var YAML = {
__proto__: null,
Alias: Alias,
CST: cst,
Composer: Composer,
Document: Document,
Lexer: Lexer,
LineCounter: LineCounter,
Pair: Pair,
Parser: Parser,
Scalar: Scalar,
Schema: Schema,
YAMLError: YAMLError,
YAMLMap: YAMLMap,
YAMLParseError: YAMLParseError,
YAMLSeq: YAMLSeq,
YAMLWarning: YAMLWarning,
isAlias: isAlias,
isCollection: isCollection$1,
isDocument: isDocument,
isMap: isMap,
isNode: isNode$1,
isPair: isPair,
isScalar: isScalar$1,
isSeq: isSeq,
parse: parse$a,
parseAllDocuments: parseAllDocuments,
parseDocument: parseDocument,
stringify: stringify,
visit: visit$1,
visitAsync: visitAsync
};
// `export * as default from ...` fails on Webpack v4
// https://github.com/eemeli/yaml/issues/228
var browser$2 = {
__proto__: null,
Alias: Alias,
CST: cst,
Composer: Composer,
Document: Document,
Lexer: Lexer,
LineCounter: LineCounter,
Pair: Pair,
Parser: Parser,
Scalar: Scalar,
Schema: Schema,
YAMLError: YAMLError,
YAMLMap: YAMLMap,
YAMLParseError: YAMLParseError,
YAMLSeq: YAMLSeq,
YAMLWarning: YAMLWarning,
default: YAML,
isAlias: isAlias,
isCollection: isCollection$1,
isDocument: isDocument,
isMap: isMap,
isNode: isNode$1,
isPair: isPair,
isScalar: isScalar$1,
isSeq: isSeq,
parse: parse$a,
parseAllDocuments: parseAllDocuments,
parseDocument: parseDocument,
stringify: stringify,
visit: visit$1,
visitAsync: visitAsync
};
var require$$3 = /*@__PURE__*/getAugmentedNamespace(browser$2);
// eslint-disable-next-line n/no-deprecated-api
const { createRequire, createRequireFromPath } = require$$0$8;
function req$2 (name, rootFile) {
const create = createRequire || createRequireFromPath;
const require = create(rootFile);
return require(name)
}
var req_1 = req$2;
const req$1 = req_1;
/**
* Load Options
*
* @private
* @method options
*
* @param {Object} config PostCSS Config
*
* @return {Object} options PostCSS Options
*/
const options = (config, file) => {
if (config.parser && typeof config.parser === 'string') {
try {
config.parser = req$1(config.parser, file);
} catch (err) {
throw new Error(`Loading PostCSS Parser failed: ${err.message}\n\n(@${file})`)
}
}
if (config.syntax && typeof config.syntax === 'string') {
try {
config.syntax = req$1(config.syntax, file);
} catch (err) {
throw new Error(`Loading PostCSS Syntax failed: ${err.message}\n\n(@${file})`)
}
}
if (config.stringifier && typeof config.stringifier === 'string') {
try {
config.stringifier = req$1(config.stringifier, file);
} catch (err) {
throw new Error(`Loading PostCSS Stringifier failed: ${err.message}\n\n(@${file})`)
}
}
if (config.plugins) {
delete config.plugins;
}
return config
};
var options_1 = options;
const req = req_1;
/**
* Plugin Loader
*
* @private
* @method load
*
* @param {String} plugin PostCSS Plugin Name
* @param {Object} options PostCSS Plugin Options
*
* @return {Function} PostCSS Plugin
*/
const load = (plugin, options, file) => {
try {
if (
options === null ||
options === undefined ||
Object.keys(options).length === 0
) {
return req(plugin, file)
} else {
return req(plugin, file)(options)
}
} catch (err) {
throw new Error(`Loading PostCSS Plugin failed: ${err.message}\n\n(@${file})`)
}
};
/**
* Load Plugins
*
* @private
* @method plugins
*
* @param {Object} config PostCSS Config Plugins
*
* @return {Array} plugins PostCSS Plugins
*/
const plugins = (config, file) => {
let plugins = [];
if (Array.isArray(config.plugins)) {
plugins = config.plugins.filter(Boolean);
} else {
plugins = Object.keys(config.plugins)
.filter((plugin) => {
return config.plugins[plugin] !== false ? plugin : ''
})
.map((plugin) => {
return load(plugin, config.plugins[plugin], file)
});
}
if (plugins.length && plugins.length > 0) {
plugins.forEach((plugin, i) => {
if (plugin.default) {
plugin = plugin.default;
}
if (plugin.postcss === true) {
plugin = plugin();
} else if (plugin.postcss) {
plugin = plugin.postcss;
}
if (
// eslint-disable-next-line
!(
(typeof plugin === 'object' && Array.isArray(plugin.plugins)) ||
(typeof plugin === 'object' && plugin.postcssPlugin) ||
(typeof plugin === 'function')
)
) {
throw new TypeError(`Invalid PostCSS Plugin found at: plugins[${i}]\n\n(@${file})`)
}
});
}
return plugins
};
var plugins_1 = plugins;
const resolve = require$$0$4.resolve;
const url$4 = require$$0$9;
const config$1 = dist;
const yaml = require$$3;
const loadOptions = options_1;
const loadPlugins = plugins_1;
/* istanbul ignore next */
const interopRequireDefault = (obj) => obj && obj.__esModule ? obj : { default: obj };
/**
* Process the result from cosmiconfig
*
* @param {Object} ctx Config Context
* @param {Object} result Cosmiconfig result
*
* @return {Object} PostCSS Config
*/
const processResult = (ctx, result) => {
const file = result.filepath || '';
let config = interopRequireDefault(result.config).default || {};
if (typeof config === 'function') {
config = config(ctx);
} else {
config = Object.assign({}, config, ctx);
}
if (!config.plugins) {
config.plugins = [];
}
return {
plugins: loadPlugins(config, file),
options: loadOptions(config, file),
file
}
};
/**
* Builds the Config Context
*
* @param {Object} ctx Config Context
*
* @return {Object} Config Context
*/
const createContext = (ctx) => {
/**
* @type {Object}
*
* @prop {String} cwd=process.cwd() Config search start location
* @prop {String} env=process.env.NODE_ENV Config Enviroment, will be set to `development` by `postcss-load-config` if `process.env.NODE_ENV` is `undefined`
*/
ctx = Object.assign({
cwd: process.cwd(),
env: process.env.NODE_ENV
}, ctx);
if (!ctx.env) {
process.env.NODE_ENV = 'development';
}
return ctx
};
const importDefault = async filepath => {
const module = await import(url$4.pathToFileURL(filepath).href);
return module.default
};
const addTypeScriptLoader = (options = {}, loader) => {
const moduleName = 'postcss';
return {
...options,
searchPlaces: [
...(options.searchPlaces || []),
'package.json',
`.${moduleName}rc`,
`.${moduleName}rc.json`,
`.${moduleName}rc.yaml`,
`.${moduleName}rc.yml`,
`.${moduleName}rc.ts`,
`.${moduleName}rc.js`,
`.${moduleName}rc.cjs`,
`.${moduleName}rc.mjs`,
`${moduleName}.config.ts`,
`${moduleName}.config.js`,
`${moduleName}.config.cjs`,
`${moduleName}.config.mjs`
],
loaders: {
...options.loaders,
'.yaml': (filepath, content) => yaml.parse(content),
'.yml': (filepath, content) => yaml.parse(content),
'.js': importDefault,
'.cjs': importDefault,
'.mjs': importDefault,
'.ts': loader
}
}
};
const withTypeScriptLoader = (rcFunc) => {
return (ctx, path, options) => {
return rcFunc(ctx, path, addTypeScriptLoader(options, (configFile) => {
let registerer = { enabled () {} };
try {
// Register TypeScript compiler instance
registerer = __require('ts-node').register();
return __require(configFile)
} catch (err) {
if (err.code === 'MODULE_NOT_FOUND') {
throw new Error(
`'ts-node' is required for the TypeScript configuration files. Make sure it is installed\nError: ${err.message}`
)
}
throw err
} finally {
registerer.enabled(false);
}
}))
}
};
/**
* Load Config
*
* @method rc
*
* @param {Object} ctx Config Context
* @param {String} path Config Path
* @param {Object} options Config Options
*
* @return {Promise} config PostCSS Config
*/
const rc = withTypeScriptLoader((ctx, path, options) => {
/**
* @type {Object} The full Config Context
*/
ctx = createContext(ctx);
/**
* @type {String} `process.cwd()`
*/
path = path ? resolve(path) : process.cwd();
return config$1.lilconfig('postcss', options)
.search(path)
.then((result) => {
if (!result) {
throw new Error(`No PostCSS Config found in: ${path}`)
}
return processResult(ctx, result)
})
});
/**
* Autoload Config for PostCSS
*
* @author Michael Ciniawsky @michael-ciniawsky <michael.ciniawsky@gmail.com>
* @license MIT
*
* @module postcss-load-config
* @version 2.1.0
*
* @requires comsiconfig
* @requires ./options
* @requires ./plugins
*/
var src$1 = rc;
var postcssrc = /*@__PURE__*/getDefaultExportFromCjs(src$1);
function _stripLiteralAcorn(code, options) {
const FILL = options?.fillChar ?? " ";
const FILL_COMMENT = " ";
let result = "";
const filter = options?.filter ?? (() => true);
function fillupTo(index) {
if (index > result.length)
result += code.slice(result.length, index).replace(/[^\n]/g, FILL_COMMENT);
}
const tokens = [];
const pasers = tokenizer(code, {
ecmaVersion: "latest",
sourceType: "module",
allowHashBang: true,
allowAwaitOutsideFunction: true,
allowImportExportEverywhere: true
});
const iter = pasers[Symbol.iterator]();
let error;
try {
while (true) {
const { done, value: token } = iter.next();
if (done)
break;
tokens.push(token);
fillupTo(token.start);
if (token.type.label === "string") {
const body = code.slice(token.start + 1, token.end - 1);
if (filter(body)) {
result += code[token.start] + FILL.repeat(token.end - token.start - 2) + code[token.end - 1];
continue;
}
} else if (token.type.label === "template") {
const body = code.slice(token.start, token.end);
if (filter(body)) {
result += FILL.repeat(token.end - token.start);
continue;
}
} else if (token.type.label === "regexp") {
const body = code.slice(token.start, token.end);
if (filter(body)) {
result += body.replace(/\/(.*)\/(\w?)$/g, (_, $1, $2) => `/${FILL.repeat($1.length)}/${$2}`);
continue;
}
}
result += code.slice(token.start, token.end);
}
fillupTo(code.length);
} catch (e) {
error = e;
}
return {
error,
result,
tokens
};
}
const multilineCommentsRE = /\/\*([^*\/])*?\*\//gms;
const singlelineCommentsRE = /(?:^|\n|\r)\s*\/\/.*(?:\r|\n|$)/gm;
const templateLiteralRE$1 = /\$\{(\s*(?:|{.*}|(?!\$\{).|\n|\r)*?\s*)\}/g;
const quotesRE = [
/(["'`])((?:\\\1|(?!\1)|.|\r)*?)\1/gm,
/([`])((?:\\\1|(?!\1)|.|\n|\r)*?)\1/gm
// multi-line strings (i.e. template literals only)
];
function stripLiteralRegex(code, options) {
const FILL_COMMENT = " ";
const FILL = options?.fillChar ?? " ";
const filter = options?.filter ?? (() => true);
code = code.replace(multilineCommentsRE, (s) => filter(s) ? FILL_COMMENT.repeat(s.length) : s).replace(singlelineCommentsRE, (s) => filter(s) ? FILL_COMMENT.repeat(s.length) : s);
let expanded = code;
for (let i = 0; i < 16; i++) {
const before = expanded;
expanded = expanded.replace(templateLiteralRE$1, "` $1`");
if (expanded === before)
break;
}
quotesRE.forEach((re) => {
expanded = expanded.replace(re, (s, quote, body, index) => {
if (!filter(s.slice(1, -1)))
return s;
code = code.slice(0, index + 1) + FILL.repeat(s.length - 2) + code.slice(index + s.length - 1);
return quote + FILL.repeat(s.length - 2) + quote;
});
});
return code;
}
function stripLiteral(code, options) {
return stripLiteralDetailed(code, options).result;
}
function stripLiteralDetailed(code, options) {
const acorn = _stripLiteralAcorn(code, options);
if (!acorn.error) {
return {
mode: "acorn",
result: acorn.result,
acorn
};
}
return {
mode: "regex",
result: stripLiteralRegex(acorn.result + code.slice(acorn.result.length), options),
acorn
};
}
var main$1 = {exports: {}};
var name = "dotenv";
var version$1 = "16.3.1";
var description = "Loads environment variables from .env file";
var main = "lib/main.js";
var types = "lib/main.d.ts";
var exports = {
".": {
types: "./lib/main.d.ts",
require: "./lib/main.js",
"default": "./lib/main.js"
},
"./config": "./config.js",
"./config.js": "./config.js",
"./lib/env-options": "./lib/env-options.js",
"./lib/env-options.js": "./lib/env-options.js",
"./lib/cli-options": "./lib/cli-options.js",
"./lib/cli-options.js": "./lib/cli-options.js",
"./package.json": "./package.json"
};
var scripts = {
"dts-check": "tsc --project tests/types/tsconfig.json",
lint: "standard",
"lint-readme": "standard-markdown",
pretest: "npm run lint && npm run dts-check",
test: "tap tests/*.js --100 -Rspec",
prerelease: "npm test",
release: "standard-version"
};
var repository = {
type: "git",
url: "git://github.com/motdotla/dotenv.git"
};
var funding = "https://github.com/motdotla/dotenv?sponsor=1";
var keywords = [
"dotenv",
"env",
".env",
"environment",
"variables",
"config",
"settings"
];
var readmeFilename = "README.md";
var license = "BSD-2-Clause";
var devDependencies = {
"@definitelytyped/dtslint": "^0.0.133",
"@types/node": "^18.11.3",
decache: "^4.6.1",
sinon: "^14.0.1",
standard: "^17.0.0",
"standard-markdown": "^7.1.0",
"standard-version": "^9.5.0",
tap: "^16.3.0",
tar: "^6.1.11",
typescript: "^4.8.4"
};
var engines = {
node: ">=12"
};
var browser$1 = {
fs: false
};
var require$$4 = {
name: name,
version: version$1,
description: description,
main: main,
types: types,
exports: exports,
scripts: scripts,
repository: repository,
funding: funding,
keywords: keywords,
readmeFilename: readmeFilename,
license: license,
devDependencies: devDependencies,
engines: engines,
browser: browser$1
};
const fs$9 = require$$0__default;
const path$9 = require$$0$4;
const os$2 = require$$2;
const crypto$1 = require$$3$1;
const packageJson = require$$4;
const version = packageJson.version;
const LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg;
// Parse src into an Object
function parse$9 (src) {
const obj = {};
// Convert buffer to string
let lines = src.toString();
// Convert line breaks to same format
lines = lines.replace(/\r\n?/mg, '\n');
let match;
while ((match = LINE.exec(lines)) != null) {
const key = match[1];
// Default undefined or null to empty string
let value = (match[2] || '');
// Remove whitespace
value = value.trim();
// Check if double quoted
const maybeQuote = value[0];
// Remove surrounding quotes
value = value.replace(/^(['"`])([\s\S]*)\1$/mg, '$2');
// Expand newlines if double quoted
if (maybeQuote === '"') {
value = value.replace(/\\n/g, '\n');
value = value.replace(/\\r/g, '\r');
}
// Add to object
obj[key] = value;
}
return obj
}
function _parseVault (options) {
const vaultPath = _vaultPath(options);
// Parse .env.vault
const result = DotenvModule.configDotenv({ path: vaultPath });
if (!result.parsed) {
throw new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`)
}
// handle scenario for comma separated keys - for use with key rotation
// example: DOTENV_KEY="dotenv://:key_1234@dotenv.org/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenv.org/vault/.env.vault?environment=prod"
const keys = _dotenvKey(options).split(',');
const length = keys.length;
let decrypted;
for (let i = 0; i < length; i++) {
try {
// Get full key
const key = keys[i].trim();
// Get instructions for decrypt
const attrs = _instructions(result, key);
// Decrypt
decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key);
break
} catch (error) {
// last key
if (i + 1 >= length) {
throw error
}
// try next key
}
}
// Parse decrypted .env string
return DotenvModule.parse(decrypted)
}
function _log (message) {
console.log(`[dotenv@${version}][INFO] ${message}`);
}
function _warn (message) {
console.log(`[dotenv@${version}][WARN] ${message}`);
}
function _debug (message) {
console.log(`[dotenv@${version}][DEBUG] ${message}`);
}
function _dotenvKey (options) {
// prioritize developer directly setting options.DOTENV_KEY
if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {
return options.DOTENV_KEY
}
// secondary infra already contains a DOTENV_KEY environment variable
if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {
return process.env.DOTENV_KEY
}
// fallback to empty string
return ''
}
function _instructions (result, dotenvKey) {
// Parse DOTENV_KEY. Format is a URI
let uri;
try {
uri = new URL(dotenvKey);
} catch (error) {
if (error.code === 'ERR_INVALID_URL') {
throw new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenv.org/vault/.env.vault?environment=development')
}
throw error
}
// Get decrypt key
const key = uri.password;
if (!key) {
throw new Error('INVALID_DOTENV_KEY: Missing key part')
}
// Get environment
const environment = uri.searchParams.get('environment');
if (!environment) {
throw new Error('INVALID_DOTENV_KEY: Missing environment part')
}
// Get ciphertext payload
const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`;
const ciphertext = result.parsed[environmentKey]; // DOTENV_VAULT_PRODUCTION
if (!ciphertext) {
throw new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`)
}
return { ciphertext, key }
}
function _vaultPath (options) {
let dotenvPath = path$9.resolve(process.cwd(), '.env');
if (options && options.path && options.path.length > 0) {
dotenvPath = options.path;
}
// Locate .env.vault
return dotenvPath.endsWith('.vault') ? dotenvPath : `${dotenvPath}.vault`
}
function _resolveHome (envPath) {
return envPath[0] === '~' ? path$9.join(os$2.homedir(), envPath.slice(1)) : envPath
}
function _configVault (options) {
_log('Loading env from encrypted .env.vault');
const parsed = DotenvModule._parseVault(options);
let processEnv = process.env;
if (options && options.processEnv != null) {
processEnv = options.processEnv;
}
DotenvModule.populate(processEnv, parsed, options);
return { parsed }
}
function configDotenv (options) {
let dotenvPath = path$9.resolve(process.cwd(), '.env');
let encoding = 'utf8';
const debug = Boolean(options && options.debug);
if (options) {
if (options.path != null) {
dotenvPath = _resolveHome(options.path);
}
if (options.encoding != null) {
encoding = options.encoding;
}
}
try {
// Specifying an encoding returns a string instead of a buffer
const parsed = DotenvModule.parse(fs$9.readFileSync(dotenvPath, { encoding }));
let processEnv = process.env;
if (options && options.processEnv != null) {
processEnv = options.processEnv;
}
DotenvModule.populate(processEnv, parsed, options);
return { parsed }
} catch (e) {
if (debug) {
_debug(`Failed to load ${dotenvPath} ${e.message}`);
}
return { error: e }
}
}
// Populates process.env from .env file
function config (options) {
const vaultPath = _vaultPath(options);
// fallback to original dotenv if DOTENV_KEY is not set
if (_dotenvKey(options).length === 0) {
return DotenvModule.configDotenv(options)
}
// dotenvKey exists but .env.vault file does not exist
if (!fs$9.existsSync(vaultPath)) {
_warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`);
return DotenvModule.configDotenv(options)
}
return DotenvModule._configVault(options)
}
function decrypt (encrypted, keyStr) {
const key = Buffer.from(keyStr.slice(-64), 'hex');
let ciphertext = Buffer.from(encrypted, 'base64');
const nonce = ciphertext.slice(0, 12);
const authTag = ciphertext.slice(-16);
ciphertext = ciphertext.slice(12, -16);
try {
const aesgcm = crypto$1.createDecipheriv('aes-256-gcm', key, nonce);
aesgcm.setAuthTag(authTag);
return `${aesgcm.update(ciphertext)}${aesgcm.final()}`
} catch (error) {
const isRange = error instanceof RangeError;
const invalidKeyLength = error.message === 'Invalid key length';
const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data';
if (isRange || invalidKeyLength) {
const msg = 'INVALID_DOTENV_KEY: It must be 64 characters long (or more)';
throw new Error(msg)
} else if (decryptionFailed) {
const msg = 'DECRYPTION_FAILED: Please check your DOTENV_KEY';
throw new Error(msg)
} else {
console.error('Error: ', error.code);
console.error('Error: ', error.message);
throw error
}
}
}
// Populate process.env with parsed values
function populate (processEnv, parsed, options = {}) {
const debug = Boolean(options && options.debug);
const override = Boolean(options && options.override);
if (typeof parsed !== 'object') {
throw new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate')
}
// Set process.env
for (const key of Object.keys(parsed)) {
if (Object.prototype.hasOwnProperty.call(processEnv, key)) {
if (override === true) {
processEnv[key] = parsed[key];
}
if (debug) {
if (override === true) {
_debug(`"${key}" is already defined and WAS overwritten`);
} else {
_debug(`"${key}" is already defined and was NOT overwritten`);
}
}
} else {
processEnv[key] = parsed[key];
}
}
}
const DotenvModule = {
configDotenv,
_configVault,
_parseVault,
config,
decrypt,
parse: parse$9,
populate
};
main$1.exports.configDotenv = DotenvModule.configDotenv;
main$1.exports._configVault = DotenvModule._configVault;
main$1.exports._parseVault = DotenvModule._parseVault;
main$1.exports.config = DotenvModule.config;
main$1.exports.decrypt = DotenvModule.decrypt;
var parse_1$1 = main$1.exports.parse = DotenvModule.parse;
main$1.exports.populate = DotenvModule.populate;
main$1.exports = DotenvModule;
function _interpolate (envValue, environment, config) {
const matches = envValue.match(/(.?\${*[\w]*(?::-[\w/]*)?}*)/g) || [];
return matches.reduce(function (newEnv, match, index) {
const parts = /(.?)\${*([\w]*(?::-[\w/]*)?)?}*/g.exec(match);
if (!parts || parts.length === 0) {
return newEnv
}
const prefix = parts[1];
let value, replacePart;
if (prefix === '\\') {
replacePart = parts[0];
value = replacePart.replace('\\$', '$');
} else {
// PATCH: compatible with env variables ended with unescaped $
if(!parts[2]) {
return newEnv
}
const keyParts = parts[2].split(':-');
const key = keyParts[0];
replacePart = parts[0].substring(prefix.length);
// process.env value 'wins' over .env file's value
value = Object.prototype.hasOwnProperty.call(environment, key)
? environment[key]
: (config.parsed[key] || keyParts[1] || '');
// If the value is found, remove nested expansions.
if (keyParts.length > 1 && value) {
const replaceNested = matches[index + 1];
matches[index + 1] = '';
newEnv = newEnv.replace(replaceNested, '');
}
// Resolve recursive interpolations
value = _interpolate(value, environment, config);
}
return newEnv.replace(replacePart, value)
}, envValue)
}
function expand (config) {
// if ignoring process.env, use a blank object
const environment = config.ignoreProcessEnv ? {} : process.env;
for (const configKey in config.parsed) {
const value = Object.prototype.hasOwnProperty.call(environment, configKey) ? environment[configKey] : config.parsed[configKey];
config.parsed[configKey] = _interpolate(value, environment, config);
}
// PATCH: don't write to process.env
// for (const processKey in config.parsed) {
// environment[processKey] = config.parsed[processKey]
// }
return config
}
var expand_1 = expand;
function loadEnv(mode, envDir, prefixes = 'VITE_') {
if (mode === 'local') {
throw new Error(`"local" cannot be used as a mode name because it conflicts with ` +
`the .local postfix for .env files.`);
}
prefixes = arraify(prefixes);
const env = {};
const envFiles = [
/** default file */ `.env`,
/** local file */ `.env.local`,
/** mode file */ `.env.${mode}`,
/** mode local file */ `.env.${mode}.local`,
];
const parsed = Object.fromEntries(envFiles.flatMap((file) => {
const filePath = path$o.join(envDir, file);
if (!tryStatSync(filePath)?.isFile())
return [];
return Object.entries(parse_1$1(fs$l.readFileSync(filePath)));
}));
// test NODE_ENV override before expand as otherwise process.env.NODE_ENV would override this
if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === undefined) {
process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV;
}
// support BROWSER and BROWSER_ARGS env variables
if (parsed.BROWSER && process.env.BROWSER === undefined) {
process.env.BROWSER = parsed.BROWSER;
}
if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === undefined) {
process.env.BROWSER_ARGS = parsed.BROWSER_ARGS;
}
// let environment variables use each other
// `expand` patched in patches/dotenv-expand@9.0.0.patch
expand_1({ parsed });
// only keys that start with prefix are exposed to client
for (const [key, value] of Object.entries(parsed)) {
if (prefixes.some((prefix) => key.startsWith(prefix))) {
env[key] = value;
}
}
// check if there are actual env variables starting with VITE_*
// these are typically provided inline and should be prioritized
for (const key in process.env) {
if (prefixes.some((prefix) => key.startsWith(prefix))) {
env[key] = process.env[key];
}
}
return env;
}
function resolveEnvPrefix({ envPrefix = 'VITE_', }) {
envPrefix = arraify(envPrefix);
if (envPrefix.some((prefix) => prefix === '')) {
throw new Error(`envPrefix option contains value '', which could lead unexpected exposure of sensitive information.`);
}
return envPrefix;
}
const modulePreloadPolyfillId = 'vite/modulepreload-polyfill';
const resolvedModulePreloadPolyfillId = '\0' + modulePreloadPolyfillId;
function modulePreloadPolyfillPlugin(config) {
// `isModernFlag` is only available during build since it is resolved by `vite:build-import-analysis`
const skip = config.command !== 'build' || config.build.ssr;
let polyfillString;
return {
name: 'vite:modulepreload-polyfill',
resolveId(id) {
if (id === modulePreloadPolyfillId) {
return resolvedModulePreloadPolyfillId;
}
},
load(id) {
if (id === resolvedModulePreloadPolyfillId) {
if (skip) {
return '';
}
if (!polyfillString) {
polyfillString = `${isModernFlag}&&(${polyfill.toString()}());`;
}
return { code: polyfillString, moduleSideEffects: true };
}
},
};
}
function polyfill() {
const relList = document.createElement('link').relList;
if (relList && relList.supports && relList.supports('modulepreload')) {
return;
}
for (const link of document.querySelectorAll('link[rel="modulepreload"]')) {
processPreload(link);
}
new MutationObserver((mutations) => {
for (const mutation of mutations) {
if (mutation.type !== 'childList') {
continue;
}
for (const node of mutation.addedNodes) {
if (node.tagName === 'LINK' && node.rel === 'modulepreload')
processPreload(node);
}
}
}).observe(document, { childList: true, subtree: true });
function getFetchOpts(link) {
const fetchOpts = {};
if (link.integrity)
fetchOpts.integrity = link.integrity;
if (link.referrerPolicy)
fetchOpts.referrerPolicy = link.referrerPolicy;
if (link.crossOrigin === 'use-credentials')
fetchOpts.credentials = 'include';
else if (link.crossOrigin === 'anonymous')
fetchOpts.credentials = 'omit';
else
fetchOpts.credentials = 'same-origin';
return fetchOpts;
}
function processPreload(link) {
if (link.ep)
// ep marker = processed
return;
link.ep = true;
// prepopulate the load record
const fetchOpts = getFetchOpts(link);
fetch(link.href, fetchOpts);
}
}
const htmlProxyRE$1 = /\?html-proxy=?(?:&inline-css)?(?:&style-attr)?&index=(\d+)\.(js|css)$/;
const inlineCSSRE$1 = /__VITE_INLINE_CSS__([a-z\d]{8}_\d+)__/g;
// Do not allow preceding '.', but do allow preceding '...' for spread operations
const inlineImportRE = /(?<!(?<!\.\.)\.)\bimport\s*\(("(?:[^"]|(?<=\\)")*"|'(?:[^']|(?<=\\)')*')\)/g;
const htmlLangRE = /\.(?:html|htm)$/;
const importMapRE = /[ \t]*<script[^>]*type\s*=\s*(?:"importmap"|'importmap'|importmap)[^>]*>.*?<\/script>/is;
const moduleScriptRE = /[ \t]*<script[^>]*type\s*=\s*(?:"module"|'module'|module)[^>]*>/i;
const modulePreloadLinkRE = /[ \t]*<link[^>]*rel\s*=\s*(?:"modulepreload"|'modulepreload'|modulepreload)[\s\S]*?\/>/i;
const importMapAppendRE = new RegExp([moduleScriptRE, modulePreloadLinkRE].map((r) => r.source).join('|'), 'i');
const isHTMLProxy = (id) => htmlProxyRE$1.test(id);
const isHTMLRequest = (request) => htmlLangRE.test(request);
// HTML Proxy Caches are stored by config -> filePath -> index
const htmlProxyMap = new WeakMap();
// HTML Proxy Transform result are stored by config
// `${hash(importer)}_${query.index}` -> transformed css code
// PS: key like `hash(/vite/playground/assets/index.html)_1`)
const htmlProxyResult = new Map();
function htmlInlineProxyPlugin(config) {
// Should do this when `constructor` rather than when `buildStart`,
// `buildStart` will be triggered multiple times then the cached result will be emptied.
// https://github.com/vitejs/vite/issues/6372
htmlProxyMap.set(config, new Map());
return {
name: 'vite:html-inline-proxy',
resolveId(id) {
if (htmlProxyRE$1.test(id)) {
return id;
}
},
load(id) {
const proxyMatch = id.match(htmlProxyRE$1);
if (proxyMatch) {
const index = Number(proxyMatch[1]);
const file = cleanUrl(id);
const url = file.replace(normalizePath$3(config.root), '');
const result = htmlProxyMap.get(config).get(url)?.[index];
if (result) {
return result;
}
else {
throw new Error(`No matching HTML proxy module found from ${id}`);
}
}
},
};
}
function addToHTMLProxyCache(config, filePath, index, result) {
if (!htmlProxyMap.get(config)) {
htmlProxyMap.set(config, new Map());
}
if (!htmlProxyMap.get(config).get(filePath)) {
htmlProxyMap.get(config).set(filePath, []);
}
htmlProxyMap.get(config).get(filePath)[index] = result;
}
function addToHTMLProxyTransformResult(hash, code) {
htmlProxyResult.set(hash, code);
}
// this extends the config in @vue/compiler-sfc with <link href>
const assetAttrsConfig = {
link: ['href'],
video: ['src', 'poster'],
source: ['src', 'srcset'],
img: ['src', 'srcset'],
image: ['xlink:href', 'href'],
use: ['xlink:href', 'href'],
};
const isAsyncScriptMap = new WeakMap();
function nodeIsElement(node) {
return node.nodeName[0] !== '#';
}
function traverseNodes(node, visitor) {
visitor(node);
if (nodeIsElement(node) ||
node.nodeName === '#document' ||
node.nodeName === '#document-fragment') {
node.childNodes.forEach((childNode) => traverseNodes(childNode, visitor));
}
}
async function traverseHtml(html, filePath, visitor) {
// lazy load compiler
const { parse } = await import('./dep-f0c7dae0.js');
const ast = parse(html, {
scriptingEnabled: false,
sourceCodeLocationInfo: true,
onParseError: (e) => {
handleParseError(e, html, filePath);
},
});
traverseNodes(ast, visitor);
}
function getScriptInfo(node) {
let src;
let sourceCodeLocation;
let isModule = false;
let isAsync = false;
for (const p of node.attrs) {
if (p.prefix !== undefined)
continue;
if (p.name === 'src') {
if (!src) {
src = p;
sourceCodeLocation = node.sourceCodeLocation?.attrs['src'];
}
}
else if (p.name === 'type' && p.value && p.value === 'module') {
isModule = true;
}
else if (p.name === 'async') {
isAsync = true;
}
}
return { src, sourceCodeLocation, isModule, isAsync };
}
const attrValueStartRE = /=\s*(.)/;
function overwriteAttrValue(s, sourceCodeLocation, newValue) {
const srcString = s.slice(sourceCodeLocation.startOffset, sourceCodeLocation.endOffset);
const valueStart = srcString.match(attrValueStartRE);
if (!valueStart) {
// overwrite attr value can only be called for a well-defined value
throw new Error(`[vite:html] internal error, failed to overwrite attribute value`);
}
const wrapOffset = valueStart[1] === '"' || valueStart[1] === "'" ? 1 : 0;
const valueOffset = valueStart.index + valueStart[0].length - 1;
s.update(sourceCodeLocation.startOffset + valueOffset + wrapOffset, sourceCodeLocation.endOffset - wrapOffset, newValue);
return s;
}
/**
* Format parse5 @type {ParserError} to @type {RollupError}
*/
function formatParseError(parserError, id, html) {
const formattedError = {
code: parserError.code,
message: `parse5 error code ${parserError.code}`,
frame: generateCodeFrame(html, parserError.startOffset),
loc: {
file: id,
line: parserError.startLine,
column: parserError.startCol,
},
};
return formattedError;
}
function handleParseError(parserError, html, filePath) {
switch (parserError.code) {
case 'missing-doctype':
// ignore missing DOCTYPE
return;
case 'abandoned-head-element-child':
// Accept elements without closing tag in <head>
return;
case 'duplicate-attribute':
// Accept duplicate attributes #9566
// The first attribute is used, browsers silently ignore duplicates
return;
case 'non-void-html-element-start-tag-with-trailing-solidus':
// Allow self closing on non-void elements #10439
return;
}
const parseError = formatParseError(parserError, filePath, html);
throw new Error(`Unable to parse HTML; ${parseError.message}\n` +
` at ${parseError.loc.file}:${parseError.loc.line}:${parseError.loc.column}\n` +
`${parseError.frame}`);
}
/**
* Compiles index.html into an entry js module
*/
function buildHtmlPlugin(config) {
const [preHooks, normalHooks, postHooks] = resolveHtmlTransforms(config.plugins);
preHooks.unshift(preImportMapHook(config));
preHooks.push(htmlEnvHook(config));
postHooks.push(postImportMapHook());
const processedHtml = new Map();
const isExcludedUrl = (url) => url[0] === '#' ||
isExternalUrl(url) ||
isDataUrl(url) ||
checkPublicFile(url, config);
// Same reason with `htmlInlineProxyPlugin`
isAsyncScriptMap.set(config, new Map());
return {
name: 'vite:build-html',
async transform(html, id) {
if (id.endsWith('.html')) {
const relativeUrlPath = path$o.posix.relative(config.root, normalizePath$3(id));
const publicPath = `/${relativeUrlPath}`;
const publicBase = getBaseInHTML(relativeUrlPath, config);
const publicToRelative = (filename, importer) => publicBase + filename;
const toOutputPublicFilePath = (url) => toOutputFilePathInHtml(url.slice(1), 'public', relativeUrlPath, 'html', config, publicToRelative);
// pre-transform
html = await applyHtmlTransforms(html, preHooks, {
path: publicPath,
filename: id,
});
let js = '';
const s = new MagicString(html);
const assetUrls = [];
const scriptUrls = [];
const styleUrls = [];
let inlineModuleIndex = -1;
let everyScriptIsAsync = true;
let someScriptsAreAsync = false;
let someScriptsAreDefer = false;
await traverseHtml(html, id, (node) => {
if (!nodeIsElement(node)) {
return;
}
let shouldRemove = false;
// script tags
if (node.nodeName === 'script') {
const { src, sourceCodeLocation, isModule, isAsync } = getScriptInfo(node);
const url = src && src.value;
const isPublicFile = !!(url && checkPublicFile(url, config));
if (isPublicFile) {
// referencing public dir url, prefix with base
overwriteAttrValue(s, sourceCodeLocation, toOutputPublicFilePath(url));
}
if (isModule) {
inlineModuleIndex++;
if (url && !isExcludedUrl(url)) {
// <script type="module" src="..."/>
// add it as an import
js += `\nimport ${JSON.stringify(url)}`;
shouldRemove = true;
}
else if (node.childNodes.length) {
const scriptNode = node.childNodes.pop();
const contents = scriptNode.value;
// <script type="module">...</script>
const filePath = id.replace(normalizePath$3(config.root), '');
addToHTMLProxyCache(config, filePath, inlineModuleIndex, {
code: contents,
});
js += `\nimport "${id}?html-proxy&index=${inlineModuleIndex}.js"`;
shouldRemove = true;
}
everyScriptIsAsync && (everyScriptIsAsync = isAsync);
someScriptsAreAsync || (someScriptsAreAsync = isAsync);
someScriptsAreDefer || (someScriptsAreDefer = !isAsync);
}
else if (url && !isPublicFile) {
if (!isExcludedUrl(url)) {
config.logger.warn(`<script src="${url}"> in "${publicPath}" can't be bundled without type="module" attribute`);
}
}
else if (node.childNodes.length) {
const scriptNode = node.childNodes.pop();
const cleanCode = stripLiteral(scriptNode.value);
let match;
inlineImportRE.lastIndex = 0;
while ((match = inlineImportRE.exec(cleanCode))) {
const { 1: url, index } = match;
const startUrl = cleanCode.indexOf(url, index);
const start = startUrl + 1;
const end = start + url.length - 2;
const startOffset = scriptNode.sourceCodeLocation.startOffset;
scriptUrls.push({
start: start + startOffset,
end: end + startOffset,
url: scriptNode.value.slice(start, end),
});
}
}
}
// For asset references in index.html, also generate an import
// statement for each - this will be handled by the asset plugin
const assetAttrs = assetAttrsConfig[node.nodeName];
if (assetAttrs) {
for (const p of node.attrs) {
const attrKey = getAttrKey(p);
if (p.value && assetAttrs.includes(attrKey)) {
const attrSourceCodeLocation = node.sourceCodeLocation.attrs[attrKey];
// assetsUrl may be encodeURI
const url = decodeURI(p.value);
if (!isExcludedUrl(url)) {
if (node.nodeName === 'link' &&
isCSSRequest(url) &&
// should not be converted if following attributes are present (#6748)
!node.attrs.some((p) => p.prefix === undefined &&
(p.name === 'media' || p.name === 'disabled'))) {
// CSS references, convert to import
const importExpression = `\nimport ${JSON.stringify(url)}`;
styleUrls.push({
url,
start: node.sourceCodeLocation.startOffset,
end: node.sourceCodeLocation.endOffset,
});
js += importExpression;
}
else {
assetUrls.push({
attr: p,
sourceCodeLocation: attrSourceCodeLocation,
});
}
}
else if (checkPublicFile(url, config)) {
overwriteAttrValue(s, attrSourceCodeLocation, toOutputPublicFilePath(url));
}
}
}
}
// <tag style="... url(...) or image-set(...) ..."></tag>
// extract inline styles as virtual css and add class attribute to tag for selecting
const inlineStyle = node.attrs.find((prop) => prop.prefix === undefined &&
prop.name === 'style' &&
// only url(...) or image-set(...) in css need to emit file
(prop.value.includes('url(') ||
prop.value.includes('image-set(')));
if (inlineStyle) {
inlineModuleIndex++;
// replace `inline style` to class
// and import css in js code
const code = inlineStyle.value;
const filePath = id.replace(normalizePath$3(config.root), '');
addToHTMLProxyCache(config, filePath, inlineModuleIndex, { code });
// will transform with css plugin and cache result with css-post plugin
js += `\nimport "${id}?html-proxy&inline-css&style-attr&index=${inlineModuleIndex}.css"`;
const hash = getHash(cleanUrl(id));
// will transform in `applyHtmlTransforms`
const sourceCodeLocation = node.sourceCodeLocation.attrs['style'];
overwriteAttrValue(s, sourceCodeLocation, `__VITE_INLINE_CSS__${hash}_${inlineModuleIndex}__`);
}
// <style>...</style>
if (node.nodeName === 'style' && node.childNodes.length) {
const styleNode = node.childNodes.pop();
const filePath = id.replace(normalizePath$3(config.root), '');
inlineModuleIndex++;
addToHTMLProxyCache(config, filePath, inlineModuleIndex, {
code: styleNode.value,
});
js += `\nimport "${id}?html-proxy&inline-css&index=${inlineModuleIndex}.css"`;
const hash = getHash(cleanUrl(id));
// will transform in `applyHtmlTransforms`
s.update(styleNode.sourceCodeLocation.startOffset, styleNode.sourceCodeLocation.endOffset, `__VITE_INLINE_CSS__${hash}_${inlineModuleIndex}__`);
}
if (shouldRemove) {
// remove the script tag from the html. we are going to inject new
// ones in the end.
s.remove(node.sourceCodeLocation.startOffset, node.sourceCodeLocation.endOffset);
}
});
isAsyncScriptMap.get(config).set(id, everyScriptIsAsync);
if (someScriptsAreAsync && someScriptsAreDefer) {
config.logger.warn(`\nMixed async and defer script modules in ${id}, output script will fallback to defer. Every script, including inline ones, need to be marked as async for your output script to be async.`);
}
// for each encountered asset url, rewrite original html so that it
// references the post-build location, ignoring empty attributes and
// attributes that directly reference named output.
const namedOutput = Object.keys(config?.build?.rollupOptions?.input || {});
for (const { attr, sourceCodeLocation } of assetUrls) {
// assetsUrl may be encodeURI
const content = decodeURI(attr.value);
if (content !== '' && // Empty attribute
!namedOutput.includes(content) && // Direct reference to named output
!namedOutput.includes(removeLeadingSlash(content)) // Allow for absolute references as named output can't be an absolute path
) {
try {
const url = attr.prefix === undefined && attr.name === 'srcset'
? await processSrcSet(content, ({ url }) => urlToBuiltUrl(url, id, config, this))
: await urlToBuiltUrl(content, id, config, this);
overwriteAttrValue(s, sourceCodeLocation, url);
}
catch (e) {
if (e.code !== 'ENOENT') {
throw e;
}
}
}
}
// emit <script>import("./aaa")</script> asset
for (const { start, end, url } of scriptUrls) {
if (!isExcludedUrl(url)) {
s.update(start, end, await urlToBuiltUrl(url, id, config, this));
}
else if (checkPublicFile(url, config)) {
s.update(start, end, toOutputPublicFilePath(url));
}
}
// ignore <link rel="stylesheet"> if its url can't be resolved
const resolvedStyleUrls = await Promise.all(styleUrls.map(async (styleUrl) => ({
...styleUrl,
resolved: await this.resolve(styleUrl.url, id),
})));
for (const { start, end, url, resolved } of resolvedStyleUrls) {
if (resolved == null) {
config.logger.warnOnce(`\n${url} doesn't exist at build time, it will remain unchanged to be resolved at runtime`);
const importExpression = `\nimport ${JSON.stringify(url)}`;
js = js.replace(importExpression, '');
}
else {
s.remove(start, end);
}
}
processedHtml.set(id, s.toString());
// inject module preload polyfill only when configured and needed
const { modulePreload } = config.build;
if (modulePreload !== false &&
modulePreload.polyfill &&
(someScriptsAreAsync || someScriptsAreDefer)) {
js = `import "${modulePreloadPolyfillId}";\n${js}`;
}
// Force rollup to keep this module from being shared between other entry points.
// If the resulting chunk is empty, it will be removed in generateBundle.
return { code: js, moduleSideEffects: 'no-treeshake' };
}
},
async generateBundle(options, bundle) {
const analyzedChunk = new Map();
const inlineEntryChunk = new Set();
const getImportedChunks = (chunk, seen = new Set()) => {
const chunks = [];
chunk.imports.forEach((file) => {
const importee = bundle[file];
if (importee?.type === 'chunk' && !seen.has(file)) {
seen.add(file);
// post-order traversal
chunks.push(...getImportedChunks(importee, seen));
chunks.push(importee);
}
});
return chunks;
};
const toScriptTag = (chunk, toOutputPath, isAsync) => ({
tag: 'script',
attrs: {
...(isAsync ? { async: true } : {}),
type: 'module',
crossorigin: true,
src: toOutputPath(chunk.fileName),
},
});
const toPreloadTag = (filename, toOutputPath) => ({
tag: 'link',
attrs: {
rel: 'modulepreload',
crossorigin: true,
href: toOutputPath(filename),
},
});
const getCssTagsForChunk = (chunk, toOutputPath, seen = new Set()) => {
const tags = [];
if (!analyzedChunk.has(chunk)) {
analyzedChunk.set(chunk, 1);
chunk.imports.forEach((file) => {
const importee = bundle[file];
if (importee?.type === 'chunk') {
tags.push(...getCssTagsForChunk(importee, toOutputPath, seen));
}
});
}
chunk.viteMetadata.importedCss.forEach((file) => {
if (!seen.has(file)) {
seen.add(file);
tags.push({
tag: 'link',
attrs: {
rel: 'stylesheet',
href: toOutputPath(file),
},
});
}
});
return tags;
};
for (const [id, html] of processedHtml) {
const relativeUrlPath = path$o.posix.relative(config.root, normalizePath$3(id));
const assetsBase = getBaseInHTML(relativeUrlPath, config);
const toOutputFilePath = (filename, type) => {
if (isExternalUrl(filename)) {
return filename;
}
else {
return toOutputFilePathInHtml(filename, type, relativeUrlPath, 'html', config, (filename, importer) => assetsBase + filename);
}
};
const toOutputAssetFilePath = (filename) => toOutputFilePath(filename, 'asset');
const toOutputPublicAssetFilePath = (filename) => toOutputFilePath(filename, 'public');
const isAsync = isAsyncScriptMap.get(config).get(id);
let result = html;
// find corresponding entry chunk
const chunk = Object.values(bundle).find((chunk) => chunk.type === 'chunk' &&
chunk.isEntry &&
chunk.facadeModuleId === id);
let canInlineEntry = false;
// inject chunk asset links
if (chunk) {
// an entry chunk can be inlined if
// - it's an ES module (e.g. not generated by the legacy plugin)
// - it contains no meaningful code other than import statements
if (options.format === 'es' && isEntirelyImport(chunk.code)) {
canInlineEntry = true;
}
// when not inlined, inject <script> for entry and modulepreload its dependencies
// when inlined, discard entry chunk and inject <script> for everything in post-order
const imports = getImportedChunks(chunk);
let assetTags;
if (canInlineEntry) {
assetTags = imports.map((chunk) => toScriptTag(chunk, toOutputAssetFilePath, isAsync));
}
else {
assetTags = [toScriptTag(chunk, toOutputAssetFilePath, isAsync)];
const { modulePreload } = config.build;
if (modulePreload !== false) {
const resolveDependencies = typeof modulePreload === 'object' &&
modulePreload.resolveDependencies;
const importsFileNames = imports.map((chunk) => chunk.fileName);
const resolvedDeps = resolveDependencies
? resolveDependencies(chunk.fileName, importsFileNames, {
hostId: relativeUrlPath,
hostType: 'html',
})
: importsFileNames;
assetTags.push(...resolvedDeps.map((i) => toPreloadTag(i, toOutputAssetFilePath)));
}
}
assetTags.push(...getCssTagsForChunk(chunk, toOutputAssetFilePath));
result = injectToHead(result, assetTags);
}
// inject css link when cssCodeSplit is false
if (!config.build.cssCodeSplit) {
const cssChunk = Object.values(bundle).find((chunk) => chunk.type === 'asset' && chunk.name === 'style.css');
if (cssChunk) {
result = injectToHead(result, [
{
tag: 'link',
attrs: {
rel: 'stylesheet',
href: toOutputAssetFilePath(cssChunk.fileName),
},
},
]);
}
}
// no use assets plugin because it will emit file
let match;
let s;
inlineCSSRE$1.lastIndex = 0;
while ((match = inlineCSSRE$1.exec(result))) {
s || (s = new MagicString(result));
const { 0: full, 1: scopedName } = match;
const cssTransformedCode = htmlProxyResult.get(scopedName);
s.update(match.index, match.index + full.length, cssTransformedCode);
}
if (s) {
result = s.toString();
}
result = await applyHtmlTransforms(result, [...normalHooks, ...postHooks], {
path: '/' + relativeUrlPath,
filename: id,
bundle,
chunk,
});
// resolve asset url references
result = result.replace(assetUrlRE, (_, fileHash, postfix = '') => {
return toOutputAssetFilePath(this.getFileName(fileHash)) + postfix;
});
result = result.replace(publicAssetUrlRE, (_, fileHash) => {
const publicAssetPath = toOutputPublicAssetFilePath(getPublicAssetFilename(fileHash, config));
return isUrl(publicAssetPath)
? publicAssetPath
: normalizePath$3(publicAssetPath);
});
if (chunk && canInlineEntry) {
inlineEntryChunk.add(chunk.fileName);
}
const shortEmitName = normalizePath$3(path$o.relative(config.root, id));
this.emitFile({
type: 'asset',
fileName: shortEmitName,
source: result,
});
}
for (const fileName of inlineEntryChunk) {
// all imports from entry have been inlined to html, prevent rollup from outputting it
delete bundle[fileName];
}
},
};
}
function preImportMapHook(config) {
return (html, ctx) => {
const importMapIndex = html.match(importMapRE)?.index;
if (importMapIndex === undefined)
return;
const importMapAppendIndex = html.match(importMapAppendRE)?.index;
if (importMapAppendIndex === undefined)
return;
if (importMapAppendIndex < importMapIndex) {
const relativeHtml = normalizePath$3(path$o.relative(config.root, ctx.filename));
config.logger.warnOnce(colors$1.yellow(colors$1.bold(`(!) <script type="importmap"> should come before <script type="module"> and <link rel="modulepreload"> in /${relativeHtml}`)));
}
};
}
/**
* Move importmap before the first module script and modulepreload link
*/
function postImportMapHook() {
return (html) => {
if (!importMapAppendRE.test(html))
return;
let importMap;
html = html.replace(importMapRE, (match) => {
importMap = match;
return '';
});
if (importMap) {
html = html.replace(importMapAppendRE, (match) => `${importMap}\n${match}`);
}
return html;
};
}
/**
* Support `%ENV_NAME%` syntax in html files
*/
function htmlEnvHook(config) {
const pattern = /%(\S+?)%/g;
const envPrefix = resolveEnvPrefix({ envPrefix: config.envPrefix });
const env = { ...config.env };
// account for user env defines
for (const key in config.define) {
if (key.startsWith(`import.meta.env.`)) {
const val = config.define[key];
if (typeof val === 'string') {
try {
const parsed = JSON.parse(val);
env[key.slice(16)] = typeof parsed === 'string' ? parsed : val;
}
catch {
env[key.slice(16)] = val;
}
}
else {
env[key.slice(16)] = JSON.stringify(val);
}
}
}
return (html, ctx) => {
return html.replace(pattern, (text, key) => {
if (key in env) {
return env[key];
}
else {
if (envPrefix.some((prefix) => key.startsWith(prefix))) {
const relativeHtml = normalizePath$3(path$o.relative(config.root, ctx.filename));
config.logger.warn(colors$1.yellow(colors$1.bold(`(!) ${text} is not defined in env variables found in /${relativeHtml}. ` +
`Is the variable mistyped?`)));
}
return text;
}
});
};
}
function resolveHtmlTransforms(plugins) {
const preHooks = [];
const normalHooks = [];
const postHooks = [];
for (const plugin of plugins) {
const hook = plugin.transformIndexHtml;
if (!hook)
continue;
if (typeof hook === 'function') {
normalHooks.push(hook);
}
else {
// `enforce` had only two possible values for the `transformIndexHtml` hook
// `'pre'` and `'post'` (the default). `order` now works with three values
// to align with other hooks (`'pre'`, normal, and `'post'`). We map
// both `enforce: 'post'` to `order: undefined` to avoid a breaking change
const order = hook.order ?? (hook.enforce === 'pre' ? 'pre' : undefined);
// @ts-expect-error union type
const handler = hook.handler ?? hook.transform;
if (order === 'pre') {
preHooks.push(handler);
}
else if (order === 'post') {
postHooks.push(handler);
}
else {
normalHooks.push(handler);
}
}
}
return [preHooks, normalHooks, postHooks];
}
async function applyHtmlTransforms(html, hooks, ctx) {
for (const hook of hooks) {
const res = await hook(html, ctx);
if (!res) {
continue;
}
if (typeof res === 'string') {
html = res;
}
else {
let tags;
if (Array.isArray(res)) {
tags = res;
}
else {
html = res.html || html;
tags = res.tags;
}
const headTags = [];
const headPrependTags = [];
const bodyTags = [];
const bodyPrependTags = [];
for (const tag of tags) {
if (tag.injectTo === 'body') {
bodyTags.push(tag);
}
else if (tag.injectTo === 'body-prepend') {
bodyPrependTags.push(tag);
}
else if (tag.injectTo === 'head') {
headTags.push(tag);
}
else {
headPrependTags.push(tag);
}
}
html = injectToHead(html, headPrependTags, true);
html = injectToHead(html, headTags);
html = injectToBody(html, bodyPrependTags, true);
html = injectToBody(html, bodyTags);
}
}
return html;
}
const importRE = /\bimport\s*("[^"]*[^\\]"|'[^']*[^\\]');*/g;
const commentRE$1 = /\/\*[\s\S]*?\*\/|\/\/.*$/gm;
function isEntirelyImport(code) {
// only consider "side-effect" imports, which match <script type=module> semantics exactly
// the regexes will remove too little in some exotic cases, but false-negatives are alright
return !code.replace(importRE, '').replace(commentRE$1, '').trim().length;
}
function getBaseInHTML(urlRelativePath, config) {
// Prefer explicit URL if defined for linking to assets and public files from HTML,
// even when base relative is specified
return config.base === './' || config.base === ''
? path$o.posix.join(path$o.posix.relative(urlRelativePath, '').slice(0, -2), './')
: config.base;
}
const headInjectRE = /([ \t]*)<\/head>/i;
const headPrependInjectRE = /([ \t]*)<head[^>]*>/i;
const htmlInjectRE = /<\/html>/i;
const htmlPrependInjectRE = /([ \t]*)<html[^>]*>/i;
const bodyInjectRE = /([ \t]*)<\/body>/i;
const bodyPrependInjectRE = /([ \t]*)<body[^>]*>/i;
const doctypePrependInjectRE = /<!doctype html>/i;
function injectToHead(html, tags, prepend = false) {
if (tags.length === 0)
return html;
if (prepend) {
// inject as the first element of head
if (headPrependInjectRE.test(html)) {
return html.replace(headPrependInjectRE, (match, p1) => `${match}\n${serializeTags(tags, incrementIndent(p1))}`);
}
}
else {
// inject before head close
if (headInjectRE.test(html)) {
// respect indentation of head tag
return html.replace(headInjectRE, (match, p1) => `${serializeTags(tags, incrementIndent(p1))}${match}`);
}
// try to inject before the body tag
if (bodyPrependInjectRE.test(html)) {
return html.replace(bodyPrependInjectRE, (match, p1) => `${serializeTags(tags, p1)}\n${match}`);
}
}
// if no head tag is present, we prepend the tag for both prepend and append
return prependInjectFallback(html, tags);
}
function injectToBody(html, tags, prepend = false) {
if (tags.length === 0)
return html;
if (prepend) {
// inject after body open
if (bodyPrependInjectRE.test(html)) {
return html.replace(bodyPrependInjectRE, (match, p1) => `${match}\n${serializeTags(tags, incrementIndent(p1))}`);
}
// if no there is no body tag, inject after head or fallback to prepend in html
if (headInjectRE.test(html)) {
return html.replace(headInjectRE, (match, p1) => `${match}\n${serializeTags(tags, p1)}`);
}
return prependInjectFallback(html, tags);
}
else {
// inject before body close
if (bodyInjectRE.test(html)) {
return html.replace(bodyInjectRE, (match, p1) => `${serializeTags(tags, incrementIndent(p1))}${match}`);
}
// if no body tag is present, append to the html tag, or at the end of the file
if (htmlInjectRE.test(html)) {
return html.replace(htmlInjectRE, `${serializeTags(tags)}\n$&`);
}
return html + `\n` + serializeTags(tags);
}
}
function prependInjectFallback(html, tags) {
// prepend to the html tag, append after doctype, or the document start
if (htmlPrependInjectRE.test(html)) {
return html.replace(htmlPrependInjectRE, `$&\n${serializeTags(tags)}`);
}
if (doctypePrependInjectRE.test(html)) {
return html.replace(doctypePrependInjectRE, `$&\n${serializeTags(tags)}`);
}
return serializeTags(tags) + html;
}
const unaryTags = new Set(['link', 'meta', 'base']);
function serializeTag({ tag, attrs, children }, indent = '') {
if (unaryTags.has(tag)) {
return `<${tag}${serializeAttrs(attrs)}>`;
}
else {
return `<${tag}${serializeAttrs(attrs)}>${serializeTags(children, incrementIndent(indent))}</${tag}>`;
}
}
function serializeTags(tags, indent = '') {
if (typeof tags === 'string') {
return tags;
}
else if (tags && tags.length) {
return tags.map((tag) => `${indent}${serializeTag(tag, indent)}\n`).join('');
}
return '';
}
function serializeAttrs(attrs) {
let res = '';
for (const key in attrs) {
if (typeof attrs[key] === 'boolean') {
res += attrs[key] ? ` ${key}` : ``;
}
else {
res += ` ${key}=${JSON.stringify(attrs[key])}`;
}
}
return res;
}
function incrementIndent(indent = '') {
return `${indent}${indent[0] === '\t' ? '\t' : ' '}`;
}
function getAttrKey(attr) {
return attr.prefix === undefined ? attr.name : `${attr.prefix}:${attr.name}`;
}
function resolveCSSOptions(options) {
if (options?.lightningcss) {
return {
...options,
lightningcss: {
...options.lightningcss,
targets: options.lightningcss.targets ??
convertTargets(ESBUILD_MODULES_TARGET),
},
};
}
// TS doesn't narrow the type with the previous if :/
return options;
}
const cssModuleRE = new RegExp(`\\.module${CSS_LANGS_RE.source}`);
const directRequestRE = /[?&]direct\b/;
const htmlProxyRE = /[?&]html-proxy\b/;
const commonjsProxyRE = /\?commonjs-proxy/;
const inlineRE = /[?&]inline\b/;
const inlineCSSRE = /[?&]inline-css\b/;
const styleAttrRE = /[?&]style-attr\b/;
const usedRE = /[?&]used\b/;
const varRE = /^var\(/i;
const cssBundleName = 'style.css';
const isCSSRequest = (request) => CSS_LANGS_RE.test(request);
const isModuleCSSRequest = (request) => cssModuleRE.test(request);
const isDirectCSSRequest = (request) => CSS_LANGS_RE.test(request) && directRequestRE.test(request);
const isDirectRequest = (request) => directRequestRE.test(request);
const cssModulesCache = new WeakMap();
const removedPureCssFilesCache = new WeakMap();
const postcssConfigCache = new WeakMap();
function encodePublicUrlsInCSS(config) {
return config.command === 'build';
}
/**
* Plugin applied before user plugins
*/
function cssPlugin(config) {
let server;
let moduleCache;
const resolveUrl = config.createResolver({
preferRelative: true,
tryIndex: false,
extensions: [],
});
// warm up cache for resolved postcss config
if (config.css?.transformer !== 'lightningcss') {
resolvePostcssConfig(config);
}
return {
name: 'vite:css',
configureServer(_server) {
server = _server;
},
buildStart() {
// Ensure a new cache for every build (i.e. rebuilding in watch mode)
moduleCache = new Map();
cssModulesCache.set(config, moduleCache);
removedPureCssFilesCache.set(config, new Map());
},
async transform(raw, id, options) {
if (!isCSSRequest(id) ||
commonjsProxyRE.test(id) ||
SPECIAL_QUERY_RE.test(id)) {
return;
}
const ssr = options?.ssr === true;
const urlReplacer = async (url, importer) => {
if (checkPublicFile(url, config)) {
if (encodePublicUrlsInCSS(config)) {
return publicFileToBuiltUrl(url, config);
}
else {
return joinUrlSegments(config.base, url);
}
}
const resolved = await resolveUrl(url, importer);
if (resolved) {
return fileToUrl(resolved, config, this);
}
if (config.command === 'build') {
const isExternal = config.build.rollupOptions.external
? resolveUserExternal(config.build.rollupOptions.external, url, // use URL as id since id could not be resolved
id, false)
: false;
if (!isExternal) {
// #9800 If we cannot resolve the css url, leave a warning.
config.logger.warnOnce(`\n${url} referenced in ${id} didn't resolve at build time, it will remain unchanged to be resolved at runtime`);
}
}
return url;
};
const { code: css, modules, deps, map, } = await compileCSS(id, raw, config, urlReplacer);
if (modules) {
moduleCache.set(id, modules);
}
// track deps for build watch mode
if (config.command === 'build' && config.build.watch && deps) {
for (const file of deps) {
this.addWatchFile(file);
}
}
// dev
if (server) {
// server only logic for handling CSS @import dependency hmr
const { moduleGraph } = server;
const thisModule = moduleGraph.getModuleById(id);
if (thisModule) {
// CSS modules cannot self-accept since it exports values
const isSelfAccepting = !modules && !inlineRE.test(id) && !htmlProxyRE.test(id);
if (deps) {
// record deps in the module graph so edits to @import css can trigger
// main import to hot update
const depModules = new Set();
const devBase = config.base;
for (const file of deps) {
depModules.add(isCSSRequest(file)
? moduleGraph.createFileOnlyEntry(file)
: await moduleGraph.ensureEntryFromUrl(stripBase(await fileToUrl(file, config, this), (config.server?.origin ?? '') + devBase), ssr));
}
moduleGraph.updateModuleInfo(thisModule, depModules, null,
// The root CSS proxy module is self-accepting and should not
// have an explicit accept list
new Set(), null, isSelfAccepting, ssr);
for (const file of deps) {
this.addWatchFile(file);
}
}
else {
thisModule.isSelfAccepting = isSelfAccepting;
}
}
}
return {
code: css,
map,
};
},
};
}
/**
* Plugin applied after user plugins
*/
function cssPostPlugin(config) {
// styles initialization in buildStart causes a styling loss in watch
const styles = new Map();
// list of css emit tasks to guarantee the files are emitted in a deterministic order
let emitTasks = [];
let pureCssChunks;
// when there are multiple rollup outputs and extracting CSS, only emit once,
// since output formats have no effect on the generated CSS.
let outputToExtractedCSSMap;
let hasEmitted = false;
const rollupOptionsOutput = config.build.rollupOptions.output;
const assetFileNames = (Array.isArray(rollupOptionsOutput)
? rollupOptionsOutput[0]
: rollupOptionsOutput)?.assetFileNames;
const getCssAssetDirname = (cssAssetName) => {
if (!assetFileNames) {
return config.build.assetsDir;
}
else if (typeof assetFileNames === 'string') {
return path$o.dirname(assetFileNames);
}
else {
return path$o.dirname(assetFileNames({
name: cssAssetName,
type: 'asset',
source: '/* vite internal call, ignore */',
}));
}
};
return {
name: 'vite:css-post',
renderStart() {
// Ensure new caches for every build (i.e. rebuilding in watch mode)
pureCssChunks = new Set();
outputToExtractedCSSMap = new Map();
hasEmitted = false;
emitTasks = [];
},
async transform(css, id, options) {
if (!isCSSRequest(id) ||
commonjsProxyRE.test(id) ||
SPECIAL_QUERY_RE.test(id)) {
return;
}
css = stripBomTag(css);
const inlined = inlineRE.test(id);
const modules = cssModulesCache.get(config).get(id);
// #6984, #7552
// `foo.module.css` => modulesCode
// `foo.module.css?inline` => cssContent
const modulesCode = modules &&
!inlined &&
dataToEsm(modules, { namedExports: true, preferConst: true });
if (config.command === 'serve') {
const getContentWithSourcemap = async (content) => {
if (config.css?.devSourcemap) {
const sourcemap = this.getCombinedSourcemap();
if (sourcemap.mappings) {
await injectSourcesContent(sourcemap, cleanUrl(id), config.logger);
}
return getCodeWithSourcemap('css', content, sourcemap);
}
return content;
};
if (isDirectCSSRequest(id)) {
return null;
}
// server only
if (options?.ssr) {
return modulesCode || `export default ${JSON.stringify(css)}`;
}
if (inlined) {
return `export default ${JSON.stringify(css)}`;
}
const cssContent = await getContentWithSourcemap(css);
const code = [
`import { updateStyle as __vite__updateStyle, removeStyle as __vite__removeStyle } from ${JSON.stringify(path$o.posix.join(config.base, CLIENT_PUBLIC_PATH))}`,
`const __vite__id = ${JSON.stringify(id)}`,
`const __vite__css = ${JSON.stringify(cssContent)}`,
`__vite__updateStyle(__vite__id, __vite__css)`,
// css modules exports change on edit so it can't self accept
`${modulesCode ||
`import.meta.hot.accept()\nexport default __vite__css`}`,
`import.meta.hot.prune(() => __vite__removeStyle(__vite__id))`,
].join('\n');
return { code, map: { mappings: '' } };
}
// build CSS handling ----------------------------------------------------
// record css
// cache css compile result to map
// and then use the cache replace inline-style-flag when `generateBundle` in vite:build-html plugin
const inlineCSS = inlineCSSRE.test(id);
const isHTMLProxy = htmlProxyRE.test(id);
const query = parseRequest(id);
if (inlineCSS && isHTMLProxy) {
if (styleAttrRE.test(id)) {
css = css.replace(/"/g, '&quot;');
}
addToHTMLProxyTransformResult(`${getHash(cleanUrl(id))}_${Number.parseInt(query.index)}`, css);
return `export default ''`;
}
if (!inlined) {
styles.set(id, css);
}
let code;
if (usedRE.test(id)) {
if (modulesCode) {
code = modulesCode;
}
else {
let content = css;
if (config.build.cssMinify) {
content = await minifyCSS(content, config);
}
code = `export default ${JSON.stringify(content)}`;
}
}
else {
// if moduleCode exists return it **even if** it does not have `?used`
// this will disable tree-shake to work with `import './foo.module.css'` but this usually does not happen
// this is a limitation of the current approach by `?used` to make tree-shake work
// See #8936 for more details
code = modulesCode || `export default ''`;
}
return {
code,
map: { mappings: '' },
// avoid the css module from being tree-shaken so that we can retrieve
// it in renderChunk()
moduleSideEffects: inlined ? false : 'no-treeshake',
};
},
async renderChunk(code, chunk, opts) {
let chunkCSS = '';
let isPureCssChunk = true;
const ids = Object.keys(chunk.modules);
for (const id of ids) {
if (styles.has(id)) {
chunkCSS += styles.get(id);
// a css module contains JS, so it makes this not a pure css chunk
if (cssModuleRE.test(id)) {
isPureCssChunk = false;
}
}
else {
// if the module does not have a style, then it's not a pure css chunk.
// this is true because in the `transform` hook above, only modules
// that are css gets added to the `styles` map.
isPureCssChunk = false;
}
}
if (!chunkCSS) {
return null;
}
const publicAssetUrlMap = publicAssetUrlCache.get(config);
// resolve asset URL placeholders to their built file URLs
const resolveAssetUrlsInCss = (chunkCSS, cssAssetName) => {
const encodedPublicUrls = encodePublicUrlsInCSS(config);
const relative = config.base === './' || config.base === '';
const cssAssetDirname = encodedPublicUrls || relative
? getCssAssetDirname(cssAssetName)
: undefined;
const toRelative = (filename) => {
// relative base + extracted CSS
const relativePath = path$o.posix.relative(cssAssetDirname, filename);
return relativePath[0] === '.' ? relativePath : './' + relativePath;
};
// replace asset url references with resolved url.
chunkCSS = chunkCSS.replace(assetUrlRE, (_, fileHash, postfix = '') => {
const filename = this.getFileName(fileHash) + postfix;
chunk.viteMetadata.importedAssets.add(cleanUrl(filename));
return toOutputFilePathInCss(filename, 'asset', cssAssetName, 'css', config, toRelative);
});
// resolve public URL from CSS paths
if (encodedPublicUrls) {
const relativePathToPublicFromCSS = path$o.posix.relative(cssAssetDirname, '');
chunkCSS = chunkCSS.replace(publicAssetUrlRE, (_, hash) => {
const publicUrl = publicAssetUrlMap.get(hash).slice(1);
return toOutputFilePathInCss(publicUrl, 'public', cssAssetName, 'css', config, () => `${relativePathToPublicFromCSS}/${publicUrl}`);
});
}
return chunkCSS;
};
function ensureFileExt(name, ext) {
return normalizePath$3(path$o.format({ ...path$o.parse(name), base: undefined, ext }));
}
if (config.build.cssCodeSplit) {
if (isPureCssChunk) {
// this is a shared CSS-only chunk that is empty.
pureCssChunks.add(chunk);
}
if (opts.format === 'es' || opts.format === 'cjs') {
const cssAssetName = chunk.facadeModuleId
? normalizePath$3(path$o.relative(config.root, chunk.facadeModuleId))
: chunk.name;
const lang = path$o.extname(cssAssetName).slice(1);
const cssFileName = ensureFileExt(cssAssetName, '.css');
chunkCSS = resolveAssetUrlsInCss(chunkCSS, cssAssetName);
const previousTask = emitTasks[emitTasks.length - 1];
// finalizeCss is async which makes `emitFile` non-deterministic, so
// we use a `.then` to wait for previous tasks before finishing this
const thisTask = finalizeCss(chunkCSS, true, config).then((css) => {
chunkCSS = css;
// make sure the previous task is also finished, this works recursively
return previousTask;
});
// push this task so the next task can wait for this one
emitTasks.push(thisTask);
const emitTasksLength = emitTasks.length;
// wait for this and previous tasks to finish
await thisTask;
// emit corresponding css file
const referenceId = this.emitFile({
name: path$o.basename(cssFileName),
type: 'asset',
source: chunkCSS,
});
const originalName = isPreProcessor(lang) ? cssAssetName : cssFileName;
const isEntry = chunk.isEntry && isPureCssChunk;
generatedAssets
.get(config)
.set(referenceId, { originalName, isEntry });
chunk.viteMetadata.importedCss.add(this.getFileName(referenceId));
if (emitTasksLength === emitTasks.length) {
// this is the last task, clear `emitTasks` to free up memory
emitTasks = [];
}
}
else if (!config.build.ssr) {
// legacy build and inline css
// Entry chunk CSS will be collected into `chunk.viteMetadata.importedCss`
// and injected later by the `'vite:build-html'` plugin into the `index.html`
// so it will be duplicated. (https://github.com/vitejs/vite/issues/2062#issuecomment-782388010)
// But because entry chunk can be imported by dynamic import,
// we shouldn't remove the inlined CSS. (#10285)
chunkCSS = await finalizeCss(chunkCSS, true, config);
let cssString = JSON.stringify(chunkCSS);
cssString =
renderAssetUrlInJS(this, config, chunk, opts, cssString)?.toString() || cssString;
const style = `__vite_style__`;
const injectCode = `var ${style} = document.createElement('style');` +
`${style}.textContent = ${cssString};` +
`document.head.appendChild(${style});`;
let injectionPoint;
const wrapIdx = code.indexOf('System.register');
if (wrapIdx >= 0) {
const executeFnStart = code.indexOf('execute:', wrapIdx);
injectionPoint = code.indexOf('{', executeFnStart) + 1;
}
else {
const insertMark = "'use strict';";
injectionPoint = code.indexOf(insertMark) + insertMark.length;
}
const s = new MagicString(code);
s.appendRight(injectionPoint, injectCode);
if (config.build.sourcemap) {
// resolve public URL from CSS paths, we need to use absolute paths
return {
code: s.toString(),
map: s.generateMap({ hires: 'boundary' }),
};
}
else {
return { code: s.toString() };
}
}
}
else {
chunkCSS = resolveAssetUrlsInCss(chunkCSS, cssBundleName);
// finalizeCss is called for the aggregated chunk in generateBundle
outputToExtractedCSSMap.set(opts, (outputToExtractedCSSMap.get(opts) || '') + chunkCSS);
}
return null;
},
augmentChunkHash(chunk) {
if (chunk.viteMetadata?.importedCss.size) {
let hash = '';
for (const id of chunk.viteMetadata.importedCss) {
hash += id;
}
return hash;
}
},
async generateBundle(opts, bundle) {
// @ts-expect-error asset emits are skipped in legacy bundle
if (opts.__vite_skip_asset_emit__) {
return;
}
// remove empty css chunks and their imports
if (pureCssChunks.size) {
// map each pure css chunk (rendered chunk) to it's corresponding bundle
// chunk. we check that by comparing the `moduleIds` as they have different
// filenames (rendered chunk has the !~{XXX}~ placeholder)
const pureCssChunkNames = [];
for (const pureCssChunk of pureCssChunks) {
for (const key in bundle) {
const bundleChunk = bundle[key];
if (bundleChunk.type === 'chunk' &&
arrayEqual(bundleChunk.moduleIds, pureCssChunk.moduleIds)) {
pureCssChunkNames.push(key);
break;
}
}
}
const emptyChunkFiles = pureCssChunkNames
.map((file) => path$o.basename(file))
.join('|')
.replace(/\./g, '\\.');
const emptyChunkRE = new RegExp(opts.format === 'es'
? `\\bimport\\s*["'][^"']*(?:${emptyChunkFiles})["'];\n?`
: `\\brequire\\(\\s*["'][^"']*(?:${emptyChunkFiles})["']\\);\n?`, 'g');
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === 'chunk') {
// remove pure css chunk from other chunk's imports,
// and also register the emitted CSS files under the importer
// chunks instead.
chunk.imports = chunk.imports.filter((file) => {
if (pureCssChunkNames.includes(file)) {
const { importedCss, importedAssets } = bundle[file].viteMetadata;
importedCss.forEach((file) => chunk.viteMetadata.importedCss.add(file));
importedAssets.forEach((file) => chunk.viteMetadata.importedAssets.add(file));
return false;
}
return true;
});
chunk.code = chunk.code.replace(emptyChunkRE,
// remove css import while preserving source map location
(m) => `/* empty css ${''.padEnd(m.length - 15)}*/`);
}
}
const removedPureCssFiles = removedPureCssFilesCache.get(config);
pureCssChunkNames.forEach((fileName) => {
removedPureCssFiles.set(fileName, bundle[fileName]);
delete bundle[fileName];
delete bundle[`${fileName}.map`];
});
}
let extractedCss = outputToExtractedCSSMap.get(opts);
if (extractedCss && !hasEmitted) {
hasEmitted = true;
extractedCss = await finalizeCss(extractedCss, true, config);
this.emitFile({
name: cssBundleName,
type: 'asset',
source: extractedCss,
});
}
},
};
}
function createCSSResolvers(config) {
let cssResolve;
let sassResolve;
let lessResolve;
return {
get css() {
return (cssResolve ||
(cssResolve = config.createResolver({
extensions: ['.css'],
mainFields: ['style'],
conditions: ['style'],
tryIndex: false,
preferRelative: true,
})));
},
get sass() {
return (sassResolve ||
(sassResolve = config.createResolver({
extensions: ['.scss', '.sass', '.css'],
mainFields: ['sass', 'style'],
conditions: ['sass', 'style'],
tryIndex: true,
tryPrefix: '_',
preferRelative: true,
})));
},
get less() {
return (lessResolve ||
(lessResolve = config.createResolver({
extensions: ['.less', '.css'],
mainFields: ['less', 'style'],
conditions: ['less', 'style'],
tryIndex: false,
preferRelative: true,
})));
},
};
}
function getCssResolversKeys(resolvers) {
return Object.keys(resolvers);
}
async function compileCSSPreprocessors(id, lang, code, config) {
const { preprocessorOptions, devSourcemap } = config.css ?? {};
const atImportResolvers = getAtImportResolvers(config);
const preProcessor = preProcessors[lang];
let opts = (preprocessorOptions && preprocessorOptions[lang]) || {};
// support @import from node dependencies by default
switch (lang) {
case "scss" /* PreprocessLang.scss */:
case "sass" /* PreprocessLang.sass */:
opts = {
includePaths: ['node_modules'],
alias: config.resolve.alias,
...opts,
};
break;
case "less" /* PreprocessLang.less */:
case "styl" /* PreprocessLang.styl */:
case "stylus" /* PreprocessLang.stylus */:
opts = {
paths: ['node_modules'],
alias: config.resolve.alias,
...opts,
};
}
// important: set this for relative import resolving
opts.filename = cleanUrl(id);
opts.enableSourcemap = devSourcemap ?? false;
const preprocessResult = await preProcessor(code, config.root, opts, atImportResolvers);
if (preprocessResult.error) {
throw preprocessResult.error;
}
let deps;
if (preprocessResult.deps) {
const normalizedFilename = normalizePath$3(opts.filename);
// sometimes sass registers the file itself as a dep
deps = new Set([...preprocessResult.deps].filter((dep) => normalizePath$3(dep) !== normalizedFilename));
}
return {
code: preprocessResult.code,
map: combineSourcemapsIfExists(opts.filename, preprocessResult.map, preprocessResult.additionalMap),
deps,
};
}
const configToAtImportResolvers = new WeakMap();
function getAtImportResolvers(config) {
let atImportResolvers = configToAtImportResolvers.get(config);
if (!atImportResolvers) {
atImportResolvers = createCSSResolvers(config);
configToAtImportResolvers.set(config, atImportResolvers);
}
return atImportResolvers;
}
async function compileCSS(id, code, config, urlReplacer) {
if (config.css?.transformer === 'lightningcss') {
return compileLightningCSS(id, code, config, urlReplacer);
}
const { modules: modulesOptions, devSourcemap } = config.css || {};
const isModule = modulesOptions !== false && cssModuleRE.test(id);
// although at serve time it can work without processing, we do need to
// crawl them in order to register watch dependencies.
const needInlineImport = code.includes('@import');
const hasUrl = cssUrlRE.test(code) || cssImageSetRE.test(code);
const lang = id.match(CSS_LANGS_RE)?.[1];
const postcssConfig = await resolvePostcssConfig(config);
// 1. plain css that needs no processing
if (lang === 'css' &&
!postcssConfig &&
!isModule &&
!needInlineImport &&
!hasUrl) {
return { code, map: null };
}
let modules;
const deps = new Set();
// 2. pre-processors: sass etc.
let preprocessorMap;
if (isPreProcessor(lang)) {
const preprocessorResult = await compileCSSPreprocessors(id, lang, code, config);
code = preprocessorResult.code;
preprocessorMap = preprocessorResult.map;
preprocessorResult.deps?.forEach((dep) => deps.add(dep));
}
// 3. postcss
const atImportResolvers = getAtImportResolvers(config);
const postcssOptions = (postcssConfig && postcssConfig.options) || {};
const postcssPlugins = postcssConfig && postcssConfig.plugins ? postcssConfig.plugins.slice() : [];
if (needInlineImport) {
postcssPlugins.unshift((await importPostcssImport()).default({
async resolve(id, basedir) {
const publicFile = checkPublicFile(id, config);
if (publicFile) {
return publicFile;
}
const resolved = await atImportResolvers.css(id, path$o.join(basedir, '*'));
if (resolved) {
return path$o.resolve(resolved);
}
// postcss-import falls back to `resolve` dep if this is unresolved,
// but we've shimmed to remove the `resolve` dep to cut on bundle size.
// warn here to provide a better error message.
if (!path$o.isAbsolute(id)) {
config.logger.error(colors$1.red(`Unable to resolve \`@import "${id}"\` from ${basedir}`));
}
return id;
},
async load(id) {
const code = await fs$l.promises.readFile(id, 'utf-8');
const lang = id.match(CSS_LANGS_RE)?.[1];
if (isPreProcessor(lang)) {
const result = await compileCSSPreprocessors(id, lang, code, config);
result.deps?.forEach((dep) => deps.add(dep));
// TODO: support source map
return result.code;
}
return code;
},
nameLayer(index) {
return `vite--anon-layer-${getHash(id)}-${index}`;
},
}));
}
if (urlReplacer) {
postcssPlugins.push(UrlRewritePostcssPlugin({
replacer: urlReplacer,
logger: config.logger,
}));
}
if (isModule) {
postcssPlugins.unshift((await importPostcssModules()).default({
...modulesOptions,
localsConvention: modulesOptions?.localsConvention,
getJSON(cssFileName, _modules, outputFileName) {
modules = _modules;
if (modulesOptions && typeof modulesOptions.getJSON === 'function') {
modulesOptions.getJSON(cssFileName, _modules, outputFileName);
}
},
async resolve(id, importer) {
for (const key of getCssResolversKeys(atImportResolvers)) {
const resolved = await atImportResolvers[key](id, importer);
if (resolved) {
return path$o.resolve(resolved);
}
}
return id;
},
}));
}
if (!postcssPlugins.length) {
return {
code,
map: preprocessorMap,
deps,
};
}
let postcssResult;
try {
const source = removeDirectQuery(id);
const postcss = await importPostcss();
// postcss is an unbundled dep and should be lazy imported
postcssResult = await postcss.default(postcssPlugins).process(code, {
...postcssOptions,
parser: lang === 'sss'
? loadPreprocessor("sugarss" /* PostCssDialectLang.sss */, config.root)
: postcssOptions.parser,
to: source,
from: source,
...(devSourcemap
? {
map: {
inline: false,
annotation: false,
// postcss may return virtual files
// we cannot obtain content of them, so this needs to be enabled
sourcesContent: true,
// when "prev: preprocessorMap", the result map may include duplicate filename in `postcssResult.map.sources`
// prev: preprocessorMap,
},
}
: {}),
});
// record CSS dependencies from @imports
for (const message of postcssResult.messages) {
if (message.type === 'dependency') {
deps.add(normalizePath$3(message.file));
}
else if (message.type === 'dir-dependency') {
// https://github.com/postcss/postcss/blob/main/docs/guidelines/plugin.md#3-dependencies
const { dir, glob: globPattern = '**' } = message;
const pattern = glob.escapePath(normalizePath$3(path$o.resolve(path$o.dirname(id), dir))) +
`/` +
globPattern;
const files = glob.sync(pattern, {
ignore: ['**/node_modules/**'],
});
for (let i = 0; i < files.length; i++) {
deps.add(files[i]);
}
}
else if (message.type === 'warning') {
let msg = `[vite:css] ${message.text}`;
if (message.line && message.column) {
msg += `\n${generateCodeFrame(code, {
line: message.line,
column: message.column,
})}`;
}
config.logger.warn(colors$1.yellow(msg));
}
}
}
catch (e) {
e.message = `[postcss] ${e.message}`;
e.code = code;
e.loc = {
column: e.column,
line: e.line,
};
throw e;
}
if (!devSourcemap) {
return {
ast: postcssResult,
code: postcssResult.css,
map: { mappings: '' },
modules,
deps,
};
}
const rawPostcssMap = postcssResult.map.toJSON();
const postcssMap = await formatPostcssSourceMap(
// version property of rawPostcssMap is declared as string
// but actually it is a number
rawPostcssMap, cleanUrl(id));
return {
ast: postcssResult,
code: postcssResult.css,
map: combineSourcemapsIfExists(cleanUrl(id), postcssMap, preprocessorMap),
modules,
deps,
};
}
function createCachedImport(imp) {
let cached;
return () => {
if (!cached) {
cached = imp().then((module) => {
cached = module;
return module;
});
}
return cached;
};
}
const importPostcssImport = createCachedImport(() => import('./dep-e4a495ce.js').then(function (n) { return n.i; }));
const importPostcssModules = createCachedImport(() => import('./dep-98d4beff.js').then(function (n) { return n.i; }));
const importPostcss = createCachedImport(() => import('postcss'));
/**
* @experimental
*/
async function preprocessCSS(code, filename, config) {
return await compileCSS(filename, code, config);
}
const postcssReturnsVirtualFilesRE = /^<.+>$/;
async function formatPostcssSourceMap(rawMap, file) {
const inputFileDir = path$o.dirname(file);
const sources = rawMap.sources.map((source) => {
const cleanSource = cleanUrl(decodeURIComponent(source));
if (postcssReturnsVirtualFilesRE.test(cleanSource)) {
return `\0${cleanSource}`;
}
return normalizePath$3(path$o.resolve(inputFileDir, cleanSource));
});
return {
file,
mappings: rawMap.mappings,
names: rawMap.names,
sources,
sourcesContent: rawMap.sourcesContent,
version: rawMap.version,
};
}
function combineSourcemapsIfExists(filename, map1, map2) {
return map1 && map2
? combineSourcemaps(filename, [
// type of version property of ExistingRawSourceMap is number
// but it is always 3
map1,
map2,
])
: map1;
}
async function finalizeCss(css, minify, config) {
// hoist external @imports and @charset to the top of the CSS chunk per spec (#1845 and #6333)
if (css.includes('@import') || css.includes('@charset')) {
css = await hoistAtRules(css);
}
if (minify && config.build.cssMinify) {
css = await minifyCSS(css, config);
}
return css;
}
async function resolvePostcssConfig(config) {
let result = postcssConfigCache.get(config);
if (result !== undefined) {
return await result;
}
// inline postcss config via vite config
const inlineOptions = config.css?.postcss;
if (isObject$2(inlineOptions)) {
const options = { ...inlineOptions };
delete options.plugins;
result = {
options,
plugins: inlineOptions.plugins || [],
};
}
else {
const searchPath = typeof inlineOptions === 'string' ? inlineOptions : config.root;
result = postcssrc({}, searchPath).catch((e) => {
if (!/No PostCSS Config found/.test(e.message)) {
if (e instanceof Error) {
const { name, message, stack } = e;
e.name = 'Failed to load PostCSS config';
e.message = `Failed to load PostCSS config (searchPath: ${searchPath}): [${name}] ${message}\n${stack}`;
e.stack = ''; // add stack to message to retain stack
throw e;
}
else {
throw new Error(`Failed to load PostCSS config: ${e}`);
}
}
return null;
});
// replace cached promise to result object when finished
result.then((resolved) => {
postcssConfigCache.set(config, resolved);
});
}
postcssConfigCache.set(config, result);
return result;
}
// https://drafts.csswg.org/css-syntax-3/#identifier-code-point
const cssUrlRE = /(?<=^|[^\w\-\u0080-\uffff])url\((\s*('[^']+'|"[^"]+")\s*|[^'")]+)\)/;
const cssDataUriRE = /(?<=^|[^\w\-\u0080-\uffff])data-uri\((\s*('[^']+'|"[^"]+")\s*|[^'")]+)\)/;
const importCssRE = /@import ('[^']+\.css'|"[^"]+\.css"|[^'")]+\.css)/;
// Assuming a function name won't be longer than 256 chars
// eslint-disable-next-line regexp/no-unused-capturing-group -- doesn't detect asyncReplace usage
const cssImageSetRE = /(?<=image-set\()((?:[\w\-]{1,256}\([^)]*\)|[^)])*)(?=\))/;
const UrlRewritePostcssPlugin = (opts) => {
if (!opts) {
throw new Error('base or replace is required');
}
return {
postcssPlugin: 'vite-url-rewrite',
Once(root) {
const promises = [];
root.walkDecls((declaration) => {
const importer = declaration.source?.input.file;
if (!importer) {
opts.logger.warnOnce('\nA PostCSS plugin did not pass the `from` option to `postcss.parse`. ' +
'This may cause imported assets to be incorrectly transformed. ' +
"If you've recently added a PostCSS plugin that raised this warning, " +
'please contact the package author to fix the issue.');
}
const isCssUrl = cssUrlRE.test(declaration.value);
const isCssImageSet = cssImageSetRE.test(declaration.value);
if (isCssUrl || isCssImageSet) {
const replacerForDeclaration = (rawUrl) => {
return opts.replacer(rawUrl, importer);
};
const rewriterToUse = isCssImageSet
? rewriteCssImageSet
: rewriteCssUrls;
promises.push(rewriterToUse(declaration.value, replacerForDeclaration).then((url) => {
declaration.value = url;
}));
}
});
if (promises.length) {
return Promise.all(promises);
}
},
};
};
UrlRewritePostcssPlugin.postcss = true;
function rewriteCssUrls(css, replacer) {
return asyncReplace(css, cssUrlRE, async (match) => {
const [matched, rawUrl] = match;
return await doUrlReplace(rawUrl.trim(), matched, replacer);
});
}
function rewriteCssDataUris(css, replacer) {
return asyncReplace(css, cssDataUriRE, async (match) => {
const [matched, rawUrl] = match;
return await doUrlReplace(rawUrl.trim(), matched, replacer, 'data-uri');
});
}
function rewriteImportCss(css, replacer) {
return asyncReplace(css, importCssRE, async (match) => {
const [matched, rawUrl] = match;
return await doImportCSSReplace(rawUrl, matched, replacer);
});
}
// TODO: image and cross-fade could contain a "url" that needs to be processed
// https://drafts.csswg.org/css-images-4/#image-notation
// https://drafts.csswg.org/css-images-4/#cross-fade-function
const cssNotProcessedRE = /(?:gradient|element|cross-fade|image)\(/;
async function rewriteCssImageSet(css, replacer) {
return await asyncReplace(css, cssImageSetRE, async (match) => {
const [, rawUrl] = match;
const url = await processSrcSet(rawUrl, async ({ url }) => {
// the url maybe url(...)
if (cssUrlRE.test(url)) {
return await rewriteCssUrls(url, replacer);
}
if (!cssNotProcessedRE.test(url)) {
return await doUrlReplace(url, url, replacer);
}
return url;
});
return url;
});
}
function skipUrlReplacer(rawUrl) {
return (isExternalUrl(rawUrl) ||
isDataUrl(rawUrl) ||
rawUrl[0] === '#' ||
varRE.test(rawUrl));
}
async function doUrlReplace(rawUrl, matched, replacer, funcName = 'url') {
let wrap = '';
const first = rawUrl[0];
if (first === `"` || first === `'`) {
wrap = first;
rawUrl = rawUrl.slice(1, -1);
}
if (skipUrlReplacer(rawUrl)) {
return matched;
}
const newUrl = await replacer(rawUrl);
if (wrap === '' && newUrl !== encodeURI(newUrl)) {
// The new url might need wrapping even if the original did not have it, e.g. if a space was added during replacement
wrap = "'";
}
return `${funcName}(${wrap}${newUrl}${wrap})`;
}
async function doImportCSSReplace(rawUrl, matched, replacer) {
let wrap = '';
const first = rawUrl[0];
if (first === `"` || first === `'`) {
wrap = first;
rawUrl = rawUrl.slice(1, -1);
}
if (isExternalUrl(rawUrl) || isDataUrl(rawUrl) || rawUrl[0] === '#') {
return matched;
}
return `@import ${wrap}${await replacer(rawUrl)}${wrap}`;
}
async function minifyCSS(css, config) {
if (config.build.cssMinify === 'lightningcss') {
const { code, warnings } = (await importLightningCSS()).transform({
...config.css?.lightningcss,
targets: convertTargets(config.build.cssTarget),
cssModules: undefined,
filename: cssBundleName,
code: Buffer.from(css),
minify: true,
});
if (warnings.length) {
config.logger.warn(colors$1.yellow(`warnings when minifying css:\n${warnings
.map((w) => w.message)
.join('\n')}`));
}
return code.toString();
}
try {
const { code, warnings } = await transform$1(css, {
loader: 'css',
target: config.build.cssTarget || undefined,
...resolveMinifyCssEsbuildOptions(config.esbuild || {}),
});
if (warnings.length) {
const msgs = await formatMessages(warnings, { kind: 'warning' });
config.logger.warn(colors$1.yellow(`warnings when minifying css:\n${msgs.join('\n')}`));
}
return code;
}
catch (e) {
if (e.errors) {
e.message = '[esbuild css minify] ' + e.message;
const msgs = await formatMessages(e.errors, { kind: 'error' });
e.frame = '\n' + msgs.join('\n');
e.loc = e.errors[0].location;
}
throw e;
}
}
function resolveMinifyCssEsbuildOptions(options) {
const base = {
charset: options.charset ?? 'utf8',
logLevel: options.logLevel,
logLimit: options.logLimit,
logOverride: options.logOverride,
legalComments: options.legalComments,
};
if (options.minifyIdentifiers != null ||
options.minifySyntax != null ||
options.minifyWhitespace != null) {
return {
...base,
minifyIdentifiers: options.minifyIdentifiers ?? true,
minifySyntax: options.minifySyntax ?? true,
minifyWhitespace: options.minifyWhitespace ?? true,
};
}
else {
return { ...base, minify: true };
}
}
async function hoistAtRules(css) {
const s = new MagicString(css);
const cleanCss = emptyCssComments(css);
let match;
// #1845
// CSS @import can only appear at top of the file. We need to hoist all @import
// to top when multiple files are concatenated.
// match until semicolon that's not in quotes
const atImportRE = /@import(?:\s*(?:url\([^)]*\)|"(?:[^"]|(?<=\\)")*"|'(?:[^']|(?<=\\)')*').*?|[^;]*);/g;
while ((match = atImportRE.exec(cleanCss))) {
s.remove(match.index, match.index + match[0].length);
// Use `appendLeft` instead of `prepend` to preserve original @import order
s.appendLeft(0, match[0]);
}
// #6333
// CSS @charset must be the top-first in the file, hoist the first to top
const atCharsetRE = /@charset(?:\s*(?:"(?:[^"]|(?<=\\)")*"|'(?:[^']|(?<=\\)')*').*?|[^;]*);/g;
let foundCharset = false;
while ((match = atCharsetRE.exec(cleanCss))) {
s.remove(match.index, match.index + match[0].length);
if (!foundCharset) {
s.prepend(match[0]);
foundCharset = true;
}
}
return s.toString();
}
const loadedPreprocessors = {};
// TODO: use dynamic import
const _require$2 = createRequire$1(import.meta.url);
function loadPreprocessor(lang, root) {
if (lang in loadedPreprocessors) {
return loadedPreprocessors[lang];
}
try {
const resolved = requireResolveFromRootWithFallback(root, lang);
return (loadedPreprocessors[lang] = _require$2(resolved));
}
catch (e) {
if (e.code === 'MODULE_NOT_FOUND') {
const installCommand = getPackageManagerCommand('install');
throw new Error(`Preprocessor dependency "${lang}" not found. Did you install it? Try \`${installCommand} -D ${lang}\`.`);
}
else {
const message = new Error(`Preprocessor dependency "${lang}" failed to load:\n${e.message}`);
message.stack = e.stack + '\n' + message.stack;
throw message;
}
}
}
// in unix, scss might append `location.href` in environments that shim `location`
// see https://github.com/sass/dart-sass/issues/710
function cleanScssBugUrl(url) {
if (
// check bug via `window` and `location` global
typeof window !== 'undefined' &&
typeof location !== 'undefined' &&
typeof location?.href === 'string') {
const prefix = location.href.replace(/\/$/, '');
return url.replace(prefix, '');
}
else {
return url;
}
}
function fixScssBugImportValue(data) {
// the scss bug doesn't load files properly so we have to load it ourselves
// to prevent internal error when it loads itself
if (
// check bug via `window` and `location` global
typeof window !== 'undefined' &&
typeof location !== 'undefined' &&
data &&
'file' in data &&
(!('contents' in data) || data.contents == null)) {
// @ts-expect-error we need to preserve file property for HMR
data.contents = fs$l.readFileSync(data.file, 'utf-8');
}
return data;
}
// .scss/.sass processor
const scss = async (source, root, options, resolvers) => {
const render = loadPreprocessor("sass" /* PreprocessLang.sass */, root).render;
// NOTE: `sass` always runs it's own importer first, and only falls back to
// the `importer` option when it can't resolve a path
const internalImporter = (url, importer, done) => {
importer = cleanScssBugUrl(importer);
resolvers.sass(url, importer).then((resolved) => {
if (resolved) {
rebaseUrls(resolved, options.filename, options.alias, '$')
.then((data) => done?.(fixScssBugImportValue(data)))
.catch((data) => done?.(data));
}
else {
done?.(null);
}
});
};
const importer = [internalImporter];
if (options.importer) {
Array.isArray(options.importer)
? importer.unshift(...options.importer)
: importer.unshift(options.importer);
}
const { content: data, map: additionalMap } = await getSource(source, options.filename, options.additionalData, options.enableSourcemap);
const finalOptions = {
...options,
data,
file: options.filename,
outFile: options.filename,
importer,
...(options.enableSourcemap
? {
sourceMap: true,
omitSourceMapUrl: true,
sourceMapRoot: path$o.dirname(options.filename),
}
: {}),
};
try {
const result = await new Promise((resolve, reject) => {
render(finalOptions, (err, res) => {
if (err) {
reject(err);
}
else {
resolve(res);
}
});
});
const deps = result.stats.includedFiles.map((f) => cleanScssBugUrl(f));
const map = result.map
? JSON.parse(result.map.toString())
: undefined;
return {
code: result.css.toString(),
map,
additionalMap,
deps,
};
}
catch (e) {
// normalize SASS error
e.message = `[sass] ${e.message}`;
e.id = e.file;
e.frame = e.formatted;
return { code: '', error: e, deps: [] };
}
};
const sass = (source, root, options, aliasResolver) => scss(source, root, {
...options,
indentedSyntax: true,
}, aliasResolver);
/**
* relative url() inside \@imported sass and less files must be rebased to use
* root file as base.
*/
async function rebaseUrls(file, rootFile, alias, variablePrefix) {
file = path$o.resolve(file); // ensure os-specific flashes
// in the same dir, no need to rebase
const fileDir = path$o.dirname(file);
const rootDir = path$o.dirname(rootFile);
if (fileDir === rootDir) {
return { file };
}
const content = await fsp.readFile(file, 'utf-8');
// no url()
const hasUrls = cssUrlRE.test(content);
// data-uri() calls
const hasDataUris = cssDataUriRE.test(content);
// no @import xxx.css
const hasImportCss = importCssRE.test(content);
if (!hasUrls && !hasDataUris && !hasImportCss) {
return { file };
}
let rebased;
const rebaseFn = (url) => {
if (url[0] === '/')
return url;
// ignore url's starting with variable
if (url.startsWith(variablePrefix))
return url;
// match alias, no need to rewrite
for (const { find } of alias) {
const matches = typeof find === 'string' ? url.startsWith(find) : find.test(url);
if (matches) {
return url;
}
}
const absolute = path$o.resolve(fileDir, url);
const relative = path$o.relative(rootDir, absolute);
return normalizePath$3(relative);
};
// fix css imports in less such as `@import "foo.css"`
if (hasImportCss) {
rebased = await rewriteImportCss(content, rebaseFn);
}
if (hasUrls) {
rebased = await rewriteCssUrls(rebased || content, rebaseFn);
}
if (hasDataUris) {
rebased = await rewriteCssDataUris(rebased || content, rebaseFn);
}
return {
file,
contents: rebased,
};
}
// .less
const less = async (source, root, options, resolvers) => {
const nodeLess = loadPreprocessor("less" /* PreprocessLang.less */, root);
const viteResolverPlugin = createViteLessPlugin(nodeLess, options.filename, options.alias, resolvers);
const { content, map: additionalMap } = await getSource(source, options.filename, options.additionalData, options.enableSourcemap);
let result;
try {
result = await nodeLess.render(content, {
...options,
plugins: [viteResolverPlugin, ...(options.plugins || [])],
...(options.enableSourcemap
? {
sourceMap: {
outputSourceFiles: true,
sourceMapFileInline: false,
},
}
: {}),
});
}
catch (e) {
const error = e;
// normalize error info
const normalizedError = new Error(`[less] ${error.message || error.type}`);
normalizedError.loc = {
file: error.filename || options.filename,
line: error.line,
column: error.column,
};
return { code: '', error: normalizedError, deps: [] };
}
const map = result.map && JSON.parse(result.map);
if (map) {
delete map.sourcesContent;
}
return {
code: result.css.toString(),
map,
additionalMap,
deps: result.imports,
};
};
/**
* Less manager, lazy initialized
*/
let ViteLessManager;
function createViteLessPlugin(less, rootFile, alias, resolvers) {
if (!ViteLessManager) {
ViteLessManager = class ViteManager extends less.FileManager {
constructor(rootFile, resolvers, alias) {
super();
this.rootFile = rootFile;
this.resolvers = resolvers;
this.alias = alias;
}
supports(filename) {
return !isExternalUrl(filename);
}
supportsSync() {
return false;
}
async loadFile(filename, dir, opts, env) {
const resolved = await this.resolvers.less(filename, path$o.join(dir, '*'));
if (resolved) {
const result = await rebaseUrls(resolved, this.rootFile, this.alias, '@');
let contents;
if (result && 'contents' in result) {
contents = result.contents;
}
else {
contents = await fsp.readFile(resolved, 'utf-8');
}
return {
filename: path$o.resolve(resolved),
contents,
};
}
else {
return super.loadFile(filename, dir, opts, env);
}
}
};
}
return {
install(_, pluginManager) {
pluginManager.addFileManager(new ViteLessManager(rootFile, resolvers, alias));
},
minVersion: [3, 0, 0],
};
}
// .styl
const styl = async (source, root, options) => {
const nodeStylus = loadPreprocessor("stylus" /* PreprocessLang.stylus */, root);
// Get source with preprocessor options.additionalData. Make sure a new line separator
// is added to avoid any render error, as added stylus content may not have semi-colon separators
const { content, map: additionalMap } = await getSource(source, options.filename, options.additionalData, options.enableSourcemap, '\n');
// Get preprocessor options.imports dependencies as stylus
// does not return them with its builtin `.deps()` method
const importsDeps = (options.imports ?? []).map((dep) => path$o.resolve(dep));
try {
const ref = nodeStylus(content, options);
if (options.define) {
for (const key in options.define) {
ref.define(key, options.define[key]);
}
}
if (options.enableSourcemap) {
ref.set('sourcemap', {
comment: false,
inline: false,
basePath: root,
});
}
const result = ref.render();
// Concat imports deps with computed deps
const deps = [...ref.deps(), ...importsDeps];
// @ts-expect-error sourcemap exists
const map = ref.sourcemap;
return {
code: result,
map: formatStylusSourceMap(map, root),
additionalMap,
deps,
};
}
catch (e) {
e.message = `[stylus] ${e.message}`;
return { code: '', error: e, deps: [] };
}
};
function formatStylusSourceMap(mapBefore, root) {
if (!mapBefore)
return undefined;
const map = { ...mapBefore };
const resolveFromRoot = (p) => normalizePath$3(path$o.resolve(root, p));
if (map.file) {
map.file = resolveFromRoot(map.file);
}
map.sources = map.sources.map(resolveFromRoot);
return map;
}
async function getSource(source, filename, additionalData, enableSourcemap, sep = '') {
if (!additionalData)
return { content: source };
if (typeof additionalData === 'function') {
const newContent = await additionalData(source, filename);
if (typeof newContent === 'string') {
return { content: newContent };
}
return newContent;
}
if (!enableSourcemap) {
return { content: additionalData + sep + source };
}
const ms = new MagicString(source);
ms.appendLeft(0, sep);
ms.appendLeft(0, additionalData);
const map = ms.generateMap({ hires: 'boundary' });
map.file = filename;
map.sources = [filename];
return {
content: ms.toString(),
map,
};
}
const preProcessors = Object.freeze({
["less" /* PreprocessLang.less */]: less,
["sass" /* PreprocessLang.sass */]: sass,
["scss" /* PreprocessLang.scss */]: scss,
["styl" /* PreprocessLang.styl */]: styl,
["stylus" /* PreprocessLang.stylus */]: styl,
});
function isPreProcessor(lang) {
return lang && lang in preProcessors;
}
const importLightningCSS = createCachedImport(() => import('lightningcss'));
async function compileLightningCSS(id, src, config, urlReplacer) {
const deps = new Set();
// Relative path is needed to get stable hash when using CSS modules
const filename = cleanUrl(path$o.relative(config.root, id));
const toAbsolute = (filePath) => path$o.isAbsolute(filePath) ? filePath : path$o.join(config.root, filePath);
const res = styleAttrRE.test(id)
? (await importLightningCSS()).transformStyleAttribute({
filename,
code: Buffer.from(src),
targets: config.css?.lightningcss?.targets,
minify: config.isProduction && !!config.build.cssMinify,
analyzeDependencies: true,
})
: await (await importLightningCSS()).bundleAsync({
filename,
resolver: {
read(filePath) {
if (filePath === filename) {
return src;
}
// This happens with html-proxy (#13776)
if (!filePath.endsWith('.css')) {
return src;
}
return fs$l.readFileSync(toAbsolute(filePath), 'utf-8');
},
async resolve(id, from) {
const publicFile = checkPublicFile(id, config);
if (publicFile) {
return publicFile;
}
const resolved = await getAtImportResolvers(config).css(id, toAbsolute(from));
if (resolved) {
deps.add(resolved);
return resolved;
}
return id;
},
},
targets: config.css?.lightningcss?.targets,
minify: config.isProduction && !!config.build.cssMinify,
sourceMap: config.css?.devSourcemap,
analyzeDependencies: true,
cssModules: cssModuleRE.test(id)
? config.css?.lightningcss?.cssModules ?? true
: undefined,
drafts: config.css?.lightningcss?.drafts,
});
let css = res.code.toString();
for (const dep of res.dependencies) {
switch (dep.type) {
case 'url':
if (skipUrlReplacer(dep.url)) {
css = css.replace(dep.placeholder, dep.url);
break;
}
deps.add(dep.url);
if (urlReplacer) {
css = css.replace(dep.placeholder, await urlReplacer(dep.url, id));
}
break;
default:
throw new Error(`Unsupported dependency type: ${dep.type}`);
}
}
let modules;
if ('exports' in res && res.exports) {
modules = {};
// https://github.com/parcel-bundler/lightningcss/issues/291
const sortedEntries = Object.entries(res.exports).sort((a, b) => a[0].localeCompare(b[0]));
for (const [key, value] of sortedEntries) {
modules[key] = value.name;
// https://lightningcss.dev/css-modules.html#class-composition
for (const c of value.composes) {
modules[key] += ' ' + c.name;
}
}
}
return {
code: css,
map: 'map' in res ? res.map?.toString() : undefined,
deps,
modules,
};
}
// Convert https://esbuild.github.io/api/#target
// To https://github.com/parcel-bundler/lightningcss/blob/master/node/targets.d.ts
const map = {
chrome: 'chrome',
edge: 'edge',
firefox: 'firefox',
hermes: false,
ie: 'ie',
ios: 'ios_saf',
node: false,
opera: 'opera',
rhino: false,
safari: 'safari',
};
const esMap = {
// https://caniuse.com/?search=es2015
2015: ['chrome49', 'edge13', 'safari10', 'firefox44', 'opera36'],
// https://caniuse.com/?search=es2016
2016: ['chrome50', 'edge13', 'safari10', 'firefox43', 'opera37'],
// https://caniuse.com/?search=es2017
2017: ['chrome58', 'edge15', 'safari11', 'firefox52', 'opera45'],
// https://caniuse.com/?search=es2018
2018: ['chrome63', 'edge79', 'safari12', 'firefox58', 'opera50'],
// https://caniuse.com/?search=es2019
2019: ['chrome73', 'edge79', 'safari12.1', 'firefox64', 'opera60'],
// https://caniuse.com/?search=es2020
2020: ['chrome80', 'edge80', 'safari14.1', 'firefox80', 'opera67'],
// https://caniuse.com/?search=es2021
2021: ['chrome85', 'edge85', 'safari14.1', 'firefox80', 'opera71'],
// https://caniuse.com/?search=es2022
2022: ['chrome94', 'edge94', 'safari16.4', 'firefox93', 'opera80'],
};
const esRE = /es(\d{4})/;
const versionRE = /\d/;
const convertTargetsCache = new Map();
const convertTargets = (esbuildTarget) => {
if (!esbuildTarget)
return {};
const cached = convertTargetsCache.get(esbuildTarget);
if (cached)
return cached;
const targets = {};
const entriesWithoutES = arraify(esbuildTarget).flatMap((e) => {
const match = e.match(esRE);
if (!match)
return e;
const year = Number(match[1]);
if (!esMap[year])
throw new Error(`Unsupported target "${e}"`);
return esMap[year];
});
for (const entry of entriesWithoutES) {
if (entry === 'esnext')
continue;
const index = entry.match(versionRE)?.index;
if (index) {
const browser = map[entry.slice(0, index)];
if (browser === false)
continue; // No mapping available
if (browser) {
const [major, minor = 0] = entry
.slice(index)
.split('.')
.map((v) => parseInt(v, 10));
if (!isNaN(major) && !isNaN(minor)) {
const version = (major << 16) | (minor << 8);
if (!targets[browser] || version < targets[browser]) {
targets[browser] = version;
}
continue;
}
}
}
throw new Error(`Unsupported target "${entry}"`);
}
convertTargetsCache.set(esbuildTarget, targets);
return targets;
};
const externalWithConversionNamespace = 'vite:dep-pre-bundle:external-conversion';
const convertedExternalPrefix = 'vite-dep-pre-bundle-external:';
const cjsExternalFacadeNamespace = 'vite:cjs-external-facade';
const nonFacadePrefix = 'vite-cjs-external-facade:';
const externalTypes = [
'css',
// supported pre-processor types
'less',
'sass',
'scss',
'styl',
'stylus',
'pcss',
'postcss',
// wasm
'wasm',
// known SFC types
'vue',
'svelte',
'marko',
'astro',
'imba',
// JSX/TSX may be configured to be compiled differently from how esbuild
// handles it by default, so exclude them as well
'jsx',
'tsx',
...KNOWN_ASSET_TYPES,
];
function esbuildDepPlugin(qualified, external, config, ssr) {
const { extensions } = getDepOptimizationConfig(config, ssr);
// remove optimizable extensions from `externalTypes` list
const allExternalTypes = extensions
? externalTypes.filter((type) => !extensions?.includes('.' + type))
: externalTypes;
// use separate package cache for optimizer as it caches paths around node_modules
// and it's unlikely for the core Vite process to traverse into node_modules again
const esmPackageCache = new Map();
const cjsPackageCache = new Map();
// default resolver which prefers ESM
const _resolve = config.createResolver({
asSrc: false,
scan: true,
packageCache: esmPackageCache,
});
// cjs resolver that prefers Node
const _resolveRequire = config.createResolver({
asSrc: false,
isRequire: true,
scan: true,
packageCache: cjsPackageCache,
});
const resolve = (id, importer, kind, resolveDir) => {
let _importer;
// explicit resolveDir - this is passed only during yarn pnp resolve for
// entries
if (resolveDir) {
_importer = normalizePath$3(path$o.join(resolveDir, '*'));
}
else {
// map importer ids to file paths for correct resolution
_importer = importer in qualified ? qualified[importer] : importer;
}
const resolver = kind.startsWith('require') ? _resolveRequire : _resolve;
return resolver(id, _importer, undefined, ssr);
};
const resolveResult = (id, resolved) => {
if (resolved.startsWith(browserExternalId)) {
return {
path: id,
namespace: 'browser-external',
};
}
if (resolved.startsWith(optionalPeerDepId)) {
return {
path: resolved,
namespace: 'optional-peer-dep',
};
}
if (ssr && isBuiltin(resolved)) {
return;
}
if (isExternalUrl(resolved)) {
return {
path: resolved,
external: true,
};
}
return {
path: path$o.resolve(resolved),
};
};
return {
name: 'vite:dep-pre-bundle',
setup(build) {
// clear package cache when esbuild is finished
build.onEnd(() => {
esmPackageCache.clear();
cjsPackageCache.clear();
});
// externalize assets and commonly known non-js file types
// See #8459 for more details about this require-import conversion
build.onResolve({
filter: new RegExp(`\\.(` + allExternalTypes.join('|') + `)(\\?.*)?$`),
}, async ({ path: id, importer, kind }) => {
// if the prefix exist, it is already converted to `import`, so set `external: true`
if (id.startsWith(convertedExternalPrefix)) {
return {
path: id.slice(convertedExternalPrefix.length),
external: true,
};
}
const resolved = await resolve(id, importer, kind);
if (resolved) {
if (kind === 'require-call') {
// here it is not set to `external: true` to convert `require` to `import`
return {
path: resolved,
namespace: externalWithConversionNamespace,
};
}
return {
path: resolved,
external: true,
};
}
});
build.onLoad({ filter: /./, namespace: externalWithConversionNamespace }, (args) => {
// import itself with prefix (this is the actual part of require-import conversion)
const modulePath = `"${convertedExternalPrefix}${args.path}"`;
return {
contents: isCSSRequest(args.path) && !isModuleCSSRequest(args.path)
? `import ${modulePath};`
: `export { default } from ${modulePath};` +
`export * from ${modulePath};`,
loader: 'js',
};
});
function resolveEntry(id) {
const flatId = flattenId(id);
if (flatId in qualified) {
return {
path: qualified[flatId],
};
}
}
build.onResolve({ filter: /^[\w@][^:]/ }, async ({ path: id, importer, kind }) => {
if (moduleListContains(external, id)) {
return {
path: id,
external: true,
};
}
// ensure esbuild uses our resolved entries
let entry;
// if this is an entry, return entry namespace resolve result
if (!importer) {
if ((entry = resolveEntry(id)))
return entry;
// check if this is aliased to an entry - also return entry namespace
const aliased = await _resolve(id, undefined, true);
if (aliased && (entry = resolveEntry(aliased))) {
return entry;
}
}
// use vite's own resolver
const resolved = await resolve(id, importer, kind);
if (resolved) {
return resolveResult(id, resolved);
}
});
build.onLoad({ filter: /.*/, namespace: 'browser-external' }, ({ path }) => {
if (config.isProduction) {
return {
contents: 'module.exports = {}',
};
}
else {
return {
// Return in CJS to intercept named imports. Use `Object.create` to
// create the Proxy in the prototype to workaround esbuild issue. Why?
//
// In short, esbuild cjs->esm flow:
// 1. Create empty object using `Object.create(Object.getPrototypeOf(module.exports))`.
// 2. Assign props of `module.exports` to the object.
// 3. Return object for ESM use.
//
// If we do `module.exports = new Proxy({}, {})`, step 1 returns empty object,
// step 2 does nothing as there's no props for `module.exports`. The final object
// is just an empty object.
//
// Creating the Proxy in the prototype satisfies step 1 immediately, which means
// the returned object is a Proxy that we can intercept.
//
// Note: Skip keys that are accessed by esbuild and browser devtools.
contents: `\
module.exports = Object.create(new Proxy({}, {
get(_, key) {
if (
key !== '__esModule' &&
key !== '__proto__' &&
key !== 'constructor' &&
key !== 'splice'
) {
console.warn(\`Module "${path}" has been externalized for browser compatibility. Cannot access "${path}.\${key}" in client code. See http://vitejs.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.\`)
}
}
}))`,
};
}
});
build.onLoad({ filter: /.*/, namespace: 'optional-peer-dep' }, ({ path }) => {
if (config.isProduction) {
return {
contents: 'module.exports = {}',
};
}
else {
const [, peerDep, parentDep] = path.split(':');
return {
contents: `throw new Error(\`Could not resolve "${peerDep}" imported by "${parentDep}". Is it installed?\`)`,
};
}
});
},
};
}
const matchesEntireLine = (text) => `^${escapeRegex(text)}$`;
// esbuild doesn't transpile `require('foo')` into `import` statements if 'foo' is externalized
// https://github.com/evanw/esbuild/issues/566#issuecomment-735551834
function esbuildCjsExternalPlugin(externals, platform) {
return {
name: 'cjs-external',
setup(build) {
const filter = new RegExp(externals.map(matchesEntireLine).join('|'));
build.onResolve({ filter: new RegExp(`^${nonFacadePrefix}`) }, (args) => {
return {
path: args.path.slice(nonFacadePrefix.length),
external: true,
};
});
build.onResolve({ filter }, (args) => {
// preserve `require` for node because it's more accurate than converting it to import
if (args.kind === 'require-call' && platform !== 'node') {
return {
path: args.path,
namespace: cjsExternalFacadeNamespace,
};
}
return {
path: args.path,
external: true,
};
});
build.onLoad({ filter: /.*/, namespace: cjsExternalFacadeNamespace }, (args) => ({
contents: `import * as m from ${JSON.stringify(nonFacadePrefix + args.path)};` + `module.exports = m;`,
}));
},
};
}
const debug$c = createDebugger('vite:ssr-external');
/**
* Converts "parent > child" syntax to just "child"
*/
function stripNesting(packages) {
return packages.map((s) => {
const arr = s.split('>');
return arr[arr.length - 1].trim();
});
}
/**
* Heuristics for determining whether a dependency should be externalized for
* server-side rendering.
*/
function cjsSsrResolveExternals(config, knownImports) {
// strip nesting since knownImports may be passed in from optimizeDeps which
// supports a "parent > child" syntax
knownImports = stripNesting(knownImports);
const ssrConfig = config.ssr;
if (ssrConfig?.noExternal === true) {
return [];
}
const ssrExternals = new Set();
const seen = new Set();
ssrConfig?.external?.forEach((id) => {
ssrExternals.add(id);
seen.add(id);
});
cjsSsrCollectExternals(config.root, config.resolve, ssrExternals, seen, config.logger);
const importedDeps = knownImports.map(getNpmPackageName).filter(isDefined);
for (const dep of importedDeps) {
// Assume external if not yet seen
// At this point, the project root and any linked packages have had their dependencies checked,
// so we can safely mark any knownImports not yet seen as external. They are guaranteed to be
// dependencies of packages in node_modules.
if (!seen.has(dep)) {
ssrExternals.add(dep);
}
}
// ensure `vite/dynamic-import-polyfill` is bundled (issue #1865)
ssrExternals.delete('vite');
let externals = [...ssrExternals];
if (ssrConfig?.noExternal) {
externals = externals.filter(createFilter(undefined, ssrConfig.noExternal, { resolve: false }));
}
return externals;
}
const CJS_CONTENT_RE = /\bmodule\.exports\b|\bexports[.[]|\brequire\s*\(|\bObject\.(?:defineProperty|defineProperties|assign)\s*\(\s*exports\b/;
// TODO: use import()
const _require$1 = createRequire$1(import.meta.url);
const isSsrExternalCache = new WeakMap();
function shouldExternalizeForSSR(id, importer, config) {
let isSsrExternal = isSsrExternalCache.get(config);
if (!isSsrExternal) {
isSsrExternal = createIsSsrExternal(config);
isSsrExternalCache.set(config, isSsrExternal);
}
return isSsrExternal(id, importer);
}
function createIsConfiguredAsSsrExternal(config) {
const { ssr, root } = config;
const noExternal = ssr?.noExternal;
const noExternalFilter = noExternal !== 'undefined' &&
typeof noExternal !== 'boolean' &&
createFilter(undefined, noExternal, { resolve: false });
const targetConditions = config.ssr.resolve?.externalConditions || [];
const resolveOptions = {
...config.resolve,
root,
isProduction: false,
isBuild: true,
conditions: targetConditions,
};
const isExternalizable = (id, importer, configuredAsExternal) => {
if (!bareImportRE.test(id) || id.includes('\0')) {
return false;
}
try {
return !!tryNodeResolve(id,
// Skip passing importer in build to avoid externalizing non-hoisted dependencies
// unresolveable from root (which would be unresolvable from output bundles also)
config.command === 'build' ? undefined : importer, resolveOptions, ssr?.target === 'webworker', undefined, true,
// try to externalize, will return undefined or an object without
// a external flag if it isn't externalizable
true,
// Allow linked packages to be externalized if they are explicitly
// configured as external
!!configuredAsExternal)?.external;
}
catch (e) {
debug$c?.(`Failed to node resolve "${id}". Skipping externalizing it by default.`);
// may be an invalid import that's resolved by a plugin
return false;
}
};
// Returns true if it is configured as external, false if it is filtered
// by noExternal and undefined if it isn't affected by the explicit config
return (id, importer) => {
const { ssr } = config;
if (ssr) {
if (
// If this id is defined as external, force it as external
// Note that individual package entries are allowed in ssr.external
ssr.external?.includes(id)) {
return true;
}
const pkgName = getNpmPackageName(id);
if (!pkgName) {
return isExternalizable(id, importer);
}
if (
// A package name in ssr.external externalizes every
// externalizable package entry
ssr.external?.includes(pkgName)) {
return isExternalizable(id, importer, true);
}
if (typeof noExternal === 'boolean') {
return !noExternal;
}
if (noExternalFilter && !noExternalFilter(pkgName)) {
return false;
}
}
return isExternalizable(id, importer);
};
}
function createIsSsrExternal(config) {
const processedIds = new Map();
const isConfiguredAsExternal = createIsConfiguredAsSsrExternal(config);
return (id, importer) => {
if (processedIds.has(id)) {
return processedIds.get(id);
}
let external = false;
if (id[0] !== '.' && !path$o.isAbsolute(id)) {
external = isBuiltin(id) || isConfiguredAsExternal(id, importer);
}
processedIds.set(id, external);
return external;
};
}
// When config.experimental.buildSsrCjsExternalHeuristics is enabled, this function
// is used reverting to the Vite 2.9 SSR externalization heuristics
function cjsSsrCollectExternals(root, resolveOptions, ssrExternals, seen, logger) {
const rootPkgPath = lookupFile(root, ['package.json']);
if (!rootPkgPath) {
return;
}
const rootPkgContent = fs$l.readFileSync(rootPkgPath, 'utf-8');
if (!rootPkgContent) {
return;
}
const rootPkg = JSON.parse(rootPkgContent);
const deps = {
...rootPkg.devDependencies,
...rootPkg.dependencies,
};
const internalResolveOptions = {
...resolveOptions,
root,
isProduction: false,
isBuild: true,
};
const depsToTrace = new Set();
for (const id in deps) {
if (seen.has(id))
continue;
seen.add(id);
let esmEntry;
let requireEntry;
try {
esmEntry = tryNodeResolve(id, undefined, internalResolveOptions, true, // we set `targetWeb` to `true` to get the ESM entry
undefined, true)?.id;
// normalizePath required for windows. tryNodeResolve uses normalizePath
// which returns with '/', require.resolve returns with '\\'
requireEntry = normalizePath$3(_require$1.resolve(id, { paths: [root] }));
}
catch (e) {
// no main entry, but deep imports may be allowed
const pkgDir = resolvePackageData(id, root)?.dir;
if (pkgDir) {
if (isInNodeModules(pkgDir)) {
ssrExternals.add(id);
}
else {
depsToTrace.add(path$o.dirname(pkgDir));
}
continue;
}
// resolve failed, assume include
debug$c?.(`Failed to resolve entries for package "${id}"\n`, e);
continue;
}
// no esm entry but has require entry
if (!esmEntry) {
ssrExternals.add(id);
}
// trace the dependencies of linked packages
else if (!isInNodeModules(esmEntry)) {
const pkgDir = resolvePackageData(id, root)?.dir;
if (pkgDir) {
depsToTrace.add(pkgDir);
}
}
// has separate esm/require entry, assume require entry is cjs
else if (esmEntry !== requireEntry) {
ssrExternals.add(id);
}
// if we're externalizing ESM and CJS should basically just always do it?
// or are there others like SystemJS / AMD that we'd need to handle?
// for now, we'll just leave this as is
else if (/\.m?js$/.test(esmEntry)) {
const pkg = resolvePackageData(id, root)?.data;
if (!pkg) {
continue;
}
if (pkg.type === 'module' || esmEntry.endsWith('.mjs')) {
ssrExternals.add(id);
continue;
}
// check if the entry is cjs
const content = fs$l.readFileSync(esmEntry, 'utf-8');
if (CJS_CONTENT_RE.test(content)) {
ssrExternals.add(id);
continue;
}
logger.warn(`${id} doesn't appear to be written in CJS, but also doesn't appear to be a valid ES module (i.e. it doesn't have "type": "module" or an .mjs extension for the entry point). Please contact the package author to fix.`);
}
}
for (const depRoot of depsToTrace) {
cjsSsrCollectExternals(depRoot, resolveOptions, ssrExternals, seen, logger);
}
}
function cjsShouldExternalizeForSSR(id, externals) {
if (!externals) {
return false;
}
const should = externals.some((e) => {
if (id === e) {
return true;
}
// deep imports, check ext before externalizing - only externalize
// extension-less imports and explicit .js imports
if (id.startsWith(withTrailingSlash(e)) &&
(!path$o.extname(id) || id.endsWith('.js'))) {
return true;
}
});
return should;
}
/**
* https://github.com/rollup/plugins/blob/master/packages/json/src/index.js
*
* This source code is licensed under the MIT license found in the
* LICENSE file at
* https://github.com/rollup/plugins/blob/master/LICENSE
*/
// Custom json filter for vite
const jsonExtRE = /\.json(?:$|\?)(?!commonjs-(?:proxy|external))/;
const jsonLangs = `\\.(?:json|json5)(?:$|\\?)`;
const jsonLangRE = new RegExp(jsonLangs);
const isJSONRequest = (request) => jsonLangRE.test(request);
function jsonPlugin(options = {}, isBuild) {
return {
name: 'vite:json',
transform(json, id) {
if (!jsonExtRE.test(id))
return null;
if (SPECIAL_QUERY_RE.test(id))
return null;
json = stripBomTag(json);
try {
if (options.stringify) {
if (isBuild) {
return {
// during build, parse then double-stringify to remove all
// unnecessary whitespaces to reduce bundle size.
code: `export default JSON.parse(${JSON.stringify(JSON.stringify(JSON.parse(json)))})`,
map: { mappings: '' },
};
}
else {
return `export default JSON.parse(${JSON.stringify(json)})`;
}
}
const parsed = JSON.parse(json);
return {
code: dataToEsm(parsed, {
preferConst: true,
namedExports: options.namedExports,
}),
map: { mappings: '' },
};
}
catch (e) {
const errorMessageList = /\d+/.exec(e.message);
const position = errorMessageList && parseInt(errorMessageList[0], 10);
const msg = position
? `, invalid JSON syntax found at line ${position}`
: `.`;
this.error(`Failed to parse JSON file` + msg, e.idx);
}
},
};
}
const ERR_OPTIMIZE_DEPS_PROCESSING_ERROR = 'ERR_OPTIMIZE_DEPS_PROCESSING_ERROR';
const ERR_OUTDATED_OPTIMIZED_DEP = 'ERR_OUTDATED_OPTIMIZED_DEP';
const debug$b = createDebugger('vite:optimize-deps');
function optimizedDepsPlugin(config) {
return {
name: 'vite:optimized-deps',
resolveId(id, source, { ssr }) {
if (getDepsOptimizer(config, ssr)?.isOptimizedDepFile(id)) {
return id;
}
},
// this.load({ id }) isn't implemented in PluginContainer
// The logic to register an id to wait until it is processed
// is in importAnalysis, see call to delayDepsOptimizerUntil
async load(id, options) {
const ssr = options?.ssr === true;
const depsOptimizer = getDepsOptimizer(config, ssr);
if (depsOptimizer?.isOptimizedDepFile(id)) {
const metadata = depsOptimizer.metadata;
const file = cleanUrl(id);
const versionMatch = id.match(DEP_VERSION_RE);
const browserHash = versionMatch
? versionMatch[1].split('=')[1]
: undefined;
// Search in both the currently optimized and newly discovered deps
const info = optimizedDepInfoFromFile(metadata, file);
if (info) {
if (browserHash && info.browserHash !== browserHash) {
throwOutdatedRequest(id);
}
try {
// This is an entry point, it may still not be bundled
await info.processing;
}
catch {
// If the refresh has not happened after timeout, Vite considers
// something unexpected has happened. In this case, Vite
// returns an empty response that will error.
throwProcessingError(id);
return;
}
const newMetadata = depsOptimizer.metadata;
if (metadata !== newMetadata) {
const currentInfo = optimizedDepInfoFromFile(newMetadata, file);
if (info.browserHash !== currentInfo?.browserHash) {
throwOutdatedRequest(id);
}
}
}
debug$b?.(`load ${colors$1.cyan(file)}`);
// Load the file from the cache instead of waiting for other plugin
// load hooks to avoid race conditions, once processing is resolved,
// we are sure that the file has been properly save to disk
try {
return await fsp.readFile(file, 'utf-8');
}
catch (e) {
// Outdated non-entry points (CHUNK), loaded after a rerun
throwOutdatedRequest(id);
}
}
},
};
}
function optimizedDepsBuildPlugin(config) {
let buildStartCalled = false;
return {
name: 'vite:optimized-deps-build',
buildStart() {
// Only reset the registered ids after a rebuild during build --watch
if (!config.isWorker && buildStartCalled) {
getDepsOptimizer(config)?.resetRegisteredIds();
}
buildStartCalled = true;
},
async resolveId(id, importer, options) {
const depsOptimizer = getDepsOptimizer(config);
if (!depsOptimizer)
return;
if (depsOptimizer.isOptimizedDepFile(id)) {
return id;
}
else {
if (options?.custom?.['vite:pre-alias']) {
// Skip registering the id if it is being resolved from the pre-alias plugin
// When a optimized dep is aliased, we need to avoid waiting for it before optimizing
return;
}
const resolved = await this.resolve(id, importer, {
...options,
skipSelf: true,
});
if (resolved && !resolved.external) {
depsOptimizer.delayDepsOptimizerUntil(resolved.id, async () => {
await this.load(resolved);
});
}
return resolved;
}
},
async load(id) {
const depsOptimizer = getDepsOptimizer(config);
if (!depsOptimizer?.isOptimizedDepFile(id)) {
return;
}
depsOptimizer?.ensureFirstRun();
const file = cleanUrl(id);
// Search in both the currently optimized and newly discovered deps
// If all the inputs are dependencies, we aren't going to get any
const info = optimizedDepInfoFromFile(depsOptimizer.metadata, file);
if (info) {
await info.processing;
debug$b?.(`load ${colors$1.cyan(file)}`);
}
else {
throw new Error(`Something unexpected happened while optimizing "${id}".`);
}
// Load the file from the cache instead of waiting for other plugin
// load hooks to avoid race conditions, once processing is resolved,
// we are sure that the file has been properly save to disk
return fsp.readFile(file, 'utf-8');
},
};
}
function throwProcessingError(id) {
const err = new Error(`Something unexpected happened while optimizing "${id}". ` +
`The current page should have reloaded by now`);
err.code = ERR_OPTIMIZE_DEPS_PROCESSING_ERROR;
// This error will be caught by the transform middleware that will
// send a 504 status code request timeout
throw err;
}
function throwOutdatedRequest(id) {
const err = new Error(`There is a new version of the pre-bundle for "${id}", ` +
`a page reload is going to ask for it.`);
err.code = ERR_OUTDATED_OPTIMIZED_DEP;
// This error will be caught by the transform middleware that will
// send a 504 status code request timeout
throw err;
}
// AST walker module for Mozilla Parser API compatible trees
function makeTest(test) {
if (typeof test === "string")
{ return function (type) { return type === test; } }
else if (!test)
{ return function () { return true; } }
else
{ return test }
}
var Found = function Found(node, state) { this.node = node; this.state = state; };
// Find a node with a given start, end, and type (all are optional,
// null can be used as wildcard). Returns a {node, state} object, or
// undefined when it doesn't find a matching node.
function findNodeAt(node, start, end, test, baseVisitor, state) {
if (!baseVisitor) { baseVisitor = base; }
test = makeTest(test);
try {
(function c(node, st, override) {
var type = override || node.type;
if ((start == null || node.start <= start) &&
(end == null || node.end >= end))
{ baseVisitor[type](node, st, c); }
if ((start == null || node.start === start) &&
(end == null || node.end === end) &&
test(type, node))
{ throw new Found(node, st) }
})(node, state);
} catch (e) {
if (e instanceof Found) { return e }
throw e
}
}
function skipThrough(node, st, c) { c(node, st); }
function ignore(_node, _st, _c) {}
// Node walkers.
var base = {};
base.Program = base.BlockStatement = base.StaticBlock = function (node, st, c) {
for (var i = 0, list = node.body; i < list.length; i += 1)
{
var stmt = list[i];
c(stmt, st, "Statement");
}
};
base.Statement = skipThrough;
base.EmptyStatement = ignore;
base.ExpressionStatement = base.ParenthesizedExpression = base.ChainExpression =
function (node, st, c) { return c(node.expression, st, "Expression"); };
base.IfStatement = function (node, st, c) {
c(node.test, st, "Expression");
c(node.consequent, st, "Statement");
if (node.alternate) { c(node.alternate, st, "Statement"); }
};
base.LabeledStatement = function (node, st, c) { return c(node.body, st, "Statement"); };
base.BreakStatement = base.ContinueStatement = ignore;
base.WithStatement = function (node, st, c) {
c(node.object, st, "Expression");
c(node.body, st, "Statement");
};
base.SwitchStatement = function (node, st, c) {
c(node.discriminant, st, "Expression");
for (var i$1 = 0, list$1 = node.cases; i$1 < list$1.length; i$1 += 1) {
var cs = list$1[i$1];
if (cs.test) { c(cs.test, st, "Expression"); }
for (var i = 0, list = cs.consequent; i < list.length; i += 1)
{
var cons = list[i];
c(cons, st, "Statement");
}
}
};
base.SwitchCase = function (node, st, c) {
if (node.test) { c(node.test, st, "Expression"); }
for (var i = 0, list = node.consequent; i < list.length; i += 1)
{
var cons = list[i];
c(cons, st, "Statement");
}
};
base.ReturnStatement = base.YieldExpression = base.AwaitExpression = function (node, st, c) {
if (node.argument) { c(node.argument, st, "Expression"); }
};
base.ThrowStatement = base.SpreadElement =
function (node, st, c) { return c(node.argument, st, "Expression"); };
base.TryStatement = function (node, st, c) {
c(node.block, st, "Statement");
if (node.handler) { c(node.handler, st); }
if (node.finalizer) { c(node.finalizer, st, "Statement"); }
};
base.CatchClause = function (node, st, c) {
if (node.param) { c(node.param, st, "Pattern"); }
c(node.body, st, "Statement");
};
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
c(node.test, st, "Expression");
c(node.body, st, "Statement");
};
base.ForStatement = function (node, st, c) {
if (node.init) { c(node.init, st, "ForInit"); }
if (node.test) { c(node.test, st, "Expression"); }
if (node.update) { c(node.update, st, "Expression"); }
c(node.body, st, "Statement");
};
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
c(node.left, st, "ForInit");
c(node.right, st, "Expression");
c(node.body, st, "Statement");
};
base.ForInit = function (node, st, c) {
if (node.type === "VariableDeclaration") { c(node, st); }
else { c(node, st, "Expression"); }
};
base.DebuggerStatement = ignore;
base.FunctionDeclaration = function (node, st, c) { return c(node, st, "Function"); };
base.VariableDeclaration = function (node, st, c) {
for (var i = 0, list = node.declarations; i < list.length; i += 1)
{
var decl = list[i];
c(decl, st);
}
};
base.VariableDeclarator = function (node, st, c) {
c(node.id, st, "Pattern");
if (node.init) { c(node.init, st, "Expression"); }
};
base.Function = function (node, st, c) {
if (node.id) { c(node.id, st, "Pattern"); }
for (var i = 0, list = node.params; i < list.length; i += 1)
{
var param = list[i];
c(param, st, "Pattern");
}
c(node.body, st, node.expression ? "Expression" : "Statement");
};
base.Pattern = function (node, st, c) {
if (node.type === "Identifier")
{ c(node, st, "VariablePattern"); }
else if (node.type === "MemberExpression")
{ c(node, st, "MemberPattern"); }
else
{ c(node, st); }
};
base.VariablePattern = ignore;
base.MemberPattern = skipThrough;
base.RestElement = function (node, st, c) { return c(node.argument, st, "Pattern"); };
base.ArrayPattern = function (node, st, c) {
for (var i = 0, list = node.elements; i < list.length; i += 1) {
var elt = list[i];
if (elt) { c(elt, st, "Pattern"); }
}
};
base.ObjectPattern = function (node, st, c) {
for (var i = 0, list = node.properties; i < list.length; i += 1) {
var prop = list[i];
if (prop.type === "Property") {
if (prop.computed) { c(prop.key, st, "Expression"); }
c(prop.value, st, "Pattern");
} else if (prop.type === "RestElement") {
c(prop.argument, st, "Pattern");
}
}
};
base.Expression = skipThrough;
base.ThisExpression = base.Super = base.MetaProperty = ignore;
base.ArrayExpression = function (node, st, c) {
for (var i = 0, list = node.elements; i < list.length; i += 1) {
var elt = list[i];
if (elt) { c(elt, st, "Expression"); }
}
};
base.ObjectExpression = function (node, st, c) {
for (var i = 0, list = node.properties; i < list.length; i += 1)
{
var prop = list[i];
c(prop, st);
}
};
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
base.SequenceExpression = function (node, st, c) {
for (var i = 0, list = node.expressions; i < list.length; i += 1)
{
var expr = list[i];
c(expr, st, "Expression");
}
};
base.TemplateLiteral = function (node, st, c) {
for (var i = 0, list = node.quasis; i < list.length; i += 1)
{
var quasi = list[i];
c(quasi, st);
}
for (var i$1 = 0, list$1 = node.expressions; i$1 < list$1.length; i$1 += 1)
{
var expr = list$1[i$1];
c(expr, st, "Expression");
}
};
base.TemplateElement = ignore;
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
c(node.argument, st, "Expression");
};
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
c(node.left, st, "Expression");
c(node.right, st, "Expression");
};
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
c(node.left, st, "Pattern");
c(node.right, st, "Expression");
};
base.ConditionalExpression = function (node, st, c) {
c(node.test, st, "Expression");
c(node.consequent, st, "Expression");
c(node.alternate, st, "Expression");
};
base.NewExpression = base.CallExpression = function (node, st, c) {
c(node.callee, st, "Expression");
if (node.arguments)
{ for (var i = 0, list = node.arguments; i < list.length; i += 1)
{
var arg = list[i];
c(arg, st, "Expression");
} }
};
base.MemberExpression = function (node, st, c) {
c(node.object, st, "Expression");
if (node.computed) { c(node.property, st, "Expression"); }
};
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
if (node.declaration)
{ c(node.declaration, st, node.type === "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression"); }
if (node.source) { c(node.source, st, "Expression"); }
};
base.ExportAllDeclaration = function (node, st, c) {
if (node.exported)
{ c(node.exported, st); }
c(node.source, st, "Expression");
};
base.ImportDeclaration = function (node, st, c) {
for (var i = 0, list = node.specifiers; i < list.length; i += 1)
{
var spec = list[i];
c(spec, st);
}
c(node.source, st, "Expression");
};
base.ImportExpression = function (node, st, c) {
c(node.source, st, "Expression");
};
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore;
base.TaggedTemplateExpression = function (node, st, c) {
c(node.tag, st, "Expression");
c(node.quasi, st, "Expression");
};
base.ClassDeclaration = base.ClassExpression = function (node, st, c) { return c(node, st, "Class"); };
base.Class = function (node, st, c) {
if (node.id) { c(node.id, st, "Pattern"); }
if (node.superClass) { c(node.superClass, st, "Expression"); }
c(node.body, st);
};
base.ClassBody = function (node, st, c) {
for (var i = 0, list = node.body; i < list.length; i += 1)
{
var elt = list[i];
c(elt, st);
}
};
base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) {
if (node.computed) { c(node.key, st, "Expression"); }
if (node.value) { c(node.value, st, "Expression"); }
};
const { isMatch: isMatch$1, scan } = micromatch$2;
function getAffectedGlobModules(file, server) {
const modules = [];
for (const [id, allGlobs] of server._importGlobMap) {
// (glob1 || glob2) && !glob3 && !glob4...
if (allGlobs.some(({ affirmed, negated }) => (!affirmed.length || affirmed.some((glob) => isMatch$1(file, glob))) &&
(!negated.length || negated.every((glob) => isMatch$1(file, glob))))) {
const mod = server.moduleGraph.getModuleById(id);
if (mod)
modules.push(mod);
}
}
modules.forEach((i) => {
if (i?.file)
server.moduleGraph.onFileChange(i.file);
});
return modules;
}
function importGlobPlugin(config) {
let server;
return {
name: 'vite:import-glob',
configureServer(_server) {
server = _server;
server._importGlobMap.clear();
},
async transform(code, id) {
if (!code.includes('import.meta.glob'))
return;
const result = await transformGlobImport(code, id, config.root, (im, _, options) => this.resolve(im, id, options).then((i) => i?.id || im), config.isProduction, config.experimental.importGlobRestoreExtension);
if (result) {
if (server) {
const allGlobs = result.matches.map((i) => i.globsResolved);
server._importGlobMap.set(id, allGlobs.map((globs) => {
const affirmed = [];
const negated = [];
for (const glob of globs) {
(glob[0] === '!' ? negated : affirmed).push(glob);
}
return { affirmed, negated };
}));
}
return transformStableResult(result.s, id, config);
}
},
};
}
const importGlobRE = /\bimport\.meta\.(glob|globEager|globEagerDefault)(?:<\w+>)?\s*\(/g;
const knownOptions = {
as: ['string'],
eager: ['boolean'],
import: ['string'],
exhaustive: ['boolean'],
query: ['object', 'string'],
};
const forceDefaultAs = ['raw', 'url'];
function err$1(e, pos) {
const error = new Error(e);
error.pos = pos;
return error;
}
function parseGlobOptions(rawOpts, optsStartIndex) {
let opts = {};
try {
opts = evalValue(rawOpts);
}
catch {
throw err$1('Vite is unable to parse the glob options as the value is not static', optsStartIndex);
}
if (opts == null) {
return {};
}
for (const key in opts) {
if (!(key in knownOptions)) {
throw err$1(`Unknown glob option "${key}"`, optsStartIndex);
}
const allowedTypes = knownOptions[key];
const valueType = typeof opts[key];
if (!allowedTypes.includes(valueType)) {
throw err$1(`Expected glob option "${key}" to be of type ${allowedTypes.join(' or ')}, but got ${valueType}`, optsStartIndex);
}
}
if (typeof opts.query === 'object') {
for (const key in opts.query) {
const value = opts.query[key];
if (!['string', 'number', 'boolean'].includes(typeof value)) {
throw err$1(`Expected glob option "query.${key}" to be of type string, number, or boolean, but got ${typeof value}`, optsStartIndex);
}
}
}
if (opts.as && forceDefaultAs.includes(opts.as)) {
if (opts.import && opts.import !== 'default' && opts.import !== '*')
throw err$1(`Option "import" can only be "default" or "*" when "as" is "${opts.as}", but got "${opts.import}"`, optsStartIndex);
opts.import = opts.import || 'default';
}
if (opts.as && opts.query)
throw err$1('Options "as" and "query" cannot be used together', optsStartIndex);
if (opts.as)
opts.query = opts.as;
return opts;
}
async function parseImportGlob(code, importer, root, resolveId) {
let cleanCode;
try {
cleanCode = stripLiteral(code);
}
catch (e) {
// skip invalid js code
return [];
}
const matches = Array.from(cleanCode.matchAll(importGlobRE));
const tasks = matches.map(async (match, index) => {
const type = match[1];
const start = match.index;
const err = (msg) => {
const e = new Error(`Invalid glob import syntax: ${msg}`);
e.pos = start;
return e;
};
let ast;
let lastTokenPos;
try {
ast = parseExpressionAt(code, start, {
ecmaVersion: 'latest',
sourceType: 'module',
ranges: true,
onToken: (token) => {
lastTokenPos = token.end;
},
});
}
catch (e) {
const _e = e;
if (_e.message && _e.message.startsWith('Unterminated string constant'))
return undefined;
if (lastTokenPos == null || lastTokenPos <= start)
throw _e;
// tailing comma in object or array will make the parser think it's a comma operation
// we try to parse again removing the comma
try {
const statement = code.slice(start, lastTokenPos).replace(/[,\s]*$/, '');
ast = parseExpressionAt(' '.repeat(start) + statement, // to keep the ast position
start, {
ecmaVersion: 'latest',
sourceType: 'module',
ranges: true,
});
}
catch {
throw _e;
}
}
const found = findNodeAt(ast, start, undefined, 'CallExpression');
if (!found)
throw err(`Expect CallExpression, got ${ast.type}`);
ast = found.node;
if (ast.arguments.length < 1 || ast.arguments.length > 2)
throw err(`Expected 1-2 arguments, but got ${ast.arguments.length}`);
const arg1 = ast.arguments[0];
const arg2 = ast.arguments[1];
const globs = [];
const validateLiteral = (element) => {
if (!element)
return;
if (element.type === 'Literal') {
if (typeof element.value !== 'string')
throw err(`Expected glob to be a string, but got "${typeof element.value}"`);
globs.push(element.value);
}
else if (element.type === 'TemplateLiteral') {
if (element.expressions.length !== 0) {
throw err(`Expected glob to be a string, but got dynamic template literal`);
}
globs.push(element.quasis[0].value.raw);
}
else {
throw err('Could only use literals');
}
};
if (arg1.type === 'ArrayExpression') {
for (const element of arg1.elements) {
validateLiteral(element);
}
}
else {
validateLiteral(arg1);
}
// arg2
let options = {};
if (arg2) {
if (arg2.type !== 'ObjectExpression')
throw err(`Expected the second argument to be an object literal, but got "${arg2.type}"`);
options = parseGlobOptions(code.slice(arg2.range[0], arg2.range[1]), arg2.range[0]);
}
const end = ast.range[1];
const globsResolved = await Promise.all(globs.map((glob) => toAbsoluteGlob(glob, root, importer, resolveId)));
const isRelative = globs.every((i) => '.!'.includes(i[0]));
return {
match,
index,
globs,
globsResolved,
isRelative,
options,
type,
start,
end,
};
});
return (await Promise.all(tasks)).filter(Boolean);
}
const importPrefix = '__vite_glob_';
const { basename, dirname, relative, join } = posix$1;
const warnedCSSDefaultImportVarName = '__vite_warned_css_default_import';
const jsonStringifyInOneline = (input) => JSON.stringify(input).replace(/[{,:]/g, '$& ').replace(/\}/g, ' }');
const createCssDefaultImportWarning = (globs, options) => `if (!${warnedCSSDefaultImportVarName}) {` +
`${warnedCSSDefaultImportVarName} = true;` +
`console.warn(${JSON.stringify('Default import of CSS without `?inline` is deprecated. ' +
"Add the `{ query: '?inline' }` glob option to fix this.\n" +
`For example: \`import.meta.glob(${jsonStringifyInOneline(globs.length === 1 ? globs[0] : globs)}, ${jsonStringifyInOneline({ ...options, query: '?inline' })})\``)});` +
`}`;
/**
* @param optimizeExport for dynamicImportVar plugin don't need to optimize export.
*/
async function transformGlobImport(code, id, root, resolveId, isProduction, restoreQueryExtension = false) {
id = slash$1(id);
root = slash$1(root);
const isVirtual = isVirtualModule(id);
const dir = isVirtual ? undefined : dirname(id);
const matches = await parseImportGlob(code, isVirtual ? undefined : id, root, resolveId);
const matchedFiles = new Set();
// TODO: backwards compatibility
matches.forEach((i) => {
if (i.type === 'globEager')
i.options.eager = true;
if (i.type === 'globEagerDefault') {
i.options.eager = true;
i.options.import = 'default';
}
});
if (!matches.length)
return null;
const s = new MagicString(code);
const staticImports = (await Promise.all(matches.map(async ({ globs, globsResolved, isRelative, options, index, start, end, }) => {
const cwd = getCommonBase(globsResolved) ?? root;
const files = (await glob(globsResolved, {
cwd,
absolute: true,
dot: !!options.exhaustive,
ignore: options.exhaustive
? []
: [join(cwd, '**/node_modules/**')],
}))
.filter((file) => file !== id)
.sort();
const objectProps = [];
const staticImports = [];
let query = !options.query
? ''
: typeof options.query === 'string'
? options.query
: stringifyQuery(options.query);
if (query && query[0] !== '?')
query = `?${query}`;
const resolvePaths = (file) => {
if (!dir) {
if (isRelative)
throw new Error("In virtual modules, all globs must start with '/'");
const filePath = `/${relative(root, file)}`;
return { filePath, importPath: filePath };
}
let importPath = relative(dir, file);
if (importPath[0] !== '.')
importPath = `./${importPath}`;
let filePath;
if (isRelative) {
filePath = importPath;
}
else {
filePath = relative(root, file);
if (filePath[0] !== '.')
filePath = `/${filePath}`;
}
return { filePath, importPath };
};
let includesCSS = false;
files.forEach((file, i) => {
const paths = resolvePaths(file);
const filePath = paths.filePath;
let importPath = paths.importPath;
let importQuery = query;
if (importQuery && importQuery !== '?raw') {
const fileExtension = basename(file).split('.').slice(-1)[0];
if (fileExtension && restoreQueryExtension)
importQuery = `${importQuery}&lang.${fileExtension}`;
}
importPath = `${importPath}${importQuery}`;
const isCSS = !query && isCSSRequest(file) && !isModuleCSSRequest(file);
includesCSS || (includesCSS = isCSS);
const importKey = options.import && options.import !== '*'
? options.import
: undefined;
if (options.eager) {
const variableName = `${importPrefix}${index}_${i}`;
const expression = importKey
? `{ ${importKey} as ${variableName} }`
: `* as ${variableName}`;
staticImports.push(`import ${expression} from ${JSON.stringify(importPath)}`);
if (!isProduction && isCSS) {
objectProps.push(`get ${JSON.stringify(filePath)}() { ${createCssDefaultImportWarning(globs, options)} return ${variableName} }`);
}
else {
objectProps.push(`${JSON.stringify(filePath)}: ${variableName}`);
}
}
else {
let importStatement = `import(${JSON.stringify(importPath)})`;
if (importKey)
importStatement += `.then(m => m[${JSON.stringify(importKey)}])`;
if (!isProduction && isCSS) {
objectProps.push(`${JSON.stringify(filePath)}: () => { ${createCssDefaultImportWarning(globs, options)} return ${importStatement}}`);
}
else {
objectProps.push(`${JSON.stringify(filePath)}: () => ${importStatement}`);
}
}
});
files.forEach((i) => matchedFiles.add(i));
const originalLineBreakCount = code.slice(start, end).match(/\n/g)?.length ?? 0;
const lineBreaks = originalLineBreakCount > 0
? '\n'.repeat(originalLineBreakCount)
: '';
let replacement;
if (!isProduction && includesCSS) {
replacement =
'/* #__PURE__ */ Object.assign(' +
'(() => {' +
`let ${warnedCSSDefaultImportVarName} = false;` +
`return {${objectProps.join(',')}${lineBreaks}};` +
'})()' +
')';
}
else {
replacement = `/* #__PURE__ */ Object.assign({${objectProps.join(',')}${lineBreaks}})`;
}
s.overwrite(start, end, replacement);
return staticImports;
}))).flat();
if (staticImports.length)
s.prepend(`${staticImports.join(';')};`);
return {
s,
matches,
files: matchedFiles,
};
}
function globSafePath(path) {
// slash path to ensure \ is converted to / as \ could lead to a double escape scenario
// see https://github.com/mrmlnc/fast-glob#advanced-syntax
return glob.escapePath(normalizePath$3(path));
}
function lastNthChar(str, n) {
return str.charAt(str.length - 1 - n);
}
function globSafeResolvedPath(resolved, glob) {
// we have to escape special glob characters in the resolved path, but keep the user specified globby suffix
// walk back both strings until a character difference is found
// then slice up the resolved path at that pos and escape the first part
let numEqual = 0;
const maxEqual = Math.min(resolved.length, glob.length);
while (numEqual < maxEqual &&
lastNthChar(resolved, numEqual) === lastNthChar(glob, numEqual)) {
numEqual += 1;
}
const staticPartEnd = resolved.length - numEqual;
const staticPart = resolved.slice(0, staticPartEnd);
const dynamicPart = resolved.slice(staticPartEnd);
return globSafePath(staticPart) + dynamicPart;
}
async function toAbsoluteGlob(glob, root, importer, resolveId) {
let pre = '';
if (glob[0] === '!') {
pre = '!';
glob = glob.slice(1);
}
root = globSafePath(root);
const dir = importer ? globSafePath(dirname(importer)) : root;
if (glob[0] === '/')
return pre + posix$1.join(root, glob.slice(1));
if (glob.startsWith('./'))
return pre + posix$1.join(dir, glob.slice(2));
if (glob.startsWith('../'))
return pre + posix$1.join(dir, glob);
if (glob.startsWith('**'))
return pre + glob;
const isSubImportsPattern = glob[0] === '#' && glob.includes('*');
const resolved = normalizePath$3((await resolveId(glob, importer, {
custom: { 'vite:import-glob': { isSubImportsPattern } },
})) || glob);
if (isSubImportsPattern) {
return join(root, resolved);
}
if (isAbsolute$2(resolved)) {
return pre + globSafeResolvedPath(resolved, glob);
}
throw new Error(`Invalid glob: "${glob}" (resolved: "${resolved}"). It must start with '/' or './'`);
}
function getCommonBase(globsResolved) {
const bases = globsResolved
.filter((g) => g[0] !== '!')
.map((glob) => {
let { base } = scan(glob);
// `scan('a/foo.js')` returns `base: 'a/foo.js'`
if (posix$1.basename(base).includes('.'))
base = posix$1.dirname(base);
return base;
});
if (!bases.length)
return null;
let commonAncestor = '';
const dirS = bases[0].split('/');
for (let i = 0; i < dirS.length; i++) {
const candidate = dirS.slice(0, i + 1).join('/');
if (bases.every((base) => base.startsWith(candidate)))
commonAncestor = candidate;
else
break;
}
if (!commonAncestor)
commonAncestor = '/';
return commonAncestor;
}
function isVirtualModule(id) {
// https://vitejs.dev/guide/api-plugin.html#virtual-modules-convention
return id.startsWith('virtual:') || id[0] === '\0' || !id.includes('/');
}
const debugHmr = createDebugger('vite:hmr');
const whitespaceRE = /\s/;
const normalizedClientDir = normalizePath$3(CLIENT_DIR);
function getShortName(file, root) {
return file.startsWith(withTrailingSlash(root))
? path$o.posix.relative(root, file)
: file;
}
async function handleHMRUpdate(file, server, configOnly) {
const { ws, config, moduleGraph } = server;
const shortFile = getShortName(file, config.root);
const fileName = path$o.basename(file);
const isConfig = file === config.configFile;
const isConfigDependency = config.configFileDependencies.some((name) => file === name);
const isEnv = config.inlineConfig.envFile !== false &&
(fileName === '.env' || fileName.startsWith('.env.'));
if (isConfig || isConfigDependency || isEnv) {
// auto restart server
debugHmr?.(`[config change] ${colors$1.dim(shortFile)}`);
config.logger.info(colors$1.green(`${path$o.relative(process.cwd(), file)} changed, restarting server...`), { clear: true, timestamp: true });
try {
await server.restart();
}
catch (e) {
config.logger.error(colors$1.red(e));
}
return;
}
if (configOnly) {
return;
}
debugHmr?.(`[file change] ${colors$1.dim(shortFile)}`);
// (dev only) the client itself cannot be hot updated.
if (file.startsWith(withTrailingSlash(normalizedClientDir))) {
ws.send({
type: 'full-reload',
path: '*',
});
return;
}
const mods = moduleGraph.getModulesByFile(file);
// check if any plugin wants to perform custom HMR handling
const timestamp = Date.now();
const hmrContext = {
file,
timestamp,
modules: mods ? [...mods] : [],
read: () => readModifiedFile(file),
server,
};
for (const hook of config.getSortedPluginHooks('handleHotUpdate')) {
const filteredModules = await hook(hmrContext);
if (filteredModules) {
hmrContext.modules = filteredModules;
}
}
if (!hmrContext.modules.length) {
// html file cannot be hot updated
if (file.endsWith('.html')) {
config.logger.info(colors$1.green(`page reload `) + colors$1.dim(shortFile), {
clear: true,
timestamp: true,
});
ws.send({
type: 'full-reload',
path: config.server.middlewareMode
? '*'
: '/' + normalizePath$3(path$o.relative(config.root, file)),
});
}
else {
// loaded but not in the module graph, probably not js
debugHmr?.(`[no modules matched] ${colors$1.dim(shortFile)}`);
}
return;
}
updateModules(shortFile, hmrContext.modules, timestamp, server);
}
function updateModules(file, modules, timestamp, { config, ws, moduleGraph }, afterInvalidation) {
const updates = [];
const invalidatedModules = new Set();
const traversedModules = new Set();
let needFullReload = false;
for (const mod of modules) {
const boundaries = [];
const hasDeadEnd = propagateUpdate(mod, traversedModules, boundaries);
moduleGraph.invalidateModule(mod, invalidatedModules, timestamp, true, boundaries.map((b) => b.boundary));
if (needFullReload) {
continue;
}
if (hasDeadEnd) {
needFullReload = true;
continue;
}
updates.push(...boundaries.map(({ boundary, acceptedVia }) => ({
type: `${boundary.type}-update`,
timestamp,
path: normalizeHmrUrl(boundary.url),
explicitImportRequired: boundary.type === 'js'
? isExplicitImportRequired(acceptedVia.url)
: undefined,
acceptedPath: normalizeHmrUrl(acceptedVia.url),
})));
}
if (needFullReload) {
config.logger.info(colors$1.green(`page reload `) + colors$1.dim(file), {
clear: !afterInvalidation,
timestamp: true,
});
ws.send({
type: 'full-reload',
});
return;
}
if (updates.length === 0) {
debugHmr?.(colors$1.yellow(`no update happened `) + colors$1.dim(file));
return;
}
config.logger.info(colors$1.green(`hmr update `) +
colors$1.dim([...new Set(updates.map((u) => u.path))].join(', ')), { clear: !afterInvalidation, timestamp: true });
ws.send({
type: 'update',
updates,
});
}
async function handleFileAddUnlink(file, server) {
const modules = [...(server.moduleGraph.getModulesByFile(file) || [])];
modules.push(...getAffectedGlobModules(file, server));
if (modules.length > 0) {
updateModules(getShortName(file, server.config.root), unique(modules), Date.now(), server);
}
}
function areAllImportsAccepted(importedBindings, acceptedExports) {
for (const binding of importedBindings) {
if (!acceptedExports.has(binding)) {
return false;
}
}
return true;
}
function propagateUpdate(node, traversedModules, boundaries, currentChain = [node]) {
if (traversedModules.has(node)) {
return false;
}
traversedModules.add(node);
// #7561
// if the imports of `node` have not been analyzed, then `node` has not
// been loaded in the browser and we should stop propagation.
if (node.id && node.isSelfAccepting === undefined) {
debugHmr?.(`[propagate update] stop propagation because not analyzed: ${colors$1.dim(node.id)}`);
return false;
}
if (node.isSelfAccepting) {
boundaries.push({ boundary: node, acceptedVia: node });
// additionally check for CSS importers, since a PostCSS plugin like
// Tailwind JIT may register any file as a dependency to a CSS file.
for (const importer of node.importers) {
if (isCSSRequest(importer.url) && !currentChain.includes(importer)) {
propagateUpdate(importer, traversedModules, boundaries, currentChain.concat(importer));
}
}
return false;
}
// A partially accepted module with no importers is considered self accepting,
// because the deal is "there are parts of myself I can't self accept if they
// are used outside of me".
// Also, the imported module (this one) must be updated before the importers,
// so that they do get the fresh imported module when/if they are reloaded.
if (node.acceptedHmrExports) {
boundaries.push({ boundary: node, acceptedVia: node });
}
else {
if (!node.importers.size) {
return true;
}
// #3716, #3913
// For a non-CSS file, if all of its importers are CSS files (registered via
// PostCSS plugins) it should be considered a dead end and force full reload.
if (!isCSSRequest(node.url) &&
[...node.importers].every((i) => isCSSRequest(i.url))) {
return true;
}
}
for (const importer of node.importers) {
const subChain = currentChain.concat(importer);
if (importer.acceptedHmrDeps.has(node)) {
boundaries.push({ boundary: importer, acceptedVia: node });
continue;
}
if (node.id && node.acceptedHmrExports && importer.importedBindings) {
const importedBindingsFromNode = importer.importedBindings.get(node.id);
if (importedBindingsFromNode &&
areAllImportsAccepted(importedBindingsFromNode, node.acceptedHmrExports)) {
continue;
}
}
if (currentChain.includes(importer)) {
// circular deps is considered dead end
return true;
}
if (propagateUpdate(importer, traversedModules, boundaries, subChain)) {
return true;
}
}
return false;
}
function handlePrunedModules(mods, { ws }) {
// update the disposed modules' hmr timestamp
// since if it's re-imported, it should re-apply side effects
// and without the timestamp the browser will not re-import it!
const t = Date.now();
mods.forEach((mod) => {
mod.lastHMRTimestamp = t;
debugHmr?.(`[dispose] ${colors$1.dim(mod.file)}`);
});
ws.send({
type: 'prune',
paths: [...mods].map((m) => m.url),
});
}
/**
* Lex import.meta.hot.accept() for accepted deps.
* Since hot.accept() can only accept string literals or array of string
* literals, we don't really need a heavy @babel/parse call on the entire source.
*
* @returns selfAccepts
*/
function lexAcceptedHmrDeps(code, start, urls) {
let state = 0 /* LexerState.inCall */;
// the state can only be 2 levels deep so no need for a stack
let prevState = 0 /* LexerState.inCall */;
let currentDep = '';
function addDep(index) {
urls.add({
url: currentDep,
start: index - currentDep.length - 1,
end: index + 1,
});
currentDep = '';
}
for (let i = start; i < code.length; i++) {
const char = code.charAt(i);
switch (state) {
case 0 /* LexerState.inCall */:
case 4 /* LexerState.inArray */:
if (char === `'`) {
prevState = state;
state = 1 /* LexerState.inSingleQuoteString */;
}
else if (char === `"`) {
prevState = state;
state = 2 /* LexerState.inDoubleQuoteString */;
}
else if (char === '`') {
prevState = state;
state = 3 /* LexerState.inTemplateString */;
}
else if (whitespaceRE.test(char)) {
continue;
}
else {
if (state === 0 /* LexerState.inCall */) {
if (char === `[`) {
state = 4 /* LexerState.inArray */;
}
else {
// reaching here means the first arg is neither a string literal
// nor an Array literal (direct callback) or there is no arg
// in both case this indicates a self-accepting module
return true; // done
}
}
else if (state === 4 /* LexerState.inArray */) {
if (char === `]`) {
return false; // done
}
else if (char === ',') {
continue;
}
else {
error$1(i);
}
}
}
break;
case 1 /* LexerState.inSingleQuoteString */:
if (char === `'`) {
addDep(i);
if (prevState === 0 /* LexerState.inCall */) {
// accept('foo', ...)
return false;
}
else {
state = prevState;
}
}
else {
currentDep += char;
}
break;
case 2 /* LexerState.inDoubleQuoteString */:
if (char === `"`) {
addDep(i);
if (prevState === 0 /* LexerState.inCall */) {
// accept('foo', ...)
return false;
}
else {
state = prevState;
}
}
else {
currentDep += char;
}
break;
case 3 /* LexerState.inTemplateString */:
if (char === '`') {
addDep(i);
if (prevState === 0 /* LexerState.inCall */) {
// accept('foo', ...)
return false;
}
else {
state = prevState;
}
}
else if (char === '$' && code.charAt(i + 1) === '{') {
error$1(i);
}
else {
currentDep += char;
}
break;
default:
throw new Error('unknown import.meta.hot lexer state');
}
}
return false;
}
function lexAcceptedHmrExports(code, start, exportNames) {
const urls = new Set();
lexAcceptedHmrDeps(code, start, urls);
for (const { url } of urls) {
exportNames.add(url);
}
return urls.size > 0;
}
function normalizeHmrUrl(url) {
if (url[0] !== '.' && url[0] !== '/') {
url = wrapId(url);
}
return url;
}
function error$1(pos) {
const err = new Error(`import.meta.hot.accept() can only accept string literals or an ` +
`Array of string literals.`);
err.pos = pos;
throw err;
}
// vitejs/vite#610 when hot-reloading Vue files, we read immediately on file
// change event and sometimes this can be too early and get an empty buffer.
// Poll until the file's modified time has changed before reading again.
async function readModifiedFile(file) {
const content = await fsp.readFile(file, 'utf-8');
if (!content) {
const mtime = (await fsp.stat(file)).mtimeMs;
await new Promise((r) => {
let n = 0;
const poll = async () => {
n++;
const newMtime = (await fsp.stat(file)).mtimeMs;
if (newMtime !== mtime || n > 10) {
r(0);
}
else {
setTimeout(poll, 10);
}
};
setTimeout(poll, 10);
});
return await fsp.readFile(file, 'utf-8');
}
else {
return content;
}
}
const debug$a = createDebugger('vite:import-analysis');
const clientDir = normalizePath$3(CLIENT_DIR);
const skipRE = /\.(?:map|json)(?:$|\?)/;
const canSkipImportAnalysis = (id) => skipRE.test(id) || isDirectCSSRequest(id);
const optimizedDepChunkRE$1 = /\/chunk-[A-Z\d]{8}\.js/;
const optimizedDepDynamicRE$1 = /-[A-Z\d]{8}\.js/;
const hasImportInQueryParamsRE = /[?&]import=?\b/;
const hasViteIgnoreRE = /\/\*\s*@vite-ignore\s*\*\//;
const cleanUpRawUrlRE = /\/\*[\s\S]*?\*\/|([^\\:]|^)\/\/.*$/gm;
const urlIsStringRE = /^(?:'.*'|".*"|`.*`)$/;
const templateLiteralRE = /^\s*`(.*)`\s*$/;
function isExplicitImportRequired(url) {
return !isJSRequest(cleanUrl(url)) && !isCSSRequest(url);
}
function markExplicitImport(url) {
if (isExplicitImportRequired(url)) {
return injectQuery(url, 'import');
}
return url;
}
function extractImportedBindings(id, source, importSpec, importedBindings) {
let bindings = importedBindings.get(id);
if (!bindings) {
bindings = new Set();
importedBindings.set(id, bindings);
}
const isDynamic = importSpec.d > -1;
const isMeta = importSpec.d === -2;
if (isDynamic || isMeta) {
// this basically means the module will be impacted by any change in its dep
bindings.add('*');
return;
}
const exp = source.slice(importSpec.ss, importSpec.se);
const [match0] = findStaticImports(exp);
if (!match0) {
return;
}
const parsed = parseStaticImport(match0);
if (!parsed) {
return;
}
if (parsed.namespacedImport) {
bindings.add('*');
}
if (parsed.defaultImport) {
bindings.add('default');
}
if (parsed.namedImports) {
for (const name of Object.keys(parsed.namedImports)) {
bindings.add(name);
}
}
}
/**
* Server-only plugin that lexes, resolves, rewrites and analyzes url imports.
*
* - Imports are resolved to ensure they exist on disk
*
* - Lexes HMR accept calls and updates import relationships in the module graph
*
* - Bare module imports are resolved (by @rollup-plugin/node-resolve) to
* absolute file paths, e.g.
*
* ```js
* import 'foo'
* ```
* is rewritten to
* ```js
* import '/@fs//project/node_modules/foo/dist/foo.js'
* ```
*
* - CSS imports are appended with `.js` since both the js module and the actual
* css (referenced via `<link>`) may go through the transform pipeline:
*
* ```js
* import './style.css'
* ```
* is rewritten to
* ```js
* import './style.css.js'
* ```
*/
function importAnalysisPlugin(config) {
const { root, base } = config;
const clientPublicPath = path$o.posix.join(base, CLIENT_PUBLIC_PATH);
const enablePartialAccept = config.experimental?.hmrPartialAccept;
let server;
let _env;
function getEnv(ssr) {
if (!_env) {
_env = `import.meta.env = ${JSON.stringify({
...config.env,
SSR: '__vite__ssr__',
})};`;
// account for user env defines
for (const key in config.define) {
if (key.startsWith(`import.meta.env.`)) {
const val = config.define[key];
_env += `${key} = ${typeof val === 'string' ? val : JSON.stringify(val)};`;
}
}
}
return _env.replace('"__vite__ssr__"', ssr + '');
}
return {
name: 'vite:import-analysis',
configureServer(_server) {
server = _server;
},
async transform(source, importer, options) {
// In a real app `server` is always defined, but it is undefined when
// running src/node/server/__tests__/pluginContainer.spec.ts
if (!server) {
return null;
}
const ssr = options?.ssr === true;
const prettyImporter = prettifyUrl(importer, root);
if (canSkipImportAnalysis(importer)) {
debug$a?.(colors$1.dim(`[skipped] ${prettyImporter}`));
return null;
}
const start = performance.now();
await init;
let imports;
let exports;
source = stripBomTag(source);
try {
[imports, exports] = parse$e(source);
}
catch (e) {
const isVue = importer.endsWith('.vue');
const isJsx = importer.endsWith('.jsx') || importer.endsWith('.tsx');
const maybeJSX = !isVue && isJSRequest(importer);
const msg = isVue
? `Install @vitejs/plugin-vue to handle .vue files.`
: maybeJSX
? isJsx
? `If you use tsconfig.json, make sure to not set jsx to preserve.`
: `If you are using JSX, make sure to name the file with the .jsx or .tsx extension.`
: `You may need to install appropriate plugins to handle the ${path$o.extname(importer)} file format, or if it's an asset, add "**/*${path$o.extname(importer)}" to \`assetsInclude\` in your configuration.`;
this.error(`Failed to parse source for import analysis because the content ` +
`contains invalid JS syntax. ` +
msg, e.idx);
}
const depsOptimizer = getDepsOptimizer(config, ssr);
const { moduleGraph } = server;
// since we are already in the transform phase of the importer, it must
// have been loaded so its entry is guaranteed in the module graph.
const importerModule = moduleGraph.getModuleById(importer);
if (!importerModule) {
// This request is no longer valid. It could happen for optimized deps
// requests. A full reload is going to request this id again.
// Throwing an outdated error so we properly finish the request with a
// 504 sent to the browser.
throwOutdatedRequest(importer);
}
if (!imports.length && !this._addedImports) {
importerModule.isSelfAccepting = false;
debug$a?.(`${timeFrom(start)} ${colors$1.dim(`[no imports] ${prettyImporter}`)}`);
return source;
}
let hasHMR = false;
let isSelfAccepting = false;
let hasEnv = false;
let needQueryInjectHelper = false;
let s;
const str = () => s || (s = new MagicString(source));
let isPartiallySelfAccepting = false;
const importedBindings = enablePartialAccept
? new Map()
: null;
const toAbsoluteUrl = (url) => path$o.posix.resolve(path$o.posix.dirname(importerModule.url), url);
const normalizeUrl = async (url, pos, forceSkipImportAnalysis = false) => {
url = stripBase(url, base);
let importerFile = importer;
const optimizeDeps = getDepOptimizationConfig(config, ssr);
if (moduleListContains(optimizeDeps?.exclude, url)) {
if (depsOptimizer) {
await depsOptimizer.scanProcessing;
// if the dependency encountered in the optimized file was excluded from the optimization
// the dependency needs to be resolved starting from the original source location of the optimized file
// because starting from node_modules/.vite will not find the dependency if it was not hoisted
// (that is, if it is under node_modules directory in the package source of the optimized file)
for (const optimizedModule of depsOptimizer.metadata.depInfoList) {
if (!optimizedModule.src)
continue; // Ignore chunks
if (optimizedModule.file === importerModule.file) {
importerFile = optimizedModule.src;
}
}
}
}
const resolved = await this.resolve(url, importerFile);
if (!resolved) {
// in ssr, we should let node handle the missing modules
if (ssr) {
return [url, url];
}
// fix#9534, prevent the importerModuleNode being stopped from propagating updates
importerModule.isSelfAccepting = false;
return this.error(`Failed to resolve import "${url}" from "${path$o.relative(process.cwd(), importerFile)}". Does the file exist?`, pos);
}
const isRelative = url[0] === '.';
const isSelfImport = !isRelative && cleanUrl(url) === cleanUrl(importer);
// normalize all imports into resolved URLs
// e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'`
if (resolved.id.startsWith(withTrailingSlash(root))) {
// in root: infer short absolute path from root
url = resolved.id.slice(root.length);
}
else if (depsOptimizer?.isOptimizedDepFile(resolved.id) ||
fs$l.existsSync(cleanUrl(resolved.id))) {
// an optimized deps may not yet exists in the filesystem, or
// a regular file exists but is out of root: rewrite to absolute /@fs/ paths
url = path$o.posix.join(FS_PREFIX, resolved.id);
}
else {
url = resolved.id;
}
if (isExternalUrl(url)) {
return [url, url];
}
// if the resolved id is not a valid browser import specifier,
// prefix it to make it valid. We will strip this before feeding it
// back into the transform pipeline
if (url[0] !== '.' && url[0] !== '/') {
url = wrapId(resolved.id);
}
// make the URL browser-valid if not SSR
if (!ssr) {
// mark non-js/css imports with `?import`
url = markExplicitImport(url);
// If the url isn't a request for a pre-bundled common chunk,
// for relative js/css imports, or self-module virtual imports
// (e.g. vue blocks), inherit importer's version query
// do not do this for unknown type imports, otherwise the appended
// query can break 3rd party plugin's extension checks.
if ((isRelative || isSelfImport) &&
!hasImportInQueryParamsRE.test(url) &&
!url.match(DEP_VERSION_RE)) {
const versionMatch = importer.match(DEP_VERSION_RE);
if (versionMatch) {
url = injectQuery(url, versionMatch[1]);
}
}
// check if the dep has been hmr updated. If yes, we need to attach
// its last updated timestamp to force the browser to fetch the most
// up-to-date version of this module.
try {
// delay setting `isSelfAccepting` until the file is actually used (#7870)
// We use an internal function to avoid resolving the url again
const depModule = await moduleGraph._ensureEntryFromUrl(unwrapId(url), ssr, canSkipImportAnalysis(url) || forceSkipImportAnalysis, resolved);
if (depModule.lastHMRTimestamp > 0) {
url = injectQuery(url, `t=${depModule.lastHMRTimestamp}`);
}
}
catch (e) {
// it's possible that the dep fails to resolve (non-existent import)
// attach location to the missing import
e.pos = pos;
throw e;
}
// prepend base
url = joinUrlSegments(base, url);
}
return [url, resolved.id];
};
const orderedImportedUrls = new Array(imports.length);
const orderedAcceptedUrls = new Array(imports.length);
const orderedAcceptedExports = new Array(imports.length);
await Promise.all(imports.map(async (importSpecifier, index) => {
const { s: start, e: end, ss: expStart, se: expEnd, d: dynamicIndex, a: assertIndex, } = importSpecifier;
// #2083 User may use escape path,
// so use imports[index].n to get the unescaped string
let specifier = importSpecifier.n;
const rawUrl = source.slice(start, end);
// check import.meta usage
if (rawUrl === 'import.meta') {
const prop = source.slice(end, end + 4);
if (prop === '.hot') {
hasHMR = true;
const endHot = end + 4 + (source[end + 4] === '?' ? 1 : 0);
if (source.slice(endHot, endHot + 7) === '.accept') {
// further analyze accepted modules
if (source.slice(endHot, endHot + 14) === '.acceptExports') {
const importAcceptedExports = (orderedAcceptedExports[index] =
new Set());
lexAcceptedHmrExports(source, source.indexOf('(', endHot + 14) + 1, importAcceptedExports);
isPartiallySelfAccepting = true;
}
else {
const importAcceptedUrls = (orderedAcceptedUrls[index] =
new Set());
if (lexAcceptedHmrDeps(source, source.indexOf('(', endHot + 7) + 1, importAcceptedUrls)) {
isSelfAccepting = true;
}
}
}
}
else if (prop === '.env') {
hasEnv = true;
}
return;
}
else if (templateLiteralRE.test(rawUrl)) {
// If the import has backticks but isn't transformed as a glob import
// (as there's nothing to glob), check if it's simply a plain string.
// If so, we can replace the specifier as a plain string to prevent
// an incorrect "cannot be analyzed" warning.
if (!(rawUrl.includes('${') && rawUrl.includes('}'))) {
specifier = rawUrl.replace(templateLiteralRE, '$1');
}
}
const isDynamicImport = dynamicIndex > -1;
// strip import assertions as we can process them ourselves
if (!isDynamicImport && assertIndex > -1) {
str().remove(end + 1, expEnd);
}
// static import or valid string in dynamic import
// If resolvable, let's resolve it
if (specifier) {
// skip external / data uri
if (isExternalUrl(specifier) || isDataUrl(specifier)) {
return;
}
// skip ssr external
if (ssr) {
if (config.legacy?.buildSsrCjsExternalHeuristics) {
if (cjsShouldExternalizeForSSR(specifier, server._ssrExternals)) {
return;
}
}
else if (shouldExternalizeForSSR(specifier, importer, config)) {
return;
}
if (isBuiltin(specifier)) {
return;
}
}
// skip client
if (specifier === clientPublicPath) {
return;
}
// warn imports to non-asset /public files
if (specifier[0] === '/' &&
!(config.assetsInclude(cleanUrl(specifier)) ||
urlRE.test(specifier)) &&
checkPublicFile(specifier, config)) {
throw new Error(`Cannot import non-asset file ${specifier} which is inside /public. ` +
`JS/CSS files inside /public are copied as-is on build and ` +
`can only be referenced via <script src> or <link href> in html. ` +
`If you want to get the URL of that file, use ${injectQuery(specifier, 'url')} instead.`);
}
// normalize
const [url, resolvedId] = await normalizeUrl(specifier, start);
if (!isDynamicImport &&
specifier &&
!specifier.includes('?') && // ignore custom queries
isCSSRequest(resolvedId) &&
!isModuleCSSRequest(resolvedId)) {
const sourceExp = source.slice(expStart, start);
if (sourceExp.includes('from') && // check default and named imports
!sourceExp.includes('__vite_glob_') // glob handles deprecation message itself
) {
const newImport = sourceExp + specifier + `?inline` + source.slice(end, expEnd);
this.warn(`\n` +
colors$1.cyan(importerModule.file) +
`\n` +
colors$1.reset(generateCodeFrame(source, start)) +
`\n` +
colors$1.yellow(`Default and named imports from CSS files are deprecated. ` +
`Use the ?inline query instead. ` +
`For example: ${newImport}`));
}
}
// record as safe modules
// safeModulesPath should not include the base prefix.
// See https://github.com/vitejs/vite/issues/9438#issuecomment-1465270409
server?.moduleGraph.safeModulesPath.add(fsPathFromUrl(stripBase(url, base)));
if (url !== specifier) {
let rewriteDone = false;
if (depsOptimizer?.isOptimizedDepFile(resolvedId) &&
!resolvedId.match(optimizedDepChunkRE$1)) {
// for optimized cjs deps, support named imports by rewriting named imports to const assignments.
// internal optimized chunks don't need es interop and are excluded
// The browserHash in resolvedId could be stale in which case there will be a full
// page reload. We could return a 404 in that case but it is safe to return the request
const file = cleanUrl(resolvedId); // Remove ?v={hash}
const needsInterop = await optimizedDepNeedsInterop(depsOptimizer.metadata, file, config, ssr);
if (needsInterop === undefined) {
// Non-entry dynamic imports from dependencies will reach here as there isn't
// optimize info for them, but they don't need es interop. If the request isn't
// a dynamic import, then it is an internal Vite error
if (!file.match(optimizedDepDynamicRE$1)) {
config.logger.error(colors$1.red(`Vite Error, ${url} optimized info should be defined`));
}
}
else if (needsInterop) {
debug$a?.(`${url} needs interop`);
interopNamedImports(str(), importSpecifier, url, index, importer, config);
rewriteDone = true;
}
}
// If source code imports builtin modules via named imports, the stub proxy export
// would fail as it's `export default` only. Apply interop for builtin modules to
// correctly throw the error message.
else if (url.includes(browserExternalId) &&
source.slice(expStart, start).includes('{')) {
interopNamedImports(str(), importSpecifier, url, index, importer, config);
rewriteDone = true;
}
if (!rewriteDone) {
const rewrittenUrl = JSON.stringify(url);
const s = isDynamicImport ? start : start - 1;
const e = isDynamicImport ? end : end + 1;
str().overwrite(s, e, rewrittenUrl, {
contentOnly: true,
});
}
}
// record for HMR import chain analysis
// make sure to unwrap and normalize away base
const hmrUrl = unwrapId(stripBase(url, base));
const isLocalImport = !isExternalUrl(hmrUrl) && !isDataUrl(hmrUrl);
if (isLocalImport) {
orderedImportedUrls[index] = hmrUrl;
}
if (enablePartialAccept && importedBindings) {
extractImportedBindings(resolvedId, source, importSpecifier, importedBindings);
}
if (!isDynamicImport &&
isLocalImport &&
config.server.preTransformRequests) {
// pre-transform known direct imports
// These requests will also be registered in transformRequest to be awaited
// by the deps optimizer
const url = removeImportQuery(hmrUrl);
server.transformRequest(url, { ssr }).catch((e) => {
if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP ||
e?.code === ERR_CLOSED_SERVER) {
// these are expected errors
return;
}
// Unexpected error, log the issue but avoid an unhandled exception
config.logger.error(e.message, { error: e });
});
}
}
else if (!importer.startsWith(withTrailingSlash(clientDir))) {
if (!isInNodeModules(importer)) {
// check @vite-ignore which suppresses dynamic import warning
const hasViteIgnore = hasViteIgnoreRE.test(
// complete expression inside parens
source.slice(dynamicIndex + 1, end));
if (!hasViteIgnore) {
this.warn(`\n` +
colors$1.cyan(importerModule.file) +
`\n` +
colors$1.reset(generateCodeFrame(source, start)) +
colors$1.yellow(`\nThe above dynamic import cannot be analyzed by Vite.\n` +
`See ${colors$1.blue(`https://github.com/rollup/plugins/tree/master/packages/dynamic-import-vars#limitations`)} ` +
`for supported dynamic import formats. ` +
`If this is intended to be left as-is, you can use the ` +
`/* @vite-ignore */ comment inside the import() call to suppress this warning.\n`));
}
}
if (!ssr) {
const url = rawUrl.replace(cleanUpRawUrlRE, '').trim();
if (!urlIsStringRE.test(url) ||
isExplicitImportRequired(url.slice(1, -1))) {
needQueryInjectHelper = true;
str().overwrite(start, end, `__vite__injectQuery(${url}, 'import')`, { contentOnly: true });
}
}
}
}));
const importedUrls = new Set(orderedImportedUrls.filter(Boolean));
const acceptedUrls = mergeAcceptedUrls(orderedAcceptedUrls);
const acceptedExports = mergeAcceptedUrls(orderedAcceptedExports);
if (hasEnv) {
// inject import.meta.env
str().prepend(getEnv(ssr));
}
if (hasHMR && !ssr) {
debugHmr?.(`${isSelfAccepting
? `[self-accepts]`
: isPartiallySelfAccepting
? `[accepts-exports]`
: acceptedUrls.size
? `[accepts-deps]`
: `[detected api usage]`} ${prettyImporter}`);
// inject hot context
str().prepend(`import { createHotContext as __vite__createHotContext } from "${clientPublicPath}";` +
`import.meta.hot = __vite__createHotContext(${JSON.stringify(normalizeHmrUrl(importerModule.url))});`);
}
if (needQueryInjectHelper) {
str().prepend(`import { injectQuery as __vite__injectQuery } from "${clientPublicPath}";`);
}
// normalize and rewrite accepted urls
const normalizedAcceptedUrls = new Set();
for (const { url, start, end } of acceptedUrls) {
const [normalized] = await moduleGraph.resolveUrl(toAbsoluteUrl(url), ssr);
normalizedAcceptedUrls.add(normalized);
str().overwrite(start, end, JSON.stringify(normalized), {
contentOnly: true,
});
}
// update the module graph for HMR analysis.
// node CSS imports does its own graph update in the css plugin so we
// only handle js graph updates here.
if (!isCSSRequest(importer)) {
// attached by pluginContainer.addWatchFile
const pluginImports = this._addedImports;
if (pluginImports) {
(await Promise.all([...pluginImports].map((id) => normalizeUrl(id, 0, true)))).forEach(([url]) => importedUrls.add(url));
}
// HMR transforms are no-ops in SSR, so an `accept` call will
// never be injected. Avoid updating the `isSelfAccepting`
// property for our module node in that case.
if (ssr && importerModule.isSelfAccepting) {
isSelfAccepting = true;
}
// a partially accepted module that accepts all its exports
// behaves like a self-accepted module in practice
if (!isSelfAccepting &&
isPartiallySelfAccepting &&
acceptedExports.size >= exports.length &&
exports.every((e) => acceptedExports.has(e.n))) {
isSelfAccepting = true;
}
const prunedImports = await moduleGraph.updateModuleInfo(importerModule, importedUrls, importedBindings, normalizedAcceptedUrls, isPartiallySelfAccepting ? acceptedExports : null, isSelfAccepting, ssr);
if (hasHMR && prunedImports) {
handlePrunedModules(prunedImports, server);
}
}
debug$a?.(`${timeFrom(start)} ${colors$1.dim(`[${importedUrls.size} imports rewritten] ${prettyImporter}`)}`);
if (s) {
return transformStableResult(s, importer, config);
}
else {
return source;
}
},
};
}
function mergeAcceptedUrls(orderedUrls) {
const acceptedUrls = new Set();
for (const urls of orderedUrls) {
if (!urls)
continue;
for (const url of urls)
acceptedUrls.add(url);
}
return acceptedUrls;
}
function interopNamedImports(str, importSpecifier, rewrittenUrl, importIndex, importer, config) {
const source = str.original;
const { s: start, e: end, ss: expStart, se: expEnd, d: dynamicIndex, } = importSpecifier;
const exp = source.slice(expStart, expEnd);
if (dynamicIndex > -1) {
// rewrite `import('package')` to expose the default directly
str.overwrite(expStart, expEnd, `import('${rewrittenUrl}').then(m => m.default && m.default.__esModule ? m.default : ({ ...m.default, default: m.default }))` +
getLineBreaks(exp), { contentOnly: true });
}
else {
const rawUrl = source.slice(start, end);
const rewritten = transformCjsImport(exp, rewrittenUrl, rawUrl, importIndex, importer, config);
if (rewritten) {
str.overwrite(expStart, expEnd, rewritten + getLineBreaks(exp), {
contentOnly: true,
});
}
else {
// #1439 export * from '...'
str.overwrite(start, end, rewrittenUrl + getLineBreaks(source.slice(start, end)), {
contentOnly: true,
});
}
}
}
// get line breaks to preserve line count for not breaking source maps
function getLineBreaks(str) {
return str.includes('\n') ? '\n'.repeat(str.split('\n').length - 1) : '';
}
/**
* Detect import statements to a known optimized CJS dependency and provide
* ES named imports interop. We do this by rewriting named imports to a variable
* assignment to the corresponding property on the `module.exports` of the cjs
* module. Note this doesn't support dynamic re-assignments from within the cjs
* module.
*
* Note that es-module-lexer treats `export * from '...'` as an import as well,
* so, we may encounter ExportAllDeclaration here, in which case `undefined`
* will be returned.
*
* Credits \@csr632 via #837
*/
function transformCjsImport(importExp, url, rawUrl, importIndex, importer, config) {
const node = parse$b(importExp, {
ecmaVersion: 'latest',
sourceType: 'module',
}).body[0];
// `export * from '...'` may cause unexpected problem, so give it a warning
if (config.command === 'serve' &&
node.type === 'ExportAllDeclaration' &&
!node.exported) {
config.logger.warn(colors$1.yellow(`\nUnable to interop \`${importExp}\` in ${importer}, this may lose module exports. Please export "${rawUrl}" as ESM or use named exports instead, e.g. \`export { A, B } from "${rawUrl}"\``));
}
else if (node.type === 'ImportDeclaration' ||
node.type === 'ExportNamedDeclaration') {
if (!node.specifiers.length) {
return `import "${url}"`;
}
const importNames = [];
const exportNames = [];
let defaultExports = '';
for (const spec of node.specifiers) {
if (spec.type === 'ImportSpecifier' &&
spec.imported.type === 'Identifier') {
const importedName = spec.imported.name;
const localName = spec.local.name;
importNames.push({ importedName, localName });
}
else if (spec.type === 'ImportDefaultSpecifier') {
importNames.push({
importedName: 'default',
localName: spec.local.name,
});
}
else if (spec.type === 'ImportNamespaceSpecifier') {
importNames.push({ importedName: '*', localName: spec.local.name });
}
else if (spec.type === 'ExportSpecifier' &&
spec.exported.type === 'Identifier') {
// for ExportSpecifier, local name is same as imported name
// prefix the variable name to avoid clashing with other local variables
const importedName = spec.local.name;
// we want to specify exported name as variable and re-export it
const exportedName = spec.exported.name;
if (exportedName === 'default') {
defaultExports = makeLegalIdentifier(`__vite__cjsExportDefault_${importIndex}`);
importNames.push({ importedName, localName: defaultExports });
}
else {
const localName = makeLegalIdentifier(`__vite__cjsExport_${exportedName}`);
importNames.push({ importedName, localName });
exportNames.push(`${localName} as ${exportedName}`);
}
}
}
// If there is multiple import for same id in one file,
// importIndex will prevent the cjsModuleName to be duplicate
const cjsModuleName = makeLegalIdentifier(`__vite__cjsImport${importIndex}_${rawUrl}`);
const lines = [`import ${cjsModuleName} from "${url}"`];
importNames.forEach(({ importedName, localName }) => {
if (importedName === '*') {
lines.push(`const ${localName} = ${cjsModuleName}`);
}
else if (importedName === 'default') {
lines.push(`const ${localName} = ${cjsModuleName}.__esModule ? ${cjsModuleName}.default : ${cjsModuleName}`);
}
else {
lines.push(`const ${localName} = ${cjsModuleName}["${importedName}"]`);
}
});
if (defaultExports) {
lines.push(`export default ${defaultExports}`);
}
if (exportNames.length) {
lines.push(`export { ${exportNames.join(', ')} }`);
}
return lines.join('; ');
}
}
const process_env_NODE_ENV_RE = /(\bglobal(This)?\.)?\bprocess\.env\.NODE_ENV\b/g;
// ids in transform are normalized to unix style
const normalizedClientEntry = normalizePath$3(CLIENT_ENTRY);
const normalizedEnvEntry = normalizePath$3(ENV_ENTRY);
/**
* some values used by the client needs to be dynamically injected by the server
* @server-only
*/
function clientInjectionsPlugin(config) {
let injectConfigValues;
return {
name: 'vite:client-inject',
async buildStart() {
const resolvedServerHostname = (await resolveHostname(config.server.host))
.name;
const resolvedServerPort = config.server.port;
const devBase = config.base;
const serverHost = `${resolvedServerHostname}:${resolvedServerPort}${devBase}`;
let hmrConfig = config.server.hmr;
hmrConfig = isObject$2(hmrConfig) ? hmrConfig : undefined;
const host = hmrConfig?.host || null;
const protocol = hmrConfig?.protocol || null;
const timeout = hmrConfig?.timeout || 30000;
const overlay = hmrConfig?.overlay !== false;
const isHmrServerSpecified = !!hmrConfig?.server;
// hmr.clientPort -> hmr.port
// -> (24678 if middleware mode and HMR server is not specified) -> new URL(import.meta.url).port
let port = hmrConfig?.clientPort || hmrConfig?.port || null;
if (config.server.middlewareMode && !isHmrServerSpecified) {
port || (port = 24678);
}
let directTarget = hmrConfig?.host || resolvedServerHostname;
directTarget += `:${hmrConfig?.port || resolvedServerPort}`;
directTarget += devBase;
let hmrBase = devBase;
if (hmrConfig?.path) {
hmrBase = path$o.posix.join(hmrBase, hmrConfig.path);
}
const serializedDefines = serializeDefine(config.define || {});
const modeReplacement = escapeReplacement(config.mode);
const baseReplacement = escapeReplacement(devBase);
const definesReplacement = () => serializedDefines;
const serverHostReplacement = escapeReplacement(serverHost);
const hmrProtocolReplacement = escapeReplacement(protocol);
const hmrHostnameReplacement = escapeReplacement(host);
const hmrPortReplacement = escapeReplacement(port);
const hmrDirectTargetReplacement = escapeReplacement(directTarget);
const hmrBaseReplacement = escapeReplacement(hmrBase);
const hmrTimeoutReplacement = escapeReplacement(timeout);
const hmrEnableOverlayReplacement = escapeReplacement(overlay);
injectConfigValues = (code) => {
return code
.replace(`__MODE__`, modeReplacement)
.replace(/__BASE__/g, baseReplacement)
.replace(`__DEFINES__`, definesReplacement)
.replace(`__SERVER_HOST__`, serverHostReplacement)
.replace(`__HMR_PROTOCOL__`, hmrProtocolReplacement)
.replace(`__HMR_HOSTNAME__`, hmrHostnameReplacement)
.replace(`__HMR_PORT__`, hmrPortReplacement)
.replace(`__HMR_DIRECT_TARGET__`, hmrDirectTargetReplacement)
.replace(`__HMR_BASE__`, hmrBaseReplacement)
.replace(`__HMR_TIMEOUT__`, hmrTimeoutReplacement)
.replace(`__HMR_ENABLE_OVERLAY__`, hmrEnableOverlayReplacement);
};
},
transform(code, id, options) {
if (id === normalizedClientEntry || id === normalizedEnvEntry) {
return injectConfigValues(code);
}
else if (!options?.ssr && code.includes('process.env.NODE_ENV')) {
// replace process.env.NODE_ENV instead of defining a global
// for it to avoid shimming a `process` object during dev,
// avoiding inconsistencies between dev and build
return code.replace(process_env_NODE_ENV_RE, config.define?.['process.env.NODE_ENV'] ||
JSON.stringify(process.env.NODE_ENV || config.mode));
}
},
};
}
function escapeReplacement(value) {
const jsonValue = JSON.stringify(value);
return () => jsonValue;
}
function serializeDefine(define) {
let res = `{`;
for (const key in define) {
const val = define[key];
res += `${JSON.stringify(key)}: ${typeof val === 'string' ? `(${val})` : JSON.stringify(val)}, `;
}
return res + `}`;
}
const wasmHelperId = '\0vite/wasm-helper';
const wasmHelper = async (opts = {}, url) => {
let result;
if (url.startsWith('data:')) {
const urlContent = url.replace(/^data:.*?base64,/, '');
let bytes;
if (typeof Buffer === 'function' && typeof Buffer.from === 'function') {
bytes = Buffer.from(urlContent, 'base64');
}
else if (typeof atob === 'function') {
const binaryString = atob(urlContent);
bytes = new Uint8Array(binaryString.length);
for (let i = 0; i < binaryString.length; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
}
else {
throw new Error('Failed to decode base64-encoded data URL, Buffer and atob are not supported');
}
result = await WebAssembly.instantiate(bytes, opts);
}
else {
// https://github.com/mdn/webassembly-examples/issues/5
// WebAssembly.instantiateStreaming requires the server to provide the
// correct MIME type for .wasm files, which unfortunately doesn't work for
// a lot of static file servers, so we just work around it by getting the
// raw buffer.
const response = await fetch(url);
const contentType = response.headers.get('Content-Type') || '';
if ('instantiateStreaming' in WebAssembly &&
contentType.startsWith('application/wasm')) {
result = await WebAssembly.instantiateStreaming(response, opts);
}
else {
const buffer = await response.arrayBuffer();
result = await WebAssembly.instantiate(buffer, opts);
}
}
return result.instance;
};
const wasmHelperCode = wasmHelper.toString();
const wasmHelperPlugin = (config) => {
return {
name: 'vite:wasm-helper',
resolveId(id) {
if (id === wasmHelperId) {
return id;
}
},
async load(id) {
if (id === wasmHelperId) {
return `export default ${wasmHelperCode}`;
}
if (!id.endsWith('.wasm?init')) {
return;
}
const url = await fileToUrl(id, config, this);
return `
import initWasm from "${wasmHelperId}"
export default opts => initWasm(opts, ${JSON.stringify(url)})
`;
},
};
};
const wasmFallbackPlugin = () => {
return {
name: 'vite:wasm-fallback',
async load(id) {
if (!id.endsWith('.wasm')) {
return;
}
throw new Error('"ESM integration proposal for Wasm" is not supported currently. ' +
'Use vite-plugin-wasm or other community plugins to handle this. ' +
'Alternatively, you can use `.wasm?init` or `.wasm?url`. ' +
'See https://vitejs.dev/guide/features.html#webassembly for more details.');
},
};
};
const WORKER_FILE_ID = 'worker_file';
const workerCache = new WeakMap();
function saveEmitWorkerAsset(config, asset) {
const fileName = asset.fileName;
const workerMap = workerCache.get(config.mainConfig || config);
workerMap.assets.set(fileName, asset);
}
// Ensure that only one rollup build is called at the same time to avoid
// leaking state in plugins between worker builds.
// TODO: Review if we can parallelize the bundling of workers.
const workerConfigSemaphore = new WeakMap();
async function bundleWorkerEntry(config, id, query) {
const processing = workerConfigSemaphore.get(config);
if (processing) {
await processing;
return bundleWorkerEntry(config, id, query);
}
const promise = serialBundleWorkerEntry(config, id, query);
workerConfigSemaphore.set(config, promise);
promise.then(() => workerConfigSemaphore.delete(config));
return promise;
}
async function serialBundleWorkerEntry(config, id, query) {
// bundle the file as entry to support imports
const { rollup } = await import('rollup');
const { plugins, rollupOptions, format } = config.worker;
const bundle = await rollup({
...rollupOptions,
input: cleanUrl(id),
plugins,
onwarn(warning, warn) {
onRollupWarning(warning, warn, config);
},
preserveEntrySignatures: false,
});
let chunk;
try {
const workerOutputConfig = config.worker.rollupOptions.output;
const workerConfig = workerOutputConfig
? Array.isArray(workerOutputConfig)
? workerOutputConfig[0] || {}
: workerOutputConfig
: {};
const { output: [outputChunk, ...outputChunks], } = await bundle.generate({
entryFileNames: path$o.posix.join(config.build.assetsDir, '[name]-[hash].js'),
chunkFileNames: path$o.posix.join(config.build.assetsDir, '[name]-[hash].js'),
assetFileNames: path$o.posix.join(config.build.assetsDir, '[name]-[hash].[ext]'),
...workerConfig,
format,
sourcemap: config.build.sourcemap,
});
chunk = outputChunk;
outputChunks.forEach((outputChunk) => {
if (outputChunk.type === 'asset') {
saveEmitWorkerAsset(config, outputChunk);
}
else if (outputChunk.type === 'chunk') {
saveEmitWorkerAsset(config, {
fileName: outputChunk.fileName,
source: outputChunk.code,
type: 'asset',
});
}
});
}
finally {
await bundle.close();
}
return emitSourcemapForWorkerEntry(config, query, chunk);
}
function emitSourcemapForWorkerEntry(config, query, chunk) {
const { map: sourcemap } = chunk;
if (sourcemap) {
if (config.build.sourcemap === 'hidden' ||
config.build.sourcemap === true) {
const data = sourcemap.toString();
const mapFileName = chunk.fileName + '.map';
saveEmitWorkerAsset(config, {
fileName: mapFileName,
type: 'asset',
source: data,
});
}
}
return chunk;
}
const workerAssetUrlRE = /__VITE_WORKER_ASSET__([a-z\d]{8})__/g;
function encodeWorkerAssetFileName(fileName, workerCache) {
const { fileNameHash } = workerCache;
const hash = getHash(fileName);
if (!fileNameHash.get(hash)) {
fileNameHash.set(hash, fileName);
}
return `__VITE_WORKER_ASSET__${hash}__`;
}
async function workerFileToUrl(config, id, query) {
const workerMap = workerCache.get(config.mainConfig || config);
let fileName = workerMap.bundle.get(id);
if (!fileName) {
const outputChunk = await bundleWorkerEntry(config, id, query);
fileName = outputChunk.fileName;
saveEmitWorkerAsset(config, {
fileName,
source: outputChunk.code,
type: 'asset',
});
workerMap.bundle.set(id, fileName);
}
return encodeWorkerAssetFileName(fileName, workerMap);
}
function webWorkerPostPlugin() {
return {
name: 'vite:worker-post',
resolveImportMeta(property, { chunkId, format }) {
// document is undefined in the worker, so we need to avoid it in iife
if (property === 'url' && format === 'iife') {
return 'self.location.href';
}
return null;
},
};
}
function webWorkerPlugin(config) {
const isBuild = config.command === 'build';
let server;
const isWorker = config.isWorker;
const isWorkerQueryId = (id) => {
const parsedQuery = parseRequest(id);
if (parsedQuery &&
(parsedQuery.worker ?? parsedQuery.sharedworker) != null) {
return true;
}
return false;
};
return {
name: 'vite:worker',
configureServer(_server) {
server = _server;
},
buildStart() {
if (isWorker) {
return;
}
workerCache.set(config, {
assets: new Map(),
bundle: new Map(),
fileNameHash: new Map(),
});
},
load(id) {
if (isBuild && isWorkerQueryId(id)) {
return '';
}
},
shouldTransformCachedModule({ id }) {
if (isBuild && isWorkerQueryId(id) && config.build.watch) {
return true;
}
},
async transform(raw, id, options) {
const ssr = options?.ssr === true;
const query = parseRequest(id);
if (query && query[WORKER_FILE_ID] != null) {
// if import worker by worker constructor will have query.type
// other type will be import worker by esm
const workerType = query['type'];
let injectEnv = '';
const scriptPath = JSON.stringify(path$o.posix.join(config.base, ENV_PUBLIC_PATH));
if (workerType === 'classic') {
injectEnv = `importScripts(${scriptPath})\n`;
}
else if (workerType === 'module') {
injectEnv = `import ${scriptPath}\n`;
}
else if (workerType === 'ignore') {
if (isBuild) {
injectEnv = '';
}
else if (server) {
// dynamic worker type we can't know how import the env
// so we copy /@vite/env code of server transform result into file header
const { moduleGraph } = server;
const module = moduleGraph.getModuleById(ENV_ENTRY);
injectEnv = module?.transformResult?.code || '';
}
}
if (injectEnv) {
const s = new MagicString(raw);
s.prepend(injectEnv);
return {
code: s.toString(),
map: s.generateMap({ hires: 'boundary' }),
};
}
return;
}
if (query == null ||
(query && (query.worker ?? query.sharedworker) == null)) {
return;
}
// stringified url or `new URL(...)`
let url;
const { format } = config.worker;
const workerConstructor = query.sharedworker != null ? 'SharedWorker' : 'Worker';
const workerType = isBuild
? format === 'es'
? 'module'
: 'classic'
: 'module';
const workerOptions = workerType === 'classic' ? '' : ',{type: "module"}';
if (isBuild) {
getDepsOptimizer(config, ssr)?.registerWorkersSource(id);
if (query.inline != null) {
const chunk = await bundleWorkerEntry(config, id, query);
const encodedJs = `const encodedJs = "${Buffer.from(chunk.code).toString('base64')}";`;
const code =
// Using blob URL for SharedWorker results in multiple instances of a same worker
workerConstructor === 'Worker'
? `${encodedJs}
const blob = typeof window !== "undefined" && window.Blob && new Blob([atob(encodedJs)], { type: "text/javascript;charset=utf-8" });
export default function WorkerWrapper() {
let objURL;
try {
objURL = blob && (window.URL || window.webkitURL).createObjectURL(blob);
if (!objURL) throw ''
return new ${workerConstructor}(objURL)
} catch(e) {
return new ${workerConstructor}("data:application/javascript;base64," + encodedJs${workerOptions});
} finally {
objURL && (window.URL || window.webkitURL).revokeObjectURL(objURL);
}
}`
: `${encodedJs}
export default function WorkerWrapper() {
return new ${workerConstructor}("data:application/javascript;base64," + encodedJs${workerOptions});
}
`;
return {
code,
// Empty sourcemap to suppress Rollup warning
map: { mappings: '' },
};
}
else {
url = await workerFileToUrl(config, id, query);
}
}
else {
url = await fileToUrl(cleanUrl(id), config, this);
url = injectQuery(url, WORKER_FILE_ID);
url = injectQuery(url, `type=${workerType}`);
}
if (query.url != null) {
return {
code: `export default ${JSON.stringify(url)}`,
map: { mappings: '' }, // Empty sourcemap to suppress Rollup warning
};
}
return {
code: `export default function WorkerWrapper() {
return new ${workerConstructor}(${JSON.stringify(url)}${workerOptions})
}`,
map: { mappings: '' }, // Empty sourcemap to suppress Rollup warning
};
},
renderChunk(code, chunk, outputOptions) {
let s;
const result = () => {
return (s && {
code: s.toString(),
map: config.build.sourcemap
? s.generateMap({ hires: 'boundary' })
: null,
});
};
if (code.match(workerAssetUrlRE)) {
const toRelativeRuntime = createToImportMetaURLBasedRelativeRuntime(outputOptions.format, config.isWorker);
let match;
s = new MagicString(code);
workerAssetUrlRE.lastIndex = 0;
// Replace "__VITE_WORKER_ASSET__5aa0ddc0__" using relative paths
const workerMap = workerCache.get(config.mainConfig || config);
const { fileNameHash } = workerMap;
while ((match = workerAssetUrlRE.exec(code))) {
const [full, hash] = match;
const filename = fileNameHash.get(hash);
const replacement = toOutputFilePathInJS(filename, 'asset', chunk.fileName, 'js', config, toRelativeRuntime);
const replacementString = typeof replacement === 'string'
? JSON.stringify(replacement).slice(1, -1)
: `"+${replacement.runtime}+"`;
s.update(match.index, match.index + full.length, replacementString);
}
}
return result();
},
generateBundle(opts) {
// @ts-expect-error asset emits are skipped in legacy bundle
if (opts.__vite_skip_asset_emit__ || isWorker) {
return;
}
const workerMap = workerCache.get(config);
workerMap.assets.forEach((asset) => {
this.emitFile(asset);
workerMap.assets.delete(asset.fileName);
});
},
};
}
/**
* A plugin to avoid an aliased AND optimized dep from being aliased in src
*/
function preAliasPlugin(config) {
const findPatterns = getAliasPatterns(config.resolve.alias);
const isConfiguredAsExternal = createIsConfiguredAsSsrExternal(config);
const isBuild = config.command === 'build';
return {
name: 'vite:pre-alias',
async resolveId(id, importer, options) {
const ssr = options?.ssr === true;
const depsOptimizer = getDepsOptimizer(config, ssr);
if (importer &&
depsOptimizer &&
bareImportRE.test(id) &&
!options?.scan &&
id !== '@vite/client' &&
id !== '@vite/env') {
if (findPatterns.find((pattern) => matches(pattern, id))) {
const optimizedId = await tryOptimizedResolve(depsOptimizer, id, importer, config.resolve.preserveSymlinks, config.packageCache);
if (optimizedId) {
return optimizedId; // aliased dep already optimized
}
if (depsOptimizer.options.noDiscovery) {
return;
}
const resolved = await this.resolve(id, importer, {
...options,
custom: { ...options.custom, 'vite:pre-alias': true },
skipSelf: true,
});
if (resolved && !depsOptimizer.isOptimizedDepFile(resolved.id)) {
const optimizeDeps = depsOptimizer.options;
const resolvedId = cleanUrl(resolved.id);
const isVirtual = resolvedId === id || resolvedId.includes('\0');
if (!isVirtual &&
fs$l.existsSync(resolvedId) &&
!moduleListContains(optimizeDeps.exclude, id) &&
path$o.isAbsolute(resolvedId) &&
(isInNodeModules(resolvedId) ||
optimizeDeps.include?.includes(id)) &&
isOptimizable(resolvedId, optimizeDeps) &&
!(isBuild && ssr && isConfiguredAsExternal(id, importer)) &&
(!ssr || optimizeAliasReplacementForSSR(resolvedId, optimizeDeps))) {
// aliased dep has not yet been optimized
const optimizedInfo = depsOptimizer.registerMissingImport(id, resolvedId);
return { id: depsOptimizer.getOptimizedDepId(optimizedInfo) };
}
}
return resolved;
}
}
},
};
}
function optimizeAliasReplacementForSSR(id, optimizeDeps) {
if (optimizeDeps.include?.includes(id)) {
return true;
}
// In the regular resolution, the default for non-external modules is to
// be optimized if they are CJS. Here, we don't have the package id but
// only the replacement file path. We could find the package.json from
// the id and respect the same default in the future.
// Default to not optimize an aliased replacement for now, forcing the
// user to explicitly add it to the ssr.optimizeDeps.include list.
return false;
}
// In sync with rollup plugin alias logic
function matches(pattern, importee) {
if (pattern instanceof RegExp) {
return pattern.test(importee);
}
if (importee.length < pattern.length) {
return false;
}
if (importee === pattern) {
return true;
}
return importee.startsWith(withTrailingSlash(pattern));
}
function getAliasPatterns(entries) {
if (!entries) {
return [];
}
if (Array.isArray(entries)) {
return entries.map((entry) => entry.find);
}
return Object.entries(entries).map(([find]) => find);
}
const nonJsRe = /\.json(?:$|\?)/;
const metaEnvRe = /import\.meta\.env\.(.+)/;
const isNonJsRequest = (request) => nonJsRe.test(request);
function definePlugin(config) {
const isBuild = config.command === 'build';
const isBuildLib = isBuild && config.build.lib;
// ignore replace process.env in lib build
const processEnv = {};
const processNodeEnv = {};
if (!isBuildLib) {
const nodeEnv = process.env.NODE_ENV || config.mode;
Object.assign(processEnv, {
'process.env.': `({}).`,
'global.process.env.': `({}).`,
'globalThis.process.env.': `({}).`,
});
Object.assign(processNodeEnv, {
'process.env.NODE_ENV': JSON.stringify(nodeEnv),
'global.process.env.NODE_ENV': JSON.stringify(nodeEnv),
'globalThis.process.env.NODE_ENV': JSON.stringify(nodeEnv),
__vite_process_env_NODE_ENV: JSON.stringify(nodeEnv),
});
}
const userDefine = {};
const userDefineEnv = {};
for (const key in config.define) {
const val = config.define[key];
userDefine[key] = typeof val === 'string' ? val : JSON.stringify(val);
// make sure `import.meta.env` object has user define properties
if (isBuild) {
const match = key.match(metaEnvRe);
if (match) {
userDefineEnv[match[1]] = `__vite__define__${key}__define__vite__`;
}
}
}
// during dev, import.meta properties are handled by importAnalysis plugin.
const importMetaKeys = {};
const importMetaFallbackKeys = {};
if (isBuild) {
// set here to allow override with config.define
importMetaKeys['import.meta.hot'] = `undefined`;
for (const key in config.env) {
importMetaKeys[`import.meta.env.${key}`] = JSON.stringify(config.env[key]);
}
Object.assign(importMetaFallbackKeys, {
'import.meta.env.': `({}).`,
'import.meta.env': JSON.stringify({
...config.env,
SSR: '__vite__ssr__',
...userDefineEnv,
}).replace(/"__vite__define__(.+?)__define__vite__"/g, (_, key) => userDefine[key]),
});
}
function getImportMetaKeys(ssr) {
if (!isBuild)
return {};
return {
...importMetaKeys,
'import.meta.env.SSR': ssr + '',
};
}
function getImportMetaFallbackKeys(ssr) {
if (!isBuild)
return {};
return {
...importMetaFallbackKeys,
'import.meta.env': importMetaFallbackKeys['import.meta.env'].replace('"__vite__ssr__"', ssr + ''),
};
}
function generatePattern(ssr) {
const replaceProcessEnv = !ssr || config.ssr?.target === 'webworker';
const replacements = {
...(replaceProcessEnv ? processNodeEnv : {}),
...getImportMetaKeys(ssr),
...userDefine,
...getImportMetaFallbackKeys(ssr),
...(replaceProcessEnv ? processEnv : {}),
};
if (isBuild && !replaceProcessEnv) {
replacements['__vite_process_env_NODE_ENV'] = 'process.env.NODE_ENV';
}
const replacementsKeys = Object.keys(replacements);
const pattern = replacementsKeys.length
? new RegExp(
// Mustn't be preceded by a char that can be part of an identifier
// or a '.' that isn't part of a spread operator
'(?<![\\p{L}\\p{N}_$]|(?<!\\.\\.)\\.)(' +
replacementsKeys.map(escapeRegex).join('|') +
// Mustn't be followed by a char that can be part of an identifier
// or an assignment (but allow equality operators)
')(?:(?<=\\.)|(?![\\p{L}\\p{N}_$]|\\s*?=[^=]))', 'gu')
: null;
return [replacements, pattern];
}
const defaultPattern = generatePattern(false);
const ssrPattern = generatePattern(true);
return {
name: 'vite:define',
transform(code, id, options) {
const ssr = options?.ssr === true;
if (!ssr && !isBuild) {
// for dev we inject actual global defines in the vite client to
// avoid the transform cost.
return;
}
if (
// exclude html, css and static assets for performance
isHTMLRequest(id) ||
isCSSRequest(id) ||
isNonJsRequest(id) ||
config.assetsInclude(id)) {
return;
}
const [replacements, pattern] = ssr ? ssrPattern : defaultPattern;
if (!pattern) {
return null;
}
if (ssr && !isBuild) {
// ssr + dev, simple replace
return code.replace(pattern, (_, match) => {
return '' + replacements[match];
});
}
const s = new MagicString(code);
let hasReplaced = false;
let match;
while ((match = pattern.exec(code))) {
hasReplaced = true;
const start = match.index;
const end = start + match[0].length;
const replacement = '' + replacements[match[1]];
s.update(start, end, replacement);
}
if (!hasReplaced) {
return null;
}
return transformStableResult(s, id, config);
},
};
}
function err(e, pos) {
const error = new Error(e);
error.pos = pos;
return error;
}
function parseWorkerOptions(rawOpts, optsStartIndex) {
let opts = {};
try {
opts = evalValue(rawOpts);
}
catch {
throw err('Vite is unable to parse the worker options as the value is not static.' +
'To ignore this error, please use /* @vite-ignore */ in the worker options.', optsStartIndex);
}
if (opts == null) {
return {};
}
if (typeof opts !== 'object') {
throw err(`Expected worker options to be an object, got ${typeof opts}`, optsStartIndex);
}
return opts;
}
function getWorkerType(raw, clean, i) {
const commaIndex = clean.indexOf(',', i);
if (commaIndex === -1) {
return 'classic';
}
const endIndex = clean.indexOf(')', i);
// case: ') ... ,' mean no worker options params
if (commaIndex > endIndex) {
return 'classic';
}
// need to find in comment code
const workerOptString = raw
.substring(commaIndex + 1, endIndex)
.replace(/\}[\s\S]*,/g, '}'); // strip trailing comma for parsing
const hasViteIgnore = hasViteIgnoreRE.test(workerOptString);
if (hasViteIgnore) {
return 'ignore';
}
// need to find in no comment code
const cleanWorkerOptString = clean.substring(commaIndex + 1, endIndex).trim();
if (!cleanWorkerOptString.length) {
return 'classic';
}
const workerOpts = parseWorkerOptions(workerOptString, commaIndex + 1);
if (workerOpts.type && ['classic', 'module'].includes(workerOpts.type)) {
return workerOpts.type;
}
return 'classic';
}
function workerImportMetaUrlPlugin(config) {
const isBuild = config.command === 'build';
let workerResolver;
const fsResolveOptions = {
...config.resolve,
root: config.root,
isProduction: config.isProduction,
isBuild: config.command === 'build',
packageCache: config.packageCache,
ssrConfig: config.ssr,
asSrc: true,
};
return {
name: 'vite:worker-import-meta-url',
async transform(code, id, options) {
const ssr = options?.ssr === true;
if (!options?.ssr &&
(code.includes('new Worker') || code.includes('new SharedWorker')) &&
code.includes('new URL') &&
code.includes(`import.meta.url`)) {
const query = parseRequest(id);
let s;
const cleanString = stripLiteral(code);
const workerImportMetaUrlRE = /\bnew\s+(?:Worker|SharedWorker)\s*\(\s*(new\s+URL\s*\(\s*('[^']+'|"[^"]+"|`[^`]+`)\s*,\s*import\.meta\.url\s*\))/g;
let match;
while ((match = workerImportMetaUrlRE.exec(cleanString))) {
const { 0: allExp, 1: exp, 2: emptyUrl, index } = match;
const urlIndex = allExp.indexOf(exp) + index;
const urlStart = cleanString.indexOf(emptyUrl, index);
const urlEnd = urlStart + emptyUrl.length;
const rawUrl = code.slice(urlStart, urlEnd);
// potential dynamic template string
if (rawUrl[0] === '`' && rawUrl.includes('${')) {
this.error(`\`new URL(url, import.meta.url)\` is not supported in dynamic template string.`, urlIndex);
}
s || (s = new MagicString(code));
const workerType = getWorkerType(code, cleanString, index + allExp.length);
const url = rawUrl.slice(1, -1);
let file;
if (url[0] === '.') {
file = path$o.resolve(path$o.dirname(id), url);
file = tryFsResolve(file, fsResolveOptions) ?? file;
}
else {
workerResolver ?? (workerResolver = config.createResolver({
extensions: [],
tryIndex: false,
preferRelative: true,
}));
file = await workerResolver(url, id);
file ?? (file = url[0] === '/'
? slash$1(path$o.join(config.publicDir, url))
: slash$1(path$o.resolve(path$o.dirname(id), url)));
}
let builtUrl;
if (isBuild) {
getDepsOptimizer(config, ssr)?.registerWorkersSource(id);
builtUrl = await workerFileToUrl(config, file, query);
}
else {
builtUrl = await fileToUrl(cleanUrl(file), config, this);
builtUrl = injectQuery(builtUrl, WORKER_FILE_ID);
builtUrl = injectQuery(builtUrl, `type=${workerType}`);
}
s.update(urlIndex, urlIndex + exp.length, `new URL(${JSON.stringify(builtUrl)}, self.location)`);
}
if (s) {
return transformStableResult(s, id, config);
}
return null;
}
},
};
}
/**
* Convert `new URL('./foo.png', import.meta.url)` to its resolved built URL
*
* Supports template string with dynamic segments:
* ```
* new URL(`./dir/${name}.png`, import.meta.url)
* // transformed to
* import.meta.glob('./dir/**.png', { eager: true, import: 'default' })[`./dir/${name}.png`]
* ```
*/
function assetImportMetaUrlPlugin(config) {
const normalizedPublicDir = normalizePath$3(config.publicDir);
let assetResolver;
const fsResolveOptions = {
...config.resolve,
root: config.root,
isProduction: config.isProduction,
isBuild: config.command === 'build',
packageCache: config.packageCache,
ssrConfig: config.ssr,
asSrc: true,
};
return {
name: 'vite:asset-import-meta-url',
async transform(code, id, options) {
if (!options?.ssr &&
id !== preloadHelperId &&
id !== CLIENT_ENTRY &&
code.includes('new URL') &&
code.includes(`import.meta.url`)) {
let s;
const assetImportMetaUrlRE = /\bnew\s+URL\s*\(\s*('[^']+'|"[^"]+"|`[^`]+`)\s*,\s*import\.meta\.url\s*(?:,\s*)?\)/g;
const cleanString = stripLiteral(code);
let match;
while ((match = assetImportMetaUrlRE.exec(cleanString))) {
const { 0: exp, 1: emptyUrl, index } = match;
const urlStart = cleanString.indexOf(emptyUrl, index);
const urlEnd = urlStart + emptyUrl.length;
const rawUrl = code.slice(urlStart, urlEnd);
if (!s)
s = new MagicString(code);
// potential dynamic template string
if (rawUrl[0] === '`' && rawUrl.includes('${')) {
const queryDelimiterIndex = getQueryDelimiterIndex(rawUrl);
const hasQueryDelimiter = queryDelimiterIndex !== -1;
const pureUrl = hasQueryDelimiter
? rawUrl.slice(0, queryDelimiterIndex) + '`'
: rawUrl;
const queryString = hasQueryDelimiter
? rawUrl.slice(queryDelimiterIndex, -1)
: '';
const ast = this.parse(pureUrl);
const templateLiteral = ast.body[0].expression;
if (templateLiteral.expressions.length) {
const pattern = buildGlobPattern(templateLiteral);
if (pattern.startsWith('**')) {
// don't transform for patterns like this
// because users won't intend to do that in most cases
continue;
}
const globOptions = {
eager: true,
import: 'default',
// A hack to allow 'as' & 'query' exist at the same time
query: injectQuery(queryString, 'url'),
};
// Note: native import.meta.url is not supported in the baseline
// target so we use the global location here. It can be
// window.location or self.location in case it is used in a Web Worker.
// @see https://developer.mozilla.org/en-US/docs/Web/API/Window/self
s.update(index, index + exp.length, `new URL((import.meta.glob(${JSON.stringify(pattern)}, ${JSON.stringify(globOptions)}))[${pureUrl}], self.location)`);
continue;
}
}
const url = rawUrl.slice(1, -1);
let file;
if (url[0] === '.') {
file = slash$1(path$o.resolve(path$o.dirname(id), url));
file = tryFsResolve(file, fsResolveOptions) ?? file;
}
else {
assetResolver ?? (assetResolver = config.createResolver({
extensions: [],
mainFields: [],
tryIndex: false,
preferRelative: true,
}));
file = await assetResolver(url, id);
file ?? (file = url[0] === '/'
? slash$1(path$o.join(config.publicDir, url))
: slash$1(path$o.resolve(path$o.dirname(id), url)));
}
// Get final asset URL. If the file does not exist,
// we fall back to the initial URL and let it resolve in runtime
let builtUrl;
if (file) {
try {
if (isParentDirectory(normalizedPublicDir, file)) {
const publicPath = '/' + path$o.posix.relative(normalizedPublicDir, file);
builtUrl = await fileToUrl(publicPath, config, this);
}
else {
builtUrl = await fileToUrl(file, config, this);
}
}
catch {
// do nothing, we'll log a warning after this
}
}
if (!builtUrl) {
const rawExp = code.slice(index, index + exp.length);
config.logger.warnOnce(`\n${rawExp} doesn't exist at build time, it will remain unchanged to be resolved at runtime`);
builtUrl = url;
}
s.update(index, index + exp.length, `new URL(${JSON.stringify(builtUrl)}, self.location)`);
}
if (s) {
return transformStableResult(s, id, config);
}
}
return null;
},
};
}
function buildGlobPattern(ast) {
let pattern = '';
let lastElementIndex = -1;
for (const exp of ast.expressions) {
for (let i = lastElementIndex + 1; i < ast.quasis.length; i++) {
const el = ast.quasis[i];
if (el.end < exp.start) {
pattern += el.value.raw;
lastElementIndex = i;
}
}
pattern += '**';
}
for (let i = lastElementIndex + 1; i < ast.quasis.length; i++) {
pattern += ast.quasis[i].value.raw;
}
return pattern;
}
function getQueryDelimiterIndex(rawUrl) {
let bracketsStack = 0;
for (let i = 0; i < rawUrl.length; i++) {
if (rawUrl[i] === '{') {
bracketsStack++;
}
else if (rawUrl[i] === '}') {
bracketsStack--;
}
else if (rawUrl[i] === '?' && bracketsStack === 0) {
return i;
}
}
return -1;
}
/**
* plugin to ensure rollup can watch correctly.
*/
function ensureWatchPlugin() {
return {
name: 'vite:ensure-watch',
load(id) {
if (queryRE.test(id)) {
this.addWatchFile(cleanUrl(id));
}
return null;
},
};
}
/**
* Prepares the rendered chunks to contain additional metadata during build.
*/
function metadataPlugin() {
return {
name: 'vite:build-metadata',
async renderChunk(_code, chunk) {
chunk.viteMetadata = {
importedAssets: new Set(),
importedCss: new Set(),
};
return null;
},
};
}
class VariableDynamicImportError extends Error {}
/* eslint-disable-next-line no-template-curly-in-string */
const example = 'For example: import(`./foo/${bar}.js`).';
function sanitizeString(str) {
if (str.includes('*')) {
throw new VariableDynamicImportError('A dynamic import cannot contain * characters.');
}
return str;
}
function templateLiteralToGlob(node) {
let glob = '';
for (let i = 0; i < node.quasis.length; i += 1) {
glob += sanitizeString(node.quasis[i].value.raw);
if (node.expressions[i]) {
glob += expressionToGlob(node.expressions[i]);
}
}
return glob;
}
function callExpressionToGlob(node) {
const { callee } = node;
if (
callee.type === 'MemberExpression' &&
callee.property.type === 'Identifier' &&
callee.property.name === 'concat'
) {
return `${expressionToGlob(callee.object)}${node.arguments.map(expressionToGlob).join('')}`;
}
return '*';
}
function binaryExpressionToGlob(node) {
if (node.operator !== '+') {
throw new VariableDynamicImportError(`${node.operator} operator is not supported.`);
}
return `${expressionToGlob(node.left)}${expressionToGlob(node.right)}`;
}
function expressionToGlob(node) {
switch (node.type) {
case 'TemplateLiteral':
return templateLiteralToGlob(node);
case 'CallExpression':
return callExpressionToGlob(node);
case 'BinaryExpression':
return binaryExpressionToGlob(node);
case 'Literal': {
return sanitizeString(node.value);
}
default:
return '*';
}
}
const defaultProtocol = 'file:';
const ignoredProtocols = ['data:', 'http:', 'https:'];
function shouldIgnore(glob) {
const containsAsterisk = glob.includes('*');
const globURL = new URL(glob, defaultProtocol);
const containsIgnoredProtocol = ignoredProtocols.some(
(ignoredProtocol) => ignoredProtocol === globURL.protocol
);
return !containsAsterisk || containsIgnoredProtocol;
}
function dynamicImportToGlob(node, sourceString) {
let glob = expressionToGlob(node);
if (shouldIgnore(glob)) {
return null;
}
glob = glob.replace(/\*\*/g, '*');
if (glob.startsWith('*')) {
throw new VariableDynamicImportError(
`invalid import "${sourceString}". It cannot be statically analyzed. Variable dynamic imports must start with ./ and be limited to a specific directory. ${example}`
);
}
if (glob.startsWith('/')) {
throw new VariableDynamicImportError(
`invalid import "${sourceString}". Variable absolute imports are not supported, imports must start with ./ in the static part of the import. ${example}`
);
}
if (!glob.startsWith('./') && !glob.startsWith('../')) {
throw new VariableDynamicImportError(
`invalid import "${sourceString}". Variable bare imports are not supported, imports must start with ./ in the static part of the import. ${example}`
);
}
// Disallow ./*.ext
const ownDirectoryStarExtension = /^\.\/\*\.[\w]+$/;
if (ownDirectoryStarExtension.test(glob)) {
throw new VariableDynamicImportError(
`${
`invalid import "${sourceString}". Variable imports cannot import their own directory, ` +
'place imports in a separate directory or make the import filename more specific. '
}${example}`
);
}
if (require$$0$4.extname(glob) === '') {
throw new VariableDynamicImportError(
`invalid import "${sourceString}". A file extension must be included in the static part of the import. ${example}`
);
}
return glob;
}
const dynamicImportHelperId = '\0vite/dynamic-import-helper';
const relativePathRE = /^\.{1,2}\//;
// fast path to check if source contains a dynamic import. we check for a
// trailing slash too as a dynamic import statement can have comments between
// the `import` and the `(`.
const hasDynamicImportRE = /\bimport\s*[(/]/;
const dynamicImportHelper = (glob, path) => {
const v = glob[path];
if (v) {
return typeof v === 'function' ? v() : Promise.resolve(v);
}
return new Promise((_, reject) => {
(typeof queueMicrotask === 'function' ? queueMicrotask : setTimeout)(reject.bind(null, new Error('Unknown variable dynamic import: ' + path)));
});
};
function parseDynamicImportPattern(strings) {
const filename = strings.slice(1, -1);
const rawQuery = parseRequest(filename);
let globParams = null;
const ast = parse$b(strings, {
ecmaVersion: 'latest',
sourceType: 'module',
}).body[0].expression;
const userPatternQuery = dynamicImportToGlob(ast, filename);
if (!userPatternQuery) {
return null;
}
const [userPattern] = userPatternQuery.split(requestQuerySplitRE, 2);
const [rawPattern] = filename.split(requestQuerySplitRE, 2);
if (rawQuery?.raw !== undefined) {
globParams = { as: 'raw' };
}
if (rawQuery?.url !== undefined) {
globParams = { as: 'url' };
}
if (rawQuery?.worker !== undefined) {
globParams = { as: 'worker' };
}
return {
globParams,
userPattern,
rawPattern,
};
}
async function transformDynamicImport(importSource, importer, resolve, root) {
if (importSource[1] !== '.' && importSource[1] !== '/') {
const resolvedFileName = await resolve(importSource.slice(1, -1), importer);
if (!resolvedFileName) {
return null;
}
const relativeFileName = posix$1.relative(posix$1.dirname(normalizePath$3(importer)), normalizePath$3(resolvedFileName));
importSource = normalizePath$3('`' + (relativeFileName[0] === '.' ? '' : './') + relativeFileName + '`');
}
const dynamicImportPattern = parseDynamicImportPattern(importSource);
if (!dynamicImportPattern) {
return null;
}
const { globParams, rawPattern, userPattern } = dynamicImportPattern;
const params = globParams
? `, ${JSON.stringify({ ...globParams, import: '*' })}`
: '';
let newRawPattern = posix$1.relative(posix$1.dirname(importer), await toAbsoluteGlob(rawPattern, root, importer, resolve));
if (!relativePathRE.test(newRawPattern)) {
newRawPattern = `./${newRawPattern}`;
}
const exp = `(import.meta.glob(${JSON.stringify(userPattern)}${params}))`;
return {
rawPattern: newRawPattern,
pattern: userPattern,
glob: exp,
};
}
function dynamicImportVarsPlugin(config) {
const resolve = config.createResolver({
preferRelative: true,
tryIndex: false,
extensions: [],
});
const { include, exclude, warnOnError } = config.build.dynamicImportVarsOptions;
const filter = createFilter(include, exclude);
return {
name: 'vite:dynamic-import-vars',
resolveId(id) {
if (id === dynamicImportHelperId) {
return id;
}
},
load(id) {
if (id === dynamicImportHelperId) {
return 'export default ' + dynamicImportHelper.toString();
}
},
async transform(source, importer) {
if (!filter(importer) ||
importer === CLIENT_ENTRY ||
!hasDynamicImportRE.test(source)) {
return;
}
await init;
let imports = [];
try {
imports = parse$e(source)[0];
}
catch (e) {
// ignore as it might not be a JS file, the subsequent plugins will catch the error
return null;
}
if (!imports.length) {
return null;
}
let s;
let needDynamicImportHelper = false;
for (let index = 0; index < imports.length; index++) {
const { s: start, e: end, ss: expStart, se: expEnd, d: dynamicIndex, } = imports[index];
if (dynamicIndex === -1 || source[start] !== '`') {
continue;
}
if (hasViteIgnoreRE.test(source.slice(expStart, expEnd))) {
continue;
}
s || (s = new MagicString(source));
let result;
try {
// When import string is using backticks, es-module-lexer `end` captures
// until the closing parenthesis, instead of the closing backtick.
// There may be inline comments between the backtick and the closing
// parenthesis, so we manually remove them for now.
// See https://github.com/guybedford/es-module-lexer/issues/118
const importSource = removeComments(source.slice(start, end)).trim();
result = await transformDynamicImport(importSource, importer, resolve, config.root);
}
catch (error) {
if (warnOnError) {
this.warn(error);
}
else {
this.error(error);
}
}
if (!result) {
continue;
}
const { rawPattern, glob } = result;
needDynamicImportHelper = true;
s.overwrite(expStart, expEnd, `__variableDynamicImportRuntimeHelper(${glob}, \`${rawPattern}\`)`);
}
if (s) {
if (needDynamicImportHelper) {
s.prepend(`import __variableDynamicImportRuntimeHelper from "${dynamicImportHelperId}";`);
}
return transformStableResult(s, importer, config);
}
},
};
}
async function resolvePlugins(config, prePlugins, normalPlugins, postPlugins) {
const isBuild = config.command === 'build';
const isWatch = isBuild && !!config.build.watch;
const buildPlugins = isBuild
? await (await Promise.resolve().then(function () { return build$1; })).resolveBuildPlugins(config)
: { pre: [], post: [] };
const { modulePreload } = config.build;
return [
...(isDepsOptimizerEnabled(config, false) ||
isDepsOptimizerEnabled(config, true)
? [
isBuild
? optimizedDepsBuildPlugin(config)
: optimizedDepsPlugin(config),
]
: []),
isWatch ? ensureWatchPlugin() : null,
isBuild ? metadataPlugin() : null,
watchPackageDataPlugin(config.packageCache),
preAliasPlugin(config),
alias$1({ entries: config.resolve.alias }),
...prePlugins,
modulePreload !== false && modulePreload.polyfill
? modulePreloadPolyfillPlugin(config)
: null,
resolvePlugin({
...config.resolve,
root: config.root,
isProduction: config.isProduction,
isBuild,
packageCache: config.packageCache,
ssrConfig: config.ssr,
asSrc: true,
getDepsOptimizer: (ssr) => getDepsOptimizer(config, ssr),
shouldExternalize: isBuild && config.build.ssr && config.ssr?.format !== 'cjs'
? (id, importer) => shouldExternalizeForSSR(id, importer, config)
: undefined,
}),
htmlInlineProxyPlugin(config),
cssPlugin(config),
config.esbuild !== false ? esbuildPlugin(config) : null,
jsonPlugin({
namedExports: true,
...config.json,
}, isBuild),
wasmHelperPlugin(config),
webWorkerPlugin(config),
assetPlugin(config),
...normalPlugins,
wasmFallbackPlugin(),
definePlugin(config),
cssPostPlugin(config),
isBuild && buildHtmlPlugin(config),
workerImportMetaUrlPlugin(config),
assetImportMetaUrlPlugin(config),
...buildPlugins.pre,
dynamicImportVarsPlugin(config),
importGlobPlugin(config),
...postPlugins,
...buildPlugins.post,
// internal server-only plugins are always applied after everything else
...(isBuild
? []
: [clientInjectionsPlugin(config), importAnalysisPlugin(config)]),
].filter(Boolean);
}
function createPluginHookUtils(plugins) {
// sort plugins per hook
const sortedPluginsCache = new Map();
function getSortedPlugins(hookName) {
if (sortedPluginsCache.has(hookName))
return sortedPluginsCache.get(hookName);
const sorted = getSortedPluginsByHook(hookName, plugins);
sortedPluginsCache.set(hookName, sorted);
return sorted;
}
function getSortedPluginHooks(hookName) {
const plugins = getSortedPlugins(hookName);
return plugins
.map((p) => {
const hook = p[hookName];
return typeof hook === 'object' && 'handler' in hook
? hook.handler
: hook;
})
.filter(Boolean);
}
return {
getSortedPlugins,
getSortedPluginHooks,
};
}
function getSortedPluginsByHook(hookName, plugins) {
const pre = [];
const normal = [];
const post = [];
for (const plugin of plugins) {
const hook = plugin[hookName];
if (hook) {
if (typeof hook === 'object') {
if (hook.order === 'pre') {
pre.push(plugin);
continue;
}
if (hook.order === 'post') {
post.push(plugin);
continue;
}
}
normal.push(plugin);
}
}
return [...pre, ...normal, ...post];
}
function ansiRegex({onlyFirst = false} = {}) {
const pattern = [
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
].join('|');
return new RegExp(pattern, onlyFirst ? undefined : 'g');
}
const regex = ansiRegex();
function stripAnsi(string) {
if (typeof string !== 'string') {
throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
}
// Even though the regex is global, we don't need to reset the `.lastIndex`
// because unlike `.exec()` and `.test()`, `.replace()` does it automatically
// and doing it manually has a performance penalty.
return string.replace(regex, '');
}
function prepareError(err) {
// only copy the information we need and avoid serializing unnecessary
// properties, since some errors may attach full objects (e.g. PostCSS)
return {
message: stripAnsi(err.message),
stack: stripAnsi(cleanStack(err.stack || '')),
id: err.id,
frame: stripAnsi(err.frame || ''),
plugin: err.plugin,
pluginCode: err.pluginCode?.toString(),
loc: err.loc,
};
}
function buildErrorMessage(err, args = [], includeStack = true) {
if (err.plugin)
args.push(` Plugin: ${colors$1.magenta(err.plugin)}`);
const loc = err.loc ? `:${err.loc.line}:${err.loc.column}` : '';
if (err.id)
args.push(` File: ${colors$1.cyan(err.id)}${loc}`);
if (err.frame)
args.push(colors$1.yellow(pad$1(err.frame)));
if (includeStack && err.stack)
args.push(pad$1(cleanStack(err.stack)));
return args.join('\n');
}
function cleanStack(stack) {
return stack
.split(/\n/g)
.filter((l) => /^\s*at/.test(l))
.join('\n');
}
function logError(server, err) {
const msg = buildErrorMessage(err, [
colors$1.red(`Internal server error: ${err.message}`),
]);
server.config.logger.error(msg, {
clear: true,
timestamp: true,
error: err,
});
server.ws.send({
type: 'error',
err: prepareError(err),
});
}
function errorMiddleware(server, allowNext = false) {
// note the 4 args must be kept for connect to treat this as error middleware
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return function viteErrorMiddleware(err, _req, res, next) {
logError(server, err);
if (allowNext) {
next();
}
else {
res.statusCode = 500;
res.end(`
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>Error</title>
<script type="module">
import { ErrorOverlay } from '/@vite/client'
document.body.appendChild(new ErrorOverlay(${JSON.stringify(prepareError(err)).replace(/</g, '\\u003c')}))
</script>
</head>
<body>
</body>
</html>
`);
}
};
}
/**
* This file is refactored into TypeScript based on
* https://github.com/preactjs/wmr/blob/main/packages/wmr/src/lib/rollup-plugin-container.js
*/
/**
https://github.com/preactjs/wmr/blob/master/LICENSE
MIT License
Copyright (c) 2020 The Preact Authors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
const noop$3 = () => { };
const ERR_CLOSED_SERVER = 'ERR_CLOSED_SERVER';
function throwClosedServerError() {
const err = new Error('The server is being restarted or closed. Request is outdated');
err.code = ERR_CLOSED_SERVER;
// This error will be caught by the transform middleware that will
// send a 504 status code request timeout
throw err;
}
let parser = Parser$1;
async function createPluginContainer(config, moduleGraph, watcher) {
const { plugins, logger, root, build: { rollupOptions }, } = config;
const { getSortedPluginHooks, getSortedPlugins } = createPluginHookUtils(plugins);
const seenResolves = {};
const debugResolve = createDebugger('vite:resolve');
const debugPluginResolve = createDebugger('vite:plugin-resolve', {
onlyWhenFocused: 'vite:plugin',
});
const debugPluginTransform = createDebugger('vite:plugin-transform', {
onlyWhenFocused: 'vite:plugin',
});
const debugSourcemapCombineFilter = process.env.DEBUG_VITE_SOURCEMAP_COMBINE_FILTER;
const debugSourcemapCombine = createDebugger('vite:sourcemap-combine', {
onlyWhenFocused: true,
});
// ---------------------------------------------------------------------------
const watchFiles = new Set();
const minimalContext = {
meta: {
rollupVersion: VERSION,
watchMode: true,
},
debug: noop$3,
info: noop$3,
warn: noop$3,
// @ts-expect-error noop
error: noop$3,
};
function warnIncompatibleMethod(method, plugin) {
logger.warn(colors$1.cyan(`[plugin:${plugin}] `) +
colors$1.yellow(`context method ${colors$1.bold(`${method}()`)} is not supported in serve mode. This plugin is likely not vite-compatible.`));
}
// parallel, ignores returns
async function hookParallel(hookName, context, args) {
const parallelPromises = [];
for (const plugin of getSortedPlugins(hookName)) {
// Don't throw here if closed, so buildEnd and closeBundle hooks can finish running
const hook = plugin[hookName];
if (!hook)
continue;
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore hook is not a primitive
const handler = 'handler' in hook ? hook.handler : hook;
if (hook.sequential) {
await Promise.all(parallelPromises);
parallelPromises.length = 0;
await handler.apply(context(plugin), args(plugin));
}
else {
parallelPromises.push(handler.apply(context(plugin), args(plugin)));
}
}
await Promise.all(parallelPromises);
}
// throw when an unsupported ModuleInfo property is accessed,
// so that incompatible plugins fail in a non-cryptic way.
const ModuleInfoProxy = {
get(info, key) {
if (key in info) {
return info[key];
}
// Don't throw an error when returning from an async function
if (key === 'then') {
return undefined;
}
throw Error(`[vite] The "${key}" property of ModuleInfo is not supported.`);
},
};
// same default value of "moduleInfo.meta" as in Rollup
const EMPTY_OBJECT = Object.freeze({});
function getModuleInfo(id) {
const module = moduleGraph?.getModuleById(id);
if (!module) {
return null;
}
if (!module.info) {
module.info = new Proxy({ id, meta: module.meta || EMPTY_OBJECT }, ModuleInfoProxy);
}
return module.info;
}
function updateModuleInfo(id, { meta }) {
if (meta) {
const moduleInfo = getModuleInfo(id);
if (moduleInfo) {
moduleInfo.meta = { ...moduleInfo.meta, ...meta };
}
}
}
// we should create a new context for each async hook pipeline so that the
// active plugin in that pipeline can be tracked in a concurrency-safe manner.
// using a class to make creating new contexts more efficient
class Context {
constructor(initialPlugin) {
this.meta = minimalContext.meta;
this.ssr = false;
this._scan = false;
this._activeId = null;
this._activeCode = null;
this._addedImports = null;
this.debug = noop$3;
this.info = noop$3;
this._activePlugin = initialPlugin || null;
}
parse(code, opts = {}) {
return parser.parse(code, {
sourceType: 'module',
ecmaVersion: 'latest',
locations: true,
...opts,
});
}
async resolve(id, importer, options) {
let skip;
if (options?.skipSelf && this._activePlugin) {
skip = new Set(this._resolveSkips);
skip.add(this._activePlugin);
}
let out = await container.resolveId(id, importer, {
assertions: options?.assertions,
custom: options?.custom,
isEntry: !!options?.isEntry,
skip,
ssr: this.ssr,
scan: this._scan,
});
if (typeof out === 'string')
out = { id: out };
return out;
}
async load(options) {
// We may not have added this to our module graph yet, so ensure it exists
await moduleGraph?.ensureEntryFromUrl(unwrapId(options.id), this.ssr);
// Not all options passed to this function make sense in the context of loading individual files,
// but we can at least update the module info properties we support
updateModuleInfo(options.id, options);
await container.load(options.id, { ssr: this.ssr });
const moduleInfo = this.getModuleInfo(options.id);
// This shouldn't happen due to calling ensureEntryFromUrl, but 1) our types can't ensure that
// and 2) moduleGraph may not have been provided (though in the situations where that happens,
// we should never have plugins calling this.load)
if (!moduleInfo)
throw Error(`Failed to load module with id ${options.id}`);
return moduleInfo;
}
getModuleInfo(id) {
return getModuleInfo(id);
}
getModuleIds() {
return moduleGraph
? moduleGraph.idToModuleMap.keys()
: Array.prototype[Symbol.iterator]();
}
addWatchFile(id) {
watchFiles.add(id);
(this._addedImports || (this._addedImports = new Set())).add(id);
if (watcher)
ensureWatchedFile(watcher, id, root);
}
getWatchFiles() {
return [...watchFiles];
}
emitFile(assetOrFile) {
warnIncompatibleMethod(`emitFile`, this._activePlugin.name);
return '';
}
setAssetSource() {
warnIncompatibleMethod(`setAssetSource`, this._activePlugin.name);
}
getFileName() {
warnIncompatibleMethod(`getFileName`, this._activePlugin.name);
return '';
}
warn(e, position) {
const err = formatError(typeof e === 'function' ? e() : e, position, this);
const msg = buildErrorMessage(err, [colors$1.yellow(`warning: ${err.message}`)], false);
logger.warn(msg, {
clear: true,
timestamp: true,
});
}
error(e, position) {
// error thrown here is caught by the transform middleware and passed on
// the the error middleware.
throw formatError(e, position, this);
}
}
function formatError(e, position, ctx) {
const err = (typeof e === 'string' ? new Error(e) : e);
if (err.pluginCode) {
return err; // The plugin likely called `this.error`
}
if (err.file && err.name === 'CssSyntaxError') {
err.id = normalizePath$3(err.file);
}
if (ctx._activePlugin)
err.plugin = ctx._activePlugin.name;
if (ctx._activeId && !err.id)
err.id = ctx._activeId;
if (ctx._activeCode) {
err.pluginCode = ctx._activeCode;
// some rollup plugins, e.g. json, sets err.position instead of err.pos
const pos = position ?? err.pos ?? err.position;
if (pos != null) {
let errLocation;
try {
errLocation = numberToPos(ctx._activeCode, pos);
}
catch (err2) {
logger.error(colors$1.red(`Error in error handler:\n${err2.stack || err2.message}\n`),
// print extra newline to separate the two errors
{ error: err2 });
throw err;
}
err.loc = err.loc || {
file: err.id,
...errLocation,
};
err.frame = err.frame || generateCodeFrame(ctx._activeCode, pos);
}
else if (err.loc) {
// css preprocessors may report errors in an included file
if (!err.frame) {
let code = ctx._activeCode;
if (err.loc.file) {
err.id = normalizePath$3(err.loc.file);
try {
code = fs$l.readFileSync(err.loc.file, 'utf-8');
}
catch { }
}
err.frame = generateCodeFrame(code, err.loc);
}
}
else if (err.line && err.column) {
err.loc = {
file: err.id,
line: err.line,
column: err.column,
};
err.frame = err.frame || generateCodeFrame(err.id, err.loc);
}
if (ctx instanceof TransformContext &&
typeof err.loc?.line === 'number' &&
typeof err.loc?.column === 'number') {
const rawSourceMap = ctx._getCombinedSourcemap();
if (rawSourceMap) {
const traced = new TraceMap(rawSourceMap);
const { source, line, column } = originalPositionFor$1(traced, {
line: Number(err.loc.line),
column: Number(err.loc.column),
});
if (source && line != null && column != null) {
err.loc = { file: source, line, column };
}
}
}
}
else if (err.loc) {
if (!err.frame) {
let code = err.pluginCode;
if (err.loc.file) {
err.id = normalizePath$3(err.loc.file);
if (!code) {
try {
code = fs$l.readFileSync(err.loc.file, 'utf-8');
}
catch { }
}
}
if (code) {
err.frame = generateCodeFrame(`${code}`, err.loc);
}
}
}
if (typeof err.loc?.column !== 'number' &&
typeof err.loc?.line !== 'number' &&
!err.loc?.file) {
delete err.loc;
}
return err;
}
class TransformContext extends Context {
constructor(filename, code, inMap) {
super();
this.originalSourcemap = null;
this.sourcemapChain = [];
this.combinedMap = null;
this.filename = filename;
this.originalCode = code;
if (inMap) {
if (debugSourcemapCombine) {
// @ts-expect-error inject name for debug purpose
inMap.name = '$inMap';
}
this.sourcemapChain.push(inMap);
}
}
_getCombinedSourcemap(createIfNull = false) {
if (debugSourcemapCombine &&
debugSourcemapCombineFilter &&
this.filename.includes(debugSourcemapCombineFilter)) {
debugSourcemapCombine('----------', this.filename);
debugSourcemapCombine(this.combinedMap);
debugSourcemapCombine(this.sourcemapChain);
debugSourcemapCombine('----------');
}
let combinedMap = this.combinedMap;
for (let m of this.sourcemapChain) {
if (typeof m === 'string')
m = JSON.parse(m);
if (!('version' in m)) {
// empty, nullified source map
combinedMap = this.combinedMap = null;
this.sourcemapChain.length = 0;
break;
}
if (!combinedMap) {
combinedMap = m;
}
else {
combinedMap = combineSourcemaps(cleanUrl(this.filename), [
m,
combinedMap,
]);
}
}
if (!combinedMap) {
return createIfNull
? new MagicString(this.originalCode).generateMap({
includeContent: true,
hires: 'boundary',
source: cleanUrl(this.filename),
})
: null;
}
if (combinedMap !== this.combinedMap) {
this.combinedMap = combinedMap;
this.sourcemapChain.length = 0;
}
return this.combinedMap;
}
getCombinedSourcemap() {
return this._getCombinedSourcemap(true);
}
}
let closed = false;
const processesing = new Set();
// keeps track of hook promises so that we can wait for them all to finish upon closing the server
function handleHookPromise(maybePromise) {
if (!maybePromise?.then) {
return maybePromise;
}
const promise = maybePromise;
processesing.add(promise);
return promise.finally(() => processesing.delete(promise));
}
const container = {
options: await (async () => {
let options = rollupOptions;
for (const optionsHook of getSortedPluginHooks('options')) {
if (closed)
throwClosedServerError();
options =
(await handleHookPromise(optionsHook.call(minimalContext, options))) || options;
}
if (options.acornInjectPlugins) {
parser = Parser$1.extend(...arraify(options.acornInjectPlugins));
}
return {
acorn,
acornInjectPlugins: [],
...options,
};
})(),
getModuleInfo,
async buildStart() {
await handleHookPromise(hookParallel('buildStart', (plugin) => new Context(plugin), () => [container.options]));
},
async resolveId(rawId, importer = join$2(root, 'index.html'), options) {
const skip = options?.skip;
const ssr = options?.ssr;
const scan = !!options?.scan;
const ctx = new Context();
ctx.ssr = !!ssr;
ctx._scan = scan;
ctx._resolveSkips = skip;
const resolveStart = debugResolve ? performance.now() : 0;
let id = null;
const partial = {};
for (const plugin of getSortedPlugins('resolveId')) {
if (closed && !ssr)
throwClosedServerError();
if (!plugin.resolveId)
continue;
if (skip?.has(plugin))
continue;
ctx._activePlugin = plugin;
const pluginResolveStart = debugPluginResolve ? performance.now() : 0;
const handler = 'handler' in plugin.resolveId
? plugin.resolveId.handler
: plugin.resolveId;
const result = await handleHookPromise(handler.call(ctx, rawId, importer, {
assertions: options?.assertions ?? {},
custom: options?.custom,
isEntry: !!options?.isEntry,
ssr,
scan,
}));
if (!result)
continue;
if (typeof result === 'string') {
id = result;
}
else {
id = result.id;
Object.assign(partial, result);
}
debugPluginResolve?.(timeFrom(pluginResolveStart), plugin.name, prettifyUrl(id, root));
// resolveId() is hookFirst - first non-null result is returned.
break;
}
if (debugResolve && rawId !== id && !rawId.startsWith(FS_PREFIX)) {
const key = rawId + id;
// avoid spamming
if (!seenResolves[key]) {
seenResolves[key] = true;
debugResolve(`${timeFrom(resolveStart)} ${colors$1.cyan(rawId)} -> ${colors$1.dim(id)}`);
}
}
if (id) {
partial.id = isExternalUrl(id) ? id : normalizePath$3(id);
return partial;
}
else {
return null;
}
},
async load(id, options) {
const ssr = options?.ssr;
const ctx = new Context();
ctx.ssr = !!ssr;
for (const plugin of getSortedPlugins('load')) {
if (closed && !ssr)
throwClosedServerError();
if (!plugin.load)
continue;
ctx._activePlugin = plugin;
const handler = 'handler' in plugin.load ? plugin.load.handler : plugin.load;
const result = await handleHookPromise(handler.call(ctx, id, { ssr }));
if (result != null) {
if (isObject$2(result)) {
updateModuleInfo(id, result);
}
return result;
}
}
return null;
},
async transform(code, id, options) {
const inMap = options?.inMap;
const ssr = options?.ssr;
const ctx = new TransformContext(id, code, inMap);
ctx.ssr = !!ssr;
for (const plugin of getSortedPlugins('transform')) {
if (closed && !ssr)
throwClosedServerError();
if (!plugin.transform)
continue;
ctx._activePlugin = plugin;
ctx._activeId = id;
ctx._activeCode = code;
const start = debugPluginTransform ? performance.now() : 0;
let result;
const handler = 'handler' in plugin.transform
? plugin.transform.handler
: plugin.transform;
try {
result = await handleHookPromise(handler.call(ctx, code, id, { ssr }));
}
catch (e) {
ctx.error(e);
}
if (!result)
continue;
debugPluginTransform?.(timeFrom(start), plugin.name, prettifyUrl(id, root));
if (isObject$2(result)) {
if (result.code !== undefined) {
code = result.code;
if (result.map) {
if (debugSourcemapCombine) {
// @ts-expect-error inject plugin name for debug purpose
result.map.name = plugin.name;
}
ctx.sourcemapChain.push(result.map);
}
}
updateModuleInfo(id, result);
}
else {
code = result;
}
}
return {
code,
map: ctx._getCombinedSourcemap(),
};
},
async close() {
if (closed)
return;
closed = true;
await Promise.allSettled(Array.from(processesing));
const ctx = new Context();
await hookParallel('buildEnd', () => ctx, () => []);
await hookParallel('closeBundle', () => ctx, () => []);
},
};
return container;
}
const debug$9 = createDebugger('vite:deps');
const htmlTypesRE = /\.(html|vue|svelte|astro|imba)$/;
// A simple regex to detect import sources. This is only used on
// <script lang="ts"> blocks in vue (setup only) or svelte files, since
// seemingly unused imports are dropped by esbuild when transpiling TS which
// prevents it from crawling further.
// We can't use es-module-lexer because it can't handle TS, and don't want to
// use Acorn because it's slow. Luckily this doesn't have to be bullet proof
// since even missed imports can be caught at runtime, and false positives will
// simply be ignored.
const importsRE = /(?<!\/\/.*)(?<=^|;|\*\/)\s*import(?!\s+type)(?:[\w*{}\n\r\t, ]+from)?\s*("[^"]+"|'[^']+')\s*(?=$|;|\/\/|\/\*)/gm;
function scanImports(config) {
// Only used to scan non-ssr code
const start = performance.now();
const deps = {};
const missing = {};
let entries;
const scanContext = { cancelled: false };
const esbuildContext = computeEntries(config).then((computedEntries) => {
entries = computedEntries;
if (!entries.length) {
if (!config.optimizeDeps.entries && !config.optimizeDeps.include) {
config.logger.warn(colors$1.yellow('(!) Could not auto-determine entry point from rollupOptions or html files ' +
'and there are no explicit optimizeDeps.include patterns. ' +
'Skipping dependency pre-bundling.'));
}
return;
}
if (scanContext.cancelled)
return;
debug$9?.(`Crawling dependencies using entries: ${entries
.map((entry) => `\n ${colors$1.dim(entry)}`)
.join('')}`);
return prepareEsbuildScanner(config, entries, deps, missing, scanContext);
});
const result = esbuildContext
.then((context) => {
function disposeContext() {
return context?.dispose().catch((e) => {
config.logger.error('Failed to dispose esbuild context', { error: e });
});
}
if (!context || scanContext?.cancelled) {
disposeContext();
return { deps: {}, missing: {} };
}
return context
.rebuild()
.then(() => {
return {
// Ensure a fixed order so hashes are stable and improve logs
deps: orderedDependencies(deps),
missing,
};
})
.finally(() => {
return disposeContext();
});
})
.catch(async (e) => {
if (e.errors && e.message.includes('The build was canceled')) {
// esbuild logs an error when cancelling, but this is expected so
// return an empty result instead
return { deps: {}, missing: {} };
}
const prependMessage = colors$1.red(`\
Failed to scan for dependencies from entries:
${entries.join('\n')}
`);
if (e.errors) {
const msgs = await formatMessages(e.errors, {
kind: 'error',
color: true,
});
e.message = prependMessage + msgs.join('\n');
}
else {
e.message = prependMessage + e.message;
}
throw e;
})
.finally(() => {
if (debug$9) {
const duration = (performance.now() - start).toFixed(2);
const depsStr = Object.keys(orderedDependencies(deps))
.sort()
.map((id) => `\n ${colors$1.cyan(id)} -> ${colors$1.dim(deps[id])}`)
.join('') || colors$1.dim('no dependencies found');
debug$9(`Scan completed in ${duration}ms: ${depsStr}`);
}
});
return {
cancel: async () => {
scanContext.cancelled = true;
return esbuildContext.then((context) => context?.cancel());
},
result,
};
}
async function computeEntries(config) {
let entries = [];
const explicitEntryPatterns = config.optimizeDeps.entries;
const buildInput = config.build.rollupOptions?.input;
if (explicitEntryPatterns) {
entries = await globEntries(explicitEntryPatterns, config);
}
else if (buildInput) {
const resolvePath = (p) => path$o.resolve(config.root, p);
if (typeof buildInput === 'string') {
entries = [resolvePath(buildInput)];
}
else if (Array.isArray(buildInput)) {
entries = buildInput.map(resolvePath);
}
else if (isObject$2(buildInput)) {
entries = Object.values(buildInput).map(resolvePath);
}
else {
throw new Error('invalid rollupOptions.input value.');
}
}
else {
entries = await globEntries('**/*.html', config);
}
// Non-supported entry file types and virtual files should not be scanned for
// dependencies.
entries = entries.filter((entry) => isScannable(entry) && fs$l.existsSync(entry));
return entries;
}
async function prepareEsbuildScanner(config, entries, deps, missing, scanContext) {
const container = await createPluginContainer(config);
if (scanContext?.cancelled)
return;
const plugin = esbuildScanPlugin(config, container, deps, missing, entries);
const { plugins = [], tsconfig, tsconfigRaw, ...esbuildOptions } = config.optimizeDeps?.esbuildOptions ?? {};
return await esbuild.context({
absWorkingDir: process.cwd(),
write: false,
stdin: {
contents: entries.map((e) => `import ${JSON.stringify(e)}`).join('\n'),
loader: 'js',
},
bundle: true,
format: 'esm',
logLevel: 'silent',
plugins: [...plugins, plugin],
tsconfig,
tsconfigRaw: resolveTsconfigRaw(tsconfig, tsconfigRaw),
...esbuildOptions,
});
}
function orderedDependencies(deps) {
const depsList = Object.entries(deps);
// Ensure the same browserHash for the same set of dependencies
depsList.sort((a, b) => a[0].localeCompare(b[0]));
return Object.fromEntries(depsList);
}
function globEntries(pattern, config) {
return glob(pattern, {
cwd: config.root,
ignore: [
'**/node_modules/**',
`**/${config.build.outDir}/**`,
// if there aren't explicit entries, also ignore other common folders
...(config.optimizeDeps.entries
? []
: [`**/__tests__/**`, `**/coverage/**`]),
],
absolute: true,
suppressErrors: true, // suppress EACCES errors
});
}
const scriptRE = /(<script(?:\s+[a-z_:][-\w:]*(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^"'<>=\s]+))?)*\s*>)(.*?)<\/script>/gis;
const commentRE = /<!--.*?-->/gs;
const srcRE = /\bsrc\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s'">]+))/i;
const typeRE = /\btype\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s'">]+))/i;
const langRE = /\blang\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s'">]+))/i;
const contextRE = /\bcontext\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s'">]+))/i;
function esbuildScanPlugin(config, container, depImports, missing, entries) {
const seen = new Map();
const resolve = async (id, importer, options) => {
const key = id + (importer && path$o.dirname(importer));
if (seen.has(key)) {
return seen.get(key);
}
const resolved = await container.resolveId(id, importer && normalizePath$3(importer), {
...options,
scan: true,
});
const res = resolved?.id;
seen.set(key, res);
return res;
};
const include = config.optimizeDeps?.include;
const exclude = [
...(config.optimizeDeps?.exclude || []),
'@vite/client',
'@vite/env',
];
const externalUnlessEntry = ({ path }) => ({
path,
external: !entries.includes(path),
});
const doTransformGlobImport = async (contents, id, loader) => {
let transpiledContents;
// transpile because `transformGlobImport` only expects js
if (loader !== 'js') {
transpiledContents = (await transform$1(contents, { loader })).code;
}
else {
transpiledContents = contents;
}
const result = await transformGlobImport(transpiledContents, id, config.root, resolve, config.isProduction);
return result?.s.toString() || transpiledContents;
};
return {
name: 'vite:dep-scan',
setup(build) {
const scripts = {};
// external urls
build.onResolve({ filter: externalRE }, ({ path }) => ({
path,
external: true,
}));
// data urls
build.onResolve({ filter: dataUrlRE }, ({ path }) => ({
path,
external: true,
}));
// local scripts (`<script>` in Svelte and `<script setup>` in Vue)
build.onResolve({ filter: virtualModuleRE }, ({ path }) => {
return {
// strip prefix to get valid filesystem path so esbuild can resolve imports in the file
path: path.replace(virtualModulePrefix, ''),
namespace: 'script',
};
});
build.onLoad({ filter: /.*/, namespace: 'script' }, ({ path }) => {
return scripts[path];
});
// html types: extract script contents -----------------------------------
build.onResolve({ filter: htmlTypesRE }, async ({ path, importer }) => {
const resolved = await resolve(path, importer);
if (!resolved)
return;
// It is possible for the scanner to scan html types in node_modules.
// If we can optimize this html type, skip it so it's handled by the
// bare import resolve, and recorded as optimization dep.
if (isInNodeModules(resolved) &&
isOptimizable(resolved, config.optimizeDeps))
return;
return {
path: resolved,
namespace: 'html',
};
});
// extract scripts inside HTML-like files and treat it as a js module
build.onLoad({ filter: htmlTypesRE, namespace: 'html' }, async ({ path }) => {
let raw = await fsp.readFile(path, 'utf-8');
// Avoid matching the content of the comment
raw = raw.replace(commentRE, '<!---->');
const isHtml = path.endsWith('.html');
scriptRE.lastIndex = 0;
let js = '';
let scriptId = 0;
let match;
while ((match = scriptRE.exec(raw))) {
const [, openTag, content] = match;
const typeMatch = openTag.match(typeRE);
const type = typeMatch && (typeMatch[1] || typeMatch[2] || typeMatch[3]);
const langMatch = openTag.match(langRE);
const lang = langMatch && (langMatch[1] || langMatch[2] || langMatch[3]);
// skip non type module script
if (isHtml && type !== 'module') {
continue;
}
// skip type="application/ld+json" and other non-JS types
if (type &&
!(type.includes('javascript') ||
type.includes('ecmascript') ||
type === 'module')) {
continue;
}
let loader = 'js';
if (lang === 'ts' || lang === 'tsx' || lang === 'jsx') {
loader = lang;
}
else if (path.endsWith('.astro')) {
loader = 'ts';
}
const srcMatch = openTag.match(srcRE);
if (srcMatch) {
const src = srcMatch[1] || srcMatch[2] || srcMatch[3];
js += `import ${JSON.stringify(src)}\n`;
}
else if (content.trim()) {
// The reason why virtual modules are needed:
// 1. There can be module scripts (`<script context="module">` in Svelte and `<script>` in Vue)
// or local scripts (`<script>` in Svelte and `<script setup>` in Vue)
// 2. There can be multiple module scripts in html
// We need to handle these separately in case variable names are reused between them
// append imports in TS to prevent esbuild from removing them
// since they may be used in the template
const contents = content +
(loader.startsWith('ts') ? extractImportPaths(content) : '');
const key = `${path}?id=${scriptId++}`;
if (contents.includes('import.meta.glob')) {
scripts[key] = {
loader: 'js',
contents: await doTransformGlobImport(contents, path, loader),
pluginData: {
htmlType: { loader },
},
};
}
else {
scripts[key] = {
loader,
contents,
pluginData: {
htmlType: { loader },
},
};
}
const virtualModulePath = JSON.stringify(virtualModulePrefix + key);
const contextMatch = openTag.match(contextRE);
const context = contextMatch &&
(contextMatch[1] || contextMatch[2] || contextMatch[3]);
// Especially for Svelte files, exports in <script context="module"> means module exports,
// exports in <script> means component props. To avoid having two same export name from the
// star exports, we need to ignore exports in <script>
if (path.endsWith('.svelte') && context !== 'module') {
js += `import ${virtualModulePath}\n`;
}
else {
js += `export * from ${virtualModulePath}\n`;
}
}
}
// This will trigger incorrectly if `export default` is contained
// anywhere in a string. Svelte and Astro files can't have
// `export default` as code so we know if it's encountered it's a
// false positive (e.g. contained in a string)
if (!path.endsWith('.vue') || !js.includes('export default')) {
js += '\nexport default {}';
}
return {
loader: 'js',
contents: js,
};
});
// bare imports: record and externalize ----------------------------------
build.onResolve({
// avoid matching windows volume
filter: /^[\w@][^:]/,
}, async ({ path: id, importer, pluginData }) => {
if (moduleListContains(exclude, id)) {
return externalUnlessEntry({ path: id });
}
if (depImports[id]) {
return externalUnlessEntry({ path: id });
}
const resolved = await resolve(id, importer, {
custom: {
depScan: { loader: pluginData?.htmlType?.loader },
},
});
if (resolved) {
if (shouldExternalizeDep(resolved, id)) {
return externalUnlessEntry({ path: id });
}
if (isInNodeModules(resolved) || include?.includes(id)) {
// dependency or forced included, externalize and stop crawling
if (isOptimizable(resolved, config.optimizeDeps)) {
depImports[id] = resolved;
}
return externalUnlessEntry({ path: id });
}
else if (isScannable(resolved)) {
const namespace = htmlTypesRE.test(resolved) ? 'html' : undefined;
// linked package, keep crawling
return {
path: path$o.resolve(resolved),
namespace,
};
}
else {
return externalUnlessEntry({ path: id });
}
}
else {
missing[id] = normalizePath$3(importer);
}
});
// Externalized file types -----------------------------------------------
// these are done on raw ids using esbuild's native regex filter so it
// should be faster than doing it in the catch-all via js
// they are done after the bare import resolve because a package name
// may end with these extensions
// css
build.onResolve({ filter: CSS_LANGS_RE }, externalUnlessEntry);
// json & wasm
build.onResolve({ filter: /\.(json|json5|wasm)$/ }, externalUnlessEntry);
// known asset types
build.onResolve({
filter: new RegExp(`\\.(${KNOWN_ASSET_TYPES.join('|')})$`),
}, externalUnlessEntry);
// known vite query types: ?worker, ?raw
build.onResolve({ filter: SPECIAL_QUERY_RE }, ({ path }) => ({
path,
external: true,
}));
// catch all -------------------------------------------------------------
build.onResolve({
filter: /.*/,
}, async ({ path: id, importer, pluginData }) => {
// use vite resolver to support urls and omitted extensions
const resolved = await resolve(id, importer, {
custom: {
depScan: { loader: pluginData?.htmlType?.loader },
},
});
if (resolved) {
if (shouldExternalizeDep(resolved, id) || !isScannable(resolved)) {
return externalUnlessEntry({ path: id });
}
const namespace = htmlTypesRE.test(resolved) ? 'html' : undefined;
return {
path: path$o.resolve(cleanUrl(resolved)),
namespace,
};
}
else {
// resolve failed... probably unsupported type
return externalUnlessEntry({ path: id });
}
});
// for jsx/tsx, we need to access the content and check for
// presence of import.meta.glob, since it results in import relationships
// but isn't crawled by esbuild.
build.onLoad({ filter: JS_TYPES_RE }, async ({ path: id }) => {
let ext = path$o.extname(id).slice(1);
if (ext === 'mjs')
ext = 'js';
let contents = await fsp.readFile(id, 'utf-8');
if (ext.endsWith('x') && config.esbuild && config.esbuild.jsxInject) {
contents = config.esbuild.jsxInject + `\n` + contents;
}
const loader = config.optimizeDeps?.esbuildOptions?.loader?.[`.${ext}`] ||
ext;
if (contents.includes('import.meta.glob')) {
return {
loader: 'js',
contents: await doTransformGlobImport(contents, id, loader),
};
}
return {
loader,
contents,
};
});
},
};
}
/**
* when using TS + (Vue + `<script setup>`) or Svelte, imports may seem
* unused to esbuild and dropped in the build output, which prevents
* esbuild from crawling further.
* the solution is to add `import 'x'` for every source to force
* esbuild to keep crawling due to potential side effects.
*/
function extractImportPaths(code) {
// empty singleline & multiline comments to avoid matching comments
code = code
.replace(multilineCommentsRE$1, '/* */')
.replace(singlelineCommentsRE$1, '');
let js = '';
let m;
importsRE.lastIndex = 0;
while ((m = importsRE.exec(code)) != null) {
js += `\nimport ${m[1]}`;
}
return js;
}
function shouldExternalizeDep(resolvedId, rawId) {
// not a valid file path
if (!path$o.isAbsolute(resolvedId)) {
return true;
}
// virtual id
if (resolvedId === rawId || resolvedId.includes('\0')) {
return true;
}
return false;
}
function isScannable(id) {
// From Vite 5, all optimizeDeps.extensions are scannable. We hardcode .marko for 4.5.0 to avoid
// potential regressions. See https://github.com/vitejs/vite/pull/14543
return (JS_TYPES_RE.test(id) ||
htmlTypesRE.test(id) ||
path$o.extname(id) === '.marko');
}
// esbuild v0.18 only transforms decorators when `experimentalDecorators` is set to `true`.
// To preserve compat with the esbuild breaking change, we set `experimentalDecorators` to
// `true` by default if it's unset.
// TODO: Remove this in Vite 5 and check https://github.com/vitejs/vite/pull/13805#issuecomment-1633612320
function resolveTsconfigRaw(tsconfig, tsconfigRaw) {
return tsconfig || typeof tsconfigRaw === 'string'
? tsconfigRaw
: {
...tsconfigRaw,
compilerOptions: {
experimentalDecorators: true,
...tsconfigRaw?.compilerOptions,
},
};
}
function createOptimizeDepsIncludeResolver(config, ssr) {
const resolve = config.createResolver({
asSrc: false,
scan: true,
ssrOptimizeCheck: ssr,
ssrConfig: config.ssr,
packageCache: new Map(),
});
return async (id) => {
const lastArrowIndex = id.lastIndexOf('>');
if (lastArrowIndex === -1) {
return await resolve(id, undefined, undefined, ssr);
}
// split nested selected id by last '>', for example:
// 'foo > bar > baz' => 'foo > bar' & 'baz'
const nestedRoot = id.substring(0, lastArrowIndex).trim();
const nestedPath = id.substring(lastArrowIndex + 1).trim();
const basedir = nestedResolveBasedir(nestedRoot, config.root, config.resolve.preserveSymlinks);
return await resolve(nestedPath, path$o.resolve(basedir, 'package.json'), undefined, ssr);
};
}
/**
* Expand the glob syntax in `optimizeDeps.include` to proper import paths
*/
function expandGlobIds(id, config) {
const pkgName = getNpmPackageName(id);
if (!pkgName)
return [];
const pkgData = resolvePackageData(pkgName, config.root, config.resolve.preserveSymlinks, config.packageCache);
if (!pkgData)
return [];
const pattern = '.' + id.slice(pkgName.length);
const exports = pkgData.data.exports;
// if package has exports field, get all possible export paths and apply
// glob on them with micromatch
if (exports) {
if (typeof exports === 'string' || Array.isArray(exports)) {
return [pkgName];
}
const possibleExportPaths = [];
for (const key in exports) {
if (key.startsWith('.')) {
if (key.includes('*')) {
// "./glob/*": {
// "browser": "./dist/glob/*-browser/*.js", <-- get this one
// "default": "./dist/glob/*/*.js"
// }
// NOTE: theoretically the "default" condition could map to a different
// set of files, but that complicates the resolve logic, so we assume
// all conditions map to the same set of files, and get the first one.
const exportsValue = getFirstExportStringValue(exports[key]);
if (!exportsValue)
continue;
// "./dist/glob/*-browser/*.js" => "./dist/glob/**/*-browser/**/*.js"
// NOTE: in some cases, this could expand to consecutive /**/*/**/* etc
// but it's fine since fast-glob handles it the same.
const exportValuePattern = exportsValue.replace(/\*/g, '**/*');
// "./dist/glob/*-browser/*.js" => /dist\/glob\/(.*)-browser\/(.*)\.js/
const exportsValueGlobRe = new RegExp(exportsValue.split('*').map(escapeRegex).join('(.*)'));
possibleExportPaths.push(...glob
.sync(exportValuePattern, {
cwd: pkgData.dir,
ignore: ['node_modules'],
})
.map((filePath) => {
// "./glob/*": "./dist/glob/*-browser/*.js"
// `filePath`: "./dist/glob/foo-browser/foo.js"
// we need to revert the file path back to the export key by
// matching value regex and replacing the capture groups to the key
const matched = slash$1(filePath).match(exportsValueGlobRe);
// `matched`: [..., 'foo', 'foo']
if (matched) {
let allGlobSame = matched.length === 2;
// exports key can only have one *, so for >=2 matched groups,
// make sure they have the same value
if (!allGlobSame) {
// assume true, if one group is different, set false and break
allGlobSame = true;
for (let i = 2; i < matched.length; i++) {
if (matched[i] !== matched[i - 1]) {
allGlobSame = false;
break;
}
}
}
if (allGlobSame) {
return key.replace('*', matched[1]).slice(2);
}
}
return '';
})
.filter(Boolean));
}
else {
possibleExportPaths.push(key.slice(2));
}
}
}
const matched = micromatch$2(possibleExportPaths, pattern).map((match) => path$o.posix.join(pkgName, match));
matched.unshift(pkgName);
return matched;
}
else {
// for packages without exports, we can do a simple glob
const matched = glob
.sync(pattern, { cwd: pkgData.dir, ignore: ['node_modules'] })
.map((match) => path$o.posix.join(pkgName, slash$1(match)));
matched.unshift(pkgName);
return matched;
}
}
function getFirstExportStringValue(obj) {
if (typeof obj === 'string') {
return obj;
}
else if (Array.isArray(obj)) {
return obj[0];
}
else {
for (const key in obj) {
return getFirstExportStringValue(obj[key]);
}
}
}
/**
* Continuously resolve the basedir of packages separated by '>'
*/
function nestedResolveBasedir(id, basedir, preserveSymlinks = false) {
const pkgs = id.split('>').map((pkg) => pkg.trim());
for (const pkg of pkgs) {
basedir = resolvePackageData(pkg, basedir, preserveSymlinks)?.dir || basedir;
}
return basedir;
}
const debug$8 = createDebugger('vite:deps');
/**
* The amount to wait for requests to register newly found dependencies before triggering
* a re-bundle + page reload
*/
const debounceMs = 100;
const depsOptimizerMap = new WeakMap();
const devSsrDepsOptimizerMap = new WeakMap();
function getDepsOptimizer(config, ssr) {
// Workers compilation shares the DepsOptimizer from the main build
const isDevSsr = ssr && config.command !== 'build';
return (isDevSsr ? devSsrDepsOptimizerMap : depsOptimizerMap).get(config.mainConfig || config);
}
async function initDepsOptimizer(config, server) {
// Non Dev SSR Optimizer
const ssr = config.command === 'build' && !!config.build.ssr;
if (!getDepsOptimizer(config, ssr)) {
await createDepsOptimizer(config, server);
}
}
let creatingDevSsrOptimizer;
async function initDevSsrDepsOptimizer(config, server) {
if (getDepsOptimizer(config, true)) {
// ssr
return;
}
if (creatingDevSsrOptimizer) {
return creatingDevSsrOptimizer;
}
creatingDevSsrOptimizer = (async function () {
// Important: scanning needs to be done before starting the SSR dev optimizer
// If ssrLoadModule is called before server.listen(), the main deps optimizer
// will not be yet created
const ssr = false;
if (!getDepsOptimizer(config, ssr)) {
await initDepsOptimizer(config, server);
}
await getDepsOptimizer(config, ssr).scanProcessing;
await createDevSsrDepsOptimizer(config);
creatingDevSsrOptimizer = undefined;
})();
return await creatingDevSsrOptimizer;
}
async function createDepsOptimizer(config, server) {
const { logger } = config;
const isBuild = config.command === 'build';
const ssr = isBuild && !!config.build.ssr; // safe as Dev SSR don't use this optimizer
const sessionTimestamp = Date.now().toString();
const cachedMetadata = await loadCachedDepOptimizationMetadata(config, ssr);
let debounceProcessingHandle;
let closed = false;
let metadata = cachedMetadata || initDepsOptimizerMetadata(config, ssr, sessionTimestamp);
const depsOptimizer = {
metadata,
registerMissingImport,
run: () => debouncedProcessing(0),
isOptimizedDepFile: createIsOptimizedDepFile(config),
isOptimizedDepUrl: createIsOptimizedDepUrl(config),
getOptimizedDepId: (depInfo) => isBuild ? depInfo.file : `${depInfo.file}?v=${depInfo.browserHash}`,
registerWorkersSource,
delayDepsOptimizerUntil,
resetRegisteredIds,
ensureFirstRun,
close,
options: getDepOptimizationConfig(config, ssr),
};
depsOptimizerMap.set(config, depsOptimizer);
let newDepsDiscovered = false;
let newDepsToLog = [];
let newDepsToLogHandle;
const logNewlyDiscoveredDeps = () => {
if (newDepsToLog.length) {
config.logger.info(colors$1.green(`✨ new dependencies optimized: ${depsLogString(newDepsToLog)}`), {
timestamp: true,
});
newDepsToLog = [];
}
};
let depOptimizationProcessing = newDepOptimizationProcessing();
let depOptimizationProcessingQueue = [];
const resolveEnqueuedProcessingPromises = () => {
// Resolve all the processings (including the ones which were delayed)
for (const processing of depOptimizationProcessingQueue) {
processing.resolve();
}
depOptimizationProcessingQueue = [];
};
let enqueuedRerun;
let currentlyProcessing = false;
let firstRunCalled = !!cachedMetadata;
// During build, we wait for every module to be scanned before resolving
// optimized deps loading for rollup on each rebuild. It will be recreated
// after each buildStart.
// During dev, if this is a cold run, we wait for static imports discovered
// from the first request before resolving to minimize full page reloads.
// On warm start or after the first optimization is run, we use a simpler
// debounce strategy each time a new dep is discovered.
let crawlEndFinder;
if (isBuild || !cachedMetadata) {
crawlEndFinder = setupOnCrawlEnd(onCrawlEnd);
}
let optimizationResult;
let discover;
async function close() {
closed = true;
crawlEndFinder?.cancel();
await Promise.allSettled([
discover?.cancel(),
depsOptimizer.scanProcessing,
optimizationResult?.cancel(),
]);
}
if (!cachedMetadata) {
// Enter processing state until crawl of static imports ends
currentlyProcessing = true;
// Initialize discovered deps with manually added optimizeDeps.include info
const deps = {};
await addManuallyIncludedOptimizeDeps(deps, config, ssr);
const discovered = toDiscoveredDependencies(config, deps, ssr, sessionTimestamp);
for (const depInfo of Object.values(discovered)) {
addOptimizedDepInfo(metadata, 'discovered', {
...depInfo,
processing: depOptimizationProcessing.promise,
});
newDepsDiscovered = true;
}
if (config.optimizeDeps.noDiscovery) {
// We don't need to scan for dependencies or wait for the static crawl to end
// Run the first optimization run immediately
runOptimizer();
}
else if (!isBuild) {
// Important, the scanner is dev only
depsOptimizer.scanProcessing = new Promise((resolve) => {
(async () => {
try {
debug$8?.(colors$1.green(`scanning for dependencies...`));
discover = discoverProjectDependencies(config);
const deps = await discover.result;
discover = undefined;
// Add these dependencies to the discovered list, as these are currently
// used by the preAliasPlugin to support aliased and optimized deps.
// This is also used by the CJS externalization heuristics in legacy mode
for (const id of Object.keys(deps)) {
if (!metadata.discovered[id]) {
addMissingDep(id, deps[id]);
}
}
const knownDeps = prepareKnownDeps();
// For dev, we run the scanner and the first optimization
// run on the background, but we wait until crawling has ended
// to decide if we send this result to the browser or we need to
// do another optimize step
optimizationResult = runOptimizeDeps(config, knownDeps);
}
catch (e) {
logger.error(e.stack || e.message);
}
finally {
resolve();
depsOptimizer.scanProcessing = undefined;
}
})();
});
}
}
function startNextDiscoveredBatch() {
newDepsDiscovered = false;
// Add the current depOptimizationProcessing to the queue, these
// promises are going to be resolved once a rerun is committed
depOptimizationProcessingQueue.push(depOptimizationProcessing);
// Create a new promise for the next rerun, discovered missing
// dependencies will be assigned this promise from this point
depOptimizationProcessing = newDepOptimizationProcessing();
}
function prepareKnownDeps() {
const knownDeps = {};
// Clone optimized info objects, fileHash, browserHash may be changed for them
for (const dep of Object.keys(metadata.optimized)) {
knownDeps[dep] = { ...metadata.optimized[dep] };
}
for (const dep of Object.keys(metadata.discovered)) {
// Clone the discovered info discarding its processing promise
const { processing, ...info } = metadata.discovered[dep];
knownDeps[dep] = info;
}
return knownDeps;
}
async function runOptimizer(preRunResult) {
// a successful completion of the optimizeDeps rerun will end up
// creating new bundled version of all current and discovered deps
// in the cache dir and a new metadata info object assigned
// to _metadata. A fullReload is only issued if the previous bundled
// dependencies have changed.
// if the rerun fails, _metadata remains untouched, current discovered
// deps are cleaned, and a fullReload is issued
// All deps, previous known and newly discovered are rebundled,
// respect insertion order to keep the metadata file stable
const isRerun = firstRunCalled;
firstRunCalled = true;
// Ensure that rerun is called sequentially
enqueuedRerun = undefined;
// Ensure that a rerun will not be issued for current discovered deps
if (debounceProcessingHandle)
clearTimeout(debounceProcessingHandle);
if (closed || Object.keys(metadata.discovered).length === 0) {
currentlyProcessing = false;
return;
}
currentlyProcessing = true;
try {
let processingResult;
if (preRunResult) {
processingResult = preRunResult;
}
else {
const knownDeps = prepareKnownDeps();
startNextDiscoveredBatch();
optimizationResult = runOptimizeDeps(config, knownDeps);
processingResult = await optimizationResult.result;
optimizationResult = undefined;
}
if (closed) {
currentlyProcessing = false;
processingResult.cancel();
resolveEnqueuedProcessingPromises();
return;
}
const newData = processingResult.metadata;
const needsInteropMismatch = findInteropMismatches(metadata.discovered, newData.optimized);
// After a re-optimization, if the internal bundled chunks change a full page reload
// is required. If the files are stable, we can avoid the reload that is expensive
// for large applications. Comparing their fileHash we can find out if it is safe to
// keep the current browser state.
const needsReload = needsInteropMismatch.length > 0 ||
metadata.hash !== newData.hash ||
Object.keys(metadata.optimized).some((dep) => {
return (metadata.optimized[dep].fileHash !== newData.optimized[dep].fileHash);
});
const commitProcessing = async () => {
await processingResult.commit();
// While optimizeDeps is running, new missing deps may be discovered,
// in which case they will keep being added to metadata.discovered
for (const id in metadata.discovered) {
if (!newData.optimized[id]) {
addOptimizedDepInfo(newData, 'discovered', metadata.discovered[id]);
}
}
// If we don't reload the page, we need to keep browserHash stable
if (!needsReload) {
newData.browserHash = metadata.browserHash;
for (const dep in newData.chunks) {
newData.chunks[dep].browserHash = metadata.browserHash;
}
for (const dep in newData.optimized) {
newData.optimized[dep].browserHash = (metadata.optimized[dep] || metadata.discovered[dep]).browserHash;
}
}
// Commit hash and needsInterop changes to the discovered deps info
// object. Allow for code to await for the discovered processing promise
// and use the information in the same object
for (const o in newData.optimized) {
const discovered = metadata.discovered[o];
if (discovered) {
const optimized = newData.optimized[o];
discovered.browserHash = optimized.browserHash;
discovered.fileHash = optimized.fileHash;
discovered.needsInterop = optimized.needsInterop;
discovered.processing = undefined;
}
}
if (isRerun) {
newDepsToLog.push(...Object.keys(newData.optimized).filter((dep) => !metadata.optimized[dep]));
}
metadata = depsOptimizer.metadata = newData;
resolveEnqueuedProcessingPromises();
};
if (!needsReload) {
await commitProcessing();
if (!debug$8) {
if (newDepsToLogHandle)
clearTimeout(newDepsToLogHandle);
newDepsToLogHandle = setTimeout(() => {
newDepsToLogHandle = undefined;
logNewlyDiscoveredDeps();
}, 2 * debounceMs);
}
else {
debug$8(colors$1.green(`✨ ${!isRerun
? `dependencies optimized`
: `optimized dependencies unchanged`}`));
}
}
else {
if (newDepsDiscovered) {
// There are newly discovered deps, and another rerun is about to be
// executed. Avoid the current full reload discarding this rerun result
// We don't resolve the processing promise, as they will be resolved
// once a rerun is committed
processingResult.cancel();
debug$8?.(colors$1.green(`✨ delaying reload as new dependencies have been found...`));
}
else {
await commitProcessing();
if (!debug$8) {
if (newDepsToLogHandle)
clearTimeout(newDepsToLogHandle);
newDepsToLogHandle = undefined;
logNewlyDiscoveredDeps();
}
logger.info(colors$1.green(`✨ optimized dependencies changed. reloading`), {
timestamp: true,
});
if (needsInteropMismatch.length > 0) {
config.logger.warn(`Mixed ESM and CJS detected in ${colors$1.yellow(needsInteropMismatch.join(', '))}, add ${needsInteropMismatch.length === 1 ? 'it' : 'them'} to optimizeDeps.needsInterop to speed up cold start`, {
timestamp: true,
});
}
fullReload();
}
}
}
catch (e) {
logger.error(colors$1.red(`error while updating dependencies:\n${e.stack}`), { timestamp: true, error: e });
resolveEnqueuedProcessingPromises();
// Reset missing deps, let the server rediscover the dependencies
metadata.discovered = {};
}
currentlyProcessing = false;
// @ts-expect-error `enqueuedRerun` could exist because `debouncedProcessing` may run while awaited
enqueuedRerun?.();
}
function fullReload() {
if (server) {
// Cached transform results have stale imports (resolved to
// old locations) so they need to be invalidated before the page is
// reloaded.
server.moduleGraph.invalidateAll();
server.ws.send({
type: 'full-reload',
path: '*',
});
}
}
async function rerun() {
// debounce time to wait for new missing deps finished, issue a new
// optimization of deps (both old and newly found) once the previous
// optimizeDeps processing is finished
const deps = Object.keys(metadata.discovered);
const depsString = depsLogString(deps);
debug$8?.(colors$1.green(`new dependencies found: ${depsString}`));
runOptimizer();
}
function getDiscoveredBrowserHash(hash, deps, missing) {
return getHash(hash + JSON.stringify(deps) + JSON.stringify(missing) + sessionTimestamp);
}
function registerMissingImport(id, resolved) {
const optimized = metadata.optimized[id];
if (optimized) {
return optimized;
}
const chunk = metadata.chunks[id];
if (chunk) {
return chunk;
}
let missing = metadata.discovered[id];
if (missing) {
// We are already discover this dependency
// It will be processed in the next rerun call
return missing;
}
missing = addMissingDep(id, resolved);
// Until the first optimize run is called, avoid triggering processing
// We'll wait until the user codebase is eagerly processed by Vite so
// we can get a list of every missing dependency before giving to the
// browser a dependency that may be outdated, thus avoiding full page reloads
if (!crawlEndFinder) {
if (isBuild) {
logger.error('Vite Internal Error: Missing dependency found after crawling ended');
}
// Debounced rerun, let other missing dependencies be discovered before
// the running next optimizeDeps
debouncedProcessing();
}
// Return the path for the optimized bundle, this path is known before
// esbuild is run to generate the pre-bundle
return missing;
}
function addMissingDep(id, resolved) {
newDepsDiscovered = true;
return addOptimizedDepInfo(metadata, 'discovered', {
id,
file: getOptimizedDepPath(id, config, ssr),
src: resolved,
// Adding a browserHash to this missing dependency that is unique to
// the current state of known + missing deps. If its optimizeDeps run
// doesn't alter the bundled files of previous known dependencies,
// we don't need a full reload and this browserHash will be kept
browserHash: getDiscoveredBrowserHash(metadata.hash, depsFromOptimizedDepInfo(metadata.optimized), depsFromOptimizedDepInfo(metadata.discovered)),
// loading of this pre-bundled dep needs to await for its processing
// promise to be resolved
processing: depOptimizationProcessing.promise,
exportsData: extractExportsData(resolved, config, ssr),
});
}
function debouncedProcessing(timeout = debounceMs) {
if (!newDepsDiscovered) {
return;
}
// Debounced rerun, let other missing dependencies be discovered before
// the running next optimizeDeps
enqueuedRerun = undefined;
if (debounceProcessingHandle)
clearTimeout(debounceProcessingHandle);
if (newDepsToLogHandle)
clearTimeout(newDepsToLogHandle);
newDepsToLogHandle = undefined;
debounceProcessingHandle = setTimeout(() => {
debounceProcessingHandle = undefined;
enqueuedRerun = rerun;
if (!currentlyProcessing) {
enqueuedRerun();
}
}, timeout);
}
// During dev, onCrawlEnd is called once when the server starts and all static
// imports after the first request have been crawled (dynamic imports may also
// be crawled if the browser requests them right away).
// During build, onCrawlEnd will be called once after each buildStart (so in
// watch mode it will be called after each rebuild has processed every module).
// All modules are transformed first in this case (both static and dynamic).
async function onCrawlEnd() {
// On build time, a missing dep appearing after onCrawlEnd is an internal error
// On dev, switch after this point to a simple debounce strategy
crawlEndFinder = undefined;
debug$8?.(colors$1.green(`✨ static imports crawl ended`));
if (closed) {
return;
}
// Await for the scan+optimize step running in the background
// It normally should be over by the time crawling of user code ended
await depsOptimizer.scanProcessing;
if (!isBuild && optimizationResult && !config.optimizeDeps.noDiscovery) {
const result = await optimizationResult.result;
optimizationResult = undefined;
currentlyProcessing = false;
const crawlDeps = Object.keys(metadata.discovered);
const scanDeps = Object.keys(result.metadata.optimized);
if (scanDeps.length === 0 && crawlDeps.length === 0) {
debug$8?.(colors$1.green(`✨ no dependencies found by the scanner or crawling static imports`));
result.cancel();
firstRunCalled = true;
return;
}
const needsInteropMismatch = findInteropMismatches(metadata.discovered, result.metadata.optimized);
const scannerMissedDeps = crawlDeps.some((dep) => !scanDeps.includes(dep));
const outdatedResult = needsInteropMismatch.length > 0 || scannerMissedDeps;
if (outdatedResult) {
// Drop this scan result, and perform a new optimization to avoid a full reload
result.cancel();
// Add deps found by the scanner to the discovered deps while crawling
for (const dep of scanDeps) {
if (!crawlDeps.includes(dep)) {
addMissingDep(dep, result.metadata.optimized[dep].src);
}
}
if (scannerMissedDeps) {
debug$8?.(colors$1.yellow(`✨ new dependencies were found while crawling that weren't detected by the scanner`));
}
debug$8?.(colors$1.green(`✨ re-running optimizer`));
debouncedProcessing(0);
}
else {
debug$8?.(colors$1.green(`✨ using post-scan optimizer result, the scanner found every used dependency`));
startNextDiscoveredBatch();
runOptimizer(result);
}
}
else {
const crawlDeps = Object.keys(metadata.discovered);
currentlyProcessing = false;
if (crawlDeps.length === 0) {
debug$8?.(colors$1.green(`✨ no dependencies found while crawling the static imports`));
firstRunCalled = true;
}
else {
// queue the first optimizer run
debouncedProcessing(0);
}
}
}
// Called during buildStart at build time, when build --watch is used.
function resetRegisteredIds() {
crawlEndFinder?.cancel();
crawlEndFinder = setupOnCrawlEnd(onCrawlEnd);
}
function registerWorkersSource(id) {
crawlEndFinder?.registerWorkersSource(id);
}
function delayDepsOptimizerUntil(id, done) {
if (crawlEndFinder && !depsOptimizer.isOptimizedDepFile(id)) {
crawlEndFinder.delayDepsOptimizerUntil(id, done);
}
}
function ensureFirstRun() {
crawlEndFinder?.ensureFirstRun();
}
}
const callCrawlEndIfIdleAfterMs = 50;
function setupOnCrawlEnd(onCrawlEnd) {
const registeredIds = new Set();
const seenIds = new Set();
const workersSources = new Set();
let timeoutHandle;
let cancelled = false;
function cancel() {
cancelled = true;
}
let crawlEndCalled = false;
function callOnCrawlEnd() {
if (!cancelled && !crawlEndCalled) {
crawlEndCalled = true;
onCrawlEnd();
}
}
// If all the inputs are dependencies, we aren't going to get any
// delayDepsOptimizerUntil(id) calls. We need to guard against this
// by forcing a rerun if no deps have been registered
let firstRunEnsured = false;
function ensureFirstRun() {
if (!firstRunEnsured && seenIds.size === 0) {
setTimeout(() => {
if (seenIds.size === 0) {
callOnCrawlEnd();
}
}, 200);
}
firstRunEnsured = true;
}
function registerWorkersSource(id) {
workersSources.add(id);
// Avoid waiting for this id, as it may be blocked by the rollup
// bundling process of the worker that also depends on the optimizer
registeredIds.delete(id);
checkIfCrawlEndAfterTimeout();
}
function delayDepsOptimizerUntil(id, done) {
if (!seenIds.has(id)) {
seenIds.add(id);
if (!workersSources.has(id)) {
registeredIds.add(id);
done()
.catch(() => { })
.finally(() => markIdAsDone(id));
}
}
}
function markIdAsDone(id) {
registeredIds.delete(id);
checkIfCrawlEndAfterTimeout();
}
function checkIfCrawlEndAfterTimeout() {
if (cancelled || registeredIds.size > 0)
return;
if (timeoutHandle)
clearTimeout(timeoutHandle);
timeoutHandle = setTimeout(callOnCrawlEndWhenIdle, callCrawlEndIfIdleAfterMs);
}
async function callOnCrawlEndWhenIdle() {
if (cancelled || registeredIds.size > 0)
return;
callOnCrawlEnd();
}
return {
ensureFirstRun,
registerWorkersSource,
delayDepsOptimizerUntil,
cancel,
};
}
async function createDevSsrDepsOptimizer(config) {
const metadata = await optimizeServerSsrDeps(config);
const depsOptimizer = {
metadata,
isOptimizedDepFile: createIsOptimizedDepFile(config),
isOptimizedDepUrl: createIsOptimizedDepUrl(config),
getOptimizedDepId: (depInfo) => `${depInfo.file}?v=${depInfo.browserHash}`,
registerMissingImport: () => {
throw new Error('Vite Internal Error: registerMissingImport is not supported in dev SSR');
},
// noop, there is no scanning during dev SSR
// the optimizer blocks the server start
run: () => { },
registerWorkersSource: (id) => { },
delayDepsOptimizerUntil: (id, done) => { },
resetRegisteredIds: () => { },
ensureFirstRun: () => { },
close: async () => { },
options: config.ssr.optimizeDeps,
};
devSsrDepsOptimizerMap.set(config, depsOptimizer);
}
function findInteropMismatches(discovered, optimized) {
const needsInteropMismatch = [];
for (const dep in discovered) {
const discoveredDepInfo = discovered[dep];
const depInfo = optimized[dep];
if (depInfo) {
if (discoveredDepInfo.needsInterop !== undefined &&
depInfo.needsInterop !== discoveredDepInfo.needsInterop) {
// This only happens when a discovered dependency has mixed ESM and CJS syntax
// and it hasn't been manually added to optimizeDeps.needsInterop
needsInteropMismatch.push(dep);
debug$8?.(colors$1.cyan(`✨ needsInterop mismatch detected for ${dep}`));
}
}
}
return needsInteropMismatch;
}
const debug$7 = createDebugger('vite:deps');
const jsExtensionRE = /\.js$/i;
const jsMapExtensionRE = /\.js\.map$/i;
/**
* Scan and optimize dependencies within a project.
* Used by Vite CLI when running `vite optimize`.
*/
async function optimizeDeps(config, force = config.optimizeDeps.force, asCommand = false) {
const log = asCommand ? config.logger.info : debug$7;
const ssr = config.command === 'build' && !!config.build.ssr;
const cachedMetadata = await loadCachedDepOptimizationMetadata(config, ssr, force, asCommand);
if (cachedMetadata) {
return cachedMetadata;
}
const deps = await discoverProjectDependencies(config).result;
const depsString = depsLogString(Object.keys(deps));
log?.(colors$1.green(`Optimizing dependencies:\n ${depsString}`));
await addManuallyIncludedOptimizeDeps(deps, config, ssr);
const depsInfo = toDiscoveredDependencies(config, deps, ssr);
const result = await runOptimizeDeps(config, depsInfo).result;
await result.commit();
return result.metadata;
}
async function optimizeServerSsrDeps(config) {
const ssr = true;
const cachedMetadata = await loadCachedDepOptimizationMetadata(config, ssr, config.optimizeDeps.force, false);
if (cachedMetadata) {
return cachedMetadata;
}
let alsoInclude;
let noExternalFilter;
const { exclude } = getDepOptimizationConfig(config, ssr);
const noExternal = config.ssr?.noExternal;
if (noExternal) {
alsoInclude = arraify(noExternal).filter((ne) => typeof ne === 'string');
noExternalFilter =
noExternal === true
? (dep) => true
: createFilter$1(undefined, exclude, {
resolve: false,
});
}
const deps = {};
await addManuallyIncludedOptimizeDeps(deps, config, ssr, alsoInclude, noExternalFilter);
const depsInfo = toDiscoveredDependencies(config, deps, true);
const result = await runOptimizeDeps(config, depsInfo, true).result;
await result.commit();
return result.metadata;
}
function initDepsOptimizerMetadata(config, ssr, timestamp) {
const hash = getDepHash(config, ssr);
return {
hash,
browserHash: getOptimizedBrowserHash(hash, {}, timestamp),
optimized: {},
chunks: {},
discovered: {},
depInfoList: [],
};
}
function addOptimizedDepInfo(metadata, type, depInfo) {
metadata[type][depInfo.id] = depInfo;
metadata.depInfoList.push(depInfo);
return depInfo;
}
let firstLoadCachedDepOptimizationMetadata = true;
/**
* Creates the initial dep optimization metadata, loading it from the deps cache
* if it exists and pre-bundling isn't forced
*/
async function loadCachedDepOptimizationMetadata(config, ssr, force = config.optimizeDeps.force, asCommand = false) {
const log = asCommand ? config.logger.info : debug$7;
if (firstLoadCachedDepOptimizationMetadata) {
firstLoadCachedDepOptimizationMetadata = false;
// Fire up a clean up of stale processing deps dirs if older process exited early
setTimeout(() => cleanupDepsCacheStaleDirs(config), 0);
}
const depsCacheDir = getDepsCacheDir(config, ssr);
if (!force) {
let cachedMetadata;
try {
const cachedMetadataPath = path$o.join(depsCacheDir, '_metadata.json');
cachedMetadata = parseDepsOptimizerMetadata(await fsp.readFile(cachedMetadataPath, 'utf-8'), depsCacheDir);
}
catch (e) { }
// hash is consistent, no need to re-bundle
if (cachedMetadata && cachedMetadata.hash === getDepHash(config, ssr)) {
log?.('Hash is consistent. Skipping. Use --force to override.');
// Nothing to commit or cancel as we are using the cache, we only
// need to resolve the processing promise so requests can move on
return cachedMetadata;
}
}
else {
config.logger.info('Forced re-optimization of dependencies');
}
// Start with a fresh cache
await fsp.rm(depsCacheDir, { recursive: true, force: true });
}
/**
* Initial optimizeDeps at server start. Perform a fast scan using esbuild to
* find deps to pre-bundle and include user hard-coded dependencies
*/
function discoverProjectDependencies(config) {
const { cancel, result } = scanImports(config);
return {
cancel,
result: result.then(({ deps, missing }) => {
const missingIds = Object.keys(missing);
if (missingIds.length) {
throw new Error(`The following dependencies are imported but could not be resolved:\n\n ${missingIds
.map((id) => `${colors$1.cyan(id)} ${colors$1.white(colors$1.dim(`(imported by ${missing[id]})`))}`)
.join(`\n `)}\n\nAre they installed?`);
}
return deps;
}),
};
}
function toDiscoveredDependencies(config, deps, ssr, timestamp) {
const browserHash = getOptimizedBrowserHash(getDepHash(config, ssr), deps, timestamp);
const discovered = {};
for (const id in deps) {
const src = deps[id];
discovered[id] = {
id,
file: getOptimizedDepPath(id, config, ssr),
src,
browserHash: browserHash,
exportsData: extractExportsData(src, config, ssr),
};
}
return discovered;
}
function depsLogString(qualifiedIds) {
return colors$1.yellow(qualifiedIds.join(`, `));
}
/**
* Internally, Vite uses this function to prepare a optimizeDeps run. When Vite starts, we can get
* the metadata and start the server without waiting for the optimizeDeps processing to be completed
*/
function runOptimizeDeps(resolvedConfig, depsInfo, ssr = resolvedConfig.command === 'build' &&
!!resolvedConfig.build.ssr) {
const optimizerContext = { cancelled: false };
const config = {
...resolvedConfig,
command: 'build',
};
const depsCacheDir = getDepsCacheDir(resolvedConfig, ssr);
const processingCacheDir = getProcessingDepsCacheDir(resolvedConfig, ssr);
// Create a temporal directory so we don't need to delete optimized deps
// until they have been processed. This also avoids leaving the deps cache
// directory in a corrupted state if there is an error
fs$l.mkdirSync(processingCacheDir, { recursive: true });
// a hint for Node.js
// all files in the cache directory should be recognized as ES modules
fs$l.writeFileSync(path$o.resolve(processingCacheDir, 'package.json'), `{\n "type": "module"\n}\n`);
const metadata = initDepsOptimizerMetadata(config, ssr);
metadata.browserHash = getOptimizedBrowserHash(metadata.hash, depsFromOptimizedDepInfo(depsInfo));
// We prebundle dependencies with esbuild and cache them, but there is no need
// to wait here. Code that needs to access the cached deps needs to await
// the optimizedDepInfo.processing promise for each dep
const qualifiedIds = Object.keys(depsInfo);
let cleaned = false;
let committed = false;
const cleanUp = () => {
// If commit was already called, ignore the clean up even if a cancel was requested
// This minimizes the chances of leaving the deps cache in a corrupted state
if (!cleaned && !committed) {
cleaned = true;
// No need to wait, we can clean up in the background because temp folders
// are unique per run
fsp.rm(processingCacheDir, { recursive: true, force: true }).catch(() => {
// Ignore errors
});
}
};
const successfulResult = {
metadata,
cancel: cleanUp,
commit: async () => {
if (cleaned) {
throw new Error('Can not commit a Deps Optimization run as it was cancelled');
}
// Ignore clean up requests after this point so the temp folder isn't deleted before
// we finish commiting the new deps cache files to the deps folder
committed = true;
// Write metadata file, then commit the processing folder to the global deps cache
// Rewire the file paths from the temporal processing dir to the final deps cache dir
const dataPath = path$o.join(processingCacheDir, '_metadata.json');
fs$l.writeFileSync(dataPath, stringifyDepsOptimizerMetadata(metadata, depsCacheDir));
// In order to minimize the time where the deps folder isn't in a consistent state,
// we first rename the old depsCacheDir to a temporal path, then we rename the
// new processing cache dir to the depsCacheDir. In systems where doing so in sync
// is safe, we do an atomic operation (at least for this thread). For Windows, we
// found there are cases where the rename operation may finish before it's done
// so we do a graceful rename checking that the folder has been properly renamed.
// We found that the rename-rename (then delete the old folder in the background)
// is safer than a delete-rename operation.
const temporalPath = depsCacheDir + getTempSuffix();
const depsCacheDirPresent = fs$l.existsSync(depsCacheDir);
if (isWindows$4) {
if (depsCacheDirPresent)
await safeRename(depsCacheDir, temporalPath);
await safeRename(processingCacheDir, depsCacheDir);
}
else {
if (depsCacheDirPresent)
fs$l.renameSync(depsCacheDir, temporalPath);
fs$l.renameSync(processingCacheDir, depsCacheDir);
}
// Delete temporal path in the background
if (depsCacheDirPresent)
fsp.rm(temporalPath, { recursive: true, force: true });
},
};
if (!qualifiedIds.length) {
// No deps to optimize, we still commit the processing cache dir to remove
// the previous optimized deps if they exist, and let the next server start
// skip the scanner step if the lockfile hasn't changed
return {
cancel: async () => cleanUp(),
result: Promise.resolve(successfulResult),
};
}
const cancelledResult = {
metadata,
commit: async () => cleanUp(),
cancel: cleanUp,
};
const start = performance.now();
const preparedRun = prepareEsbuildOptimizerRun(resolvedConfig, depsInfo, ssr, processingCacheDir, optimizerContext);
const runResult = preparedRun.then(({ context, idToExports }) => {
function disposeContext() {
return context?.dispose().catch((e) => {
config.logger.error('Failed to dispose esbuild context', { error: e });
});
}
if (!context || optimizerContext.cancelled) {
disposeContext();
return cancelledResult;
}
return context
.rebuild()
.then((result) => {
const meta = result.metafile;
// the paths in `meta.outputs` are relative to `process.cwd()`
const processingCacheDirOutputPath = path$o.relative(process.cwd(), processingCacheDir);
for (const id in depsInfo) {
const output = esbuildOutputFromId(meta.outputs, id, processingCacheDir);
const { exportsData, ...info } = depsInfo[id];
addOptimizedDepInfo(metadata, 'optimized', {
...info,
// We only need to hash the output.imports in to check for stability, but adding the hash
// and file path gives us a unique hash that may be useful for other things in the future
fileHash: getHash(metadata.hash +
depsInfo[id].file +
JSON.stringify(output.imports)),
browserHash: metadata.browserHash,
// After bundling we have more information and can warn the user about legacy packages
// that require manual configuration
needsInterop: needsInterop(config, ssr, id, idToExports[id], output),
});
}
for (const o of Object.keys(meta.outputs)) {
if (!o.match(jsMapExtensionRE)) {
const id = path$o
.relative(processingCacheDirOutputPath, o)
.replace(jsExtensionRE, '');
const file = getOptimizedDepPath(id, resolvedConfig, ssr);
if (!findOptimizedDepInfoInRecord(metadata.optimized, (depInfo) => depInfo.file === file)) {
addOptimizedDepInfo(metadata, 'chunks', {
id,
file,
needsInterop: false,
browserHash: metadata.browserHash,
});
}
}
}
debug$7?.(`Dependencies bundled in ${(performance.now() - start).toFixed(2)}ms`);
return successfulResult;
})
.catch((e) => {
if (e.errors && e.message.includes('The build was canceled')) {
// esbuild logs an error when cancelling, but this is expected so
// return an empty result instead
return cancelledResult;
}
throw e;
})
.finally(() => {
return disposeContext();
});
});
runResult.catch(() => {
cleanUp();
});
return {
async cancel() {
optimizerContext.cancelled = true;
const { context } = await preparedRun;
await context?.cancel();
cleanUp();
},
result: runResult,
};
}
async function prepareEsbuildOptimizerRun(resolvedConfig, depsInfo, ssr, processingCacheDir, optimizerContext) {
const isBuild = resolvedConfig.command === 'build';
const config = {
...resolvedConfig,
command: 'build',
};
// esbuild generates nested directory output with lowest common ancestor base
// this is unpredictable and makes it difficult to analyze entry / output
// mapping. So what we do here is:
// 1. flatten all ids to eliminate slash
// 2. in the plugin, read the entry ourselves as virtual files to retain the
// path.
const flatIdDeps = {};
const idToExports = {};
const optimizeDeps = getDepOptimizationConfig(config, ssr);
const { plugins: pluginsFromConfig = [], tsconfig, tsconfigRaw, ...esbuildOptions } = optimizeDeps?.esbuildOptions ?? {};
await Promise.all(Object.keys(depsInfo).map(async (id) => {
const src = depsInfo[id].src;
const exportsData = await (depsInfo[id].exportsData ??
extractExportsData(src, config, ssr));
if (exportsData.jsxLoader && !esbuildOptions.loader?.['.js']) {
// Ensure that optimization won't fail by defaulting '.js' to the JSX parser.
// This is useful for packages such as Gatsby.
esbuildOptions.loader = {
'.js': 'jsx',
...esbuildOptions.loader,
};
}
const flatId = flattenId(id);
flatIdDeps[flatId] = src;
idToExports[id] = exportsData;
}));
if (optimizerContext.cancelled)
return { context: undefined, idToExports };
// esbuild automatically replaces process.env.NODE_ENV for platform 'browser'
// In lib mode, we need to keep process.env.NODE_ENV untouched, so to at build
// time we replace it by __vite_process_env_NODE_ENV. This placeholder will be
// later replaced by the define plugin
const define = {
'process.env.NODE_ENV': isBuild
? '__vite_process_env_NODE_ENV'
: JSON.stringify(process.env.NODE_ENV || config.mode),
};
const platform = ssr && config.ssr?.target !== 'webworker' ? 'node' : 'browser';
const external = [...(optimizeDeps?.exclude ?? [])];
if (isBuild) {
let rollupOptionsExternal = config?.build?.rollupOptions?.external;
if (rollupOptionsExternal) {
if (typeof rollupOptionsExternal === 'string') {
rollupOptionsExternal = [rollupOptionsExternal];
}
// TODO: decide whether to support RegExp and function options
// They're not supported yet because `optimizeDeps.exclude` currently only accepts strings
if (!Array.isArray(rollupOptionsExternal) ||
rollupOptionsExternal.some((ext) => typeof ext !== 'string')) {
throw new Error(`[vite] 'build.rollupOptions.external' can only be an array of strings or a string when using esbuild optimization at build time.`);
}
external.push(...rollupOptionsExternal);
}
}
const plugins = [...pluginsFromConfig];
if (external.length) {
plugins.push(esbuildCjsExternalPlugin(external, platform));
}
plugins.push(esbuildDepPlugin(flatIdDeps, external, config, ssr));
const context = await esbuild.context({
absWorkingDir: process.cwd(),
entryPoints: Object.keys(flatIdDeps),
bundle: true,
// We can't use platform 'neutral', as esbuild has custom handling
// when the platform is 'node' or 'browser' that can't be emulated
// by using mainFields and conditions
platform,
define,
format: 'esm',
// See https://github.com/evanw/esbuild/issues/1921#issuecomment-1152991694
banner: platform === 'node'
? {
js: `import { createRequire } from 'module';const require = createRequire(import.meta.url);`,
}
: undefined,
target: isBuild ? config.build.target || undefined : ESBUILD_MODULES_TARGET,
external,
logLevel: 'error',
splitting: true,
sourcemap: true,
outdir: processingCacheDir,
ignoreAnnotations: !isBuild,
metafile: true,
plugins,
charset: 'utf8',
tsconfig,
tsconfigRaw: resolveTsconfigRaw(tsconfig, tsconfigRaw),
...esbuildOptions,
supported: {
'dynamic-import': true,
'import-meta': true,
...esbuildOptions.supported,
},
});
return { context, idToExports };
}
async function findKnownImports(config, ssr) {
const { deps } = await scanImports(config).result;
await addManuallyIncludedOptimizeDeps(deps, config, ssr);
return Object.keys(deps);
}
async function addManuallyIncludedOptimizeDeps(deps, config, ssr, extra = [], filter) {
const { logger } = config;
const optimizeDeps = getDepOptimizationConfig(config, ssr);
const optimizeDepsInclude = optimizeDeps?.include ?? [];
if (optimizeDepsInclude.length || extra.length) {
const unableToOptimize = (id, msg) => {
if (optimizeDepsInclude.includes(id)) {
logger.warn(`${msg}: ${colors$1.cyan(id)}, present in '${ssr ? 'ssr.' : ''}optimizeDeps.include'`);
}
};
const includes = [...optimizeDepsInclude, ...extra];
for (let i = 0; i < includes.length; i++) {
const id = includes[i];
if (glob.isDynamicPattern(id)) {
const globIds = expandGlobIds(id, config);
includes.splice(i, 1, ...globIds);
i += globIds.length - 1;
}
}
const resolve = createOptimizeDepsIncludeResolver(config, ssr);
for (const id of includes) {
// normalize 'foo >bar` as 'foo > bar' to prevent same id being added
// and for pretty printing
const normalizedId = normalizeId(id);
if (!deps[normalizedId] && filter?.(normalizedId) !== false) {
const entry = await resolve(id);
if (entry) {
if (isOptimizable(entry, optimizeDeps)) {
if (!entry.endsWith('?__vite_skip_optimization')) {
deps[normalizedId] = entry;
}
}
else {
unableToOptimize(id, 'Cannot optimize dependency');
}
}
else {
unableToOptimize(id, 'Failed to resolve dependency');
}
}
}
}
}
function newDepOptimizationProcessing() {
let resolve;
const promise = new Promise((_resolve) => {
resolve = _resolve;
});
return { promise, resolve: resolve };
}
// Convert to { id: src }
function depsFromOptimizedDepInfo(depsInfo) {
return Object.fromEntries(Object.entries(depsInfo).map((d) => [d[0], d[1].src]));
}
function getOptimizedDepPath(id, config, ssr) {
return normalizePath$3(path$o.resolve(getDepsCacheDir(config, ssr), flattenId(id) + '.js'));
}
function getDepsCacheSuffix(config, ssr) {
let suffix = '';
if (config.command === 'build') {
// Differentiate build caches depending on outDir to allow parallel builds
const { outDir } = config.build;
const buildId = outDir.length > 8 || outDir.includes('/') ? getHash(outDir) : outDir;
suffix += `_build-${buildId}`;
}
if (ssr) {
suffix += '_ssr';
}
return suffix;
}
function getDepsCacheDir(config, ssr) {
return getDepsCacheDirPrefix(config) + getDepsCacheSuffix(config, ssr);
}
function getProcessingDepsCacheDir(config, ssr) {
return (getDepsCacheDirPrefix(config) +
getDepsCacheSuffix(config, ssr) +
getTempSuffix());
}
function getTempSuffix() {
return ('_temp_' +
getHash(`${process.pid}:${Date.now().toString()}:${Math.random()
.toString(16)
.slice(2)}`));
}
function getDepsCacheDirPrefix(config) {
return normalizePath$3(path$o.resolve(config.cacheDir, 'deps'));
}
function createIsOptimizedDepFile(config) {
const depsCacheDirPrefix = getDepsCacheDirPrefix(config);
return (id) => id.startsWith(depsCacheDirPrefix);
}
function createIsOptimizedDepUrl(config) {
const { root } = config;
const depsCacheDir = getDepsCacheDirPrefix(config);
// determine the url prefix of files inside cache directory
const depsCacheDirRelative = normalizePath$3(path$o.relative(root, depsCacheDir));
const depsCacheDirPrefix = depsCacheDirRelative.startsWith('../')
? // if the cache directory is outside root, the url prefix would be something
// like '/@fs/absolute/path/to/node_modules/.vite'
`/@fs/${removeLeadingSlash(normalizePath$3(depsCacheDir))}`
: // if the cache directory is inside root, the url prefix would be something
// like '/node_modules/.vite'
`/${depsCacheDirRelative}`;
return function isOptimizedDepUrl(url) {
return url.startsWith(depsCacheDirPrefix);
};
}
function parseDepsOptimizerMetadata(jsonMetadata, depsCacheDir) {
const { hash, browserHash, optimized, chunks } = JSON.parse(jsonMetadata, (key, value) => {
// Paths can be absolute or relative to the deps cache dir where
// the _metadata.json is located
if (key === 'file' || key === 'src') {
return normalizePath$3(path$o.resolve(depsCacheDir, value));
}
return value;
});
if (!chunks ||
Object.values(optimized).some((depInfo) => !depInfo.fileHash)) {
// outdated _metadata.json version, ignore
return;
}
const metadata = {
hash,
browserHash,
optimized: {},
discovered: {},
chunks: {},
depInfoList: [],
};
for (const id of Object.keys(optimized)) {
addOptimizedDepInfo(metadata, 'optimized', {
...optimized[id],
id,
browserHash,
});
}
for (const id of Object.keys(chunks)) {
addOptimizedDepInfo(metadata, 'chunks', {
...chunks[id],
id,
browserHash,
needsInterop: false,
});
}
return metadata;
}
/**
* Stringify metadata for deps cache. Remove processing promises
* and individual dep info browserHash. Once the cache is reload
* the next time the server start we need to use the global
* browserHash to allow long term caching
*/
function stringifyDepsOptimizerMetadata(metadata, depsCacheDir) {
const { hash, browserHash, optimized, chunks } = metadata;
return JSON.stringify({
hash,
browserHash,
optimized: Object.fromEntries(Object.values(optimized).map(({ id, src, file, fileHash, needsInterop }) => [
id,
{
src,
file,
fileHash,
needsInterop,
},
])),
chunks: Object.fromEntries(Object.values(chunks).map(({ id, file }) => [id, { file }])),
}, (key, value) => {
// Paths can be absolute or relative to the deps cache dir where
// the _metadata.json is located
if (key === 'file' || key === 'src') {
return normalizePath$3(path$o.relative(depsCacheDir, value));
}
return value;
}, 2);
}
function esbuildOutputFromId(outputs, id, cacheDirOutputPath) {
const cwd = process.cwd();
const flatId = flattenId(id) + '.js';
const normalizedOutputPath = normalizePath$3(path$o.relative(cwd, path$o.join(cacheDirOutputPath, flatId)));
const output = outputs[normalizedOutputPath];
if (output) {
return output;
}
// If the root dir was symlinked, esbuild could return output keys as `../cwd/`
// Normalize keys to support this case too
for (const [key, value] of Object.entries(outputs)) {
if (normalizePath$3(path$o.relative(cwd, key)) === normalizedOutputPath) {
return value;
}
}
}
async function extractExportsData(filePath, config, ssr) {
await init;
const optimizeDeps = getDepOptimizationConfig(config, ssr);
const esbuildOptions = optimizeDeps?.esbuildOptions ?? {};
if (optimizeDeps.extensions?.some((ext) => filePath.endsWith(ext))) {
// For custom supported extensions, build the entry file to transform it into JS,
// and then parse with es-module-lexer. Note that the `bundle` option is not `true`,
// so only the entry file is being transformed.
const result = await build$3({
...esbuildOptions,
entryPoints: [filePath],
write: false,
format: 'esm',
});
const [imports, exports] = parse$e(result.outputFiles[0].text);
return {
hasImports: imports.length > 0,
exports: exports.map((e) => e.n),
};
}
let parseResult;
let usedJsxLoader = false;
const entryContent = await fsp.readFile(filePath, 'utf-8');
try {
parseResult = parse$e(entryContent);
}
catch {
const loader = esbuildOptions.loader?.[path$o.extname(filePath)] || 'jsx';
debug$7?.(`Unable to parse: ${filePath}.\n Trying again with a ${loader} transform.`);
const transformed = await transformWithEsbuild(entryContent, filePath, {
loader,
});
parseResult = parse$e(transformed.code);
usedJsxLoader = true;
}
const [imports, exports] = parseResult;
const exportsData = {
hasImports: imports.length > 0,
exports: exports.map((e) => e.n),
jsxLoader: usedJsxLoader,
};
return exportsData;
}
function needsInterop(config, ssr, id, exportsData, output) {
if (getDepOptimizationConfig(config, ssr)?.needsInterop?.includes(id)) {
return true;
}
const { hasImports, exports } = exportsData;
// entry has no ESM syntax - likely CJS or UMD
if (!exports.length && !hasImports) {
return true;
}
if (output) {
// if a peer dependency used require() on an ESM dependency, esbuild turns the
// ESM dependency's entry chunk into a single default export... detect
// such cases by checking exports mismatch, and force interop.
const generatedExports = output.exports;
if (!generatedExports ||
(isSingleDefaultExport(generatedExports) &&
!isSingleDefaultExport(exports))) {
return true;
}
}
return false;
}
function isSingleDefaultExport(exports) {
return exports.length === 1 && exports[0] === 'default';
}
const lockfileFormats = [
{ name: 'package-lock.json', checkPatches: true, manager: 'npm' },
{ name: 'yarn.lock', checkPatches: true, manager: 'yarn' },
{ name: 'pnpm-lock.yaml', checkPatches: false, manager: 'pnpm' },
{ name: 'bun.lockb', checkPatches: true, manager: 'bun' },
].sort((_, { manager }) => {
return process.env.npm_config_user_agent?.startsWith(manager) ? 1 : -1;
});
const lockfileNames = lockfileFormats.map((l) => l.name);
function getDepHash(config, ssr) {
const lockfilePath = lookupFile(config.root, lockfileNames);
let content = lockfilePath ? fs$l.readFileSync(lockfilePath, 'utf-8') : '';
if (lockfilePath) {
const lockfileName = path$o.basename(lockfilePath);
const { checkPatches } = lockfileFormats.find((f) => f.name === lockfileName);
if (checkPatches) {
// Default of https://github.com/ds300/patch-package
const fullPath = path$o.join(path$o.dirname(lockfilePath), 'patches');
const stat = tryStatSync(fullPath);
if (stat?.isDirectory()) {
content += stat.mtimeMs.toString();
}
}
}
// also take config into account
// only a subset of config options that can affect dep optimization
const optimizeDeps = getDepOptimizationConfig(config, ssr);
content += JSON.stringify({
mode: process.env.NODE_ENV || config.mode,
root: config.root,
resolve: config.resolve,
buildTarget: config.build.target,
assetsInclude: config.assetsInclude,
plugins: config.plugins.map((p) => p.name),
optimizeDeps: {
include: optimizeDeps?.include,
exclude: optimizeDeps?.exclude,
esbuildOptions: {
...optimizeDeps?.esbuildOptions,
plugins: optimizeDeps?.esbuildOptions?.plugins?.map((p) => p.name),
},
},
}, (_, value) => {
if (typeof value === 'function' || value instanceof RegExp) {
return value.toString();
}
return value;
});
return getHash(content);
}
function getOptimizedBrowserHash(hash, deps, timestamp = '') {
return getHash(hash + JSON.stringify(deps) + timestamp);
}
function optimizedDepInfoFromId(metadata, id) {
return (metadata.optimized[id] || metadata.discovered[id] || metadata.chunks[id]);
}
function optimizedDepInfoFromFile(metadata, file) {
return metadata.depInfoList.find((depInfo) => depInfo.file === file);
}
function findOptimizedDepInfoInRecord(dependenciesInfo, callbackFn) {
for (const o of Object.keys(dependenciesInfo)) {
const info = dependenciesInfo[o];
if (callbackFn(info, o)) {
return info;
}
}
}
async function optimizedDepNeedsInterop(metadata, file, config, ssr) {
const depInfo = optimizedDepInfoFromFile(metadata, file);
if (depInfo?.src && depInfo.needsInterop === undefined) {
depInfo.exportsData ?? (depInfo.exportsData = extractExportsData(depInfo.src, config, ssr));
depInfo.needsInterop = needsInterop(config, ssr, depInfo.id, await depInfo.exportsData);
}
return depInfo?.needsInterop;
}
const MAX_TEMP_DIR_AGE_MS = 24 * 60 * 60 * 1000;
async function cleanupDepsCacheStaleDirs(config) {
try {
const cacheDir = path$o.resolve(config.cacheDir);
if (fs$l.existsSync(cacheDir)) {
const dirents = await fsp.readdir(cacheDir, { withFileTypes: true });
for (const dirent of dirents) {
if (dirent.isDirectory() && dirent.name.includes('_temp_')) {
const tempDirPath = path$o.resolve(config.cacheDir, dirent.name);
const stats = await fsp.stat(tempDirPath).catch((_) => null);
if (stats?.mtime &&
Date.now() - stats.mtime.getTime() > MAX_TEMP_DIR_AGE_MS) {
await fsp.rm(tempDirPath, { recursive: true, force: true });
}
}
}
}
}
catch (err) {
config.logger.error(err);
}
}
// We found issues with renaming folders in some systems. This is a custom
// implementation for the optimizer. It isn't intended to be a general utility
// Based on node-graceful-fs
// The ISC License
// Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors
// https://github.com/isaacs/node-graceful-fs/blob/main/LICENSE
// On Windows, A/V software can lock the directory, causing this
// to fail with an EACCES or EPERM if the directory contains newly
// created files. The original tried for up to 60 seconds, we only
// wait for 5 seconds, as a longer time would be seen as an error
const GRACEFUL_RENAME_TIMEOUT = 5000;
const safeRename = promisify$4(function gracefulRename(from, to, cb) {
const start = Date.now();
let backoff = 0;
fs$l.rename(from, to, function CB(er) {
if (er &&
(er.code === 'EACCES' || er.code === 'EPERM') &&
Date.now() - start < GRACEFUL_RENAME_TIMEOUT) {
setTimeout(function () {
fs$l.stat(to, function (stater, st) {
if (stater && stater.code === 'ENOENT')
fs$l.rename(from, to, CB);
else
CB(er);
});
}, backoff);
if (backoff < 100)
backoff += 10;
return;
}
if (cb)
cb(er);
});
});
var index$1 = {
__proto__: null,
addManuallyIncludedOptimizeDeps: addManuallyIncludedOptimizeDeps,
addOptimizedDepInfo: addOptimizedDepInfo,
cleanupDepsCacheStaleDirs: cleanupDepsCacheStaleDirs,
createIsOptimizedDepFile: createIsOptimizedDepFile,
createIsOptimizedDepUrl: createIsOptimizedDepUrl,
depsFromOptimizedDepInfo: depsFromOptimizedDepInfo,
depsLogString: depsLogString,
discoverProjectDependencies: discoverProjectDependencies,
extractExportsData: extractExportsData,
findKnownImports: findKnownImports,
getDepHash: getDepHash,
getDepsCacheDir: getDepsCacheDir,
getDepsOptimizer: getDepsOptimizer,
getOptimizedDepPath: getOptimizedDepPath,
initDepsOptimizer: initDepsOptimizer,
initDepsOptimizerMetadata: initDepsOptimizerMetadata,
initDevSsrDepsOptimizer: initDevSsrDepsOptimizer,
loadCachedDepOptimizationMetadata: loadCachedDepOptimizationMetadata,
newDepOptimizationProcessing: newDepOptimizationProcessing,
optimizeDeps: optimizeDeps,
optimizeServerSsrDeps: optimizeServerSsrDeps,
optimizedDepInfoFromFile: optimizedDepInfoFromFile,
optimizedDepInfoFromId: optimizedDepInfoFromId,
optimizedDepNeedsInterop: optimizedDepNeedsInterop,
runOptimizeDeps: runOptimizeDeps,
toDiscoveredDependencies: toDiscoveredDependencies
};
/**
* A flag for injected helpers. This flag will be set to `false` if the output
* target is not native es - so that injected helper logic can be conditionally
* dropped.
*/
const isModernFlag = `__VITE_IS_MODERN__`;
const preloadMethod = `__vitePreload`;
const preloadMarker = `__VITE_PRELOAD__`;
const preloadHelperId = '\0vite/preload-helper';
const preloadMarkerWithQuote = new RegExp(`['"]${preloadMarker}['"]`);
const dynamicImportPrefixRE = /import\s*\(/;
// TODO: abstract
const optimizedDepChunkRE = /\/chunk-[A-Z\d]{8}\.js/;
const optimizedDepDynamicRE = /-[A-Z\d]{8}\.js/;
function toRelativePath(filename, importer) {
const relPath = path$o.relative(path$o.dirname(importer), filename);
return relPath[0] === '.' ? relPath : `./${relPath}`;
}
function indexOfMatchInSlice(str, reg, pos = 0) {
if (pos !== 0) {
str = str.slice(pos);
}
const matcher = str.match(reg);
return matcher?.index !== undefined ? matcher.index + pos : -1;
}
/**
* Helper for preloading CSS and direct imports of async chunks in parallel to
* the async chunk itself.
*/
function detectScriptRel() {
const relList = typeof document !== 'undefined' && document.createElement('link').relList;
return relList && relList.supports && relList.supports('modulepreload')
? 'modulepreload'
: 'preload';
}
function preload(baseModule, deps, importerUrl) {
// @ts-expect-error __VITE_IS_MODERN__ will be replaced with boolean later
if (!__VITE_IS_MODERN__ || !deps || deps.length === 0) {
return baseModule();
}
const links = document.getElementsByTagName('link');
return Promise.all(deps.map((dep) => {
// @ts-expect-error assetsURL is declared before preload.toString()
dep = assetsURL(dep, importerUrl);
if (dep in seen)
return;
seen[dep] = true;
const isCss = dep.endsWith('.css');
const cssSelector = isCss ? '[rel="stylesheet"]' : '';
const isBaseRelative = !!importerUrl;
// check if the file is already preloaded by SSR markup
if (isBaseRelative) {
// When isBaseRelative is true then we have `importerUrl` and `dep` is
// already converted to an absolute URL by the `assetsURL` function
for (let i = links.length - 1; i >= 0; i--) {
const link = links[i];
// The `links[i].href` is an absolute URL thanks to browser doing the work
// for us. See https://html.spec.whatwg.org/multipage/common-dom-interfaces.html#reflecting-content-attributes-in-idl-attributes:idl-domstring-5
if (link.href === dep && (!isCss || link.rel === 'stylesheet')) {
return;
}
}
}
else if (document.querySelector(`link[href="${dep}"]${cssSelector}`)) {
return;
}
const link = document.createElement('link');
link.rel = isCss ? 'stylesheet' : scriptRel;
if (!isCss) {
link.as = 'script';
link.crossOrigin = '';
}
link.href = dep;
document.head.appendChild(link);
if (isCss) {
return new Promise((res, rej) => {
link.addEventListener('load', res);
link.addEventListener('error', () => rej(new Error(`Unable to preload CSS for ${dep}`)));
});
}
}))
.then(() => baseModule())
.catch((err) => {
const e = new Event('vite:preloadError', { cancelable: true });
// @ts-expect-error custom payload
e.payload = err;
window.dispatchEvent(e);
if (!e.defaultPrevented) {
throw err;
}
});
}
/**
* Build only. During serve this is performed as part of ./importAnalysis.
*/
function buildImportAnalysisPlugin(config) {
const ssr = !!config.build.ssr;
const isWorker = config.isWorker;
const insertPreload = !(ssr || !!config.build.lib || isWorker);
const resolveModulePreloadDependencies = config.build.modulePreload && config.build.modulePreload.resolveDependencies;
const renderBuiltUrl = config.experimental.renderBuiltUrl;
const customModulePreloadPaths = !!(resolveModulePreloadDependencies || renderBuiltUrl);
const isRelativeBase = config.base === './' || config.base === '';
const optimizeModulePreloadRelativePaths = isRelativeBase && !customModulePreloadPaths;
const { modulePreload } = config.build;
const scriptRel = modulePreload && modulePreload.polyfill
? `'modulepreload'`
: `(${detectScriptRel.toString()})()`;
// There are three different cases for the preload list format in __vitePreload
//
// __vitePreload(() => import(asyncChunk), [ ...deps... ])
//
// This is maintained to keep backwards compatibility as some users developed plugins
// using regex over this list to workaround the fact that module preload wasn't
// configurable.
const assetsURL = customModulePreloadPaths
? // If `experimental.renderBuiltUrl` or `build.modulePreload.resolveDependencies` are used
// the dependencies are already resolved. To avoid the need for `new URL(dep, import.meta.url)`
// a helper `__vitePreloadRelativeDep` is used to resolve from relative paths which can be minimized.
`function(dep, importerUrl) { return dep[0] === '.' ? new URL(dep, importerUrl).href : dep }`
: optimizeModulePreloadRelativePaths
? // If there isn't custom resolvers affecting the deps list, deps in the list are relative
// to the current chunk and are resolved to absolute URL by the __vitePreload helper itself.
// The importerUrl is passed as third parameter to __vitePreload in this case
`function(dep, importerUrl) { return new URL(dep, importerUrl).href }`
: // If the base isn't relative, then the deps are relative to the projects `outDir` and the base
// is appended inside __vitePreload too.
`function(dep) { return ${JSON.stringify(config.base)}+dep }`;
const preloadCode = `const scriptRel = ${scriptRel};const assetsURL = ${assetsURL};const seen = {};export const ${preloadMethod} = ${preload.toString()}`;
return {
name: 'vite:build-import-analysis',
resolveId(id) {
if (id === preloadHelperId) {
return id;
}
},
load(id) {
if (id === preloadHelperId) {
return preloadCode;
}
},
async transform(source, importer) {
if (isInNodeModules(importer) && !dynamicImportPrefixRE.test(source)) {
return;
}
await init;
let imports = [];
try {
imports = parse$e(source)[0];
}
catch (e) {
this.error(e, e.idx);
}
if (!imports.length) {
return null;
}
const { root } = config;
const depsOptimizer = getDepsOptimizer(config, ssr);
const normalizeUrl = async (url, pos) => {
let importerFile = importer;
const optimizeDeps = getDepOptimizationConfig(config, ssr);
if (moduleListContains(optimizeDeps?.exclude, url)) {
if (depsOptimizer) {
await depsOptimizer.scanProcessing;
// if the dependency encountered in the optimized file was excluded from the optimization
// the dependency needs to be resolved starting from the original source location of the optimized file
// because starting from node_modules/.vite will not find the dependency if it was not hoisted
// (that is, if it is under node_modules directory in the package source of the optimized file)
for (const optimizedModule of depsOptimizer.metadata.depInfoList) {
if (!optimizedModule.src)
continue; // Ignore chunks
if (optimizedModule.file === importer) {
importerFile = optimizedModule.src;
}
}
}
}
const resolved = await this.resolve(url, importerFile);
if (!resolved) {
// in ssr, we should let node handle the missing modules
if (ssr) {
return [url, url];
}
return this.error(`Failed to resolve import "${url}" from "${path$o.relative(process.cwd(), importerFile)}". Does the file exist?`, pos);
}
// normalize all imports into resolved URLs
// e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'`
if (resolved.id.startsWith(withTrailingSlash(root))) {
// in root: infer short absolute path from root
url = resolved.id.slice(root.length);
}
else {
url = resolved.id;
}
if (isExternalUrl(url)) {
return [url, url];
}
return [url, resolved.id];
};
let s;
const str = () => s || (s = new MagicString(source));
let needPreloadHelper = false;
for (let index = 0; index < imports.length; index++) {
const { s: start, e: end, ss: expStart, se: expEnd, n: specifier, d: dynamicIndex, a: assertIndex, } = imports[index];
const isDynamicImport = dynamicIndex > -1;
// strip import assertions as we can process them ourselves
if (!isDynamicImport && assertIndex > -1) {
str().remove(end + 1, expEnd);
}
if (isDynamicImport && insertPreload) {
needPreloadHelper = true;
str().prependLeft(expStart, `${preloadMethod}(() => `);
str().appendRight(expEnd, `,${isModernFlag}?"${preloadMarker}":void 0${optimizeModulePreloadRelativePaths || customModulePreloadPaths
? ',import.meta.url'
: ''})`);
}
// static import or valid string in dynamic import
// If resolvable, let's resolve it
if (depsOptimizer && specifier) {
// skip external / data uri
if (isExternalUrl(specifier) || isDataUrl(specifier)) {
continue;
}
// normalize
const [url, resolvedId] = await normalizeUrl(specifier, start);
if (url !== specifier) {
if (depsOptimizer.isOptimizedDepFile(resolvedId) &&
!resolvedId.match(optimizedDepChunkRE)) {
const file = cleanUrl(resolvedId); // Remove ?v={hash}
const needsInterop = await optimizedDepNeedsInterop(depsOptimizer.metadata, file, config, ssr);
let rewriteDone = false;
if (needsInterop === undefined) {
// Non-entry dynamic imports from dependencies will reach here as there isn't
// optimize info for them, but they don't need es interop. If the request isn't
// a dynamic import, then it is an internal Vite error
if (!file.match(optimizedDepDynamicRE)) {
config.logger.error(colors$1.red(`Vite Error, ${url} optimized info should be defined`));
}
}
else if (needsInterop) {
// config.logger.info(`${url} needs interop`)
interopNamedImports(str(), imports[index], url, index, importer, config);
rewriteDone = true;
}
if (!rewriteDone) {
const rewrittenUrl = JSON.stringify(file);
const s = isDynamicImport ? start : start - 1;
const e = isDynamicImport ? end : end + 1;
str().update(s, e, rewrittenUrl);
}
}
}
}
// Differentiate CSS imports that use the default export from those that
// do not by injecting a ?used query - this allows us to avoid including
// the CSS string when unnecessary (esbuild has trouble tree-shaking
// them)
if (specifier &&
isCSSRequest(specifier) &&
// always inject ?used query when it is a dynamic import
// because there is no way to check whether the default export is used
(source.slice(expStart, start).includes('from') || isDynamicImport) &&
// already has ?used query (by import.meta.glob)
!specifier.match(/\?used(&|$)/) &&
// don't append ?used when SPECIAL_QUERY_RE exists
!specifier.match(SPECIAL_QUERY_RE) &&
// edge case for package names ending with .css (e.g normalize.css)
!(bareImportRE.test(specifier) && !specifier.includes('/'))) {
const url = specifier.replace(/\?|$/, (m) => `?used${m ? '&' : ''}`);
str().update(start, end, isDynamicImport ? `'${url}'` : url);
}
}
if (needPreloadHelper &&
insertPreload &&
!source.includes(`const ${preloadMethod} =`)) {
str().prepend(`import { ${preloadMethod} } from "${preloadHelperId}";`);
}
if (s) {
return {
code: s.toString(),
map: config.build.sourcemap
? s.generateMap({ hires: 'boundary' })
: null,
};
}
},
renderChunk(code, _, { format }) {
// make sure we only perform the preload logic in modern builds.
if (code.indexOf(isModernFlag) > -1) {
const re = new RegExp(isModernFlag, 'g');
const isModern = String(format === 'es');
if (config.build.sourcemap) {
const s = new MagicString(code);
let match;
while ((match = re.exec(code))) {
s.update(match.index, match.index + isModernFlag.length, isModern);
}
return {
code: s.toString(),
map: s.generateMap({ hires: 'boundary' }),
};
}
else {
return code.replace(re, isModern);
}
}
return null;
},
generateBundle({ format }, bundle) {
if (format !== 'es' || ssr || isWorker) {
return;
}
for (const file in bundle) {
const chunk = bundle[file];
// can't use chunk.dynamicImports.length here since some modules e.g.
// dynamic import to constant json may get inlined.
if (chunk.type === 'chunk' && chunk.code.indexOf(preloadMarker) > -1) {
const code = chunk.code;
let imports;
try {
imports = parse$e(code)[0].filter((i) => i.d > -1);
}
catch (e) {
const loc = numberToPos(code, e.idx);
this.error({
name: e.name,
message: e.message,
stack: e.stack,
cause: e.cause,
pos: e.idx,
loc: { ...loc, file: chunk.fileName },
frame: generateCodeFrame(code, loc),
});
}
const s = new MagicString(code);
const rewroteMarkerStartPos = new Set(); // position of the leading double quote
if (imports.length) {
for (let index = 0; index < imports.length; index++) {
// To handle escape sequences in specifier strings, the .n field will be provided where possible.
const { n: name, s: start, e: end, ss: expStart, se: expEnd, } = imports[index];
// check the chunk being imported
let url = name;
if (!url) {
const rawUrl = code.slice(start, end);
if (rawUrl[0] === `"` && rawUrl[rawUrl.length - 1] === `"`)
url = rawUrl.slice(1, -1);
}
const deps = new Set();
let hasRemovedPureCssChunk = false;
let normalizedFile = undefined;
if (url) {
normalizedFile = path$o.posix.join(path$o.posix.dirname(chunk.fileName), url);
const ownerFilename = chunk.fileName;
// literal import - trace direct imports and add to deps
const analyzed = new Set();
const addDeps = (filename) => {
if (filename === ownerFilename)
return;
if (analyzed.has(filename))
return;
analyzed.add(filename);
const chunk = bundle[filename];
if (chunk) {
deps.add(chunk.fileName);
chunk.imports.forEach(addDeps);
// Ensure that the css imported by current chunk is loaded after the dependencies.
// So the style of current chunk won't be overwritten unexpectedly.
chunk.viteMetadata.importedCss.forEach((file) => {
deps.add(file);
});
}
else {
const removedPureCssFiles = removedPureCssFilesCache.get(config);
const chunk = removedPureCssFiles.get(filename);
if (chunk) {
if (chunk.viteMetadata.importedCss.size) {
chunk.viteMetadata.importedCss.forEach((file) => {
deps.add(file);
});
hasRemovedPureCssChunk = true;
}
s.update(expStart, expEnd, 'Promise.resolve({})');
}
}
};
addDeps(normalizedFile);
}
let markerStartPos = indexOfMatchInSlice(code, preloadMarkerWithQuote, end);
// fix issue #3051
if (markerStartPos === -1 && imports.length === 1) {
markerStartPos = indexOfMatchInSlice(code, preloadMarkerWithQuote);
}
if (markerStartPos > 0) {
// the dep list includes the main chunk, so only need to reload when there are actual other deps.
const depsArray = deps.size > 1 ||
// main chunk is removed
(hasRemovedPureCssChunk && deps.size > 0)
? modulePreload === false
? // CSS deps use the same mechanism as module preloads, so even if disabled,
// we still need to pass these deps to the preload helper in dynamic imports.
[...deps].filter((d) => d.endsWith('.css'))
: [...deps]
: [];
let renderedDeps;
if (normalizedFile && customModulePreloadPaths) {
const { modulePreload } = config.build;
const resolveDependencies = modulePreload
? modulePreload.resolveDependencies
: undefined;
let resolvedDeps;
if (resolveDependencies) {
// We can't let the user remove css deps as these aren't really preloads, they are just using
// the same mechanism as module preloads for this chunk
const cssDeps = [];
const otherDeps = [];
for (const dep of depsArray) {
(dep.endsWith('.css') ? cssDeps : otherDeps).push(dep);
}
resolvedDeps = [
...resolveDependencies(normalizedFile, otherDeps, {
hostId: file,
hostType: 'js',
}),
...cssDeps,
];
}
else {
resolvedDeps = depsArray;
}
renderedDeps = resolvedDeps.map((dep) => {
const replacement = toOutputFilePathInJS(dep, 'asset', chunk.fileName, 'js', config, toRelativePath);
const replacementString = typeof replacement === 'string'
? JSON.stringify(replacement)
: replacement.runtime;
return replacementString;
});
}
else {
renderedDeps = depsArray.map((d) =>
// Don't include the assets dir if the default asset file names
// are used, the path will be reconstructed by the import preload helper
JSON.stringify(optimizeModulePreloadRelativePaths
? toRelativePath(d, file)
: d));
}
s.update(markerStartPos, markerStartPos + preloadMarker.length + 2, `[${renderedDeps.join(',')}]`);
rewroteMarkerStartPos.add(markerStartPos);
}
}
}
// there may still be markers due to inlined dynamic imports, remove
// all the markers regardless
let markerStartPos = indexOfMatchInSlice(code, preloadMarkerWithQuote);
while (markerStartPos >= 0) {
if (!rewroteMarkerStartPos.has(markerStartPos)) {
s.update(markerStartPos, markerStartPos + preloadMarker.length + 2, 'void 0');
}
markerStartPos = indexOfMatchInSlice(code, preloadMarkerWithQuote, markerStartPos + preloadMarker.length + 2);
}
if (s.hasChanged()) {
chunk.code = s.toString();
if (config.build.sourcemap && chunk.map) {
const nextMap = s.generateMap({
source: chunk.fileName,
hires: 'boundary',
});
const map = combineSourcemaps(chunk.fileName, [
nextMap,
chunk.map,
]);
map.toUrl = () => genSourceMapUrl(map);
chunk.map = map;
if (config.build.sourcemap === 'inline') {
chunk.code = chunk.code.replace(convertSourceMap.mapFileCommentRegex, '');
chunk.code += `\n//# sourceMappingURL=${genSourceMapUrl(map)}`;
}
else if (config.build.sourcemap) {
const mapAsset = bundle[chunk.fileName + '.map'];
if (mapAsset && mapAsset.type === 'asset') {
mapAsset.source = map.toString();
}
}
}
}
}
}
},
};
}
function ssrManifestPlugin(config) {
// module id => preload assets mapping
const ssrManifest = {};
const base = config.base; // TODO:base
return {
name: 'vite:ssr-manifest',
generateBundle(_options, bundle) {
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === 'chunk') {
for (const id in chunk.modules) {
const normalizedId = normalizePath$3(relative$2(config.root, id));
const mappedChunks = ssrManifest[normalizedId] ?? (ssrManifest[normalizedId] = []);
if (!chunk.isEntry) {
mappedChunks.push(joinUrlSegments(base, chunk.fileName));
// <link> tags for entry chunks are already generated in static HTML,
// so we only need to record info for non-entry chunks.
chunk.viteMetadata.importedCss.forEach((file) => {
mappedChunks.push(joinUrlSegments(base, file));
});
}
chunk.viteMetadata.importedAssets.forEach((file) => {
mappedChunks.push(joinUrlSegments(base, file));
});
}
if (chunk.code.includes(preloadMethod)) {
// generate css deps map
const code = chunk.code;
let imports;
try {
imports = parse$e(code)[0].filter((i) => i.n && i.d > -1);
}
catch (e) {
const loc = numberToPos(code, e.idx);
this.error({
name: e.name,
message: e.message,
stack: e.stack,
cause: e.cause,
pos: e.idx,
loc: { ...loc, file: chunk.fileName },
frame: generateCodeFrame(code, loc),
});
}
if (imports.length) {
for (let index = 0; index < imports.length; index++) {
const { s: start, e: end, n: name } = imports[index];
// check the chunk being imported
const url = code.slice(start, end);
const deps = [];
const ownerFilename = chunk.fileName;
// literal import - trace direct imports and add to deps
const analyzed = new Set();
const addDeps = (filename) => {
if (filename === ownerFilename)
return;
if (analyzed.has(filename))
return;
analyzed.add(filename);
const chunk = bundle[filename];
if (chunk) {
chunk.viteMetadata.importedCss.forEach((file) => {
deps.push(joinUrlSegments(base, file)); // TODO:base
});
chunk.imports.forEach(addDeps);
}
};
const normalizedFile = normalizePath$3(join$2(dirname$2(chunk.fileName), url.slice(1, -1)));
addDeps(normalizedFile);
ssrManifest[basename$2(name)] = deps;
}
}
}
}
}
this.emitFile({
fileName: typeof config.build.ssrManifest === 'string'
? config.build.ssrManifest
: 'ssr-manifest.json',
type: 'asset',
source: jsonStableStringify$1(ssrManifest, { space: 2 }),
});
},
};
}
/**
* A plugin to provide build load fallback for arbitrary request with queries.
*/
function loadFallbackPlugin() {
return {
name: 'vite:load-fallback',
async load(id) {
try {
// if we don't add `await` here, we couldn't catch the error in readFile
return await fsp.readFile(cleanUrl(id), 'utf-8');
}
catch (e) {
return fsp.readFile(id, 'utf-8');
}
},
};
}
function resolveChokidarOptions(config, options) {
const { ignored = [], ...otherOptions } = options ?? {};
const resolvedWatchOptions = {
ignored: [
'**/.git/**',
'**/node_modules/**',
'**/test-results/**',
glob.escapePath(config.cacheDir) + '/**',
...(Array.isArray(ignored) ? ignored : [ignored]),
],
ignoreInitial: true,
ignorePermissionErrors: true,
...otherOptions,
};
return resolvedWatchOptions;
}
/**
* make sure systemjs register wrap to had complete parameters in system format
*/
function completeSystemWrapPlugin() {
const SystemJSWrapRE = /System.register\(.*(\(exports\)|\(\))/g;
return {
name: 'vite:force-systemjs-wrap-complete',
renderChunk(code, chunk, opts) {
if (opts.format === 'system') {
return {
code: code.replace(SystemJSWrapRE, (s, s1) => s.replace(s1, '(exports, module)')),
map: null,
};
}
},
};
}
/*!
* etag
* Copyright(c) 2014-2016 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
var etag_1 = etag;
/**
* Module dependencies.
* @private
*/
var crypto = require$$3$1;
var Stats = require$$0__default.Stats;
/**
* Module variables.
* @private
*/
var toString = Object.prototype.toString;
/**
* Generate an entity tag.
*
* @param {Buffer|string} entity
* @return {string}
* @private
*/
function entitytag (entity) {
if (entity.length === 0) {
// fast-path empty
return '"0-2jmj7l5rSw0yVb/vlWAYkK/YBwk"'
}
// compute hash of entity
var hash = crypto
.createHash('sha1')
.update(entity, 'utf8')
.digest('base64')
.substring(0, 27);
// compute length of entity
var len = typeof entity === 'string'
? Buffer.byteLength(entity, 'utf8')
: entity.length;
return '"' + len.toString(16) + '-' + hash + '"'
}
/**
* Create a simple ETag.
*
* @param {string|Buffer|Stats} entity
* @param {object} [options]
* @param {boolean} [options.weak]
* @return {String}
* @public
*/
function etag (entity, options) {
if (entity == null) {
throw new TypeError('argument entity is required')
}
// support fs.Stats object
var isStats = isstats(entity);
var weak = options && typeof options.weak === 'boolean'
? options.weak
: isStats;
// validate argument
if (!isStats && typeof entity !== 'string' && !Buffer.isBuffer(entity)) {
throw new TypeError('argument entity must be string, Buffer, or fs.Stats')
}
// generate entity tag
var tag = isStats
? stattag(entity)
: entitytag(entity);
return weak
? 'W/' + tag
: tag
}
/**
* Determine if object is a Stats object.
*
* @param {object} obj
* @return {boolean}
* @api private
*/
function isstats (obj) {
// genuine fs.Stats
if (typeof Stats === 'function' && obj instanceof Stats) {
return true
}
// quack quack
return obj && typeof obj === 'object' &&
'ctime' in obj && toString.call(obj.ctime) === '[object Date]' &&
'mtime' in obj && toString.call(obj.mtime) === '[object Date]' &&
'ino' in obj && typeof obj.ino === 'number' &&
'size' in obj && typeof obj.size === 'number'
}
/**
* Generate a tag for a stat.
*
* @param {object} stat
* @return {string}
* @private
*/
function stattag (stat) {
var mtime = stat.mtime.getTime().toString(16);
var size = stat.size.toString(16);
return '"' + size + '-' + mtime + '"'
}
var getEtag = /*@__PURE__*/getDefaultExportFromCjs(etag_1);
const alias = {
js: 'application/javascript',
css: 'text/css',
html: 'text/html',
json: 'application/json',
};
function send$2(req, res, content, type, options) {
const { etag = getEtag(content, { weak: true }), cacheControl = 'no-cache', headers, map, } = options;
if (res.writableEnded) {
return;
}
if (req.headers['if-none-match'] === etag) {
res.statusCode = 304;
res.end();
return;
}
res.setHeader('Content-Type', alias[type] || type);
res.setHeader('Cache-Control', cacheControl);
res.setHeader('Etag', etag);
if (headers) {
for (const name in headers) {
res.setHeader(name, headers[name]);
}
}
// inject source map reference
if (map && map.mappings) {
if (type === 'js' || type === 'css') {
content = getCodeWithSourcemap(type, content.toString(), map);
}
}
res.statusCode = 200;
res.end(content);
return;
}
function totalist(dir, callback, pre='') {
dir = resolve$3('.', dir);
let arr = readdirSync(dir);
let i=0, abs, stats;
for (; i < arr.length; i++) {
abs = join$1(dir, arr[i]);
stats = statSync$1(abs);
stats.isDirectory()
? totalist(abs, callback, join$1(pre, arr[i]))
: callback(join$1(pre, arr[i]), abs, stats);
}
}
/**
* @typedef ParsedURL
* @type {import('.').ParsedURL}
*/
/**
* @typedef Request
* @property {string} url
* @property {ParsedURL} _parsedUrl
*/
/**
* @param {Request} req
* @returns {ParsedURL|void}
*/
function parse$8(req) {
let raw = req.url;
if (raw == null) return;
let prev = req._parsedUrl;
if (prev && prev.raw === raw) return prev;
let pathname=raw, search='', query;
if (raw.length > 1) {
let idx = raw.indexOf('?', 1);
if (idx !== -1) {
search = raw.substring(idx);
pathname = raw.substring(0, idx);
if (search.length > 1) {
query = qs.parse(search.substring(1));
}
}
}
return req._parsedUrl = { pathname, search, query, raw };
}
const noop$2 = () => {};
function isMatch(uri, arr) {
for (let i=0; i < arr.length; i++) {
if (arr[i].test(uri)) return true;
}
}
function toAssume(uri, extns) {
let i=0, x, len=uri.length - 1;
if (uri.charCodeAt(len) === 47) {
uri = uri.substring(0, len);
}
let arr=[], tmp=`${uri}/index`;
for (; i < extns.length; i++) {
x = extns[i] ? `.${extns[i]}` : '';
if (uri) arr.push(uri + x);
arr.push(tmp + x);
}
return arr;
}
function viaCache(cache, uri, extns) {
let i=0, data, arr=toAssume(uri, extns);
for (; i < arr.length; i++) {
if (data = cache[arr[i]]) return data;
}
}
function viaLocal(dir, isEtag, uri, extns, shouldServe) {
let i=0, arr=toAssume(uri, extns);
let abs, stats, name, headers;
for (; i < arr.length; i++) {
abs = normalize(join$1(dir, name=arr[i]));
if (abs.startsWith(dir) && require$$0$2.existsSync(abs)) {
stats = require$$0$2.statSync(abs);
if (stats.isDirectory()) continue;
if (shouldServe && !shouldServe(abs)) continue;
headers = toHeaders(name, stats, isEtag);
headers['Cache-Control'] = isEtag ? 'no-cache' : 'no-store';
return { abs, stats, headers };
}
}
}
function is404(req, res) {
return (res.statusCode=404,res.end());
}
function send$1(req, res, file, stats, headers) {
let code=200, tmp, opts={};
headers = { ...headers };
for (let key in headers) {
tmp = res.getHeader(key);
if (tmp) headers[key] = tmp;
}
if (tmp = res.getHeader('content-type')) {
headers['Content-Type'] = tmp;
}
if (req.headers.range) {
code = 206;
let [x, y] = req.headers.range.replace('bytes=', '').split('-');
let end = opts.end = parseInt(y, 10) || stats.size - 1;
let start = opts.start = parseInt(x, 10) || 0;
if (end >= stats.size) {
end = stats.size - 1;
}
if (start >= stats.size) {
res.setHeader('Content-Range', `bytes */${stats.size}`);
res.statusCode = 416;
return res.end();
}
headers['Content-Range'] = `bytes ${start}-${end}/${stats.size}`;
headers['Content-Length'] = (end - start + 1);
headers['Accept-Ranges'] = 'bytes';
}
res.writeHead(code, headers);
require$$0$2.createReadStream(file, opts).pipe(res);
}
const ENCODING = {
'.br': 'br',
'.gz': 'gzip',
};
function toHeaders(name, stats, isEtag) {
let enc = ENCODING[name.slice(-3)];
let ctype = lookup(name.slice(0, enc && -3)) || '';
if (ctype === 'text/html') ctype += ';charset=utf-8';
let headers = {
'Content-Length': stats.size,
'Content-Type': ctype,
'Last-Modified': stats.mtime.toUTCString(),
};
if (enc) headers['Content-Encoding'] = enc;
if (isEtag) headers['ETag'] = `W/"${stats.size}-${stats.mtime.getTime()}"`;
return headers;
}
function sirv (dir, opts={}) {
dir = resolve$3(dir || '.');
let isNotFound = opts.onNoMatch || is404;
let setHeaders = opts.setHeaders || noop$2;
let extensions = opts.extensions || ['html', 'htm'];
let gzips = opts.gzip && extensions.map(x => `${x}.gz`).concat('gz');
let brots = opts.brotli && extensions.map(x => `${x}.br`).concat('br');
const FILES = {};
let fallback = '/';
let isEtag = !!opts.etag;
let isSPA = !!opts.single;
if (typeof opts.single === 'string') {
let idx = opts.single.lastIndexOf('.');
fallback += !!~idx ? opts.single.substring(0, idx) : opts.single;
}
let ignores = [];
if (opts.ignores !== false) {
ignores.push(/[/]([A-Za-z\s\d~$._-]+\.\w+){1,}$/); // any extn
if (opts.dotfiles) ignores.push(/\/\.\w/);
else ignores.push(/\/\.well-known/);
[].concat(opts.ignores || []).forEach(x => {
ignores.push(new RegExp(x, 'i'));
});
}
let cc = opts.maxAge != null && `public,max-age=${opts.maxAge}`;
if (cc && opts.immutable) cc += ',immutable';
else if (cc && opts.maxAge === 0) cc += ',must-revalidate';
if (!opts.dev) {
totalist(dir, (name, abs, stats) => {
if (/\.well-known[\\+\/]/.test(name)) ; // keep
else if (!opts.dotfiles && /(^\.|[\\+|\/+]\.)/.test(name)) return;
let headers = toHeaders(name, stats, isEtag);
if (cc) headers['Cache-Control'] = cc;
FILES['/' + name.normalize().replace(/\\+/g, '/')] = { abs, stats, headers };
});
}
let lookup = opts.dev ? viaLocal.bind(0, dir, isEtag) : viaCache.bind(0, FILES);
return function (req, res, next) {
let extns = [''];
let pathname = parse$8(req).pathname;
let val = req.headers['accept-encoding'] || '';
if (gzips && val.includes('gzip')) extns.unshift(...gzips);
if (brots && /(br|brotli)/i.test(val)) extns.unshift(...brots);
extns.push(...extensions); // [...br, ...gz, orig, ...exts]
if (pathname.indexOf('%') !== -1) {
try { pathname = decodeURI(pathname); }
catch (err) { /* malform uri */ }
}
let data = lookup(pathname, extns, opts.shouldServe) || isSPA && !isMatch(pathname, ignores) && lookup(fallback, extns, opts.shouldServe);
if (!data) return next ? next() : isNotFound(req, res);
if (isEtag && req.headers['if-none-match'] === data.headers['ETag']) {
res.writeHead(304);
return res.end();
}
if (gzips || brots) {
res.setHeader('Vary', 'Accept-Encoding');
}
setHeaders(res, pathname, data.stats);
send$1(req, res, data.abs, data.stats, data.headers);
};
}
/*!
* escape-html
* Copyright(c) 2012-2013 TJ Holowaychuk
* Copyright(c) 2015 Andreas Lubbe
* Copyright(c) 2015 Tiancheng "Timothy" Gu
* MIT Licensed
*/
/**
* Module variables.
* @private
*/
var matchHtmlRegExp = /["'&<>]/;
/**
* Module exports.
* @public
*/
var escapeHtml_1 = escapeHtml$1;
/**
* Escape special characters in the given string of html.
*
* @param {string} string The string to escape for inserting into HTML
* @return {string}
* @public
*/
function escapeHtml$1(string) {
var str = '' + string;
var match = matchHtmlRegExp.exec(str);
if (!match) {
return str;
}
var escape;
var html = '';
var index = 0;
var lastIndex = 0;
for (index = match.index; index < str.length; index++) {
switch (str.charCodeAt(index)) {
case 34: // "
escape = '&quot;';
break;
case 38: // &
escape = '&amp;';
break;
case 39: // '
escape = '&#39;';
break;
case 60: // <
escape = '&lt;';
break;
case 62: // >
escape = '&gt;';
break;
default:
continue;
}
if (lastIndex !== index) {
html += str.substring(lastIndex, index);
}
lastIndex = index + 1;
html += escape;
}
return lastIndex !== index
? html + str.substring(lastIndex, index)
: html;
}
var escapeHtml$2 = /*@__PURE__*/getDefaultExportFromCjs(escapeHtml_1);
const knownJavascriptExtensionRE = /\.[tj]sx?$/;
const sirvOptions = ({ headers, shouldServe, }) => {
return {
dev: true,
etag: true,
extensions: [],
setHeaders(res, pathname) {
// Matches js, jsx, ts, tsx.
// The reason this is done, is that the .ts file extension is reserved
// for the MIME type video/mp2t. In almost all cases, we can expect
// these files to be TypeScript files, and for Vite to serve them with
// this Content-Type.
if (knownJavascriptExtensionRE.test(pathname)) {
res.setHeader('Content-Type', 'application/javascript');
}
if (headers) {
for (const name in headers) {
res.setHeader(name, headers[name]);
}
}
},
shouldServe,
};
};
function servePublicMiddleware(dir, headers) {
const serve = sirv(dir, sirvOptions({
headers,
shouldServe: (filePath) => shouldServeFile(filePath, dir),
}));
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return function viteServePublicMiddleware(req, res, next) {
// skip import request and internal requests `/@fs/ /@vite-client` etc...
if (isImportRequest(req.url) || isInternalRequest(req.url)) {
return next();
}
serve(req, res, next);
};
}
function serveStaticMiddleware(dir, server) {
const serve = sirv(dir, sirvOptions({
headers: server.config.server.headers,
}));
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return function viteServeStaticMiddleware(req, res, next) {
// only serve the file if it's not an html request or ends with `/`
// so that html requests can fallthrough to our html middleware for
// special processing
// also skip internal requests `/@fs/ /@vite-client` etc...
const cleanedUrl = cleanUrl(req.url);
if (cleanedUrl[cleanedUrl.length - 1] === '/' ||
path$o.extname(cleanedUrl) === '.html' ||
isInternalRequest(req.url)) {
return next();
}
const url = new URL(req.url.replace(/^\/{2,}/, '/'), 'http://example.com');
const pathname = decodeURI(url.pathname);
// apply aliases to static requests as well
let redirectedPathname;
for (const { find, replacement } of server.config.resolve.alias) {
const matches = typeof find === 'string'
? pathname.startsWith(find)
: find.test(pathname);
if (matches) {
redirectedPathname = pathname.replace(find, replacement);
break;
}
}
if (redirectedPathname) {
// dir is pre-normalized to posix style
if (redirectedPathname.startsWith(withTrailingSlash(dir))) {
redirectedPathname = redirectedPathname.slice(dir.length);
}
}
const resolvedPathname = redirectedPathname || pathname;
let fileUrl = path$o.resolve(dir, removeLeadingSlash(resolvedPathname));
if (resolvedPathname[resolvedPathname.length - 1] === '/' &&
fileUrl[fileUrl.length - 1] !== '/') {
fileUrl = withTrailingSlash(fileUrl);
}
if (!ensureServingAccess(fileUrl, server, res, next)) {
return;
}
if (redirectedPathname) {
url.pathname = encodeURI(redirectedPathname);
req.url = url.href.slice(url.origin.length);
}
serve(req, res, next);
};
}
function serveRawFsMiddleware(server) {
const serveFromRoot = sirv('/', sirvOptions({ headers: server.config.server.headers }));
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return function viteServeRawFsMiddleware(req, res, next) {
const url = new URL(req.url.replace(/^\/{2,}/, '/'), 'http://example.com');
// In some cases (e.g. linked monorepos) files outside of root will
// reference assets that are also out of served root. In such cases
// the paths are rewritten to `/@fs/` prefixed paths and must be served by
// searching based from fs root.
if (url.pathname.startsWith(FS_PREFIX)) {
const pathname = decodeURI(url.pathname);
// restrict files outside of `fs.allow`
if (!ensureServingAccess(slash$1(path$o.resolve(fsPathFromId(pathname))), server, res, next)) {
return;
}
let newPathname = pathname.slice(FS_PREFIX.length);
if (isWindows$4)
newPathname = newPathname.replace(/^[A-Z]:/i, '');
url.pathname = encodeURI(newPathname);
req.url = url.href.slice(url.origin.length);
serveFromRoot(req, res, next);
}
else {
next();
}
};
}
/**
* Check if the url is allowed to be served, via the `server.fs` config.
*/
function isFileServingAllowed(url, server) {
if (!server.config.server.fs.strict)
return true;
const file = fsPathFromUrl(url);
if (server._fsDenyGlob(file))
return false;
if (server.moduleGraph.safeModulesPath.has(file))
return true;
if (server.config.server.fs.allow.some((uri) => isSameFileUri(uri, file) || isParentDirectory(uri, file)))
return true;
return false;
}
function ensureServingAccess(url, server, res, next) {
if (isFileServingAllowed(url, server)) {
return true;
}
if (isFileReadable(cleanUrl(url))) {
const urlMessage = `The request url "${url}" is outside of Vite serving allow list.`;
const hintMessage = `
${server.config.server.fs.allow.map((i) => `- ${i}`).join('\n')}
Refer to docs https://vitejs.dev/config/server-options.html#server-fs-allow for configurations and more details.`;
server.config.logger.error(urlMessage);
server.config.logger.warnOnce(hintMessage + '\n');
res.statusCode = 403;
res.write(renderRestrictedErrorHTML(urlMessage + '\n' + hintMessage));
res.end();
}
else {
// if the file doesn't exist, we shouldn't restrict this path as it can
// be an API call. Middlewares would issue a 404 if the file isn't handled
next();
}
return false;
}
function renderRestrictedErrorHTML(msg) {
// to have syntax highlighting and autocompletion in IDE
const html = String.raw;
return html `
<body>
<h1>403 Restricted</h1>
<p>${escapeHtml$2(msg).replace(/\n/g, '<br/>')}</p>
<style>
body {
padding: 1em 2em;
}
</style>
</body>
`;
}
function resolveBuildOptions(raw, logger, root) {
const deprecatedPolyfillModulePreload = raw?.polyfillModulePreload;
if (raw) {
const { polyfillModulePreload, ...rest } = raw;
raw = rest;
if (deprecatedPolyfillModulePreload !== undefined) {
logger.warn('polyfillModulePreload is deprecated. Use modulePreload.polyfill instead.');
}
if (deprecatedPolyfillModulePreload === false &&
raw.modulePreload === undefined) {
raw.modulePreload = { polyfill: false };
}
}
const modulePreload = raw?.modulePreload;
const defaultModulePreload = {
polyfill: true,
};
const defaultBuildOptions = {
outDir: 'dist',
assetsDir: 'assets',
assetsInlineLimit: 4096,
cssCodeSplit: !raw?.lib,
sourcemap: false,
rollupOptions: {},
minify: raw?.ssr ? false : 'esbuild',
terserOptions: {},
write: true,
emptyOutDir: null,
copyPublicDir: true,
manifest: false,
lib: false,
ssr: false,
ssrManifest: false,
ssrEmitAssets: false,
reportCompressedSize: true,
chunkSizeWarningLimit: 500,
watch: null,
};
const userBuildOptions = raw
? mergeConfig(defaultBuildOptions, raw)
: defaultBuildOptions;
// @ts-expect-error Fallback options instead of merging
const resolved = {
target: 'modules',
cssTarget: false,
...userBuildOptions,
commonjsOptions: {
include: [/node_modules/],
extensions: ['.js', '.cjs'],
...userBuildOptions.commonjsOptions,
},
dynamicImportVarsOptions: {
warnOnError: true,
exclude: [/node_modules/],
...userBuildOptions.dynamicImportVarsOptions,
},
// Resolve to false | object
modulePreload: modulePreload === false
? false
: typeof modulePreload === 'object'
? {
...defaultModulePreload,
...modulePreload,
}
: defaultModulePreload,
};
// handle special build targets
if (resolved.target === 'modules') {
resolved.target = ESBUILD_MODULES_TARGET;
}
else if (resolved.target === 'esnext' && resolved.minify === 'terser') {
try {
const terserPackageJsonPath = requireResolveFromRootWithFallback(root, 'terser/package.json');
const terserPackageJson = JSON.parse(fs$l.readFileSync(terserPackageJsonPath, 'utf-8'));
const v = terserPackageJson.version.split('.');
if (v[0] === '5' && v[1] < 16) {
// esnext + terser 5.16<: limit to es2021 so it can be minified by terser
resolved.target = 'es2021';
}
}
catch { }
}
if (!resolved.cssTarget) {
resolved.cssTarget = resolved.target;
}
// normalize false string into actual false
if (resolved.minify === 'false') {
resolved.minify = false;
}
if (resolved.minify === true) {
resolved.minify = 'esbuild';
}
if (resolved.cssMinify == null) {
resolved.cssMinify = !!resolved.minify;
}
return resolved;
}
async function resolveBuildPlugins(config) {
const options = config.build;
const { commonjsOptions } = options;
const usePluginCommonjs = !Array.isArray(commonjsOptions?.include) ||
commonjsOptions?.include.length !== 0;
const rollupOptionsPlugins = options.rollupOptions.plugins;
return {
pre: [
completeSystemWrapPlugin(),
...(options.watch ? [ensureWatchPlugin()] : []),
...(usePluginCommonjs ? [commonjs(options.commonjsOptions)] : []),
dataURIPlugin(),
...(await asyncFlatten(Array.isArray(rollupOptionsPlugins)
? rollupOptionsPlugins
: [rollupOptionsPlugins])).filter(Boolean),
...(config.isWorker ? [webWorkerPostPlugin()] : []),
],
post: [
buildImportAnalysisPlugin(config),
...(config.esbuild !== false ? [buildEsbuildPlugin(config)] : []),
...(options.minify ? [terserPlugin(config)] : []),
...(!config.isWorker
? [
...(options.manifest ? [manifestPlugin(config)] : []),
...(options.ssrManifest ? [ssrManifestPlugin(config)] : []),
buildReporterPlugin(config),
]
: []),
loadFallbackPlugin(),
],
};
}
/**
* Bundles the app for production.
* Returns a Promise containing the build result.
*/
async function build(inlineConfig = {}) {
const config = await resolveConfig(inlineConfig, 'build', 'production', 'production');
const options = config.build;
const ssr = !!options.ssr;
const libOptions = options.lib;
config.logger.info(colors$1.cyan(`vite v${VERSION$1} ${colors$1.green(`building ${ssr ? `SSR bundle ` : ``}for ${config.mode}...`)}`));
const resolve = (p) => path$o.resolve(config.root, p);
const input = libOptions
? options.rollupOptions?.input ||
(typeof libOptions.entry === 'string'
? resolve(libOptions.entry)
: Array.isArray(libOptions.entry)
? libOptions.entry.map(resolve)
: Object.fromEntries(Object.entries(libOptions.entry).map(([alias, file]) => [
alias,
resolve(file),
])))
: typeof options.ssr === 'string'
? resolve(options.ssr)
: options.rollupOptions?.input || resolve('index.html');
if (ssr && typeof input === 'string' && input.endsWith('.html')) {
throw new Error(`rollupOptions.input should not be an html file when building for SSR. ` +
`Please specify a dedicated SSR entry.`);
}
const outDir = resolve(options.outDir);
// inject ssr arg to plugin load/transform hooks
const plugins = (ssr ? config.plugins.map((p) => injectSsrFlagToHooks(p)) : config.plugins);
const userExternal = options.rollupOptions?.external;
let external = userExternal;
// In CJS, we can pass the externals to rollup as is. In ESM, we need to
// do it in the resolve plugin so we can add the resolved extension for
// deep node_modules imports
if (ssr && config.legacy?.buildSsrCjsExternalHeuristics) {
external = await cjsSsrResolveExternal(config, userExternal);
}
if (isDepsOptimizerEnabled(config, ssr)) {
await initDepsOptimizer(config);
}
const rollupOptions = {
context: 'globalThis',
preserveEntrySignatures: ssr
? 'allow-extension'
: libOptions
? 'strict'
: false,
cache: config.build.watch ? undefined : false,
...options.rollupOptions,
input,
plugins,
external,
onwarn(warning, warn) {
onRollupWarning(warning, warn, config);
},
};
const outputBuildError = (e) => {
let msg = colors$1.red((e.plugin ? `[${e.plugin}] ` : '') + e.message);
if (e.id) {
msg += `\nfile: ${colors$1.cyan(e.id + (e.loc ? `:${e.loc.line}:${e.loc.column}` : ''))}`;
}
if (e.frame) {
msg += `\n` + colors$1.yellow(e.frame);
}
config.logger.error(msg, { error: e });
};
let bundle;
try {
const buildOutputOptions = (output = {}) => {
// @ts-expect-error See https://github.com/vitejs/vite/issues/5812#issuecomment-984345618
if (output.output) {
config.logger.warn(`You've set "rollupOptions.output.output" in your config. ` +
`This is deprecated and will override all Vite.js default output options. ` +
`Please use "rollupOptions.output" instead.`);
}
const ssrNodeBuild = ssr && config.ssr.target === 'node';
const ssrWorkerBuild = ssr && config.ssr.target === 'webworker';
const cjsSsrBuild = ssr && config.ssr.format === 'cjs';
const format = output.format || (cjsSsrBuild ? 'cjs' : 'es');
const jsExt = ssrNodeBuild || libOptions
? resolveOutputJsExtension(format, findNearestPackageData(config.root, config.packageCache)?.data
.type)
: 'js';
return {
dir: outDir,
// Default format is 'es' for regular and for SSR builds
format,
exports: cjsSsrBuild ? 'named' : 'auto',
sourcemap: options.sourcemap,
name: libOptions ? libOptions.name : undefined,
// es2015 enables `generatedCode.symbols`
// - #764 add `Symbol.toStringTag` when build es module into cjs chunk
// - #1048 add `Symbol.toStringTag` for module default export
generatedCode: 'es2015',
entryFileNames: ssr
? `[name].${jsExt}`
: libOptions
? ({ name }) => resolveLibFilename(libOptions, format, name, config.root, jsExt, config.packageCache)
: path$o.posix.join(options.assetsDir, `[name]-[hash].${jsExt}`),
chunkFileNames: libOptions
? `[name]-[hash].${jsExt}`
: path$o.posix.join(options.assetsDir, `[name]-[hash].${jsExt}`),
assetFileNames: libOptions
? `[name].[ext]`
: path$o.posix.join(options.assetsDir, `[name]-[hash].[ext]`),
inlineDynamicImports: output.format === 'umd' ||
output.format === 'iife' ||
(ssrWorkerBuild &&
(typeof input === 'string' || Object.keys(input).length === 1)),
...output,
};
};
// resolve lib mode outputs
const outputs = resolveBuildOutputs(options.rollupOptions?.output, libOptions, config.logger);
const normalizedOutputs = [];
if (Array.isArray(outputs)) {
for (const resolvedOutput of outputs) {
normalizedOutputs.push(buildOutputOptions(resolvedOutput));
}
}
else {
normalizedOutputs.push(buildOutputOptions(outputs));
}
const outDirs = normalizedOutputs.map(({ dir }) => resolve(dir));
// watch file changes with rollup
if (config.build.watch) {
config.logger.info(colors$1.cyan(`\nwatching for file changes...`));
const resolvedChokidarOptions = resolveChokidarOptions(config, config.build.watch.chokidar);
const { watch } = await import('rollup');
const watcher = watch({
...rollupOptions,
output: normalizedOutputs,
watch: {
...config.build.watch,
chokidar: resolvedChokidarOptions,
},
});
watcher.on('event', (event) => {
if (event.code === 'BUNDLE_START') {
config.logger.info(colors$1.cyan(`\nbuild started...`));
if (options.write) {
prepareOutDir(outDirs, options.emptyOutDir, config);
}
}
else if (event.code === 'BUNDLE_END') {
event.result.close();
config.logger.info(colors$1.cyan(`built in ${event.duration}ms.`));
}
else if (event.code === 'ERROR') {
outputBuildError(event.error);
}
});
return watcher;
}
// write or generate files with rollup
const { rollup } = await import('rollup');
bundle = await rollup(rollupOptions);
if (options.write) {
prepareOutDir(outDirs, options.emptyOutDir, config);
}
const res = [];
for (const output of normalizedOutputs) {
res.push(await bundle[options.write ? 'write' : 'generate'](output));
}
return Array.isArray(outputs) ? res : res[0];
}
catch (e) {
outputBuildError(e);
throw e;
}
finally {
if (bundle)
await bundle.close();
}
}
function prepareOutDir(outDirs, emptyOutDir, config) {
const nonDuplicateDirs = new Set(outDirs);
let outside = false;
if (emptyOutDir == null) {
for (const outDir of nonDuplicateDirs) {
if (fs$l.existsSync(outDir) &&
!normalizePath$3(outDir).startsWith(withTrailingSlash(config.root))) {
// warn if outDir is outside of root
config.logger.warn(colors$1.yellow(`\n${colors$1.bold(`(!)`)} outDir ${colors$1.white(colors$1.dim(outDir))} is not inside project root and will not be emptied.\n` +
`Use --emptyOutDir to override.\n`));
outside = true;
break;
}
}
}
for (const outDir of nonDuplicateDirs) {
if (!outside && emptyOutDir !== false && fs$l.existsSync(outDir)) {
// skip those other outDirs which are nested in current outDir
const skipDirs = outDirs
.map((dir) => {
const relative = path$o.relative(outDir, dir);
if (relative &&
!relative.startsWith('..') &&
!path$o.isAbsolute(relative)) {
return relative;
}
return '';
})
.filter(Boolean);
emptyDir(outDir, [...skipDirs, '.git']);
}
if (config.build.copyPublicDir &&
config.publicDir &&
fs$l.existsSync(config.publicDir)) {
if (!areSeparateFolders(outDir, config.publicDir)) {
config.logger.warn(colors$1.yellow(`\n${colors$1.bold(`(!)`)} The public directory feature may not work correctly. outDir ${colors$1.white(colors$1.dim(outDir))} and publicDir ${colors$1.white(colors$1.dim(config.publicDir))} are not separate folders.\n`));
}
copyDir(config.publicDir, outDir);
}
}
}
function getPkgName(name) {
return name?.[0] === '@' ? name.split('/')[1] : name;
}
function resolveOutputJsExtension(format, type = 'commonjs') {
if (type === 'module') {
return format === 'cjs' || format === 'umd' ? 'cjs' : 'js';
}
else {
return format === 'es' ? 'mjs' : 'js';
}
}
function resolveLibFilename(libOptions, format, entryName, root, extension, packageCache) {
if (typeof libOptions.fileName === 'function') {
return libOptions.fileName(format, entryName);
}
const packageJson = findNearestPackageData(root, packageCache)?.data;
const name = libOptions.fileName ||
(packageJson && typeof libOptions.entry === 'string'
? getPkgName(packageJson.name)
: entryName);
if (!name)
throw new Error('Name in package.json is required if option "build.lib.fileName" is not provided.');
extension ?? (extension = resolveOutputJsExtension(format, packageJson?.type));
if (format === 'cjs' || format === 'es') {
return `${name}.${extension}`;
}
return `${name}.${format}.${extension}`;
}
function resolveBuildOutputs(outputs, libOptions, logger) {
if (libOptions) {
const libHasMultipleEntries = typeof libOptions.entry !== 'string' &&
Object.values(libOptions.entry).length > 1;
const libFormats = libOptions.formats ||
(libHasMultipleEntries ? ['es', 'cjs'] : ['es', 'umd']);
if (!Array.isArray(outputs)) {
if (libFormats.includes('umd') || libFormats.includes('iife')) {
if (libHasMultipleEntries) {
throw new Error('Multiple entry points are not supported when output formats include "umd" or "iife".');
}
if (!libOptions.name) {
throw new Error('Option "build.lib.name" is required when output formats include "umd" or "iife".');
}
}
return libFormats.map((format) => ({ ...outputs, format }));
}
// By this point, we know "outputs" is an Array.
if (libOptions.formats) {
logger.warn(colors$1.yellow('"build.lib.formats" will be ignored because "build.rollupOptions.output" is already an array format.'));
}
outputs.forEach((output) => {
if (['umd', 'iife'].includes(output.format) && !output.name) {
throw new Error('Entries in "build.rollupOptions.output" must specify "name" when the format is "umd" or "iife".');
}
});
}
return outputs;
}
const warningIgnoreList = [`CIRCULAR_DEPENDENCY`, `THIS_IS_UNDEFINED`];
const dynamicImportWarningIgnoreList = [
`Unsupported expression`,
`statically analyzed`,
];
function onRollupWarning(warning, warn, config) {
const viteWarn = (warnLog) => {
let warning;
if (typeof warnLog === 'function') {
warning = warnLog();
}
else {
warning = warnLog;
}
if (typeof warning === 'object') {
if (warning.code === 'UNRESOLVED_IMPORT') {
const id = warning.id;
const exporter = warning.exporter;
// throw unless it's commonjs external...
if (!id || !/\?commonjs-external$/.test(id)) {
throw new Error(`[vite]: Rollup failed to resolve import "${exporter}" from "${id}".\n` +
`This is most likely unintended because it can break your application at runtime.\n` +
`If you do want to externalize this module explicitly add it to\n` +
`\`build.rollupOptions.external\``);
}
}
if (warning.plugin === 'rollup-plugin-dynamic-import-variables' &&
dynamicImportWarningIgnoreList.some((msg) =>
// @ts-expect-error warning is RollupLog
warning.message.includes(msg))) {
return;
}
if (warningIgnoreList.includes(warning.code)) {
return;
}
if (warning.code === 'PLUGIN_WARNING') {
config.logger.warn(`${colors$1.bold(colors$1.yellow(`[plugin:${warning.plugin}]`))} ${colors$1.yellow(warning.message)}`);
return;
}
}
warn(warnLog);
};
const tty = process.stdout.isTTY && !process.env.CI;
if (tty) {
process.stdout.clearLine(0);
process.stdout.cursorTo(0);
}
const userOnWarn = config.build.rollupOptions?.onwarn;
if (userOnWarn) {
userOnWarn(warning, viteWarn);
}
else {
viteWarn(warning);
}
}
async function cjsSsrResolveExternal(config, user) {
// see if we have cached deps data available
let knownImports;
const dataPath = path$o.join(getDepsCacheDir(config, false), '_metadata.json');
try {
const data = JSON.parse(fs$l.readFileSync(dataPath, 'utf-8'));
knownImports = Object.keys(data.optimized);
}
catch (e) { }
if (!knownImports) {
// no dev deps optimization data, do a fresh scan
knownImports = await findKnownImports(config, false); // needs to use non-ssr
}
const ssrExternals = cjsSsrResolveExternals(config, knownImports);
return (id, parentId, isResolved) => {
const isExternal = cjsShouldExternalizeForSSR(id, ssrExternals);
if (isExternal) {
return true;
}
if (user) {
return resolveUserExternal(user, id, parentId, isResolved);
}
};
}
function resolveUserExternal(user, id, parentId, isResolved) {
if (typeof user === 'function') {
return user(id, parentId, isResolved);
}
else if (Array.isArray(user)) {
return user.some((test) => isExternal(id, test));
}
else {
return isExternal(id, user);
}
}
function isExternal(id, test) {
if (typeof test === 'string') {
return id === test;
}
else {
return test.test(id);
}
}
function injectSsrFlagToHooks(plugin) {
const { resolveId, load, transform } = plugin;
return {
...plugin,
resolveId: wrapSsrResolveId(resolveId),
load: wrapSsrLoad(load),
transform: wrapSsrTransform(transform),
};
}
function wrapSsrResolveId(hook) {
if (!hook)
return;
const fn = 'handler' in hook ? hook.handler : hook;
const handler = function (id, importer, options) {
return fn.call(this, id, importer, injectSsrFlag(options));
};
if ('handler' in hook) {
return {
...hook,
handler,
};
}
else {
return handler;
}
}
function wrapSsrLoad(hook) {
if (!hook)
return;
const fn = 'handler' in hook ? hook.handler : hook;
const handler = function (id, ...args) {
// @ts-expect-error: Receiving options param to be future-proof if Rollup adds it
return fn.call(this, id, injectSsrFlag(args[0]));
};
if ('handler' in hook) {
return {
...hook,
handler,
};
}
else {
return handler;
}
}
function wrapSsrTransform(hook) {
if (!hook)
return;
const fn = 'handler' in hook ? hook.handler : hook;
const handler = function (code, importer, ...args) {
// @ts-expect-error: Receiving options param to be future-proof if Rollup adds it
return fn.call(this, code, importer, injectSsrFlag(args[0]));
};
if ('handler' in hook) {
return {
...hook,
handler,
};
}
else {
return handler;
}
}
function injectSsrFlag(options) {
return { ...(options ?? {}), ssr: true };
}
/*
The following functions are copied from rollup
https://github.com/rollup/rollup/blob/0bcf0a672ac087ff2eb88fbba45ec62389a4f45f/src/ast/nodes/MetaProperty.ts#L145-L193
https://github.com/rollup/rollup
The MIT License (MIT)
Copyright (c) 2017 [these people](https://github.com/rollup/rollup/graphs/contributors)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
const needsEscapeRegEx = /[\n\r'\\\u2028\u2029]/;
const quoteNewlineRegEx = /([\n\r'\u2028\u2029])/g;
const backSlashRegEx = /\\/g;
function escapeId(id) {
if (!needsEscapeRegEx.test(id))
return id;
return id.replace(backSlashRegEx, '\\\\').replace(quoteNewlineRegEx, '\\$1');
}
const getResolveUrl = (path, URL = 'URL') => `new ${URL}(${path}).href`;
const getRelativeUrlFromDocument = (relativePath, umd = false) => getResolveUrl(`'${escapeId(relativePath)}', ${umd ? `typeof document === 'undefined' ? location.href : ` : ''}document.currentScript && document.currentScript.src || document.baseURI`);
const getFileUrlFromFullPath = (path) => `require('u' + 'rl').pathToFileURL(${path}).href`;
const getFileUrlFromRelativePath = (path) => getFileUrlFromFullPath(`__dirname + '/${path}'`);
const relativeUrlMechanisms = {
amd: (relativePath) => {
if (relativePath[0] !== '.')
relativePath = './' + relativePath;
return getResolveUrl(`require.toUrl('${relativePath}'), document.baseURI`);
},
cjs: (relativePath) => `(typeof document === 'undefined' ? ${getFileUrlFromRelativePath(relativePath)} : ${getRelativeUrlFromDocument(relativePath)})`,
es: (relativePath) => getResolveUrl(`'${relativePath}', import.meta.url`),
iife: (relativePath) => getRelativeUrlFromDocument(relativePath),
// NOTE: make sure rollup generate `module` params
system: (relativePath) => getResolveUrl(`'${relativePath}', module.meta.url`),
umd: (relativePath) => `(typeof document === 'undefined' && typeof location === 'undefined' ? ${getFileUrlFromRelativePath(relativePath)} : ${getRelativeUrlFromDocument(relativePath, true)})`,
};
/* end of copy */
const customRelativeUrlMechanisms = {
...relativeUrlMechanisms,
'worker-iife': (relativePath) => getResolveUrl(`'${relativePath}', self.location.href`),
};
function toOutputFilePathInJS(filename, type, hostId, hostType, config, toRelative) {
const { renderBuiltUrl } = config.experimental;
let relative = config.base === '' || config.base === './';
if (renderBuiltUrl) {
const result = renderBuiltUrl(filename, {
hostId,
hostType,
type,
ssr: !!config.build.ssr,
});
if (typeof result === 'object') {
if (result.runtime) {
return { runtime: result.runtime };
}
if (typeof result.relative === 'boolean') {
relative = result.relative;
}
}
else if (result) {
return result;
}
}
if (relative && !config.build.ssr) {
return toRelative(filename, hostId);
}
return joinUrlSegments(config.base, filename);
}
function createToImportMetaURLBasedRelativeRuntime(format, isWorker) {
const formatLong = isWorker && format === 'iife' ? 'worker-iife' : format;
const toRelativePath = customRelativeUrlMechanisms[formatLong];
return (filename, importer) => ({
runtime: toRelativePath(path$o.posix.relative(path$o.dirname(importer), filename)),
});
}
function toOutputFilePathWithoutRuntime(filename, type, hostId, hostType, config, toRelative) {
const { renderBuiltUrl } = config.experimental;
let relative = config.base === '' || config.base === './';
if (renderBuiltUrl) {
const result = renderBuiltUrl(filename, {
hostId,
hostType,
type,
ssr: !!config.build.ssr,
});
if (typeof result === 'object') {
if (result.runtime) {
throw new Error(`{ runtime: "${result.runtime}" } is not supported for assets in ${hostType} files: ${filename}`);
}
if (typeof result.relative === 'boolean') {
relative = result.relative;
}
}
else if (result) {
return result;
}
}
if (relative && !config.build.ssr) {
return toRelative(filename, hostId);
}
else {
return joinUrlSegments(config.base, filename);
}
}
const toOutputFilePathInCss = toOutputFilePathWithoutRuntime;
const toOutputFilePathInHtml = toOutputFilePathWithoutRuntime;
function areSeparateFolders(a, b) {
const na = normalizePath$3(a);
const nb = normalizePath$3(b);
return (na !== nb &&
!na.startsWith(withTrailingSlash(nb)) &&
!nb.startsWith(withTrailingSlash(na)));
}
var build$1 = {
__proto__: null,
build: build,
createToImportMetaURLBasedRelativeRuntime: createToImportMetaURLBasedRelativeRuntime,
onRollupWarning: onRollupWarning,
resolveBuildOptions: resolveBuildOptions,
resolveBuildOutputs: resolveBuildOutputs,
resolveBuildPlugins: resolveBuildPlugins,
resolveLibFilename: resolveLibFilename,
resolveUserExternal: resolveUserExternal,
toOutputFilePathInCss: toOutputFilePathInCss,
toOutputFilePathInHtml: toOutputFilePathInHtml,
toOutputFilePathInJS: toOutputFilePathInJS,
toOutputFilePathWithoutRuntime: toOutputFilePathWithoutRuntime
};
var src = {exports: {}};
var browser = {exports: {}};
var debug$6 = {exports: {}};
/**
* Helpers.
*/
var ms;
var hasRequiredMs;
function requireMs () {
if (hasRequiredMs) return ms;
hasRequiredMs = 1;
var s = 1000;
var m = s * 60;
var h = m * 60;
var d = h * 24;
var y = d * 365.25;
/**
* Parse or format the given `val`.
*
* Options:
*
* - `long` verbose formatting [false]
*
* @param {String|Number} val
* @param {Object} [options]
* @throws {Error} throw an error if val is not a non-empty string or a number
* @return {String|Number}
* @api public
*/
ms = function(val, options) {
options = options || {};
var type = typeof val;
if (type === 'string' && val.length > 0) {
return parse(val);
} else if (type === 'number' && isNaN(val) === false) {
return options.long ? fmtLong(val) : fmtShort(val);
}
throw new Error(
'val is not a non-empty string or a valid number. val=' +
JSON.stringify(val)
);
};
/**
* Parse the given `str` and return milliseconds.
*
* @param {String} str
* @return {Number}
* @api private
*/
function parse(str) {
str = String(str);
if (str.length > 100) {
return;
}
var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(
str
);
if (!match) {
return;
}
var n = parseFloat(match[1]);
var type = (match[2] || 'ms').toLowerCase();
switch (type) {
case 'years':
case 'year':
case 'yrs':
case 'yr':
case 'y':
return n * y;
case 'days':
case 'day':
case 'd':
return n * d;
case 'hours':
case 'hour':
case 'hrs':
case 'hr':
case 'h':
return n * h;
case 'minutes':
case 'minute':
case 'mins':
case 'min':
case 'm':
return n * m;
case 'seconds':
case 'second':
case 'secs':
case 'sec':
case 's':
return n * s;
case 'milliseconds':
case 'millisecond':
case 'msecs':
case 'msec':
case 'ms':
return n;
default:
return undefined;
}
}
/**
* Short format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtShort(ms) {
if (ms >= d) {
return Math.round(ms / d) + 'd';
}
if (ms >= h) {
return Math.round(ms / h) + 'h';
}
if (ms >= m) {
return Math.round(ms / m) + 'm';
}
if (ms >= s) {
return Math.round(ms / s) + 's';
}
return ms + 'ms';
}
/**
* Long format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtLong(ms) {
return plural(ms, d, 'day') ||
plural(ms, h, 'hour') ||
plural(ms, m, 'minute') ||
plural(ms, s, 'second') ||
ms + ' ms';
}
/**
* Pluralization helper.
*/
function plural(ms, n, name) {
if (ms < n) {
return;
}
if (ms < n * 1.5) {
return Math.floor(ms / n) + ' ' + name;
}
return Math.ceil(ms / n) + ' ' + name + 's';
}
return ms;
}
var hasRequiredDebug;
function requireDebug () {
if (hasRequiredDebug) return debug$6.exports;
hasRequiredDebug = 1;
(function (module, exports) {
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*
* Expose `debug()` as the module.
*/
exports = module.exports = createDebug.debug = createDebug['default'] = createDebug;
exports.coerce = coerce;
exports.disable = disable;
exports.enable = enable;
exports.enabled = enabled;
exports.humanize = requireMs();
/**
* The currently active debug mode names, and names to skip.
*/
exports.names = [];
exports.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
exports.formatters = {};
/**
* Previous log timestamp.
*/
var prevTime;
/**
* Select a color.
* @param {String} namespace
* @return {Number}
* @api private
*/
function selectColor(namespace) {
var hash = 0, i;
for (i in namespace) {
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return exports.colors[Math.abs(hash) % exports.colors.length];
}
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
function debug() {
// disabled?
if (!debug.enabled) return;
var self = debug;
// set `diff` timestamp
var curr = +new Date();
var ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
// turn the `arguments` into a proper Array
var args = new Array(arguments.length);
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
args[0] = exports.coerce(args[0]);
if ('string' !== typeof args[0]) {
// anything else let's inspect with %O
args.unshift('%O');
}
// apply any `formatters` transformations
var index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, function(match, format) {
// if we encounter an escaped % then don't increase the array index
if (match === '%%') return match;
index++;
var formatter = exports.formatters[format];
if ('function' === typeof formatter) {
var val = args[index];
match = formatter.call(self, val);
// now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
});
// apply env-specific formatting (colors, etc.)
exports.formatArgs.call(self, args);
var logFn = debug.log || exports.log || console.log.bind(console);
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.enabled = exports.enabled(namespace);
debug.useColors = exports.useColors();
debug.color = selectColor(namespace);
// env-specific initialization logic for debug instances
if ('function' === typeof exports.init) {
exports.init(debug);
}
return debug;
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
exports.save(namespaces);
exports.names = [];
exports.skips = [];
var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
var len = split.length;
for (var i = 0; i < len; i++) {
if (!split[i]) continue; // ignore empty strings
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));
} else {
exports.names.push(new RegExp('^' + namespaces + '$'));
}
}
}
/**
* Disable debug output.
*
* @api public
*/
function disable() {
exports.enable('');
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
var i, len;
for (i = 0, len = exports.skips.length; i < len; i++) {
if (exports.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = exports.names.length; i < len; i++) {
if (exports.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) return val.stack || val.message;
return val;
}
} (debug$6, debug$6.exports));
return debug$6.exports;
}
/**
* This is the web browser implementation of `debug()`.
*
* Expose `debug()` as the module.
*/
var hasRequiredBrowser;
function requireBrowser () {
if (hasRequiredBrowser) return browser.exports;
hasRequiredBrowser = 1;
(function (module, exports) {
exports = module.exports = requireDebug();
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.storage = 'undefined' != typeof chrome
&& 'undefined' != typeof chrome.storage
? chrome.storage.local
: localstorage();
/**
* Colors.
*/
exports.colors = [
'lightseagreen',
'forestgreen',
'goldenrod',
'dodgerblue',
'darkorchid',
'crimson'
];
/**
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
* and the Firebug extension (any Firefox version) are known
* to support "%c" CSS customizations.
*
* TODO: add a `localStorage` variable to explicitly enable/disable colors
*/
function useColors() {
// NB: In an Electron preload script, document will be defined but not fully
// initialized. Since we know we're in Chrome, we'll just detect this case
// explicitly
if (typeof window !== 'undefined' && window.process && window.process.type === 'renderer') {
return true;
}
// is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
// is firebug? http://stackoverflow.com/a/398120/376773
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
// is firefox >= v31?
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||
// double check webkit in userAgent just in case we are in a worker
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
}
/**
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
*/
exports.formatters.j = function(v) {
try {
return JSON.stringify(v);
} catch (err) {
return '[UnexpectedJSONParseError]: ' + err.message;
}
};
/**
* Colorize log arguments if enabled.
*
* @api public
*/
function formatArgs(args) {
var useColors = this.useColors;
args[0] = (useColors ? '%c' : '')
+ this.namespace
+ (useColors ? ' %c' : ' ')
+ args[0]
+ (useColors ? '%c ' : ' ')
+ '+' + exports.humanize(this.diff);
if (!useColors) return;
var c = 'color: ' + this.color;
args.splice(1, 0, c, 'color: inherit');
// the final "%c" is somewhat tricky, because there could be other
// arguments passed either before or after the %c, so we need to
// figure out the correct index to insert the CSS into
var index = 0;
var lastC = 0;
args[0].replace(/%[a-zA-Z%]/g, function(match) {
if ('%%' === match) return;
index++;
if ('%c' === match) {
// we only are interested in the *last* %c
// (the user may have provided their own)
lastC = index;
}
});
args.splice(lastC, 0, c);
}
/**
* Invokes `console.log()` when available.
* No-op when `console.log` is not a "function".
*
* @api public
*/
function log() {
// this hackery is required for IE8/9, where
// the `console.log` function doesn't have 'apply'
return 'object' === typeof console
&& console.log
&& Function.prototype.apply.call(console.log, console, arguments);
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
try {
if (null == namespaces) {
exports.storage.removeItem('debug');
} else {
exports.storage.debug = namespaces;
}
} catch(e) {}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
var r;
try {
r = exports.storage.debug;
} catch(e) {}
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
if (!r && typeof process !== 'undefined' && 'env' in process) {
r = process.env.DEBUG;
}
return r;
}
/**
* Enable namespaces listed in `localStorage.debug` initially.
*/
exports.enable(load());
/**
* Localstorage attempts to return the localstorage.
*
* This is necessary because safari throws
* when a user disables cookies/localstorage
* and you attempt to access it.
*
* @return {LocalStorage}
* @api private
*/
function localstorage() {
try {
return window.localStorage;
} catch (e) {}
}
} (browser, browser.exports));
return browser.exports;
}
var node = {exports: {}};
/**
* Module dependencies.
*/
var hasRequiredNode;
function requireNode () {
if (hasRequiredNode) return node.exports;
hasRequiredNode = 1;
(function (module, exports) {
var tty = require$$0$3;
var util = require$$0$6;
/**
* This is the Node.js implementation of `debug()`.
*
* Expose `debug()` as the module.
*/
exports = module.exports = requireDebug();
exports.init = init;
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
/**
* Colors.
*/
exports.colors = [6, 2, 3, 4, 5, 1];
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter(function (key) {
return /^debug_/i.test(key);
}).reduce(function (obj, key) {
// camel-case
var prop = key
.substring(6)
.toLowerCase()
.replace(/_([a-z])/g, function (_, k) { return k.toUpperCase() });
// coerce string value into JS value
var val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) val = true;
else if (/^(no|off|false|disabled)$/i.test(val)) val = false;
else if (val === 'null') val = null;
else val = Number(val);
obj[prop] = val;
return obj;
}, {});
/**
* The file descriptor to write the `debug()` calls to.
* Set the `DEBUG_FD` env variable to override with another value. i.e.:
*
* $ DEBUG_FD=3 node script.js 3>debug.log
*/
var fd = parseInt(process.env.DEBUG_FD, 10) || 2;
if (1 !== fd && 2 !== fd) {
util.deprecate(function(){}, 'except for stderr(2) and stdout(1), any other usage of DEBUG_FD is deprecated. Override debug.log if you want to use a different log function (https://git.io/debug_fd)')();
}
var stream = 1 === fd ? process.stdout :
2 === fd ? process.stderr :
createWritableStdioStream(fd);
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors() {
return 'colors' in exports.inspectOpts
? Boolean(exports.inspectOpts.colors)
: tty.isatty(fd);
}
/**
* Map %o to `util.inspect()`, all on a single line.
*/
exports.formatters.o = function(v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts)
.split('\n').map(function(str) {
return str.trim()
}).join(' ');
};
/**
* Map %o to `util.inspect()`, allowing multiple lines if needed.
*/
exports.formatters.O = function(v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts);
};
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs(args) {
var name = this.namespace;
var useColors = this.useColors;
if (useColors) {
var c = this.color;
var prefix = ' \u001b[3' + c + ';1m' + name + ' ' + '\u001b[0m';
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
args.push('\u001b[3' + c + 'm+' + exports.humanize(this.diff) + '\u001b[0m');
} else {
args[0] = new Date().toUTCString()
+ ' ' + name + ' ' + args[0];
}
}
/**
* Invokes `util.format()` with the specified arguments and writes to `stream`.
*/
function log() {
return stream.write(util.format.apply(util, arguments) + '\n');
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
if (null == namespaces) {
// If you set a process.env field to null or undefined, it gets cast to the
// string 'null' or 'undefined'. Just delete instead.
delete process.env.DEBUG;
} else {
process.env.DEBUG = namespaces;
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
return process.env.DEBUG;
}
/**
* Copied from `node/src/node.js`.
*
* XXX: It's lame that node doesn't expose this API out-of-the-box. It also
* relies on the undocumented `tty_wrap.guessHandleType()` which is also lame.
*/
function createWritableStdioStream (fd) {
var stream;
var tty_wrap = process.binding('tty_wrap');
// Note stream._type is used for test-module-load-list.js
switch (tty_wrap.guessHandleType(fd)) {
case 'TTY':
stream = new tty.WriteStream(fd);
stream._type = 'tty';
// Hack to have stream not keep the event loop alive.
// See https://github.com/joyent/node/issues/1726
if (stream._handle && stream._handle.unref) {
stream._handle.unref();
}
break;
case 'FILE':
var fs = require$$0__default;
stream = new fs.SyncWriteStream(fd, { autoClose: false });
stream._type = 'fs';
break;
case 'PIPE':
case 'TCP':
var net = require$$3$2;
stream = new net.Socket({
fd: fd,
readable: false,
writable: true
});
// FIXME Should probably have an option in net.Socket to create a
// stream from an existing fd which is writable only. But for now
// we'll just add this hack and set the `readable` member to false.
// Test: ./node test/fixtures/echo.js < /etc/passwd
stream.readable = false;
stream.read = null;
stream._type = 'pipe';
// FIXME Hack to have stream not keep the event loop alive.
// See https://github.com/joyent/node/issues/1726
if (stream._handle && stream._handle.unref) {
stream._handle.unref();
}
break;
default:
// Probably an error on in uv_guess_handle()
throw new Error('Implement me. Unknown stream file type!');
}
// For supporting legacy API we put the FD here.
stream.fd = fd;
stream._isStdio = true;
return stream;
}
/**
* Init logic for `debug` instances.
*
* Create a new `inspectOpts` object in case `useColors` is set
* differently for a particular `debug` instance.
*/
function init (debug) {
debug.inspectOpts = {};
var keys = Object.keys(exports.inspectOpts);
for (var i = 0; i < keys.length; i++) {
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
}
}
/**
* Enable namespaces listed in `process.env.DEBUG` initially.
*/
exports.enable(load());
} (node, node.exports));
return node.exports;
}
/**
* Detect Electron renderer process, which is node, but we should
* treat as a browser.
*/
if (typeof process !== 'undefined' && process.type === 'renderer') {
src.exports = requireBrowser();
} else {
src.exports = requireNode();
}
var srcExports = src.exports;
/*!
* encodeurl
* Copyright(c) 2016 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
var encodeurl = encodeUrl$1;
/**
* RegExp to match non-URL code points, *after* encoding (i.e. not including "%")
* and including invalid escape sequences.
* @private
*/
var ENCODE_CHARS_REGEXP = /(?:[^\x21\x25\x26-\x3B\x3D\x3F-\x5B\x5D\x5F\x61-\x7A\x7E]|%(?:[^0-9A-Fa-f]|[0-9A-Fa-f][^0-9A-Fa-f]|$))+/g;
/**
* RegExp to match unmatched surrogate pair.
* @private
*/
var UNMATCHED_SURROGATE_PAIR_REGEXP = /(^|[^\uD800-\uDBFF])[\uDC00-\uDFFF]|[\uD800-\uDBFF]([^\uDC00-\uDFFF]|$)/g;
/**
* String to replace unmatched surrogate pair with.
* @private
*/
var UNMATCHED_SURROGATE_PAIR_REPLACE = '$1\uFFFD$2';
/**
* Encode a URL to a percent-encoded form, excluding already-encoded sequences.
*
* This function will take an already-encoded URL and encode all the non-URL
* code points. This function will not encode the "%" character unless it is
* not part of a valid sequence (`%20` will be left as-is, but `%foo` will
* be encoded as `%25foo`).
*
* This encode is meant to be "safe" and does not throw errors. It will try as
* hard as it can to properly encode the given URL, including replacing any raw,
* unpaired surrogate pairs with the Unicode replacement character prior to
* encoding.
*
* @param {string} url
* @return {string}
* @public
*/
function encodeUrl$1 (url) {
return String(url)
.replace(UNMATCHED_SURROGATE_PAIR_REGEXP, UNMATCHED_SURROGATE_PAIR_REPLACE)
.replace(ENCODE_CHARS_REGEXP, encodeURI)
}
var onFinished$2 = {exports: {}};
/*!
* ee-first
* Copyright(c) 2014 Jonathan Ong
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
var eeFirst = first$1;
/**
* Get the first event in a set of event emitters and event pairs.
*
* @param {array} stuff
* @param {function} done
* @public
*/
function first$1(stuff, done) {
if (!Array.isArray(stuff))
throw new TypeError('arg must be an array of [ee, events...] arrays')
var cleanups = [];
for (var i = 0; i < stuff.length; i++) {
var arr = stuff[i];
if (!Array.isArray(arr) || arr.length < 2)
throw new TypeError('each array member must be [ee, events...]')
var ee = arr[0];
for (var j = 1; j < arr.length; j++) {
var event = arr[j];
var fn = listener(event, callback);
// listen to the event
ee.on(event, fn);
// push this listener to the list of cleanups
cleanups.push({
ee: ee,
event: event,
fn: fn,
});
}
}
function callback() {
cleanup();
done.apply(null, arguments);
}
function cleanup() {
var x;
for (var i = 0; i < cleanups.length; i++) {
x = cleanups[i];
x.ee.removeListener(x.event, x.fn);
}
}
function thunk(fn) {
done = fn;
}
thunk.cancel = cleanup;
return thunk
}
/**
* Create the event listener.
* @private
*/
function listener(event, done) {
return function onevent(arg1) {
var args = new Array(arguments.length);
var ee = this;
var err = event === 'error'
? arg1
: null;
// copy args to prevent arguments escaping scope
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
done(err, ee, event, args);
}
}
/*!
* on-finished
* Copyright(c) 2013 Jonathan Ong
* Copyright(c) 2014 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
onFinished$2.exports = onFinished$1;
onFinished$2.exports.isFinished = isFinished$1;
/**
* Module dependencies.
* @private
*/
var first = eeFirst;
/**
* Variables.
* @private
*/
/* istanbul ignore next */
var defer$2 = typeof setImmediate === 'function'
? setImmediate
: function(fn){ process.nextTick(fn.bind.apply(fn, arguments)); };
/**
* Invoke callback when the response has finished, useful for
* cleaning up resources afterwards.
*
* @param {object} msg
* @param {function} listener
* @return {object}
* @public
*/
function onFinished$1(msg, listener) {
if (isFinished$1(msg) !== false) {
defer$2(listener, null, msg);
return msg
}
// attach the listener to the message
attachListener(msg, listener);
return msg
}
/**
* Determine if message is already finished.
*
* @param {object} msg
* @return {boolean}
* @public
*/
function isFinished$1(msg) {
var socket = msg.socket;
if (typeof msg.finished === 'boolean') {
// OutgoingMessage
return Boolean(msg.finished || (socket && !socket.writable))
}
if (typeof msg.complete === 'boolean') {
// IncomingMessage
return Boolean(msg.upgrade || !socket || !socket.readable || (msg.complete && !msg.readable))
}
// don't know
return undefined
}
/**
* Attach a finished listener to the message.
*
* @param {object} msg
* @param {function} callback
* @private
*/
function attachFinishedListener(msg, callback) {
var eeMsg;
var eeSocket;
var finished = false;
function onFinish(error) {
eeMsg.cancel();
eeSocket.cancel();
finished = true;
callback(error);
}
// finished on first message event
eeMsg = eeSocket = first([[msg, 'end', 'finish']], onFinish);
function onSocket(socket) {
// remove listener
msg.removeListener('socket', onSocket);
if (finished) return
if (eeMsg !== eeSocket) return
// finished on first socket event
eeSocket = first([[socket, 'error', 'close']], onFinish);
}
if (msg.socket) {
// socket already assigned
onSocket(msg.socket);
return
}
// wait for socket to be assigned
msg.on('socket', onSocket);
if (msg.socket === undefined) {
// node.js 0.8 patch
patchAssignSocket(msg, onSocket);
}
}
/**
* Attach the listener to the message.
*
* @param {object} msg
* @return {function}
* @private
*/
function attachListener(msg, listener) {
var attached = msg.__onFinished;
// create a private single listener with queue
if (!attached || !attached.queue) {
attached = msg.__onFinished = createListener(msg);
attachFinishedListener(msg, attached);
}
attached.queue.push(listener);
}
/**
* Create listener on message.
*
* @param {object} msg
* @return {function}
* @private
*/
function createListener(msg) {
function listener(err) {
if (msg.__onFinished === listener) msg.__onFinished = null;
if (!listener.queue) return
var queue = listener.queue;
listener.queue = null;
for (var i = 0; i < queue.length; i++) {
queue[i](err, msg);
}
}
listener.queue = [];
return listener
}
/**
* Patch ServerResponse.prototype.assignSocket for node.js 0.8.
*
* @param {ServerResponse} res
* @param {function} callback
* @private
*/
function patchAssignSocket(res, callback) {
var assignSocket = res.assignSocket;
if (typeof assignSocket !== 'function') return
// res.on('socket', callback) is broken in 0.8
res.assignSocket = function _assignSocket(socket) {
assignSocket.call(this, socket);
callback(socket);
};
}
var onFinishedExports = onFinished$2.exports;
var parseurl$1 = {exports: {}};
/*!
* parseurl
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2014-2017 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module dependencies.
* @private
*/
var url$3 = require$$0$9;
var parse$7 = url$3.parse;
var Url = url$3.Url;
/**
* Module exports.
* @public
*/
parseurl$1.exports = parseurl;
parseurl$1.exports.original = originalurl;
/**
* Parse the `req` url with memoization.
*
* @param {ServerRequest} req
* @return {Object}
* @public
*/
function parseurl (req) {
var url = req.url;
if (url === undefined) {
// URL is undefined
return undefined
}
var parsed = req._parsedUrl;
if (fresh(url, parsed)) {
// Return cached URL parse
return parsed
}
// Parse the URL
parsed = fastparse(url);
parsed._raw = url;
return (req._parsedUrl = parsed)
}
/**
* Parse the `req` original url with fallback and memoization.
*
* @param {ServerRequest} req
* @return {Object}
* @public
*/
function originalurl (req) {
var url = req.originalUrl;
if (typeof url !== 'string') {
// Fallback
return parseurl(req)
}
var parsed = req._parsedOriginalUrl;
if (fresh(url, parsed)) {
// Return cached URL parse
return parsed
}
// Parse the URL
parsed = fastparse(url);
parsed._raw = url;
return (req._parsedOriginalUrl = parsed)
}
/**
* Parse the `str` url with fast-path short-cut.
*
* @param {string} str
* @return {Object}
* @private
*/
function fastparse (str) {
if (typeof str !== 'string' || str.charCodeAt(0) !== 0x2f /* / */) {
return parse$7(str)
}
var pathname = str;
var query = null;
var search = null;
// This takes the regexp from https://github.com/joyent/node/pull/7878
// Which is /^(\/[^?#\s]*)(\?[^#\s]*)?$/
// And unrolls it into a for loop
for (var i = 1; i < str.length; i++) {
switch (str.charCodeAt(i)) {
case 0x3f: /* ? */
if (search === null) {
pathname = str.substring(0, i);
query = str.substring(i + 1);
search = str.substring(i);
}
break
case 0x09: /* \t */
case 0x0a: /* \n */
case 0x0c: /* \f */
case 0x0d: /* \r */
case 0x20: /* */
case 0x23: /* # */
case 0xa0:
case 0xfeff:
return parse$7(str)
}
}
var url = Url !== undefined
? new Url()
: {};
url.path = str;
url.href = str;
url.pathname = pathname;
if (search !== null) {
url.query = query;
url.search = search;
}
return url
}
/**
* Determine if parsed is still fresh for url.
*
* @param {string} url
* @param {object} parsedUrl
* @return {boolean}
* @private
*/
function fresh (url, parsedUrl) {
return typeof parsedUrl === 'object' &&
parsedUrl !== null &&
(Url === undefined || parsedUrl instanceof Url) &&
parsedUrl._raw === url
}
var parseurlExports = parseurl$1.exports;
var require$$0$1 = {
"100": "Continue",
"101": "Switching Protocols",
"102": "Processing",
"103": "Early Hints",
"200": "OK",
"201": "Created",
"202": "Accepted",
"203": "Non-Authoritative Information",
"204": "No Content",
"205": "Reset Content",
"206": "Partial Content",
"207": "Multi-Status",
"208": "Already Reported",
"226": "IM Used",
"300": "Multiple Choices",
"301": "Moved Permanently",
"302": "Found",
"303": "See Other",
"304": "Not Modified",
"305": "Use Proxy",
"306": "(Unused)",
"307": "Temporary Redirect",
"308": "Permanent Redirect",
"400": "Bad Request",
"401": "Unauthorized",
"402": "Payment Required",
"403": "Forbidden",
"404": "Not Found",
"405": "Method Not Allowed",
"406": "Not Acceptable",
"407": "Proxy Authentication Required",
"408": "Request Timeout",
"409": "Conflict",
"410": "Gone",
"411": "Length Required",
"412": "Precondition Failed",
"413": "Payload Too Large",
"414": "URI Too Long",
"415": "Unsupported Media Type",
"416": "Range Not Satisfiable",
"417": "Expectation Failed",
"418": "I'm a teapot",
"421": "Misdirected Request",
"422": "Unprocessable Entity",
"423": "Locked",
"424": "Failed Dependency",
"425": "Unordered Collection",
"426": "Upgrade Required",
"428": "Precondition Required",
"429": "Too Many Requests",
"431": "Request Header Fields Too Large",
"451": "Unavailable For Legal Reasons",
"500": "Internal Server Error",
"501": "Not Implemented",
"502": "Bad Gateway",
"503": "Service Unavailable",
"504": "Gateway Timeout",
"505": "HTTP Version Not Supported",
"506": "Variant Also Negotiates",
"507": "Insufficient Storage",
"508": "Loop Detected",
"509": "Bandwidth Limit Exceeded",
"510": "Not Extended",
"511": "Network Authentication Required"
};
/*!
* statuses
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2016 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module dependencies.
* @private
*/
var codes = require$$0$1;
/**
* Module exports.
* @public
*/
var statuses$1 = status;
// status code to message map
status.STATUS_CODES = codes;
// array of status codes
status.codes = populateStatusesMap(status, codes);
// status codes for redirects
status.redirect = {
300: true,
301: true,
302: true,
303: true,
305: true,
307: true,
308: true
};
// status codes for empty bodies
status.empty = {
204: true,
205: true,
304: true
};
// status codes for when you should retry the request
status.retry = {
502: true,
503: true,
504: true
};
/**
* Populate the statuses map for given codes.
* @private
*/
function populateStatusesMap (statuses, codes) {
var arr = [];
Object.keys(codes).forEach(function forEachCode (code) {
var message = codes[code];
var status = Number(code);
// Populate properties
statuses[status] = message;
statuses[message] = status;
statuses[message.toLowerCase()] = status;
// Add to array
arr.push(status);
});
return arr
}
/**
* Get the status code.
*
* Given a number, this will throw if it is not a known status
* code, otherwise the code will be returned. Given a string,
* the string will be parsed for a number and return the code
* if valid, otherwise will lookup the code assuming this is
* the status message.
*
* @param {string|number} code
* @returns {number}
* @public
*/
function status (code) {
if (typeof code === 'number') {
if (!status[code]) throw new Error('invalid status code: ' + code)
return code
}
if (typeof code !== 'string') {
throw new TypeError('code must be a number or string')
}
// '403'
var n = parseInt(code, 10);
if (!isNaN(n)) {
if (!status[n]) throw new Error('invalid status code: ' + n)
return n
}
n = status[code.toLowerCase()];
if (!n) throw new Error('invalid status message: "' + code + '"')
return n
}
/*!
* unpipe
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
var unpipe_1 = unpipe$1;
/**
* Determine if there are Node.js pipe-like data listeners.
* @private
*/
function hasPipeDataListeners(stream) {
var listeners = stream.listeners('data');
for (var i = 0; i < listeners.length; i++) {
if (listeners[i].name === 'ondata') {
return true
}
}
return false
}
/**
* Unpipe a stream from all destinations.
*
* @param {object} stream
* @public
*/
function unpipe$1(stream) {
if (!stream) {
throw new TypeError('argument stream is required')
}
if (typeof stream.unpipe === 'function') {
// new-style
stream.unpipe();
return
}
// Node.js 0.8 hack
if (!hasPipeDataListeners(stream)) {
return
}
var listener;
var listeners = stream.listeners('close');
for (var i = 0; i < listeners.length; i++) {
listener = listeners[i];
if (listener.name !== 'cleanup' && listener.name !== 'onclose') {
continue
}
// invoke the listener
listener.call(stream);
}
}
/*!
* finalhandler
* Copyright(c) 2014-2017 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module dependencies.
* @private
*/
var debug$5 = srcExports('finalhandler');
var encodeUrl = encodeurl;
var escapeHtml = escapeHtml_1;
var onFinished = onFinishedExports;
var parseUrl$1 = parseurlExports;
var statuses = statuses$1;
var unpipe = unpipe_1;
/**
* Module variables.
* @private
*/
var DOUBLE_SPACE_REGEXP = /\x20{2}/g;
var NEWLINE_REGEXP = /\n/g;
/* istanbul ignore next */
var defer$1 = typeof setImmediate === 'function'
? setImmediate
: function (fn) { process.nextTick(fn.bind.apply(fn, arguments)); };
var isFinished = onFinished.isFinished;
/**
* Create a minimal HTML document.
*
* @param {string} message
* @private
*/
function createHtmlDocument (message) {
var body = escapeHtml(message)
.replace(NEWLINE_REGEXP, '<br>')
.replace(DOUBLE_SPACE_REGEXP, ' &nbsp;');
return '<!DOCTYPE html>\n' +
'<html lang="en">\n' +
'<head>\n' +
'<meta charset="utf-8">\n' +
'<title>Error</title>\n' +
'</head>\n' +
'<body>\n' +
'<pre>' + body + '</pre>\n' +
'</body>\n' +
'</html>\n'
}
/**
* Module exports.
* @public
*/
var finalhandler_1 = finalhandler$1;
/**
* Create a function to handle the final response.
*
* @param {Request} req
* @param {Response} res
* @param {Object} [options]
* @return {Function}
* @public
*/
function finalhandler$1 (req, res, options) {
var opts = options || {};
// get environment
var env = opts.env || process.env.NODE_ENV || 'development';
// get error callback
var onerror = opts.onerror;
return function (err) {
var headers;
var msg;
var status;
// ignore 404 on in-flight response
if (!err && headersSent(res)) {
debug$5('cannot 404 after headers sent');
return
}
// unhandled error
if (err) {
// respect status code from error
status = getErrorStatusCode(err);
if (status === undefined) {
// fallback to status code on response
status = getResponseStatusCode(res);
} else {
// respect headers from error
headers = getErrorHeaders(err);
}
// get error message
msg = getErrorMessage(err, status, env);
} else {
// not found
status = 404;
msg = 'Cannot ' + req.method + ' ' + encodeUrl(getResourceName(req));
}
debug$5('default %s', status);
// schedule onerror callback
if (err && onerror) {
defer$1(onerror, err, req, res);
}
// cannot actually respond
if (headersSent(res)) {
debug$5('cannot %d after headers sent', status);
req.socket.destroy();
return
}
// send response
send(req, res, status, headers, msg);
}
}
/**
* Get headers from Error object.
*
* @param {Error} err
* @return {object}
* @private
*/
function getErrorHeaders (err) {
if (!err.headers || typeof err.headers !== 'object') {
return undefined
}
var headers = Object.create(null);
var keys = Object.keys(err.headers);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
headers[key] = err.headers[key];
}
return headers
}
/**
* Get message from Error object, fallback to status message.
*
* @param {Error} err
* @param {number} status
* @param {string} env
* @return {string}
* @private
*/
function getErrorMessage (err, status, env) {
var msg;
if (env !== 'production') {
// use err.stack, which typically includes err.message
msg = err.stack;
// fallback to err.toString() when possible
if (!msg && typeof err.toString === 'function') {
msg = err.toString();
}
}
return msg || statuses[status]
}
/**
* Get status code from Error object.
*
* @param {Error} err
* @return {number}
* @private
*/
function getErrorStatusCode (err) {
// check err.status
if (typeof err.status === 'number' && err.status >= 400 && err.status < 600) {
return err.status
}
// check err.statusCode
if (typeof err.statusCode === 'number' && err.statusCode >= 400 && err.statusCode < 600) {
return err.statusCode
}
return undefined
}
/**
* Get resource name for the request.
*
* This is typically just the original pathname of the request
* but will fallback to "resource" is that cannot be determined.
*
* @param {IncomingMessage} req
* @return {string}
* @private
*/
function getResourceName (req) {
try {
return parseUrl$1.original(req).pathname
} catch (e) {
return 'resource'
}
}
/**
* Get status code from response.
*
* @param {OutgoingMessage} res
* @return {number}
* @private
*/
function getResponseStatusCode (res) {
var status = res.statusCode;
// default status code to 500 if outside valid range
if (typeof status !== 'number' || status < 400 || status > 599) {
status = 500;
}
return status
}
/**
* Determine if the response headers have been sent.
*
* @param {object} res
* @returns {boolean}
* @private
*/
function headersSent (res) {
return typeof res.headersSent !== 'boolean'
? Boolean(res._header)
: res.headersSent
}
/**
* Send response.
*
* @param {IncomingMessage} req
* @param {OutgoingMessage} res
* @param {number} status
* @param {object} headers
* @param {string} message
* @private
*/
function send (req, res, status, headers, message) {
function write () {
// response body
var body = createHtmlDocument(message);
// response status
res.statusCode = status;
res.statusMessage = statuses[status];
// response headers
setHeaders(res, headers);
// security headers
res.setHeader('Content-Security-Policy', "default-src 'none'");
res.setHeader('X-Content-Type-Options', 'nosniff');
// standard headers
res.setHeader('Content-Type', 'text/html; charset=utf-8');
res.setHeader('Content-Length', Buffer.byteLength(body, 'utf8'));
if (req.method === 'HEAD') {
res.end();
return
}
res.end(body, 'utf8');
}
if (isFinished(req)) {
write();
return
}
// unpipe everything from the request
unpipe(req);
// flush the request
onFinished(req, write);
req.resume();
}
/**
* Set response headers from an object.
*
* @param {OutgoingMessage} res
* @param {object} headers
* @private
*/
function setHeaders (res, headers) {
if (!headers) {
return
}
var keys = Object.keys(headers);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
res.setHeader(key, headers[key]);
}
}
var utilsMerge = {exports: {}};
/**
* Merge object b with object a.
*
* var a = { foo: 'bar' }
* , b = { bar: 'baz' };
*
* merge(a, b);
* // => { foo: 'bar', bar: 'baz' }
*
* @param {Object} a
* @param {Object} b
* @return {Object}
* @api public
*/
(function (module, exports) {
module.exports = function(a, b){
if (a && b) {
for (var key in b) {
a[key] = b[key];
}
}
return a;
};
} (utilsMerge));
var utilsMergeExports = utilsMerge.exports;
/*!
* connect
* Copyright(c) 2010 Sencha Inc.
* Copyright(c) 2011 TJ Holowaychuk
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module dependencies.
* @private
*/
var debug$4 = srcExports('connect:dispatcher');
var EventEmitter$3 = require$$0$5.EventEmitter;
var finalhandler = finalhandler_1;
var http$4 = require$$1$1;
var merge = utilsMergeExports;
var parseUrl = parseurlExports;
/**
* Module exports.
* @public
*/
var connect = createServer$1;
/**
* Module variables.
* @private
*/
var env = process.env.NODE_ENV || 'development';
var proto = {};
/* istanbul ignore next */
var defer = typeof setImmediate === 'function'
? setImmediate
: function(fn){ process.nextTick(fn.bind.apply(fn, arguments)); };
/**
* Create a new connect server.
*
* @return {function}
* @public
*/
function createServer$1() {
function app(req, res, next){ app.handle(req, res, next); }
merge(app, proto);
merge(app, EventEmitter$3.prototype);
app.route = '/';
app.stack = [];
return app;
}
/**
* Utilize the given middleware `handle` to the given `route`,
* defaulting to _/_. This "route" is the mount-point for the
* middleware, when given a value other than _/_ the middleware
* is only effective when that segment is present in the request's
* pathname.
*
* For example if we were to mount a function at _/admin_, it would
* be invoked on _/admin_, and _/admin/settings_, however it would
* not be invoked for _/_, or _/posts_.
*
* @param {String|Function|Server} route, callback or server
* @param {Function|Server} callback or server
* @return {Server} for chaining
* @public
*/
proto.use = function use(route, fn) {
var handle = fn;
var path = route;
// default route to '/'
if (typeof route !== 'string') {
handle = route;
path = '/';
}
// wrap sub-apps
if (typeof handle.handle === 'function') {
var server = handle;
server.route = path;
handle = function (req, res, next) {
server.handle(req, res, next);
};
}
// wrap vanilla http.Servers
if (handle instanceof http$4.Server) {
handle = handle.listeners('request')[0];
}
// strip trailing slash
if (path[path.length - 1] === '/') {
path = path.slice(0, -1);
}
// add the middleware
debug$4('use %s %s', path || '/', handle.name || 'anonymous');
this.stack.push({ route: path, handle: handle });
return this;
};
/**
* Handle server requests, punting them down
* the middleware stack.
*
* @private
*/
proto.handle = function handle(req, res, out) {
var index = 0;
var protohost = getProtohost(req.url) || '';
var removed = '';
var slashAdded = false;
var stack = this.stack;
// final function handler
var done = out || finalhandler(req, res, {
env: env,
onerror: logerror
});
// store the original URL
req.originalUrl = req.originalUrl || req.url;
function next(err) {
if (slashAdded) {
req.url = req.url.substr(1);
slashAdded = false;
}
if (removed.length !== 0) {
req.url = protohost + removed + req.url.substr(protohost.length);
removed = '';
}
// next callback
var layer = stack[index++];
// all done
if (!layer) {
defer(done, err);
return;
}
// route data
var path = parseUrl(req).pathname || '/';
var route = layer.route;
// skip this layer if the route doesn't match
if (path.toLowerCase().substr(0, route.length) !== route.toLowerCase()) {
return next(err);
}
// skip if route match does not border "/", ".", or end
var c = path.length > route.length && path[route.length];
if (c && c !== '/' && c !== '.') {
return next(err);
}
// trim off the part of the url that matches the route
if (route.length !== 0 && route !== '/') {
removed = route;
req.url = protohost + req.url.substr(protohost.length + removed.length);
// ensure leading slash
if (!protohost && req.url[0] !== '/') {
req.url = '/' + req.url;
slashAdded = true;
}
}
// call the layer handle
call(layer.handle, route, err, req, res, next);
}
next();
};
/**
* Listen for connections.
*
* This method takes the same arguments
* as node's `http.Server#listen()`.
*
* HTTP and HTTPS:
*
* If you run your application both as HTTP
* and HTTPS you may wrap them individually,
* since your Connect "server" is really just
* a JavaScript `Function`.
*
* var connect = require('connect')
* , http = require('http')
* , https = require('https');
*
* var app = connect();
*
* http.createServer(app).listen(80);
* https.createServer(options, app).listen(443);
*
* @return {http.Server}
* @api public
*/
proto.listen = function listen() {
var server = http$4.createServer(this);
return server.listen.apply(server, arguments);
};
/**
* Invoke a route handle.
* @private
*/
function call(handle, route, err, req, res, next) {
var arity = handle.length;
var error = err;
var hasError = Boolean(err);
debug$4('%s %s : %s', handle.name || '<anonymous>', route, req.originalUrl);
try {
if (hasError && arity === 4) {
// error-handling middleware
handle(err, req, res, next);
return;
} else if (!hasError && arity < 4) {
// request-handling middleware
handle(req, res, next);
return;
}
} catch (e) {
// replace the error
error = e;
}
// continue
next(error);
}
/**
* Log error using console.error.
*
* @param {Error} err
* @private
*/
function logerror(err) {
if (env !== 'test') console.error(err.stack || err.toString());
}
/**
* Get get protocol + host for a URL.
*
* @param {string} url
* @private
*/
function getProtohost(url) {
if (url.length === 0 || url[0] === '/') {
return undefined;
}
var fqdnIndex = url.indexOf('://');
return fqdnIndex !== -1 && url.lastIndexOf('?', fqdnIndex) === -1
? url.substr(0, url.indexOf('/', 3 + fqdnIndex))
: undefined;
}
var connect$1 = /*@__PURE__*/getDefaultExportFromCjs(connect);
var lib$1 = {exports: {}};
/*
object-assign
(c) Sindre Sorhus
@license MIT
*/
/* eslint-disable no-unused-vars */
var getOwnPropertySymbols = Object.getOwnPropertySymbols;
var hasOwnProperty = Object.prototype.hasOwnProperty;
var propIsEnumerable = Object.prototype.propertyIsEnumerable;
function toObject(val) {
if (val === null || val === undefined) {
throw new TypeError('Object.assign cannot be called with null or undefined');
}
return Object(val);
}
function shouldUseNative() {
try {
if (!Object.assign) {
return false;
}
// Detect buggy property enumeration order in older V8 versions.
// https://bugs.chromium.org/p/v8/issues/detail?id=4118
var test1 = new String('abc'); // eslint-disable-line no-new-wrappers
test1[5] = 'de';
if (Object.getOwnPropertyNames(test1)[0] === '5') {
return false;
}
// https://bugs.chromium.org/p/v8/issues/detail?id=3056
var test2 = {};
for (var i = 0; i < 10; i++) {
test2['_' + String.fromCharCode(i)] = i;
}
var order2 = Object.getOwnPropertyNames(test2).map(function (n) {
return test2[n];
});
if (order2.join('') !== '0123456789') {
return false;
}
// https://bugs.chromium.org/p/v8/issues/detail?id=3056
var test3 = {};
'abcdefghijklmnopqrst'.split('').forEach(function (letter) {
test3[letter] = letter;
});
if (Object.keys(Object.assign({}, test3)).join('') !==
'abcdefghijklmnopqrst') {
return false;
}
return true;
} catch (err) {
// We don't expect any of the above to throw, but better to be safe.
return false;
}
}
var objectAssign = shouldUseNative() ? Object.assign : function (target, source) {
var from;
var to = toObject(target);
var symbols;
for (var s = 1; s < arguments.length; s++) {
from = Object(arguments[s]);
for (var key in from) {
if (hasOwnProperty.call(from, key)) {
to[key] = from[key];
}
}
if (getOwnPropertySymbols) {
symbols = getOwnPropertySymbols(from);
for (var i = 0; i < symbols.length; i++) {
if (propIsEnumerable.call(from, symbols[i])) {
to[symbols[i]] = from[symbols[i]];
}
}
}
}
return to;
};
var vary$1 = {exports: {}};
/*!
* vary
* Copyright(c) 2014-2017 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
*/
vary$1.exports = vary;
vary$1.exports.append = append;
/**
* RegExp to match field-name in RFC 7230 sec 3.2
*
* field-name = token
* token = 1*tchar
* tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
* / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
* / DIGIT / ALPHA
* ; any VCHAR, except delimiters
*/
var FIELD_NAME_REGEXP = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/;
/**
* Append a field to a vary header.
*
* @param {String} header
* @param {String|Array} field
* @return {String}
* @public
*/
function append (header, field) {
if (typeof header !== 'string') {
throw new TypeError('header argument is required')
}
if (!field) {
throw new TypeError('field argument is required')
}
// get fields array
var fields = !Array.isArray(field)
? parse$6(String(field))
: field;
// assert on invalid field names
for (var j = 0; j < fields.length; j++) {
if (!FIELD_NAME_REGEXP.test(fields[j])) {
throw new TypeError('field argument contains an invalid header name')
}
}
// existing, unspecified vary
if (header === '*') {
return header
}
// enumerate current values
var val = header;
var vals = parse$6(header.toLowerCase());
// unspecified vary
if (fields.indexOf('*') !== -1 || vals.indexOf('*') !== -1) {
return '*'
}
for (var i = 0; i < fields.length; i++) {
var fld = fields[i].toLowerCase();
// append value (case-preserving)
if (vals.indexOf(fld) === -1) {
vals.push(fld);
val = val
? val + ', ' + fields[i]
: fields[i];
}
}
return val
}
/**
* Parse a vary header into an array.
*
* @param {String} header
* @return {Array}
* @private
*/
function parse$6 (header) {
var end = 0;
var list = [];
var start = 0;
// gather tokens
for (var i = 0, len = header.length; i < len; i++) {
switch (header.charCodeAt(i)) {
case 0x20: /* */
if (start === end) {
start = end = i + 1;
}
break
case 0x2c: /* , */
list.push(header.substring(start, end));
start = end = i + 1;
break
default:
end = i + 1;
break
}
}
// final token
list.push(header.substring(start, end));
return list
}
/**
* Mark that a request is varied on a header field.
*
* @param {Object} res
* @param {String|Array} field
* @public
*/
function vary (res, field) {
if (!res || !res.getHeader || !res.setHeader) {
// quack quack
throw new TypeError('res argument is required')
}
// get existing header
var val = res.getHeader('Vary') || '';
var header = Array.isArray(val)
? val.join(', ')
: String(val);
// set new header
if ((val = append(header, field))) {
res.setHeader('Vary', val);
}
}
var varyExports = vary$1.exports;
(function () {
var assign = objectAssign;
var vary = varyExports;
var defaults = {
origin: '*',
methods: 'GET,HEAD,PUT,PATCH,POST,DELETE',
preflightContinue: false,
optionsSuccessStatus: 204
};
function isString(s) {
return typeof s === 'string' || s instanceof String;
}
function isOriginAllowed(origin, allowedOrigin) {
if (Array.isArray(allowedOrigin)) {
for (var i = 0; i < allowedOrigin.length; ++i) {
if (isOriginAllowed(origin, allowedOrigin[i])) {
return true;
}
}
return false;
} else if (isString(allowedOrigin)) {
return origin === allowedOrigin;
} else if (allowedOrigin instanceof RegExp) {
return allowedOrigin.test(origin);
} else {
return !!allowedOrigin;
}
}
function configureOrigin(options, req) {
var requestOrigin = req.headers.origin,
headers = [],
isAllowed;
if (!options.origin || options.origin === '*') {
// allow any origin
headers.push([{
key: 'Access-Control-Allow-Origin',
value: '*'
}]);
} else if (isString(options.origin)) {
// fixed origin
headers.push([{
key: 'Access-Control-Allow-Origin',
value: options.origin
}]);
headers.push([{
key: 'Vary',
value: 'Origin'
}]);
} else {
isAllowed = isOriginAllowed(requestOrigin, options.origin);
// reflect origin
headers.push([{
key: 'Access-Control-Allow-Origin',
value: isAllowed ? requestOrigin : false
}]);
headers.push([{
key: 'Vary',
value: 'Origin'
}]);
}
return headers;
}
function configureMethods(options) {
var methods = options.methods;
if (methods.join) {
methods = options.methods.join(','); // .methods is an array, so turn it into a string
}
return {
key: 'Access-Control-Allow-Methods',
value: methods
};
}
function configureCredentials(options) {
if (options.credentials === true) {
return {
key: 'Access-Control-Allow-Credentials',
value: 'true'
};
}
return null;
}
function configureAllowedHeaders(options, req) {
var allowedHeaders = options.allowedHeaders || options.headers;
var headers = [];
if (!allowedHeaders) {
allowedHeaders = req.headers['access-control-request-headers']; // .headers wasn't specified, so reflect the request headers
headers.push([{
key: 'Vary',
value: 'Access-Control-Request-Headers'
}]);
} else if (allowedHeaders.join) {
allowedHeaders = allowedHeaders.join(','); // .headers is an array, so turn it into a string
}
if (allowedHeaders && allowedHeaders.length) {
headers.push([{
key: 'Access-Control-Allow-Headers',
value: allowedHeaders
}]);
}
return headers;
}
function configureExposedHeaders(options) {
var headers = options.exposedHeaders;
if (!headers) {
return null;
} else if (headers.join) {
headers = headers.join(','); // .headers is an array, so turn it into a string
}
if (headers && headers.length) {
return {
key: 'Access-Control-Expose-Headers',
value: headers
};
}
return null;
}
function configureMaxAge(options) {
var maxAge = (typeof options.maxAge === 'number' || options.maxAge) && options.maxAge.toString();
if (maxAge && maxAge.length) {
return {
key: 'Access-Control-Max-Age',
value: maxAge
};
}
return null;
}
function applyHeaders(headers, res) {
for (var i = 0, n = headers.length; i < n; i++) {
var header = headers[i];
if (header) {
if (Array.isArray(header)) {
applyHeaders(header, res);
} else if (header.key === 'Vary' && header.value) {
vary(res, header.value);
} else if (header.value) {
res.setHeader(header.key, header.value);
}
}
}
}
function cors(options, req, res, next) {
var headers = [],
method = req.method && req.method.toUpperCase && req.method.toUpperCase();
if (method === 'OPTIONS') {
// preflight
headers.push(configureOrigin(options, req));
headers.push(configureCredentials(options));
headers.push(configureMethods(options));
headers.push(configureAllowedHeaders(options, req));
headers.push(configureMaxAge(options));
headers.push(configureExposedHeaders(options));
applyHeaders(headers, res);
if (options.preflightContinue) {
next();
} else {
// Safari (and potentially other browsers) need content-length 0,
// for 204 or they just hang waiting for a body
res.statusCode = options.optionsSuccessStatus;
res.setHeader('Content-Length', '0');
res.end();
}
} else {
// actual response
headers.push(configureOrigin(options, req));
headers.push(configureCredentials(options));
headers.push(configureExposedHeaders(options));
applyHeaders(headers, res);
next();
}
}
function middlewareWrapper(o) {
// if options are static (either via defaults or custom options passed in), wrap in a function
var optionsCallback = null;
if (typeof o === 'function') {
optionsCallback = o;
} else {
optionsCallback = function (req, cb) {
cb(null, o);
};
}
return function corsMiddleware(req, res, next) {
optionsCallback(req, function (err, options) {
if (err) {
next(err);
} else {
var corsOptions = assign({}, defaults, options);
var originCallback = null;
if (corsOptions.origin && typeof corsOptions.origin === 'function') {
originCallback = corsOptions.origin;
} else if (corsOptions.origin) {
originCallback = function (origin, cb) {
cb(null, corsOptions.origin);
};
}
if (originCallback) {
originCallback(req.headers.origin, function (err2, origin) {
if (err2 || !origin) {
next(err2);
} else {
corsOptions.origin = origin;
cors(corsOptions, req, res, next);
}
});
} else {
next();
}
}
});
};
}
// can pass either an options hash, an options delegate, or nothing
lib$1.exports = middlewareWrapper;
}());
var libExports$1 = lib$1.exports;
var corsMiddleware = /*@__PURE__*/getDefaultExportFromCjs(libExports$1);
var chokidar = {};
const fs$8 = require$$0__default;
const { Readable } = require$$0$7;
const sysPath$3 = require$$0$4;
const { promisify: promisify$3 } = require$$0$6;
const picomatch$1 = picomatch$3;
const readdir$1 = promisify$3(fs$8.readdir);
const stat$3 = promisify$3(fs$8.stat);
const lstat$2 = promisify$3(fs$8.lstat);
const realpath$1 = promisify$3(fs$8.realpath);
/**
* @typedef {Object} EntryInfo
* @property {String} path
* @property {String} fullPath
* @property {fs.Stats=} stats
* @property {fs.Dirent=} dirent
* @property {String} basename
*/
const BANG$2 = '!';
const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR';
const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]);
const FILE_TYPE = 'files';
const DIR_TYPE = 'directories';
const FILE_DIR_TYPE = 'files_directories';
const EVERYTHING_TYPE = 'all';
const ALL_TYPES = [FILE_TYPE, DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE];
const isNormalFlowError = error => NORMAL_FLOW_ERRORS.has(error.code);
const [maj, min] = process.versions.node.split('.').slice(0, 2).map(n => Number.parseInt(n, 10));
const wantBigintFsStats = process.platform === 'win32' && (maj > 10 || (maj === 10 && min >= 5));
const normalizeFilter = filter => {
if (filter === undefined) return;
if (typeof filter === 'function') return filter;
if (typeof filter === 'string') {
const glob = picomatch$1(filter.trim());
return entry => glob(entry.basename);
}
if (Array.isArray(filter)) {
const positive = [];
const negative = [];
for (const item of filter) {
const trimmed = item.trim();
if (trimmed.charAt(0) === BANG$2) {
negative.push(picomatch$1(trimmed.slice(1)));
} else {
positive.push(picomatch$1(trimmed));
}
}
if (negative.length > 0) {
if (positive.length > 0) {
return entry =>
positive.some(f => f(entry.basename)) && !negative.some(f => f(entry.basename));
}
return entry => !negative.some(f => f(entry.basename));
}
return entry => positive.some(f => f(entry.basename));
}
};
class ReaddirpStream extends Readable {
static get defaultOptions() {
return {
root: '.',
/* eslint-disable no-unused-vars */
fileFilter: (path) => true,
directoryFilter: (path) => true,
/* eslint-enable no-unused-vars */
type: FILE_TYPE,
lstat: false,
depth: 2147483648,
alwaysStat: false
};
}
constructor(options = {}) {
super({
objectMode: true,
autoDestroy: true,
highWaterMark: options.highWaterMark || 4096
});
const opts = { ...ReaddirpStream.defaultOptions, ...options };
const { root, type } = opts;
this._fileFilter = normalizeFilter(opts.fileFilter);
this._directoryFilter = normalizeFilter(opts.directoryFilter);
const statMethod = opts.lstat ? lstat$2 : stat$3;
// Use bigint stats if it's windows and stat() supports options (node 10+).
if (wantBigintFsStats) {
this._stat = path => statMethod(path, { bigint: true });
} else {
this._stat = statMethod;
}
this._maxDepth = opts.depth;
this._wantsDir = [DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type);
this._wantsFile = [FILE_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type);
this._wantsEverything = type === EVERYTHING_TYPE;
this._root = sysPath$3.resolve(root);
this._isDirent = ('Dirent' in fs$8) && !opts.alwaysStat;
this._statsProp = this._isDirent ? 'dirent' : 'stats';
this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent };
// Launch stream with one parent, the root dir.
this.parents = [this._exploreDir(root, 1)];
this.reading = false;
this.parent = undefined;
}
async _read(batch) {
if (this.reading) return;
this.reading = true;
try {
while (!this.destroyed && batch > 0) {
const { path, depth, files = [] } = this.parent || {};
if (files.length > 0) {
const slice = files.splice(0, batch).map(dirent => this._formatEntry(dirent, path));
for (const entry of await Promise.all(slice)) {
if (this.destroyed) return;
const entryType = await this._getEntryType(entry);
if (entryType === 'directory' && this._directoryFilter(entry)) {
if (depth <= this._maxDepth) {
this.parents.push(this._exploreDir(entry.fullPath, depth + 1));
}
if (this._wantsDir) {
this.push(entry);
batch--;
}
} else if ((entryType === 'file' || this._includeAsFile(entry)) && this._fileFilter(entry)) {
if (this._wantsFile) {
this.push(entry);
batch--;
}
}
}
} else {
const parent = this.parents.pop();
if (!parent) {
this.push(null);
break;
}
this.parent = await parent;
if (this.destroyed) return;
}
}
} catch (error) {
this.destroy(error);
} finally {
this.reading = false;
}
}
async _exploreDir(path, depth) {
let files;
try {
files = await readdir$1(path, this._rdOptions);
} catch (error) {
this._onError(error);
}
return { files, depth, path };
}
async _formatEntry(dirent, path) {
let entry;
try {
const basename = this._isDirent ? dirent.name : dirent;
const fullPath = sysPath$3.resolve(sysPath$3.join(path, basename));
entry = { path: sysPath$3.relative(this._root, fullPath), fullPath, basename };
entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath);
} catch (err) {
this._onError(err);
}
return entry;
}
_onError(err) {
if (isNormalFlowError(err) && !this.destroyed) {
this.emit('warn', err);
} else {
this.destroy(err);
}
}
async _getEntryType(entry) {
// entry may be undefined, because a warning or an error were emitted
// and the statsProp is undefined
const stats = entry && entry[this._statsProp];
if (!stats) {
return;
}
if (stats.isFile()) {
return 'file';
}
if (stats.isDirectory()) {
return 'directory';
}
if (stats && stats.isSymbolicLink()) {
const full = entry.fullPath;
try {
const entryRealPath = await realpath$1(full);
const entryRealPathStats = await lstat$2(entryRealPath);
if (entryRealPathStats.isFile()) {
return 'file';
}
if (entryRealPathStats.isDirectory()) {
const len = entryRealPath.length;
if (full.startsWith(entryRealPath) && full.substr(len, 1) === sysPath$3.sep) {
const recursiveError = new Error(
`Circular symlink detected: "${full}" points to "${entryRealPath}"`
);
recursiveError.code = RECURSIVE_ERROR_CODE;
return this._onError(recursiveError);
}
return 'directory';
}
} catch (error) {
this._onError(error);
}
}
}
_includeAsFile(entry) {
const stats = entry && entry[this._statsProp];
return stats && this._wantsEverything && !stats.isDirectory();
}
}
/**
* @typedef {Object} ReaddirpArguments
* @property {Function=} fileFilter
* @property {Function=} directoryFilter
* @property {String=} type
* @property {Number=} depth
* @property {String=} root
* @property {Boolean=} lstat
* @property {Boolean=} bigint
*/
/**
* Main function which ends up calling readdirRec and reads all files and directories in given root recursively.
* @param {String} root Root directory
* @param {ReaddirpArguments=} options Options to specify root (start directory), filters and recursion depth
*/
const readdirp$1 = (root, options = {}) => {
let type = options.entryType || options.type;
if (type === 'both') type = FILE_DIR_TYPE; // backwards-compatibility
if (type) options.type = type;
if (!root) {
throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)');
} else if (typeof root !== 'string') {
throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)');
} else if (type && !ALL_TYPES.includes(type)) {
throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`);
}
options.root = root;
return new ReaddirpStream(options);
};
const readdirpPromise = (root, options = {}) => {
return new Promise((resolve, reject) => {
const files = [];
readdirp$1(root, options)
.on('data', entry => files.push(entry))
.on('end', () => resolve(files))
.on('error', error => reject(error));
});
};
readdirp$1.promise = readdirpPromise;
readdirp$1.ReaddirpStream = ReaddirpStream;
readdirp$1.default = readdirp$1;
var readdirp_1 = readdirp$1;
var anymatch$2 = {exports: {}};
/*!
* normalize-path <https://github.com/jonschlinkert/normalize-path>
*
* Copyright (c) 2014-2018, Jon Schlinkert.
* Released under the MIT License.
*/
var normalizePath$2 = function(path, stripTrailing) {
if (typeof path !== 'string') {
throw new TypeError('expected path to be a string');
}
if (path === '\\' || path === '/') return '/';
var len = path.length;
if (len <= 1) return path;
// ensure that win32 namespaces has two leading slashes, so that the path is
// handled properly by the win32 version of path.parse() after being normalized
// https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces
var prefix = '';
if (len > 4 && path[3] === '\\') {
var ch = path[2];
if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') {
path = path.slice(2);
prefix = '//';
}
}
var segs = path.split(/[/\\]+/);
if (stripTrailing !== false && segs[segs.length - 1] === '') {
segs.pop();
}
return prefix + segs.join('/');
};
var anymatch_1 = anymatch$2.exports;
Object.defineProperty(anymatch_1, "__esModule", { value: true });
const picomatch = picomatch$3;
const normalizePath$1 = normalizePath$2;
/**
* @typedef {(testString: string) => boolean} AnymatchFn
* @typedef {string|RegExp|AnymatchFn} AnymatchPattern
* @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher
*/
const BANG$1 = '!';
const DEFAULT_OPTIONS = {returnIndex: false};
const arrify$1 = (item) => Array.isArray(item) ? item : [item];
/**
* @param {AnymatchPattern} matcher
* @param {object} options
* @returns {AnymatchFn}
*/
const createPattern = (matcher, options) => {
if (typeof matcher === 'function') {
return matcher;
}
if (typeof matcher === 'string') {
const glob = picomatch(matcher, options);
return (string) => matcher === string || glob(string);
}
if (matcher instanceof RegExp) {
return (string) => matcher.test(string);
}
return (string) => false;
};
/**
* @param {Array<Function>} patterns
* @param {Array<Function>} negPatterns
* @param {String|Array} args
* @param {Boolean} returnIndex
* @returns {boolean|number}
*/
const matchPatterns = (patterns, negPatterns, args, returnIndex) => {
const isList = Array.isArray(args);
const _path = isList ? args[0] : args;
if (!isList && typeof _path !== 'string') {
throw new TypeError('anymatch: second argument must be a string: got ' +
Object.prototype.toString.call(_path))
}
const path = normalizePath$1(_path);
for (let index = 0; index < negPatterns.length; index++) {
const nglob = negPatterns[index];
if (nglob(path)) {
return returnIndex ? -1 : false;
}
}
const applied = isList && [path].concat(args.slice(1));
for (let index = 0; index < patterns.length; index++) {
const pattern = patterns[index];
if (isList ? pattern(...applied) : pattern(path)) {
return returnIndex ? index : true;
}
}
return returnIndex ? -1 : false;
};
/**
* @param {AnymatchMatcher} matchers
* @param {Array|string} testString
* @param {object} options
* @returns {boolean|number|Function}
*/
const anymatch$1 = (matchers, testString, options = DEFAULT_OPTIONS) => {
if (matchers == null) {
throw new TypeError('anymatch: specify first argument');
}
const opts = typeof options === 'boolean' ? {returnIndex: options} : options;
const returnIndex = opts.returnIndex || false;
// Early cache for matchers.
const mtchers = arrify$1(matchers);
const negatedGlobs = mtchers
.filter(item => typeof item === 'string' && item.charAt(0) === BANG$1)
.map(item => item.slice(1))
.map(item => picomatch(item, opts));
const patterns = mtchers
.filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG$1))
.map(matcher => createPattern(matcher, opts));
if (testString == null) {
return (testString, ri = false) => {
const returnIndex = typeof ri === 'boolean' ? ri : false;
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
}
}
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
};
anymatch$1.default = anymatch$1;
anymatch$2.exports = anymatch$1;
var anymatchExports = anymatch$2.exports;
var require$$0 = [
"3dm",
"3ds",
"3g2",
"3gp",
"7z",
"a",
"aac",
"adp",
"ai",
"aif",
"aiff",
"alz",
"ape",
"apk",
"appimage",
"ar",
"arj",
"asf",
"au",
"avi",
"bak",
"baml",
"bh",
"bin",
"bk",
"bmp",
"btif",
"bz2",
"bzip2",
"cab",
"caf",
"cgm",
"class",
"cmx",
"cpio",
"cr2",
"cur",
"dat",
"dcm",
"deb",
"dex",
"djvu",
"dll",
"dmg",
"dng",
"doc",
"docm",
"docx",
"dot",
"dotm",
"dra",
"DS_Store",
"dsk",
"dts",
"dtshd",
"dvb",
"dwg",
"dxf",
"ecelp4800",
"ecelp7470",
"ecelp9600",
"egg",
"eol",
"eot",
"epub",
"exe",
"f4v",
"fbs",
"fh",
"fla",
"flac",
"flatpak",
"fli",
"flv",
"fpx",
"fst",
"fvt",
"g3",
"gh",
"gif",
"graffle",
"gz",
"gzip",
"h261",
"h263",
"h264",
"icns",
"ico",
"ief",
"img",
"ipa",
"iso",
"jar",
"jpeg",
"jpg",
"jpgv",
"jpm",
"jxr",
"key",
"ktx",
"lha",
"lib",
"lvp",
"lz",
"lzh",
"lzma",
"lzo",
"m3u",
"m4a",
"m4v",
"mar",
"mdi",
"mht",
"mid",
"midi",
"mj2",
"mka",
"mkv",
"mmr",
"mng",
"mobi",
"mov",
"movie",
"mp3",
"mp4",
"mp4a",
"mpeg",
"mpg",
"mpga",
"mxu",
"nef",
"npx",
"numbers",
"nupkg",
"o",
"odp",
"ods",
"odt",
"oga",
"ogg",
"ogv",
"otf",
"ott",
"pages",
"pbm",
"pcx",
"pdb",
"pdf",
"pea",
"pgm",
"pic",
"png",
"pnm",
"pot",
"potm",
"potx",
"ppa",
"ppam",
"ppm",
"pps",
"ppsm",
"ppsx",
"ppt",
"pptm",
"pptx",
"psd",
"pya",
"pyc",
"pyo",
"pyv",
"qt",
"rar",
"ras",
"raw",
"resources",
"rgb",
"rip",
"rlc",
"rmf",
"rmvb",
"rpm",
"rtf",
"rz",
"s3m",
"s7z",
"scpt",
"sgi",
"shar",
"snap",
"sil",
"sketch",
"slk",
"smv",
"snk",
"so",
"stl",
"suo",
"sub",
"swf",
"tar",
"tbz",
"tbz2",
"tga",
"tgz",
"thmx",
"tif",
"tiff",
"tlz",
"ttc",
"ttf",
"txz",
"udf",
"uvh",
"uvi",
"uvm",
"uvp",
"uvs",
"uvu",
"viv",
"vob",
"war",
"wav",
"wax",
"wbmp",
"wdp",
"weba",
"webm",
"webp",
"whl",
"wim",
"wm",
"wma",
"wmv",
"wmx",
"woff",
"woff2",
"wrm",
"wvx",
"xbm",
"xif",
"xla",
"xlam",
"xls",
"xlsb",
"xlsm",
"xlsx",
"xlt",
"xltm",
"xltx",
"xm",
"xmind",
"xpi",
"xpm",
"xwd",
"xz",
"z",
"zip",
"zipx"
];
var binaryExtensions$1 = require$$0;
const path$8 = require$$0$4;
const binaryExtensions = binaryExtensions$1;
const extensions = new Set(binaryExtensions);
var isBinaryPath$1 = filePath => extensions.has(path$8.extname(filePath).slice(1).toLowerCase());
var constants$1 = {};
(function (exports) {
const {sep} = require$$0$4;
const {platform} = process;
const os = require$$2;
exports.EV_ALL = 'all';
exports.EV_READY = 'ready';
exports.EV_ADD = 'add';
exports.EV_CHANGE = 'change';
exports.EV_ADD_DIR = 'addDir';
exports.EV_UNLINK = 'unlink';
exports.EV_UNLINK_DIR = 'unlinkDir';
exports.EV_RAW = 'raw';
exports.EV_ERROR = 'error';
exports.STR_DATA = 'data';
exports.STR_END = 'end';
exports.STR_CLOSE = 'close';
exports.FSEVENT_CREATED = 'created';
exports.FSEVENT_MODIFIED = 'modified';
exports.FSEVENT_DELETED = 'deleted';
exports.FSEVENT_MOVED = 'moved';
exports.FSEVENT_CLONED = 'cloned';
exports.FSEVENT_UNKNOWN = 'unknown';
exports.FSEVENT_TYPE_FILE = 'file';
exports.FSEVENT_TYPE_DIRECTORY = 'directory';
exports.FSEVENT_TYPE_SYMLINK = 'symlink';
exports.KEY_LISTENERS = 'listeners';
exports.KEY_ERR = 'errHandlers';
exports.KEY_RAW = 'rawEmitters';
exports.HANDLER_KEYS = [exports.KEY_LISTENERS, exports.KEY_ERR, exports.KEY_RAW];
exports.DOT_SLASH = `.${sep}`;
exports.BACK_SLASH_RE = /\\/g;
exports.DOUBLE_SLASH_RE = /\/\//;
exports.SLASH_OR_BACK_SLASH_RE = /[/\\]/;
exports.DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/;
exports.REPLACER_RE = /^\.[/\\]/;
exports.SLASH = '/';
exports.SLASH_SLASH = '//';
exports.BRACE_START = '{';
exports.BANG = '!';
exports.ONE_DOT = '.';
exports.TWO_DOTS = '..';
exports.STAR = '*';
exports.GLOBSTAR = '**';
exports.ROOT_GLOBSTAR = '/**/*';
exports.SLASH_GLOBSTAR = '/**';
exports.DIR_SUFFIX = 'Dir';
exports.ANYMATCH_OPTS = {dot: true};
exports.STRING_TYPE = 'string';
exports.FUNCTION_TYPE = 'function';
exports.EMPTY_STR = '';
exports.EMPTY_FN = () => {};
exports.IDENTITY_FN = val => val;
exports.isWindows = platform === 'win32';
exports.isMacos = platform === 'darwin';
exports.isLinux = platform === 'linux';
exports.isIBMi = os.type() === 'OS400';
} (constants$1));
const fs$7 = require$$0__default;
const sysPath$2 = require$$0$4;
const { promisify: promisify$2 } = require$$0$6;
const isBinaryPath = isBinaryPath$1;
const {
isWindows: isWindows$2,
isLinux,
EMPTY_FN: EMPTY_FN$2,
EMPTY_STR: EMPTY_STR$1,
KEY_LISTENERS,
KEY_ERR,
KEY_RAW,
HANDLER_KEYS,
EV_CHANGE: EV_CHANGE$2,
EV_ADD: EV_ADD$2,
EV_ADD_DIR: EV_ADD_DIR$2,
EV_ERROR: EV_ERROR$2,
STR_DATA: STR_DATA$1,
STR_END: STR_END$2,
BRACE_START: BRACE_START$1,
STAR
} = constants$1;
const THROTTLE_MODE_WATCH = 'watch';
const open$2 = promisify$2(fs$7.open);
const stat$2 = promisify$2(fs$7.stat);
const lstat$1 = promisify$2(fs$7.lstat);
const close = promisify$2(fs$7.close);
const fsrealpath = promisify$2(fs$7.realpath);
const statMethods$1 = { lstat: lstat$1, stat: stat$2 };
// TODO: emit errors properly. Example: EMFILE on Macos.
const foreach = (val, fn) => {
if (val instanceof Set) {
val.forEach(fn);
} else {
fn(val);
}
};
const addAndConvert = (main, prop, item) => {
let container = main[prop];
if (!(container instanceof Set)) {
main[prop] = container = new Set([container]);
}
container.add(item);
};
const clearItem = cont => key => {
const set = cont[key];
if (set instanceof Set) {
set.clear();
} else {
delete cont[key];
}
};
const delFromSet = (main, prop, item) => {
const container = main[prop];
if (container instanceof Set) {
container.delete(item);
} else if (container === item) {
delete main[prop];
}
};
const isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val;
/**
* @typedef {String} Path
*/
// fs_watch helpers
// object to hold per-process fs_watch instances
// (may be shared across chokidar FSWatcher instances)
/**
* @typedef {Object} FsWatchContainer
* @property {Set} listeners
* @property {Set} errHandlers
* @property {Set} rawEmitters
* @property {fs.FSWatcher=} watcher
* @property {Boolean=} watcherUnusable
*/
/**
* @type {Map<String,FsWatchContainer>}
*/
const FsWatchInstances = new Map();
/**
* Instantiates the fs_watch interface
* @param {String} path to be watched
* @param {Object} options to be passed to fs_watch
* @param {Function} listener main event handler
* @param {Function} errHandler emits info about errors
* @param {Function} emitRaw emits raw event data
* @returns {fs.FSWatcher} new fsevents instance
*/
function createFsWatchInstance(path, options, listener, errHandler, emitRaw) {
const handleEvent = (rawEvent, evPath) => {
listener(path);
emitRaw(rawEvent, evPath, {watchedPath: path});
// emit based on events occurring for files from a directory's watcher in
// case the file's watcher misses it (and rely on throttling to de-dupe)
if (evPath && path !== evPath) {
fsWatchBroadcast(
sysPath$2.resolve(path, evPath), KEY_LISTENERS, sysPath$2.join(path, evPath)
);
}
};
try {
return fs$7.watch(path, options, handleEvent);
} catch (error) {
errHandler(error);
}
}
/**
* Helper for passing fs_watch event data to a collection of listeners
* @param {Path} fullPath absolute path bound to fs_watch instance
* @param {String} type listener type
* @param {*=} val1 arguments to be passed to listeners
* @param {*=} val2
* @param {*=} val3
*/
const fsWatchBroadcast = (fullPath, type, val1, val2, val3) => {
const cont = FsWatchInstances.get(fullPath);
if (!cont) return;
foreach(cont[type], (listener) => {
listener(val1, val2, val3);
});
};
/**
* Instantiates the fs_watch interface or binds listeners
* to an existing one covering the same file system entry
* @param {String} path
* @param {String} fullPath absolute path
* @param {Object} options to be passed to fs_watch
* @param {Object} handlers container for event listener functions
*/
const setFsWatchListener = (path, fullPath, options, handlers) => {
const {listener, errHandler, rawEmitter} = handlers;
let cont = FsWatchInstances.get(fullPath);
/** @type {fs.FSWatcher=} */
let watcher;
if (!options.persistent) {
watcher = createFsWatchInstance(
path, options, listener, errHandler, rawEmitter
);
return watcher.close.bind(watcher);
}
if (cont) {
addAndConvert(cont, KEY_LISTENERS, listener);
addAndConvert(cont, KEY_ERR, errHandler);
addAndConvert(cont, KEY_RAW, rawEmitter);
} else {
watcher = createFsWatchInstance(
path,
options,
fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS),
errHandler, // no need to use broadcast here
fsWatchBroadcast.bind(null, fullPath, KEY_RAW)
);
if (!watcher) return;
watcher.on(EV_ERROR$2, async (error) => {
const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR);
cont.watcherUnusable = true; // documented since Node 10.4.1
// Workaround for https://github.com/joyent/node/issues/4337
if (isWindows$2 && error.code === 'EPERM') {
try {
const fd = await open$2(path, 'r');
await close(fd);
broadcastErr(error);
} catch (err) {}
} else {
broadcastErr(error);
}
});
cont = {
listeners: listener,
errHandlers: errHandler,
rawEmitters: rawEmitter,
watcher
};
FsWatchInstances.set(fullPath, cont);
}
// const index = cont.listeners.indexOf(listener);
// removes this instance's listeners and closes the underlying fs_watch
// instance if there are no more listeners left
return () => {
delFromSet(cont, KEY_LISTENERS, listener);
delFromSet(cont, KEY_ERR, errHandler);
delFromSet(cont, KEY_RAW, rawEmitter);
if (isEmptySet(cont.listeners)) {
// Check to protect against issue gh-730.
// if (cont.watcherUnusable) {
cont.watcher.close();
// }
FsWatchInstances.delete(fullPath);
HANDLER_KEYS.forEach(clearItem(cont));
cont.watcher = undefined;
Object.freeze(cont);
}
};
};
// fs_watchFile helpers
// object to hold per-process fs_watchFile instances
// (may be shared across chokidar FSWatcher instances)
const FsWatchFileInstances = new Map();
/**
* Instantiates the fs_watchFile interface or binds listeners
* to an existing one covering the same file system entry
* @param {String} path to be watched
* @param {String} fullPath absolute path
* @param {Object} options options to be passed to fs_watchFile
* @param {Object} handlers container for event listener functions
* @returns {Function} closer
*/
const setFsWatchFileListener = (path, fullPath, options, handlers) => {
const {listener, rawEmitter} = handlers;
let cont = FsWatchFileInstances.get(fullPath);
const copts = cont && cont.options;
if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) {
fs$7.unwatchFile(fullPath);
cont = undefined;
}
/* eslint-enable no-unused-vars, prefer-destructuring */
if (cont) {
addAndConvert(cont, KEY_LISTENERS, listener);
addAndConvert(cont, KEY_RAW, rawEmitter);
} else {
// TODO
// listeners.add(listener);
// rawEmitters.add(rawEmitter);
cont = {
listeners: listener,
rawEmitters: rawEmitter,
options,
watcher: fs$7.watchFile(fullPath, options, (curr, prev) => {
foreach(cont.rawEmitters, (rawEmitter) => {
rawEmitter(EV_CHANGE$2, fullPath, {curr, prev});
});
const currmtime = curr.mtimeMs;
if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
foreach(cont.listeners, (listener) => listener(path, curr));
}
})
};
FsWatchFileInstances.set(fullPath, cont);
}
// const index = cont.listeners.indexOf(listener);
// Removes this instance's listeners and closes the underlying fs_watchFile
// instance if there are no more listeners left.
return () => {
delFromSet(cont, KEY_LISTENERS, listener);
delFromSet(cont, KEY_RAW, rawEmitter);
if (isEmptySet(cont.listeners)) {
FsWatchFileInstances.delete(fullPath);
fs$7.unwatchFile(fullPath);
cont.options = cont.watcher = undefined;
Object.freeze(cont);
}
};
};
/**
* @mixin
*/
let NodeFsHandler$1 = class NodeFsHandler {
/**
* @param {import("../index").FSWatcher} fsW
*/
constructor(fsW) {
this.fsw = fsW;
this._boundHandleError = (error) => fsW._handleError(error);
}
/**
* Watch file for changes with fs_watchFile or fs_watch.
* @param {String} path to file or dir
* @param {Function} listener on fs change
* @returns {Function} closer for the watcher instance
*/
_watchWithNodeFs(path, listener) {
const opts = this.fsw.options;
const directory = sysPath$2.dirname(path);
const basename = sysPath$2.basename(path);
const parent = this.fsw._getWatchedDir(directory);
parent.add(basename);
const absolutePath = sysPath$2.resolve(path);
const options = {persistent: opts.persistent};
if (!listener) listener = EMPTY_FN$2;
let closer;
if (opts.usePolling) {
options.interval = opts.enableBinaryInterval && isBinaryPath(basename) ?
opts.binaryInterval : opts.interval;
closer = setFsWatchFileListener(path, absolutePath, options, {
listener,
rawEmitter: this.fsw._emitRaw
});
} else {
closer = setFsWatchListener(path, absolutePath, options, {
listener,
errHandler: this._boundHandleError,
rawEmitter: this.fsw._emitRaw
});
}
return closer;
}
/**
* Watch a file and emit add event if warranted.
* @param {Path} file Path
* @param {fs.Stats} stats result of fs_stat
* @param {Boolean} initialAdd was the file added at watch instantiation?
* @returns {Function} closer for the watcher instance
*/
_handleFile(file, stats, initialAdd) {
if (this.fsw.closed) {
return;
}
const dirname = sysPath$2.dirname(file);
const basename = sysPath$2.basename(file);
const parent = this.fsw._getWatchedDir(dirname);
// stats is always present
let prevStats = stats;
// if the file is already being watched, do nothing
if (parent.has(basename)) return;
const listener = async (path, newStats) => {
if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5)) return;
if (!newStats || newStats.mtimeMs === 0) {
try {
const newStats = await stat$2(file);
if (this.fsw.closed) return;
// Check that change event was not fired because of changed only accessTime.
const at = newStats.atimeMs;
const mt = newStats.mtimeMs;
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
this.fsw._emit(EV_CHANGE$2, file, newStats);
}
if (isLinux && prevStats.ino !== newStats.ino) {
this.fsw._closeFile(path);
prevStats = newStats;
this.fsw._addPathCloser(path, this._watchWithNodeFs(file, listener));
} else {
prevStats = newStats;
}
} catch (error) {
// Fix issues where mtime is null but file is still present
this.fsw._remove(dirname, basename);
}
// add is about to be emitted if file not already tracked in parent
} else if (parent.has(basename)) {
// Check that change event was not fired because of changed only accessTime.
const at = newStats.atimeMs;
const mt = newStats.mtimeMs;
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
this.fsw._emit(EV_CHANGE$2, file, newStats);
}
prevStats = newStats;
}
};
// kick off the watcher
const closer = this._watchWithNodeFs(file, listener);
// emit an add event if we're supposed to
if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) {
if (!this.fsw._throttle(EV_ADD$2, file, 0)) return;
this.fsw._emit(EV_ADD$2, file, stats);
}
return closer;
}
/**
* Handle symlinks encountered while reading a dir.
* @param {Object} entry returned by readdirp
* @param {String} directory path of dir being read
* @param {String} path of this item
* @param {String} item basename of this item
* @returns {Promise<Boolean>} true if no more processing is needed for this entry.
*/
async _handleSymlink(entry, directory, path, item) {
if (this.fsw.closed) {
return;
}
const full = entry.fullPath;
const dir = this.fsw._getWatchedDir(directory);
if (!this.fsw.options.followSymlinks) {
// watch symlink directly (don't follow) and detect changes
this.fsw._incrReadyCount();
let linkPath;
try {
linkPath = await fsrealpath(path);
} catch (e) {
this.fsw._emitReady();
return true;
}
if (this.fsw.closed) return;
if (dir.has(item)) {
if (this.fsw._symlinkPaths.get(full) !== linkPath) {
this.fsw._symlinkPaths.set(full, linkPath);
this.fsw._emit(EV_CHANGE$2, path, entry.stats);
}
} else {
dir.add(item);
this.fsw._symlinkPaths.set(full, linkPath);
this.fsw._emit(EV_ADD$2, path, entry.stats);
}
this.fsw._emitReady();
return true;
}
// don't follow the same symlink more than once
if (this.fsw._symlinkPaths.has(full)) {
return true;
}
this.fsw._symlinkPaths.set(full, true);
}
_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) {
// Normalize the directory name on Windows
directory = sysPath$2.join(directory, EMPTY_STR$1);
if (!wh.hasGlob) {
throttler = this.fsw._throttle('readdir', directory, 1000);
if (!throttler) return;
}
const previous = this.fsw._getWatchedDir(wh.path);
const current = new Set();
let stream = this.fsw._readdirp(directory, {
fileFilter: entry => wh.filterPath(entry),
directoryFilter: entry => wh.filterDir(entry),
depth: 0
}).on(STR_DATA$1, async (entry) => {
if (this.fsw.closed) {
stream = undefined;
return;
}
const item = entry.path;
let path = sysPath$2.join(directory, item);
current.add(item);
if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path, item)) {
return;
}
if (this.fsw.closed) {
stream = undefined;
return;
}
// Files that present in current directory snapshot
// but absent in previous are added to watch list and
// emit `add` event.
if (item === target || !target && !previous.has(item)) {
this.fsw._incrReadyCount();
// ensure relativeness of path is preserved in case of watcher reuse
path = sysPath$2.join(dir, sysPath$2.relative(dir, path));
this._addToNodeFs(path, initialAdd, wh, depth + 1);
}
}).on(EV_ERROR$2, this._boundHandleError);
return new Promise(resolve =>
stream.once(STR_END$2, () => {
if (this.fsw.closed) {
stream = undefined;
return;
}
const wasThrottled = throttler ? throttler.clear() : false;
resolve();
// Files that absent in current directory snapshot
// but present in previous emit `remove` event
// and are removed from @watched[directory].
previous.getChildren().filter((item) => {
return item !== directory &&
!current.has(item) &&
// in case of intersecting globs;
// a path may have been filtered out of this readdir, but
// shouldn't be removed because it matches a different glob
(!wh.hasGlob || wh.filterPath({
fullPath: sysPath$2.resolve(directory, item)
}));
}).forEach((item) => {
this.fsw._remove(directory, item);
});
stream = undefined;
// one more time for any missed in case changes came in extremely quickly
if (wasThrottled) this._handleRead(directory, false, wh, target, dir, depth, throttler);
})
);
}
/**
* Read directory to add / remove files from `@watched` list and re-read it on change.
* @param {String} dir fs path
* @param {fs.Stats} stats
* @param {Boolean} initialAdd
* @param {Number} depth relative to user-supplied path
* @param {String} target child path targeted for watch
* @param {Object} wh Common watch helpers for this path
* @param {String} realpath
* @returns {Promise<Function>} closer for the watcher instance.
*/
async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) {
const parentDir = this.fsw._getWatchedDir(sysPath$2.dirname(dir));
const tracked = parentDir.has(sysPath$2.basename(dir));
if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) {
if (!wh.hasGlob || wh.globFilter(dir)) this.fsw._emit(EV_ADD_DIR$2, dir, stats);
}
// ensure dir is tracked (harmless if redundant)
parentDir.add(sysPath$2.basename(dir));
this.fsw._getWatchedDir(dir);
let throttler;
let closer;
const oDepth = this.fsw.options.depth;
if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) {
if (!target) {
await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler);
if (this.fsw.closed) return;
}
closer = this._watchWithNodeFs(dir, (dirPath, stats) => {
// if current directory is removed, do nothing
if (stats && stats.mtimeMs === 0) return;
this._handleRead(dirPath, false, wh, target, dir, depth, throttler);
});
}
return closer;
}
/**
* Handle added file, directory, or glob pattern.
* Delegates call to _handleFile / _handleDir after checks.
* @param {String} path to file or ir
* @param {Boolean} initialAdd was the file added at watch instantiation?
* @param {Object} priorWh depth relative to user-supplied path
* @param {Number} depth Child path actually targeted for watch
* @param {String=} target Child path actually targeted for watch
* @returns {Promise}
*/
async _addToNodeFs(path, initialAdd, priorWh, depth, target) {
const ready = this.fsw._emitReady;
if (this.fsw._isIgnored(path) || this.fsw.closed) {
ready();
return false;
}
const wh = this.fsw._getWatchHelpers(path, depth);
if (!wh.hasGlob && priorWh) {
wh.hasGlob = priorWh.hasGlob;
wh.globFilter = priorWh.globFilter;
wh.filterPath = entry => priorWh.filterPath(entry);
wh.filterDir = entry => priorWh.filterDir(entry);
}
// evaluate what is at the path we're being asked to watch
try {
const stats = await statMethods$1[wh.statMethod](wh.watchPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(wh.watchPath, stats)) {
ready();
return false;
}
const follow = this.fsw.options.followSymlinks && !path.includes(STAR) && !path.includes(BRACE_START$1);
let closer;
if (stats.isDirectory()) {
const absPath = sysPath$2.resolve(path);
const targetPath = follow ? await fsrealpath(path) : path;
if (this.fsw.closed) return;
closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
if (this.fsw.closed) return;
// preserve this symlink's target path
if (absPath !== targetPath && targetPath !== undefined) {
this.fsw._symlinkPaths.set(absPath, targetPath);
}
} else if (stats.isSymbolicLink()) {
const targetPath = follow ? await fsrealpath(path) : path;
if (this.fsw.closed) return;
const parent = sysPath$2.dirname(wh.watchPath);
this.fsw._getWatchedDir(parent).add(wh.watchPath);
this.fsw._emit(EV_ADD$2, wh.watchPath, stats);
closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath);
if (this.fsw.closed) return;
// preserve this symlink's target path
if (targetPath !== undefined) {
this.fsw._symlinkPaths.set(sysPath$2.resolve(path), targetPath);
}
} else {
closer = this._handleFile(wh.watchPath, stats, initialAdd);
}
ready();
this.fsw._addPathCloser(path, closer);
return false;
} catch (error) {
if (this.fsw._handleError(error)) {
ready();
return path;
}
}
}
};
var nodefsHandler = NodeFsHandler$1;
var fseventsHandler = {exports: {}};
const fs$6 = require$$0__default;
const sysPath$1 = require$$0$4;
const { promisify: promisify$1 } = require$$0$6;
let fsevents;
try {
fsevents = __require('fsevents');
} catch (error) {
if (process.env.CHOKIDAR_PRINT_FSEVENTS_REQUIRE_ERROR) console.error(error);
}
if (fsevents) {
// TODO: real check
const mtch = process.version.match(/v(\d+)\.(\d+)/);
if (mtch && mtch[1] && mtch[2]) {
const maj = Number.parseInt(mtch[1], 10);
const min = Number.parseInt(mtch[2], 10);
if (maj === 8 && min < 16) {
fsevents = undefined;
}
}
}
const {
EV_ADD: EV_ADD$1,
EV_CHANGE: EV_CHANGE$1,
EV_ADD_DIR: EV_ADD_DIR$1,
EV_UNLINK: EV_UNLINK$1,
EV_ERROR: EV_ERROR$1,
STR_DATA,
STR_END: STR_END$1,
FSEVENT_CREATED,
FSEVENT_MODIFIED,
FSEVENT_DELETED,
FSEVENT_MOVED,
// FSEVENT_CLONED,
FSEVENT_UNKNOWN,
FSEVENT_TYPE_FILE,
FSEVENT_TYPE_DIRECTORY,
FSEVENT_TYPE_SYMLINK,
ROOT_GLOBSTAR,
DIR_SUFFIX,
DOT_SLASH,
FUNCTION_TYPE: FUNCTION_TYPE$1,
EMPTY_FN: EMPTY_FN$1,
IDENTITY_FN
} = constants$1;
const Depth = (value) => isNaN(value) ? {} : {depth: value};
const stat$1 = promisify$1(fs$6.stat);
const lstat = promisify$1(fs$6.lstat);
const realpath = promisify$1(fs$6.realpath);
const statMethods = { stat: stat$1, lstat };
/**
* @typedef {String} Path
*/
/**
* @typedef {Object} FsEventsWatchContainer
* @property {Set<Function>} listeners
* @property {Function} rawEmitter
* @property {{stop: Function}} watcher
*/
// fsevents instance helper functions
/**
* Object to hold per-process fsevents instances (may be shared across chokidar FSWatcher instances)
* @type {Map<Path,FsEventsWatchContainer>}
*/
const FSEventsWatchers = new Map();
// Threshold of duplicate path prefixes at which to start
// consolidating going forward
const consolidateThreshhold = 10;
const wrongEventFlags = new Set([
69888, 70400, 71424, 72704, 73472, 131328, 131840, 262912
]);
/**
* Instantiates the fsevents interface
* @param {Path} path path to be watched
* @param {Function} callback called when fsevents is bound and ready
* @returns {{stop: Function}} new fsevents instance
*/
const createFSEventsInstance = (path, callback) => {
const stop = fsevents.watch(path, callback);
return {stop};
};
/**
* Instantiates the fsevents interface or binds listeners to an existing one covering
* the same file tree.
* @param {Path} path - to be watched
* @param {Path} realPath - real path for symlinks
* @param {Function} listener - called when fsevents emits events
* @param {Function} rawEmitter - passes data to listeners of the 'raw' event
* @returns {Function} closer
*/
function setFSEventsListener(path, realPath, listener, rawEmitter) {
let watchPath = sysPath$1.extname(realPath) ? sysPath$1.dirname(realPath) : realPath;
const parentPath = sysPath$1.dirname(watchPath);
let cont = FSEventsWatchers.get(watchPath);
// If we've accumulated a substantial number of paths that
// could have been consolidated by watching one directory
// above the current one, create a watcher on the parent
// path instead, so that we do consolidate going forward.
if (couldConsolidate(parentPath)) {
watchPath = parentPath;
}
const resolvedPath = sysPath$1.resolve(path);
const hasSymlink = resolvedPath !== realPath;
const filteredListener = (fullPath, flags, info) => {
if (hasSymlink) fullPath = fullPath.replace(realPath, resolvedPath);
if (
fullPath === resolvedPath ||
!fullPath.indexOf(resolvedPath + sysPath$1.sep)
) listener(fullPath, flags, info);
};
// check if there is already a watcher on a parent path
// modifies `watchPath` to the parent path when it finds a match
let watchedParent = false;
for (const watchedPath of FSEventsWatchers.keys()) {
if (realPath.indexOf(sysPath$1.resolve(watchedPath) + sysPath$1.sep) === 0) {
watchPath = watchedPath;
cont = FSEventsWatchers.get(watchPath);
watchedParent = true;
break;
}
}
if (cont || watchedParent) {
cont.listeners.add(filteredListener);
} else {
cont = {
listeners: new Set([filteredListener]),
rawEmitter,
watcher: createFSEventsInstance(watchPath, (fullPath, flags) => {
if (!cont.listeners.size) return;
const info = fsevents.getInfo(fullPath, flags);
cont.listeners.forEach(list => {
list(fullPath, flags, info);
});
cont.rawEmitter(info.event, fullPath, info);
})
};
FSEventsWatchers.set(watchPath, cont);
}
// removes this instance's listeners and closes the underlying fsevents
// instance if there are no more listeners left
return () => {
const lst = cont.listeners;
lst.delete(filteredListener);
if (!lst.size) {
FSEventsWatchers.delete(watchPath);
if (cont.watcher) return cont.watcher.stop().then(() => {
cont.rawEmitter = cont.watcher = undefined;
Object.freeze(cont);
});
}
};
}
// Decide whether or not we should start a new higher-level
// parent watcher
const couldConsolidate = (path) => {
let count = 0;
for (const watchPath of FSEventsWatchers.keys()) {
if (watchPath.indexOf(path) === 0) {
count++;
if (count >= consolidateThreshhold) {
return true;
}
}
}
return false;
};
// returns boolean indicating whether fsevents can be used
const canUse = () => fsevents && FSEventsWatchers.size < 128;
// determines subdirectory traversal levels from root to path
const calcDepth = (path, root) => {
let i = 0;
while (!path.indexOf(root) && (path = sysPath$1.dirname(path)) !== root) i++;
return i;
};
// returns boolean indicating whether the fsevents' event info has the same type
// as the one returned by fs.stat
const sameTypes = (info, stats) => (
info.type === FSEVENT_TYPE_DIRECTORY && stats.isDirectory() ||
info.type === FSEVENT_TYPE_SYMLINK && stats.isSymbolicLink() ||
info.type === FSEVENT_TYPE_FILE && stats.isFile()
);
/**
* @mixin
*/
let FsEventsHandler$1 = class FsEventsHandler {
/**
* @param {import('../index').FSWatcher} fsw
*/
constructor(fsw) {
this.fsw = fsw;
}
checkIgnored(path, stats) {
const ipaths = this.fsw._ignoredPaths;
if (this.fsw._isIgnored(path, stats)) {
ipaths.add(path);
if (stats && stats.isDirectory()) {
ipaths.add(path + ROOT_GLOBSTAR);
}
return true;
}
ipaths.delete(path);
ipaths.delete(path + ROOT_GLOBSTAR);
}
addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts) {
const event = watchedDir.has(item) ? EV_CHANGE$1 : EV_ADD$1;
this.handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
async checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts) {
try {
const stats = await stat$1(path);
if (this.fsw.closed) return;
if (sameTypes(info, stats)) {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK$1, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} catch (error) {
if (error.code === 'EACCES') {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK$1, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
}
}
handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts) {
if (this.fsw.closed || this.checkIgnored(path)) return;
if (event === EV_UNLINK$1) {
const isDirectory = info.type === FSEVENT_TYPE_DIRECTORY;
// suppress unlink events on never before seen files
if (isDirectory || watchedDir.has(item)) {
this.fsw._remove(parent, item, isDirectory);
}
} else {
if (event === EV_ADD$1) {
// track new directories
if (info.type === FSEVENT_TYPE_DIRECTORY) this.fsw._getWatchedDir(path);
if (info.type === FSEVENT_TYPE_SYMLINK && opts.followSymlinks) {
// push symlinks back to the top of the stack to get handled
const curDepth = opts.depth === undefined ?
undefined : calcDepth(fullPath, realPath) + 1;
return this._addToFsEvents(path, false, true, curDepth);
}
// track new paths
// (other than symlinks being followed, which will be tracked soon)
this.fsw._getWatchedDir(parent).add(item);
}
/**
* @type {'add'|'addDir'|'unlink'|'unlinkDir'}
*/
const eventName = info.type === FSEVENT_TYPE_DIRECTORY ? event + DIR_SUFFIX : event;
this.fsw._emit(eventName, path);
if (eventName === EV_ADD_DIR$1) this._addToFsEvents(path, false, true);
}
}
/**
* Handle symlinks encountered during directory scan
* @param {String} watchPath - file/dir path to be watched with fsevents
* @param {String} realPath - real path (in case of symlinks)
* @param {Function} transform - path transformer
* @param {Function} globFilter - path filter in case a glob pattern was provided
* @returns {Function} closer for the watcher instance
*/
_watchWithFsEvents(watchPath, realPath, transform, globFilter) {
if (this.fsw.closed || this.fsw._isIgnored(watchPath)) return;
const opts = this.fsw.options;
const watchCallback = async (fullPath, flags, info) => {
// PATCH: bypass the callback for better perf when fullPath hit the ignored file list
if (this.fsw.closed || this.fsw._isIgnored(fullPath)) return;
if (
opts.depth !== undefined &&
calcDepth(fullPath, realPath) > opts.depth
) return;
const path = transform(sysPath$1.join(
watchPath, sysPath$1.relative(watchPath, fullPath)
));
if (globFilter && !globFilter(path)) return;
// ensure directories are tracked
const parent = sysPath$1.dirname(path);
const item = sysPath$1.basename(path);
const watchedDir = this.fsw._getWatchedDir(
info.type === FSEVENT_TYPE_DIRECTORY ? path : parent
);
// correct for wrong events emitted
if (wrongEventFlags.has(flags) || info.event === FSEVENT_UNKNOWN) {
if (typeof opts.ignored === FUNCTION_TYPE$1) {
let stats;
try {
stats = await stat$1(path);
} catch (error) {}
if (this.fsw.closed) return;
if (this.checkIgnored(path, stats)) return;
if (sameTypes(info, stats)) {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK$1, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} else {
this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} else {
switch (info.event) {
case FSEVENT_CREATED:
case FSEVENT_MODIFIED:
return this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
case FSEVENT_DELETED:
case FSEVENT_MOVED:
return this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
}
};
const closer = setFSEventsListener(
watchPath,
realPath,
watchCallback,
this.fsw._emitRaw
);
this.fsw._emitReady();
return closer;
}
/**
* Handle symlinks encountered during directory scan
* @param {String} linkPath path to symlink
* @param {String} fullPath absolute path to the symlink
* @param {Function} transform pre-existing path transformer
* @param {Number} curDepth level of subdirectories traversed to where symlink is
* @returns {Promise<void>}
*/
async _handleFsEventsSymlink(linkPath, fullPath, transform, curDepth) {
// don't follow the same symlink more than once
if (this.fsw.closed || this.fsw._symlinkPaths.has(fullPath)) return;
this.fsw._symlinkPaths.set(fullPath, true);
this.fsw._incrReadyCount();
try {
const linkTarget = await realpath(linkPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(linkTarget)) {
return this.fsw._emitReady();
}
this.fsw._incrReadyCount();
// add the linkTarget for watching with a wrapper for transform
// that causes emitted paths to incorporate the link's path
this._addToFsEvents(linkTarget || linkPath, (path) => {
let aliasedPath = linkPath;
if (linkTarget && linkTarget !== DOT_SLASH) {
aliasedPath = path.replace(linkTarget, linkPath);
} else if (path !== DOT_SLASH) {
aliasedPath = sysPath$1.join(linkPath, path);
}
return transform(aliasedPath);
}, false, curDepth);
} catch(error) {
if (this.fsw._handleError(error)) {
return this.fsw._emitReady();
}
}
}
/**
*
* @param {Path} newPath
* @param {fs.Stats} stats
*/
emitAdd(newPath, stats, processPath, opts, forceAdd) {
const pp = processPath(newPath);
const isDir = stats.isDirectory();
const dirObj = this.fsw._getWatchedDir(sysPath$1.dirname(pp));
const base = sysPath$1.basename(pp);
// ensure empty dirs get tracked
if (isDir) this.fsw._getWatchedDir(pp);
if (dirObj.has(base)) return;
dirObj.add(base);
if (!opts.ignoreInitial || forceAdd === true) {
this.fsw._emit(isDir ? EV_ADD_DIR$1 : EV_ADD$1, pp, stats);
}
}
initWatch(realPath, path, wh, processPath) {
if (this.fsw.closed) return;
const closer = this._watchWithFsEvents(
wh.watchPath,
sysPath$1.resolve(realPath || wh.watchPath),
processPath,
wh.globFilter
);
this.fsw._addPathCloser(path, closer);
}
/**
* Handle added path with fsevents
* @param {String} path file/dir path or glob pattern
* @param {Function|Boolean=} transform converts working path to what the user expects
* @param {Boolean=} forceAdd ensure add is emitted
* @param {Number=} priorDepth Level of subdirectories already traversed.
* @returns {Promise<void>}
*/
async _addToFsEvents(path, transform, forceAdd, priorDepth) {
if (this.fsw.closed) {
return;
}
const opts = this.fsw.options;
const processPath = typeof transform === FUNCTION_TYPE$1 ? transform : IDENTITY_FN;
const wh = this.fsw._getWatchHelpers(path);
// evaluate what is at the path we're being asked to watch
try {
const stats = await statMethods[wh.statMethod](wh.watchPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(wh.watchPath, stats)) {
throw null;
}
if (stats.isDirectory()) {
// emit addDir unless this is a glob parent
if (!wh.globFilter) this.emitAdd(processPath(path), stats, processPath, opts, forceAdd);
// don't recurse further if it would exceed depth setting
if (priorDepth && priorDepth > opts.depth) return;
// scan the contents of the dir
this.fsw._readdirp(wh.watchPath, {
fileFilter: entry => wh.filterPath(entry),
directoryFilter: entry => wh.filterDir(entry),
...Depth(opts.depth - (priorDepth || 0))
}).on(STR_DATA, (entry) => {
// need to check filterPath on dirs b/c filterDir is less restrictive
if (this.fsw.closed) {
return;
}
if (entry.stats.isDirectory() && !wh.filterPath(entry)) return;
const joinedPath = sysPath$1.join(wh.watchPath, entry.path);
const {fullPath} = entry;
if (wh.followSymlinks && entry.stats.isSymbolicLink()) {
// preserve the current depth here since it can't be derived from
// real paths past the symlink
const curDepth = opts.depth === undefined ?
undefined : calcDepth(joinedPath, sysPath$1.resolve(wh.watchPath)) + 1;
this._handleFsEventsSymlink(joinedPath, fullPath, processPath, curDepth);
} else {
this.emitAdd(joinedPath, entry.stats, processPath, opts, forceAdd);
}
}).on(EV_ERROR$1, EMPTY_FN$1).on(STR_END$1, () => {
this.fsw._emitReady();
});
} else {
this.emitAdd(wh.watchPath, stats, processPath, opts, forceAdd);
this.fsw._emitReady();
}
} catch (error) {
if (!error || this.fsw._handleError(error)) {
// TODO: Strange thing: "should not choke on an ignored watch path" will be failed without 2 ready calls -__-
this.fsw._emitReady();
this.fsw._emitReady();
}
}
if (opts.persistent && forceAdd !== true) {
if (typeof transform === FUNCTION_TYPE$1) {
// realpath has already been resolved
this.initWatch(undefined, path, wh, processPath);
} else {
let realPath;
try {
realPath = await realpath(wh.watchPath);
} catch (e) {}
this.initWatch(realPath, path, wh, processPath);
}
}
}
};
fseventsHandler.exports = FsEventsHandler$1;
fseventsHandler.exports.canUse = canUse;
var fseventsHandlerExports = fseventsHandler.exports;
const { EventEmitter: EventEmitter$2 } = require$$0$5;
const fs$5 = require$$0__default;
const sysPath = require$$0$4;
const { promisify } = require$$0$6;
const readdirp = readdirp_1;
const anymatch = anymatchExports.default;
const globParent = globParent$2;
const isGlob = isGlob$2;
const braces = braces_1;
const normalizePath = normalizePath$2;
const NodeFsHandler = nodefsHandler;
const FsEventsHandler = fseventsHandlerExports;
const {
EV_ALL,
EV_READY,
EV_ADD,
EV_CHANGE,
EV_UNLINK,
EV_ADD_DIR,
EV_UNLINK_DIR,
EV_RAW,
EV_ERROR,
STR_CLOSE,
STR_END,
BACK_SLASH_RE,
DOUBLE_SLASH_RE,
SLASH_OR_BACK_SLASH_RE,
DOT_RE,
REPLACER_RE,
SLASH,
SLASH_SLASH,
BRACE_START,
BANG,
ONE_DOT,
TWO_DOTS,
GLOBSTAR,
SLASH_GLOBSTAR,
ANYMATCH_OPTS,
STRING_TYPE,
FUNCTION_TYPE,
EMPTY_STR,
EMPTY_FN,
isWindows: isWindows$1,
isMacos,
isIBMi
} = constants$1;
const stat = promisify(fs$5.stat);
const readdir = promisify(fs$5.readdir);
/**
* @typedef {String} Path
* @typedef {'all'|'add'|'addDir'|'change'|'unlink'|'unlinkDir'|'raw'|'error'|'ready'} EventName
* @typedef {'readdir'|'watch'|'add'|'remove'|'change'} ThrottleType
*/
/**
*
* @typedef {Object} WatchHelpers
* @property {Boolean} followSymlinks
* @property {'stat'|'lstat'} statMethod
* @property {Path} path
* @property {Path} watchPath
* @property {Function} entryPath
* @property {Boolean} hasGlob
* @property {Object} globFilter
* @property {Function} filterPath
* @property {Function} filterDir
*/
const arrify = (value = []) => Array.isArray(value) ? value : [value];
const flatten = (list, result = []) => {
list.forEach(item => {
if (Array.isArray(item)) {
flatten(item, result);
} else {
result.push(item);
}
});
return result;
};
const unifyPaths = (paths_) => {
/**
* @type {Array<String>}
*/
const paths = flatten(arrify(paths_));
if (!paths.every(p => typeof p === STRING_TYPE)) {
throw new TypeError(`Non-string provided as watch path: ${paths}`);
}
return paths.map(normalizePathToUnix);
};
// If SLASH_SLASH occurs at the beginning of path, it is not replaced
// because "//StoragePC/DrivePool/Movies" is a valid network path
const toUnix = (string) => {
let str = string.replace(BACK_SLASH_RE, SLASH);
let prepend = false;
if (str.startsWith(SLASH_SLASH)) {
prepend = true;
}
while (str.match(DOUBLE_SLASH_RE)) {
str = str.replace(DOUBLE_SLASH_RE, SLASH);
}
if (prepend) {
str = SLASH + str;
}
return str;
};
// Our version of upath.normalize
// TODO: this is not equal to path-normalize module - investigate why
const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path)));
const normalizeIgnored = (cwd = EMPTY_STR) => (path) => {
if (typeof path !== STRING_TYPE) return path;
return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path));
};
const getAbsolutePath = (path, cwd) => {
if (sysPath.isAbsolute(path)) {
return path;
}
if (path.startsWith(BANG)) {
return BANG + sysPath.join(cwd, path.slice(1));
}
return sysPath.join(cwd, path);
};
const undef = (opts, key) => opts[key] === undefined;
/**
* Directory entry.
* @property {Path} path
* @property {Set<Path>} items
*/
class DirEntry {
/**
* @param {Path} dir
* @param {Function} removeWatcher
*/
constructor(dir, removeWatcher) {
this.path = dir;
this._removeWatcher = removeWatcher;
/** @type {Set<Path>} */
this.items = new Set();
}
add(item) {
const {items} = this;
if (!items) return;
if (item !== ONE_DOT && item !== TWO_DOTS) items.add(item);
}
async remove(item) {
const {items} = this;
if (!items) return;
items.delete(item);
if (items.size > 0) return;
const dir = this.path;
try {
await readdir(dir);
} catch (err) {
if (this._removeWatcher) {
this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir));
}
}
}
has(item) {
const {items} = this;
if (!items) return;
return items.has(item);
}
/**
* @returns {Array<String>}
*/
getChildren() {
const {items} = this;
if (!items) return;
return [...items.values()];
}
dispose() {
this.items.clear();
delete this.path;
delete this._removeWatcher;
delete this.items;
Object.freeze(this);
}
}
const STAT_METHOD_F = 'stat';
const STAT_METHOD_L = 'lstat';
class WatchHelper {
constructor(path, watchPath, follow, fsw) {
this.fsw = fsw;
this.path = path = path.replace(REPLACER_RE, EMPTY_STR);
this.watchPath = watchPath;
this.fullWatchPath = sysPath.resolve(watchPath);
this.hasGlob = watchPath !== path;
/** @type {object|boolean} */
if (path === EMPTY_STR) this.hasGlob = false;
this.globSymlink = this.hasGlob && follow ? undefined : false;
this.globFilter = this.hasGlob ? anymatch(path, undefined, ANYMATCH_OPTS) : false;
this.dirParts = this.getDirParts(path);
this.dirParts.forEach((parts) => {
if (parts.length > 1) parts.pop();
});
this.followSymlinks = follow;
this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L;
}
checkGlobSymlink(entry) {
// only need to resolve once
// first entry should always have entry.parentDir === EMPTY_STR
if (this.globSymlink === undefined) {
this.globSymlink = entry.fullParentDir === this.fullWatchPath ?
false : {realPath: entry.fullParentDir, linkPath: this.fullWatchPath};
}
if (this.globSymlink) {
return entry.fullPath.replace(this.globSymlink.realPath, this.globSymlink.linkPath);
}
return entry.fullPath;
}
entryPath(entry) {
return sysPath.join(this.watchPath,
sysPath.relative(this.watchPath, this.checkGlobSymlink(entry))
);
}
filterPath(entry) {
const {stats} = entry;
if (stats && stats.isSymbolicLink()) return this.filterDir(entry);
const resolvedPath = this.entryPath(entry);
const matchesGlob = this.hasGlob && typeof this.globFilter === FUNCTION_TYPE ?
this.globFilter(resolvedPath) : true;
return matchesGlob &&
this.fsw._isntIgnored(resolvedPath, stats) &&
this.fsw._hasReadPermissions(stats);
}
getDirParts(path) {
if (!this.hasGlob) return [];
const parts = [];
const expandedPath = path.includes(BRACE_START) ? braces.expand(path) : [path];
expandedPath.forEach((path) => {
parts.push(sysPath.relative(this.watchPath, path).split(SLASH_OR_BACK_SLASH_RE));
});
return parts;
}
filterDir(entry) {
if (this.hasGlob) {
const entryParts = this.getDirParts(this.checkGlobSymlink(entry));
let globstar = false;
this.unmatchedGlob = !this.dirParts.some((parts) => {
return parts.every((part, i) => {
if (part === GLOBSTAR) globstar = true;
return globstar || !entryParts[0][i] || anymatch(part, entryParts[0][i], ANYMATCH_OPTS);
});
});
}
return !this.unmatchedGlob && this.fsw._isntIgnored(this.entryPath(entry), entry.stats);
}
}
/**
* Watches files & directories for changes. Emitted events:
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
*
* new FSWatcher()
* .add(directories)
* .on('add', path => log('File', path, 'was added'))
*/
class FSWatcher extends EventEmitter$2 {
// Not indenting methods for history sake; for now.
constructor(_opts) {
super();
const opts = {};
if (_opts) Object.assign(opts, _opts); // for frozen objects
/** @type {Map<String, DirEntry>} */
this._watched = new Map();
/** @type {Map<String, Array>} */
this._closers = new Map();
/** @type {Set<String>} */
this._ignoredPaths = new Set();
/** @type {Map<ThrottleType, Map>} */
this._throttled = new Map();
/** @type {Map<Path, String|Boolean>} */
this._symlinkPaths = new Map();
this._streams = new Set();
this.closed = false;
// Set up default options.
if (undef(opts, 'persistent')) opts.persistent = true;
if (undef(opts, 'ignoreInitial')) opts.ignoreInitial = false;
if (undef(opts, 'ignorePermissionErrors')) opts.ignorePermissionErrors = false;
if (undef(opts, 'interval')) opts.interval = 100;
if (undef(opts, 'binaryInterval')) opts.binaryInterval = 300;
if (undef(opts, 'disableGlobbing')) opts.disableGlobbing = false;
opts.enableBinaryInterval = opts.binaryInterval !== opts.interval;
// Enable fsevents on OS X when polling isn't explicitly enabled.
if (undef(opts, 'useFsEvents')) opts.useFsEvents = !opts.usePolling;
// If we can't use fsevents, ensure the options reflect it's disabled.
const canUseFsEvents = FsEventsHandler.canUse();
if (!canUseFsEvents) opts.useFsEvents = false;
// Use polling on Mac if not using fsevents.
// Other platforms use non-polling fs_watch.
if (undef(opts, 'usePolling') && !opts.useFsEvents) {
opts.usePolling = isMacos;
}
// Always default to polling on IBM i because fs.watch() is not available on IBM i.
if(isIBMi) {
opts.usePolling = true;
}
// Global override (useful for end-developers that need to force polling for all
// instances of chokidar, regardless of usage/dependency depth)
const envPoll = process.env.CHOKIDAR_USEPOLLING;
if (envPoll !== undefined) {
const envLower = envPoll.toLowerCase();
if (envLower === 'false' || envLower === '0') {
opts.usePolling = false;
} else if (envLower === 'true' || envLower === '1') {
opts.usePolling = true;
} else {
opts.usePolling = !!envLower;
}
}
const envInterval = process.env.CHOKIDAR_INTERVAL;
if (envInterval) {
opts.interval = Number.parseInt(envInterval, 10);
}
// Editor atomic write normalization enabled by default with fs.watch
if (undef(opts, 'atomic')) opts.atomic = !opts.usePolling && !opts.useFsEvents;
if (opts.atomic) this._pendingUnlinks = new Map();
if (undef(opts, 'followSymlinks')) opts.followSymlinks = true;
if (undef(opts, 'awaitWriteFinish')) opts.awaitWriteFinish = false;
if (opts.awaitWriteFinish === true) opts.awaitWriteFinish = {};
const awf = opts.awaitWriteFinish;
if (awf) {
if (!awf.stabilityThreshold) awf.stabilityThreshold = 2000;
if (!awf.pollInterval) awf.pollInterval = 100;
this._pendingWrites = new Map();
}
if (opts.ignored) opts.ignored = arrify(opts.ignored);
let readyCalls = 0;
this._emitReady = () => {
readyCalls++;
if (readyCalls >= this._readyCount) {
this._emitReady = EMPTY_FN;
this._readyEmitted = true;
// use process.nextTick to allow time for listener to be bound
process.nextTick(() => this.emit(EV_READY));
}
};
this._emitRaw = (...args) => this.emit(EV_RAW, ...args);
this._readyEmitted = false;
this.options = opts;
// Initialize with proper watcher.
if (opts.useFsEvents) {
this._fsEventsHandler = new FsEventsHandler(this);
} else {
this._nodeFsHandler = new NodeFsHandler(this);
}
// Youre frozen when your hearts not open.
Object.freeze(opts);
}
// Public methods
/**
* Adds paths to be watched on an existing FSWatcher instance
* @param {Path|Array<Path>} paths_
* @param {String=} _origAdd private; for handling non-existent paths to be watched
* @param {Boolean=} _internal private; indicates a non-user add
* @returns {FSWatcher} for chaining
*/
add(paths_, _origAdd, _internal) {
const {cwd, disableGlobbing} = this.options;
this.closed = false;
let paths = unifyPaths(paths_);
if (cwd) {
paths = paths.map((path) => {
const absPath = getAbsolutePath(path, cwd);
// Check `path` instead of `absPath` because the cwd portion can't be a glob
if (disableGlobbing || !isGlob(path)) {
return absPath;
}
return normalizePath(absPath);
});
}
// set aside negated glob strings
paths = paths.filter((path) => {
if (path.startsWith(BANG)) {
this._ignoredPaths.add(path.slice(1));
return false;
}
// if a path is being added that was previously ignored, stop ignoring it
this._ignoredPaths.delete(path);
this._ignoredPaths.delete(path + SLASH_GLOBSTAR);
// reset the cached userIgnored anymatch fn
// to make ignoredPaths changes effective
this._userIgnored = undefined;
return true;
});
if (this.options.useFsEvents && this._fsEventsHandler) {
if (!this._readyCount) this._readyCount = paths.length;
if (this.options.persistent) this._readyCount *= 2;
paths.forEach((path) => this._fsEventsHandler._addToFsEvents(path));
} else {
if (!this._readyCount) this._readyCount = 0;
this._readyCount += paths.length;
Promise.all(
paths.map(async path => {
const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, 0, 0, _origAdd);
if (res) this._emitReady();
return res;
})
).then(results => {
if (this.closed) return;
results.filter(item => item).forEach(item => {
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
});
});
}
return this;
}
/**
* Close watchers or start ignoring events from specified paths.
* @param {Path|Array<Path>} paths_ - string or array of strings, file/directory paths and/or globs
* @returns {FSWatcher} for chaining
*/
unwatch(paths_) {
if (this.closed) return this;
const paths = unifyPaths(paths_);
const {cwd} = this.options;
paths.forEach((path) => {
// convert to absolute path unless relative path already matches
if (!sysPath.isAbsolute(path) && !this._closers.has(path)) {
if (cwd) path = sysPath.join(cwd, path);
path = sysPath.resolve(path);
}
this._closePath(path);
this._ignoredPaths.add(path);
if (this._watched.has(path)) {
this._ignoredPaths.add(path + SLASH_GLOBSTAR);
}
// reset the cached userIgnored anymatch fn
// to make ignoredPaths changes effective
this._userIgnored = undefined;
});
return this;
}
/**
* Close watchers and remove all listeners from watched paths.
* @returns {Promise<void>}.
*/
close() {
if (this.closed) return this._closePromise;
this.closed = true;
// Memory management.
this.removeAllListeners();
const closers = [];
this._closers.forEach(closerList => closerList.forEach(closer => {
const promise = closer();
if (promise instanceof Promise) closers.push(promise);
}));
this._streams.forEach(stream => stream.destroy());
this._userIgnored = undefined;
this._readyCount = 0;
this._readyEmitted = false;
this._watched.forEach(dirent => dirent.dispose());
['closers', 'watched', 'streams', 'symlinkPaths', 'throttled'].forEach(key => {
this[`_${key}`].clear();
});
this._closePromise = closers.length ? Promise.all(closers).then(() => undefined) : Promise.resolve();
return this._closePromise;
}
/**
* Expose list of watched paths
* @returns {Object} for chaining
*/
getWatched() {
const watchList = {};
this._watched.forEach((entry, dir) => {
const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir;
watchList[key || ONE_DOT] = entry.getChildren().sort();
});
return watchList;
}
emitWithAll(event, args) {
this.emit(...args);
if (event !== EV_ERROR) this.emit(EV_ALL, ...args);
}
// Common helpers
// --------------
/**
* Normalize and emit events.
* Calling _emit DOES NOT MEAN emit() would be called!
* @param {EventName} event Type of event
* @param {Path} path File or directory path
* @param {*=} val1 arguments to be passed with event
* @param {*=} val2
* @param {*=} val3
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
*/
async _emit(event, path, val1, val2, val3) {
if (this.closed) return;
const opts = this.options;
if (isWindows$1) path = sysPath.normalize(path);
if (opts.cwd) path = sysPath.relative(opts.cwd, path);
/** @type Array<any> */
const args = [event, path];
if (val3 !== undefined) args.push(val1, val2, val3);
else if (val2 !== undefined) args.push(val1, val2);
else if (val1 !== undefined) args.push(val1);
const awf = opts.awaitWriteFinish;
let pw;
if (awf && (pw = this._pendingWrites.get(path))) {
pw.lastChange = new Date();
return this;
}
if (opts.atomic) {
if (event === EV_UNLINK) {
this._pendingUnlinks.set(path, args);
setTimeout(() => {
this._pendingUnlinks.forEach((entry, path) => {
this.emit(...entry);
this.emit(EV_ALL, ...entry);
this._pendingUnlinks.delete(path);
});
}, typeof opts.atomic === 'number' ? opts.atomic : 100);
return this;
}
if (event === EV_ADD && this._pendingUnlinks.has(path)) {
event = args[0] = EV_CHANGE;
this._pendingUnlinks.delete(path);
}
}
if (awf && (event === EV_ADD || event === EV_CHANGE) && this._readyEmitted) {
const awfEmit = (err, stats) => {
if (err) {
event = args[0] = EV_ERROR;
args[1] = err;
this.emitWithAll(event, args);
} else if (stats) {
// if stats doesn't exist the file must have been deleted
if (args.length > 2) {
args[2] = stats;
} else {
args.push(stats);
}
this.emitWithAll(event, args);
}
};
this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit);
return this;
}
if (event === EV_CHANGE) {
const isThrottled = !this._throttle(EV_CHANGE, path, 50);
if (isThrottled) return this;
}
if (opts.alwaysStat && val1 === undefined &&
(event === EV_ADD || event === EV_ADD_DIR || event === EV_CHANGE)
) {
const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path;
let stats;
try {
stats = await stat(fullPath);
} catch (err) {}
// Suppress event when fs_stat fails, to avoid sending undefined 'stat'
if (!stats || this.closed) return;
args.push(stats);
}
this.emitWithAll(event, args);
return this;
}
/**
* Common handler for errors
* @param {Error} error
* @returns {Error|Boolean} The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
*/
_handleError(error) {
const code = error && error.code;
if (error && code !== 'ENOENT' && code !== 'ENOTDIR' &&
(!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES'))
) {
this.emit(EV_ERROR, error);
}
return error || this.closed;
}
/**
* Helper utility for throttling
* @param {ThrottleType} actionType type being throttled
* @param {Path} path being acted upon
* @param {Number} timeout duration of time to suppress duplicate actions
* @returns {Object|false} tracking object or false if action should be suppressed
*/
_throttle(actionType, path, timeout) {
if (!this._throttled.has(actionType)) {
this._throttled.set(actionType, new Map());
}
/** @type {Map<Path, Object>} */
const action = this._throttled.get(actionType);
/** @type {Object} */
const actionPath = action.get(path);
if (actionPath) {
actionPath.count++;
return false;
}
let timeoutObject;
const clear = () => {
const item = action.get(path);
const count = item ? item.count : 0;
action.delete(path);
clearTimeout(timeoutObject);
if (item) clearTimeout(item.timeoutObject);
return count;
};
timeoutObject = setTimeout(clear, timeout);
const thr = {timeoutObject, clear, count: 0};
action.set(path, thr);
return thr;
}
_incrReadyCount() {
return this._readyCount++;
}
/**
* Awaits write operation to finish.
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
* @param {Path} path being acted upon
* @param {Number} threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
* @param {EventName} event
* @param {Function} awfEmit Callback to be called when ready for event to be emitted.
*/
_awaitWriteFinish(path, threshold, event, awfEmit) {
let timeoutHandler;
let fullPath = path;
if (this.options.cwd && !sysPath.isAbsolute(path)) {
fullPath = sysPath.join(this.options.cwd, path);
}
const now = new Date();
const awaitWriteFinish = (prevStat) => {
fs$5.stat(fullPath, (err, curStat) => {
if (err || !this._pendingWrites.has(path)) {
if (err && err.code !== 'ENOENT') awfEmit(err);
return;
}
const now = Number(new Date());
if (prevStat && curStat.size !== prevStat.size) {
this._pendingWrites.get(path).lastChange = now;
}
const pw = this._pendingWrites.get(path);
const df = now - pw.lastChange;
if (df >= threshold) {
this._pendingWrites.delete(path);
awfEmit(undefined, curStat);
} else {
timeoutHandler = setTimeout(
awaitWriteFinish,
this.options.awaitWriteFinish.pollInterval,
curStat
);
}
});
};
if (!this._pendingWrites.has(path)) {
this._pendingWrites.set(path, {
lastChange: now,
cancelWait: () => {
this._pendingWrites.delete(path);
clearTimeout(timeoutHandler);
return event;
}
});
timeoutHandler = setTimeout(
awaitWriteFinish,
this.options.awaitWriteFinish.pollInterval
);
}
}
_getGlobIgnored() {
return [...this._ignoredPaths.values()];
}
/**
* Determines whether user has asked to ignore this path.
* @param {Path} path filepath or dir
* @param {fs.Stats=} stats result of fs.stat
* @returns {Boolean}
*/
_isIgnored(path, stats) {
if (this.options.atomic && DOT_RE.test(path)) return true;
if (!this._userIgnored) {
const {cwd} = this.options;
const ign = this.options.ignored;
const ignored = ign && ign.map(normalizeIgnored(cwd));
const paths = arrify(ignored)
.filter((path) => typeof path === STRING_TYPE && !isGlob(path))
.map((path) => path + SLASH_GLOBSTAR);
const list = this._getGlobIgnored().map(normalizeIgnored(cwd)).concat(ignored, paths);
this._userIgnored = anymatch(list, undefined, ANYMATCH_OPTS);
}
return this._userIgnored([path, stats]);
}
_isntIgnored(path, stat) {
return !this._isIgnored(path, stat);
}
/**
* Provides a set of common helpers and properties relating to symlink and glob handling.
* @param {Path} path file, directory, or glob pattern being watched
* @param {Number=} depth at any depth > 0, this isn't a glob
* @returns {WatchHelper} object containing helpers for this path
*/
_getWatchHelpers(path, depth) {
const watchPath = depth || this.options.disableGlobbing || !isGlob(path) ? path : globParent(path);
const follow = this.options.followSymlinks;
return new WatchHelper(path, watchPath, follow, this);
}
// Directory helpers
// -----------------
/**
* Provides directory tracking objects
* @param {String} directory path of the directory
* @returns {DirEntry} the directory's tracking object
*/
_getWatchedDir(directory) {
if (!this._boundRemove) this._boundRemove = this._remove.bind(this);
const dir = sysPath.resolve(directory);
if (!this._watched.has(dir)) this._watched.set(dir, new DirEntry(dir, this._boundRemove));
return this._watched.get(dir);
}
// File helpers
// ------------
/**
* Check for read permissions.
* Based on this answer on SO: https://stackoverflow.com/a/11781404/1358405
* @param {fs.Stats} stats - object, result of fs_stat
* @returns {Boolean} indicates whether the file can be read
*/
_hasReadPermissions(stats) {
if (this.options.ignorePermissionErrors) return true;
// stats.mode may be bigint
const md = stats && Number.parseInt(stats.mode, 10);
const st = md & 0o777;
const it = Number.parseInt(st.toString(8)[0], 10);
return Boolean(4 & it);
}
/**
* Handles emitting unlink events for
* files and directories, and via recursion, for
* files and directories within directories that are unlinked
* @param {String} directory within which the following item is located
* @param {String} item base path of item/directory
* @returns {void}
*/
_remove(directory, item, isDirectory) {
// if what is being deleted is a directory, get that directory's paths
// for recursive deleting and cleaning of watched object
// if it is not a directory, nestedDirectoryChildren will be empty array
const path = sysPath.join(directory, item);
const fullPath = sysPath.resolve(path);
isDirectory = isDirectory != null
? isDirectory
: this._watched.has(path) || this._watched.has(fullPath);
// prevent duplicate handling in case of arriving here nearly simultaneously
// via multiple paths (such as _handleFile and _handleDir)
if (!this._throttle('remove', path, 100)) return;
// if the only watched file is removed, watch for its return
if (!isDirectory && !this.options.useFsEvents && this._watched.size === 1) {
this.add(directory, item, true);
}
// This will create a new entry in the watched object in either case
// so we got to do the directory check beforehand
const wp = this._getWatchedDir(path);
const nestedDirectoryChildren = wp.getChildren();
// Recursively remove children directories / files.
nestedDirectoryChildren.forEach(nested => this._remove(path, nested));
// Check if item was on the watched list and remove it
const parent = this._getWatchedDir(directory);
const wasTracked = parent.has(item);
parent.remove(item);
// Fixes issue #1042 -> Relative paths were detected and added as symlinks
// (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612),
// but never removed from the map in case the path was deleted.
// This leads to an incorrect state if the path was recreated:
// https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553
if (this._symlinkPaths.has(fullPath)) {
this._symlinkPaths.delete(fullPath);
}
// If we wait for this file to be fully written, cancel the wait.
let relPath = path;
if (this.options.cwd) relPath = sysPath.relative(this.options.cwd, path);
if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
const event = this._pendingWrites.get(relPath).cancelWait();
if (event === EV_ADD) return;
}
// The Entry will either be a directory that just got removed
// or a bogus entry to a file, in either case we have to remove it
this._watched.delete(path);
this._watched.delete(fullPath);
const eventName = isDirectory ? EV_UNLINK_DIR : EV_UNLINK;
if (wasTracked && !this._isIgnored(path)) this._emit(eventName, path);
// Avoid conflicts if we later create another file with the same name
if (!this.options.useFsEvents) {
this._closePath(path);
}
}
/**
* Closes all watchers for a path
* @param {Path} path
*/
_closePath(path) {
this._closeFile(path);
const dir = sysPath.dirname(path);
this._getWatchedDir(dir).remove(sysPath.basename(path));
}
/**
* Closes only file-specific watchers
* @param {Path} path
*/
_closeFile(path) {
const closers = this._closers.get(path);
if (!closers) return;
closers.forEach(closer => closer());
this._closers.delete(path);
}
/**
*
* @param {Path} path
* @param {Function} closer
*/
_addPathCloser(path, closer) {
if (!closer) return;
let list = this._closers.get(path);
if (!list) {
list = [];
this._closers.set(path, list);
}
list.push(closer);
}
_readdirp(root, opts) {
if (this.closed) return;
const options = {type: EV_ALL, alwaysStat: true, lstat: true, ...opts};
let stream = readdirp(root, options);
this._streams.add(stream);
stream.once(STR_CLOSE, () => {
stream = undefined;
});
stream.once(STR_END, () => {
if (stream) {
this._streams.delete(stream);
stream = undefined;
}
});
return stream;
}
}
// Export FSWatcher class
chokidar.FSWatcher = FSWatcher;
/**
* Instantiates watcher with paths to be tracked.
* @param {String|Array<String>} paths file/directory paths and/or globs
* @param {Object=} options chokidar opts
* @returns an instance of FSWatcher for chaining.
*/
const watch = (paths, options) => {
const watcher = new FSWatcher(options);
watcher.add(paths);
return watcher;
};
chokidar.watch = watch;
var shellQuote$1 = {};
shellQuote$1.quote = function (xs) {
return xs.map(function (s) {
if (s && typeof s === 'object') {
return s.op.replace(/(.)/g, '\\$1');
}
else if (/["\s]/.test(s) && !/'/.test(s)) {
return "'" + s.replace(/(['\\])/g, '\\$1') + "'";
}
else if (/["'\s]/.test(s)) {
return '"' + s.replace(/(["\\$`!])/g, '\\$1') + '"';
}
else {
return String(s).replace(/([A-Za-z]:)?([#!"$&'()*,:;<=>?@\[\\\]^`{|}])/g, '$1\\$2');
}
}).join(' ');
};
// '<(' is process substitution operator and
// can be parsed the same as control operator
var CONTROL = '(?:' + [
'\\|\\|', '\\&\\&', ';;', '\\|\\&', '\\<\\(', '>>', '>\\&', '[&;()|<>]'
].join('|') + ')';
var META = '|&;()<> \\t';
var BAREWORD = '(\\\\[\'"' + META + ']|[^\\s\'"' + META + '])+';
var SINGLE_QUOTE = '"((\\\\"|[^"])*?)"';
var DOUBLE_QUOTE = '\'((\\\\\'|[^\'])*?)\'';
var TOKEN = '';
for (var i = 0; i < 4; i++) {
TOKEN += (Math.pow(16,8)*Math.random()).toString(16);
}
shellQuote$1.parse = function (s, env, opts) {
var mapped = parse$5(s, env, opts);
if (typeof env !== 'function') return mapped;
return mapped.reduce(function (acc, s) {
if (typeof s === 'object') return acc.concat(s);
var xs = s.split(RegExp('(' + TOKEN + '.*?' + TOKEN + ')', 'g'));
if (xs.length === 1) return acc.concat(xs[0]);
return acc.concat(xs.filter(Boolean).map(function (x) {
if (RegExp('^' + TOKEN).test(x)) {
return JSON.parse(x.split(TOKEN)[1]);
}
else return x;
}));
}, []);
};
function parse$5 (s, env, opts) {
var chunker = new RegExp([
'(' + CONTROL + ')', // control chars
'(' + BAREWORD + '|' + SINGLE_QUOTE + '|' + DOUBLE_QUOTE + ')*'
].join('|'), 'g');
var match = s.match(chunker).filter(Boolean);
var commented = false;
if (!match) return [];
if (!env) env = {};
if (!opts) opts = {};
return match.map(function (s, j) {
if (commented) {
return;
}
if (RegExp('^' + CONTROL + '$').test(s)) {
return { op: s };
}
// Hand-written scanner/parser for Bash quoting rules:
//
// 1. inside single quotes, all characters are printed literally.
// 2. inside double quotes, all characters are printed literally
// except variables prefixed by '$' and backslashes followed by
// either a double quote or another backslash.
// 3. outside of any quotes, backslashes are treated as escape
// characters and not printed (unless they are themselves escaped)
// 4. quote context can switch mid-token if there is no whitespace
// between the two quote contexts (e.g. all'one'"token" parses as
// "allonetoken")
var SQ = "'";
var DQ = '"';
var DS = '$';
var BS = opts.escape || '\\';
var quote = false;
var esc = false;
var out = '';
var isGlob = false;
for (var i = 0, len = s.length; i < len; i++) {
var c = s.charAt(i);
isGlob = isGlob || (!quote && (c === '*' || c === '?'));
if (esc) {
out += c;
esc = false;
}
else if (quote) {
if (c === quote) {
quote = false;
}
else if (quote == SQ) {
out += c;
}
else { // Double quote
if (c === BS) {
i += 1;
c = s.charAt(i);
if (c === DQ || c === BS || c === DS) {
out += c;
} else {
out += BS + c;
}
}
else if (c === DS) {
out += parseEnvVar();
}
else {
out += c;
}
}
}
else if (c === DQ || c === SQ) {
quote = c;
}
else if (RegExp('^' + CONTROL + '$').test(c)) {
return { op: s };
}
else if (RegExp('^#$').test(c)) {
commented = true;
if (out.length){
return [out, { comment: s.slice(i+1) + match.slice(j+1).join(' ') }];
}
return [{ comment: s.slice(i+1) + match.slice(j+1).join(' ') }];
}
else if (c === BS) {
esc = true;
}
else if (c === DS) {
out += parseEnvVar();
}
else out += c;
}
if (isGlob) return {op: 'glob', pattern: out};
return out;
function parseEnvVar() {
i += 1;
var varend, varname;
//debugger
if (s.charAt(i) === '{') {
i += 1;
if (s.charAt(i) === '}') {
throw new Error("Bad substitution: " + s.substr(i - 2, 3));
}
varend = s.indexOf('}', i);
if (varend < 0) {
throw new Error("Bad substitution: " + s.substr(i));
}
varname = s.substr(i, varend - i);
i = varend;
}
else if (/[*@#?$!_\-]/.test(s.charAt(i))) {
varname = s.charAt(i);
i += 1;
}
else {
varend = s.substr(i).match(/[^\w\d_]/);
if (!varend) {
varname = s.substr(i);
i = s.length;
} else {
varname = s.substr(i, varend.index);
i += varend.index - 1;
}
}
return getVar(null, '', varname);
}
})
// finalize parsed aruments
.reduce(function(prev, arg){
if (arg === undefined){
return prev;
}
return prev.concat(arg);
},[]);
function getVar (_, pre, key) {
var r = typeof env === 'function' ? env(key) : env[key];
if (r === undefined && key != '')
r = '';
else if (r === undefined)
r = '$';
if (typeof r === 'object') {
return pre + TOKEN + JSON.stringify(r) + TOKEN;
}
else return pre + r;
}
}
var osx = {
'/Applications/Atom.app/Contents/MacOS/Atom': 'atom',
'/Applications/Atom Beta.app/Contents/MacOS/Atom Beta':
'/Applications/Atom Beta.app/Contents/MacOS/Atom Beta',
'/Applications/Brackets.app/Contents/MacOS/Brackets': 'brackets',
'/Applications/Sublime Text.app/Contents/MacOS/Sublime Text':
'/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl',
'/Applications/Sublime Text.app/Contents/MacOS/sublime_text':
'/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl',
'/Applications/Sublime Text 2.app/Contents/MacOS/Sublime Text 2':
'/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl',
'/Applications/Sublime Text Dev.app/Contents/MacOS/Sublime Text':
'/Applications/Sublime Text Dev.app/Contents/SharedSupport/bin/subl',
'/Applications/Visual Studio Code.app/Contents/MacOS/Electron': 'code',
'/Applications/Visual Studio Code - Insiders.app/Contents/MacOS/Electron':
'code-insiders',
'/Applications/VSCodium.app/Contents/MacOS/Electron': 'codium',
'/Applications/AppCode.app/Contents/MacOS/appcode':
'/Applications/AppCode.app/Contents/MacOS/appcode',
'/Applications/CLion.app/Contents/MacOS/clion':
'/Applications/CLion.app/Contents/MacOS/clion',
'/Applications/IntelliJ IDEA.app/Contents/MacOS/idea':
'/Applications/IntelliJ IDEA.app/Contents/MacOS/idea',
'/Applications/PhpStorm.app/Contents/MacOS/phpstorm':
'/Applications/PhpStorm.app/Contents/MacOS/phpstorm',
'/Applications/PyCharm.app/Contents/MacOS/pycharm':
'/Applications/PyCharm.app/Contents/MacOS/pycharm',
'/Applications/PyCharm CE.app/Contents/MacOS/pycharm':
'/Applications/PyCharm CE.app/Contents/MacOS/pycharm',
'/Applications/RubyMine.app/Contents/MacOS/rubymine':
'/Applications/RubyMine.app/Contents/MacOS/rubymine',
'/Applications/WebStorm.app/Contents/MacOS/webstorm':
'/Applications/WebStorm.app/Contents/MacOS/webstorm',
'/Applications/MacVim.app/Contents/MacOS/MacVim': 'mvim',
'/Applications/GoLand.app/Contents/MacOS/goland':
'/Applications/GoLand.app/Contents/MacOS/goland',
'/Applications/Rider.app/Contents/MacOS/rider':
'/Applications/Rider.app/Contents/MacOS/rider'
};
var linux = {
atom: 'atom',
Brackets: 'brackets',
code: 'code',
'code-insiders': 'code-insiders',
codium: 'codium',
vscodium: 'vscodium',
emacs: 'emacs',
gvim: 'gvim',
'idea.sh': 'idea',
'phpstorm.sh': 'phpstorm',
'pycharm.sh': 'pycharm',
'rubymine.sh': 'rubymine',
sublime_text: 'subl',
vim: 'vim',
'webstorm.sh': 'webstorm',
'goland.sh': 'goland',
'rider.sh': 'rider'
};
var windows$1 = [
'Brackets.exe',
'Code.exe',
'Code - Insiders.exe',
'VSCodium.exe',
'atom.exe',
'sublime_text.exe',
'notepad++.exe',
'clion.exe',
'clion64.exe',
'idea.exe',
'idea64.exe',
'phpstorm.exe',
'phpstorm64.exe',
'pycharm.exe',
'pycharm64.exe',
'rubymine.exe',
'rubymine64.exe',
'webstorm.exe',
'webstorm64.exe',
'goland.exe',
'goland64.exe',
'rider.exe',
'rider64.exe'
];
const path$7 = require$$0$4;
const shellQuote = shellQuote$1;
const childProcess$2 = require$$2$1;
// Map from full process name to binary that starts the process
// We can't just re-use full process name, because it will spawn a new instance
// of the app every time
const COMMON_EDITORS_OSX = osx;
const COMMON_EDITORS_LINUX = linux;
const COMMON_EDITORS_WIN = windows$1;
var guess = function guessEditor (specifiedEditor) {
if (specifiedEditor) {
return shellQuote.parse(specifiedEditor)
}
if (process.env.LAUNCH_EDITOR) {
return [process.env.LAUNCH_EDITOR]
}
if (process.versions.webcontainer) {
return [process.env.EDITOR || 'code']
}
// We can find out which editor is currently running by:
// `ps x` on macOS and Linux
// `Get-Process` on Windows
try {
if (process.platform === 'darwin') {
const output = childProcess$2
.execSync('ps x -o comm=', {
stdio: ['pipe', 'pipe', 'ignore']
})
.toString();
const processNames = Object.keys(COMMON_EDITORS_OSX);
const processList = output.split('\n');
for (let i = 0; i < processNames.length; i++) {
const processName = processNames[i];
// Find editor by exact match.
if (output.indexOf(processName) !== -1) {
return [COMMON_EDITORS_OSX[processName]]
}
const processNameWithoutApplications = processName.replace('/Applications', '');
// Find editor installation not in /Applications.
if (output.indexOf(processNameWithoutApplications) !== -1) {
// Use the CLI command if one is specified
if (processName !== COMMON_EDITORS_OSX[processName]) {
return [COMMON_EDITORS_OSX[processName]]
}
// Use a partial match to find the running process path. If one is found, use the
// existing path since it can be running from anywhere.
const runningProcess = processList.find((procName) => procName.endsWith(processNameWithoutApplications));
if (runningProcess !== undefined) {
return [runningProcess]
}
}
}
} else if (process.platform === 'win32') {
const output = childProcess$2
.execSync(
'powershell -NoProfile -Command "Get-CimInstance -Query \\"select executablepath from win32_process where executablepath is not null\\" | % { $_.ExecutablePath }"',
{
stdio: ['pipe', 'pipe', 'ignore']
}
)
.toString();
const runningProcesses = output.split('\r\n');
for (let i = 0; i < runningProcesses.length; i++) {
const fullProcessPath = runningProcesses[i].trim();
const shortProcessName = path$7.basename(fullProcessPath);
if (COMMON_EDITORS_WIN.indexOf(shortProcessName) !== -1) {
return [fullProcessPath]
}
}
} else if (process.platform === 'linux') {
// --no-heading No header line
// x List all processes owned by you
// -o comm Need only names column
const output = childProcess$2
.execSync('ps x --no-heading -o comm --sort=comm', {
stdio: ['pipe', 'pipe', 'ignore']
})
.toString();
const processNames = Object.keys(COMMON_EDITORS_LINUX);
for (let i = 0; i < processNames.length; i++) {
const processName = processNames[i];
if (output.indexOf(processName) !== -1) {
return [COMMON_EDITORS_LINUX[processName]]
}
}
}
} catch (error) {
// Ignore...
}
// Last resort, use old skool env vars
if (process.env.VISUAL) {
return [process.env.VISUAL]
} else if (process.env.EDITOR) {
return [process.env.EDITOR]
}
return [null]
};
const path$6 = require$$0$4;
// normalize file/line numbers into command line args for specific editors
var getArgs = function getArgumentsForPosition (
editor,
fileName,
lineNumber,
columnNumber = 1
) {
const editorBasename = path$6.basename(editor).replace(/\.(exe|cmd|bat)$/i, '');
switch (editorBasename) {
case 'atom':
case 'Atom':
case 'Atom Beta':
case 'subl':
case 'sublime':
case 'sublime_text':
case 'wstorm':
case 'charm':
return [`${fileName}:${lineNumber}:${columnNumber}`]
case 'notepad++':
return ['-n' + lineNumber, '-c' + columnNumber, fileName]
case 'vim':
case 'mvim':
return [`+call cursor(${lineNumber}, ${columnNumber})`, fileName]
case 'joe':
case 'gvim':
return ['+' + `${lineNumber}`, fileName]
case 'emacs':
case 'emacsclient':
return [`+${lineNumber}:${columnNumber}`, fileName]
case 'rmate':
case 'mate':
case 'mine':
return ['--line', lineNumber, fileName]
case 'code':
case 'Code':
case 'code-insiders':
case 'Code - Insiders':
case 'codium':
case 'vscodium':
case 'VSCodium':
return ['-r', '-g', `${fileName}:${lineNumber}:${columnNumber}`]
case 'appcode':
case 'clion':
case 'clion64':
case 'idea':
case 'idea64':
case 'phpstorm':
case 'phpstorm64':
case 'pycharm':
case 'pycharm64':
case 'rubymine':
case 'rubymine64':
case 'webstorm':
case 'webstorm64':
case 'goland':
case 'goland64':
case 'rider':
case 'rider64':
return ['--line', lineNumber, '--column', columnNumber, fileName]
}
if (process.env.LAUNCH_EDITOR) {
return [fileName, lineNumber, columnNumber]
}
// For all others, drop the lineNumber until we have
// a mapping above, since providing the lineNumber incorrectly
// can result in errors or confusing behavior.
return [fileName]
};
/**
* Copyright (c) 2015-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file at
* https://github.com/facebookincubator/create-react-app/blob/master/LICENSE
*
* Modified by Yuxi Evan You
*/
const fs$4 = require$$0__default;
const os$1 = require$$2;
const path$5 = require$$0$4;
const colors = picocolorsExports;
const childProcess$1 = require$$2$1;
const guessEditor = guess;
const getArgumentsForPosition = getArgs;
function wrapErrorCallback (cb) {
return (fileName, errorMessage) => {
console.log();
console.log(
colors.red('Could not open ' + path$5.basename(fileName) + ' in the editor.')
);
if (errorMessage) {
if (errorMessage[errorMessage.length - 1] !== '.') {
errorMessage += '.';
}
console.log(
colors.red('The editor process exited with an error: ' + errorMessage)
);
}
console.log();
if (cb) cb(fileName, errorMessage);
}
}
function isTerminalEditor (editor) {
switch (editor) {
case 'vim':
case 'emacs':
case 'nano':
return true
}
return false
}
const positionRE = /:(\d+)(:(\d+))?$/;
function parseFile (file) {
const fileName = file.replace(positionRE, '');
const match = file.match(positionRE);
const lineNumber = match && match[1];
const columnNumber = match && match[3];
return {
fileName,
lineNumber,
columnNumber
}
}
let _childProcess = null;
function launchEditor (file, specifiedEditor, onErrorCallback) {
const parsed = parseFile(file);
let { fileName } = parsed;
const { lineNumber, columnNumber } = parsed;
if (!fs$4.existsSync(fileName)) {
return
}
if (typeof specifiedEditor === 'function') {
onErrorCallback = specifiedEditor;
specifiedEditor = undefined;
}
onErrorCallback = wrapErrorCallback(onErrorCallback);
const [editor, ...args] = guessEditor(specifiedEditor);
if (!editor) {
onErrorCallback(fileName, null);
return
}
if (
process.platform === 'linux' &&
fileName.startsWith('/mnt/') &&
/Microsoft/i.test(os$1.release())
) {
// Assume WSL / "Bash on Ubuntu on Windows" is being used, and
// that the file exists on the Windows file system.
// `os.release()` is "4.4.0-43-Microsoft" in the current release
// build of WSL, see: https://github.com/Microsoft/BashOnWindows/issues/423#issuecomment-221627364
// When a Windows editor is specified, interop functionality can
// handle the path translation, but only if a relative path is used.
fileName = path$5.relative('', fileName);
}
if (lineNumber) {
const extraArgs = getArgumentsForPosition(editor, fileName, lineNumber, columnNumber);
args.push.apply(args, extraArgs);
} else {
args.push(fileName);
}
if (_childProcess && isTerminalEditor(editor)) {
// There's an existing editor process already and it's attached
// to the terminal, so go kill it. Otherwise two separate editor
// instances attach to the stdin/stdout which gets confusing.
_childProcess.kill('SIGKILL');
}
if (process.platform === 'win32') {
// On Windows, launch the editor in a shell because spawn can only
// launch .exe files.
_childProcess = childProcess$1.spawn(
'cmd.exe',
['/C', editor].concat(args),
{ stdio: 'inherit' }
);
} else {
_childProcess = childProcess$1.spawn(editor, args, { stdio: 'inherit' });
}
_childProcess.on('exit', function (errorCode) {
_childProcess = null;
if (errorCode) {
onErrorCallback(fileName, '(code ' + errorCode + ')');
}
});
_childProcess.on('error', function (error) {
onErrorCallback(fileName, error.message);
});
}
var launchEditor_1 = launchEditor;
const url$2 = require$$0$9;
const path$4 = require$$0$4;
const launch = launchEditor_1;
var launchEditorMiddleware = (specifiedEditor, srcRoot, onErrorCallback) => {
if (typeof specifiedEditor === 'function') {
onErrorCallback = specifiedEditor;
specifiedEditor = undefined;
}
if (typeof srcRoot === 'function') {
onErrorCallback = srcRoot;
srcRoot = undefined;
}
srcRoot = srcRoot || process.cwd();
return function launchEditorMiddleware (req, res, next) {
const { file } = url$2.parse(req.url, true).query || {};
if (!file) {
res.statusCode = 500;
res.end(`launch-editor-middleware: required query param "file" is missing.`);
} else {
launch(path$4.resolve(srcRoot, file), specifiedEditor, onErrorCallback);
res.end();
}
}
};
var launchEditorMiddleware$1 = /*@__PURE__*/getDefaultExportFromCjs(launchEditorMiddleware);
async function resolveHttpServer({ proxy }, app, httpsOptions) {
if (!httpsOptions) {
const { createServer } = await import('node:http');
return createServer(app);
}
// #484 fallback to http1 when proxy is needed.
if (proxy) {
const { createServer } = await import('node:https');
return createServer(httpsOptions, app);
}
else {
const { createSecureServer } = await import('node:http2');
return createSecureServer({
// Manually increase the session memory to prevent 502 ENHANCE_YOUR_CALM
// errors on large numbers of requests
maxSessionMemory: 1000,
...httpsOptions,
allowHTTP1: true,
},
// @ts-expect-error TODO: is this correct?
app);
}
}
async function resolveHttpsConfig(https) {
if (!https)
return undefined;
if (!isObject$2(https))
return {};
const [ca, cert, key, pfx] = await Promise.all([
readFileIfExists(https.ca),
readFileIfExists(https.cert),
readFileIfExists(https.key),
readFileIfExists(https.pfx),
]);
return { ...https, ca, cert, key, pfx };
}
async function readFileIfExists(value) {
if (typeof value === 'string') {
return fsp.readFile(path$o.resolve(value)).catch(() => value);
}
return value;
}
async function httpServerStart(httpServer, serverOptions) {
let { port, strictPort, host, logger } = serverOptions;
return new Promise((resolve, reject) => {
const onError = (e) => {
if (e.code === 'EADDRINUSE') {
if (strictPort) {
httpServer.removeListener('error', onError);
reject(new Error(`Port ${port} is already in use`));
}
else {
logger.info(`Port ${port} is in use, trying another one...`);
httpServer.listen(++port, host);
}
}
else {
httpServer.removeListener('error', onError);
reject(e);
}
};
httpServer.on('error', onError);
httpServer.listen(port, host, () => {
httpServer.removeListener('error', onError);
resolve(port);
});
});
}
function setClientErrorHandler(server, logger) {
server.on('clientError', (err, socket) => {
let msg = '400 Bad Request';
if (err.code === 'HPE_HEADER_OVERFLOW') {
msg = '431 Request Header Fields Too Large';
logger.warn(colors$1.yellow('Server responded with status code 431. ' +
'See https://vitejs.dev/guide/troubleshooting.html#_431-request-header-fields-too-large.'));
}
if (err.code === 'ECONNRESET' || !socket.writable) {
return;
}
socket.end(`HTTP/1.1 ${msg}\r\n\r\n`);
});
}
const ERR_LOAD_URL = 'ERR_LOAD_URL';
const ERR_LOAD_PUBLIC_URL = 'ERR_LOAD_PUBLIC_URL';
const debugLoad = createDebugger('vite:load');
const debugTransform = createDebugger('vite:transform');
const debugCache$1 = createDebugger('vite:cache');
function transformRequest(url, server, options = {}) {
if (server._restartPromise && !options.ssr)
throwClosedServerError();
const cacheKey = (options.ssr ? 'ssr:' : options.html ? 'html:' : '') + url;
// This module may get invalidated while we are processing it. For example
// when a full page reload is needed after the re-processing of pre-bundled
// dependencies when a missing dep is discovered. We save the current time
// to compare it to the last invalidation performed to know if we should
// cache the result of the transformation or we should discard it as stale.
//
// A module can be invalidated due to:
// 1. A full reload because of pre-bundling newly discovered deps
// 2. A full reload after a config change
// 3. The file that generated the module changed
// 4. Invalidation for a virtual module
//
// For 1 and 2, a new request for this module will be issued after
// the invalidation as part of the browser reloading the page. For 3 and 4
// there may not be a new request right away because of HMR handling.
// In all cases, the next time this module is requested, it should be
// re-processed.
//
// We save the timestamp when we start processing and compare it with the
// last time this module is invalidated
const timestamp = Date.now();
const pending = server._pendingRequests.get(cacheKey);
if (pending) {
return server.moduleGraph
.getModuleByUrl(removeTimestampQuery(url), options.ssr)
.then((module) => {
if (!module || pending.timestamp > module.lastInvalidationTimestamp) {
// The pending request is still valid, we can safely reuse its result
return pending.request;
}
else {
// Request 1 for module A (pending.timestamp)
// Invalidate module A (module.lastInvalidationTimestamp)
// Request 2 for module A (timestamp)
// First request has been invalidated, abort it to clear the cache,
// then perform a new doTransform.
pending.abort();
return transformRequest(url, server, options);
}
});
}
const request = doTransform(url, server, options, timestamp);
// Avoid clearing the cache of future requests if aborted
let cleared = false;
const clearCache = () => {
if (!cleared) {
server._pendingRequests.delete(cacheKey);
cleared = true;
}
};
// Cache the request and clear it once processing is done
server._pendingRequests.set(cacheKey, {
request,
timestamp,
abort: clearCache,
});
return request.finally(clearCache);
}
async function doTransform(url, server, options, timestamp) {
url = removeTimestampQuery(url);
const { config, pluginContainer } = server;
const prettyUrl = debugCache$1 ? prettifyUrl(url, config.root) : '';
const ssr = !!options.ssr;
const module = await server.moduleGraph.getModuleByUrl(url, ssr);
// check if we have a fresh cache
const cached = module && (ssr ? module.ssrTransformResult : module.transformResult);
if (cached) {
// TODO: check if the module is "partially invalidated" - i.e. an import
// down the chain has been fully invalidated, but this current module's
// content has not changed.
// in this case, we can reuse its previous cached result and only update
// its import timestamps.
debugCache$1?.(`[memory] ${prettyUrl}`);
return cached;
}
const resolved = module
? undefined
: (await pluginContainer.resolveId(url, undefined, { ssr })) ?? undefined;
// resolve
const id = module?.id ?? resolved?.id ?? url;
const result = loadAndTransform(id, url, server, options, timestamp, module, resolved);
getDepsOptimizer(config, ssr)?.delayDepsOptimizerUntil(id, () => result);
return result;
}
async function loadAndTransform(id, url, server, options, timestamp, mod, resolved) {
const { config, pluginContainer, moduleGraph, watcher } = server;
const { root, logger } = config;
const prettyUrl = debugLoad || debugTransform ? prettifyUrl(url, config.root) : '';
const ssr = !!options.ssr;
const file = cleanUrl(id);
let code = null;
let map = null;
// load
const loadStart = debugLoad ? performance.now() : 0;
const loadResult = await pluginContainer.load(id, { ssr });
if (loadResult == null) {
// if this is an html request and there is no load result, skip ahead to
// SPA fallback.
if (options.html && !id.endsWith('.html')) {
return null;
}
// try fallback loading it from fs as string
// if the file is a binary, there should be a plugin that already loaded it
// as string
// only try the fallback if access is allowed, skip for out of root url
// like /service-worker.js or /api/users
if (options.ssr || isFileServingAllowed(file, server)) {
try {
code = await fsp.readFile(file, 'utf-8');
debugLoad?.(`${timeFrom(loadStart)} [fs] ${prettyUrl}`);
}
catch (e) {
if (e.code !== 'ENOENT') {
if (e.code === 'EISDIR') {
e.message = `${e.message} ${file}`;
}
throw e;
}
}
}
if (code) {
try {
map = (convertSourceMap.fromSource(code) ||
(await convertSourceMap.fromMapFileSource(code, createConvertSourceMapReadMap(file))))?.toObject();
code = code.replace(convertSourceMap.mapFileCommentRegex, blankReplacer);
}
catch (e) {
logger.warn(`Failed to load source map for ${url}.`, {
timestamp: true,
});
}
}
}
else {
debugLoad?.(`${timeFrom(loadStart)} [plugin] ${prettyUrl}`);
if (isObject$2(loadResult)) {
code = loadResult.code;
map = loadResult.map;
}
else {
code = loadResult;
}
}
if (code == null) {
const isPublicFile = checkPublicFile(url, config);
const msg = isPublicFile
? `This file is in /public and will be copied as-is during build without ` +
`going through the plugin transforms, and therefore should not be ` +
`imported from source code. It can only be referenced via HTML tags.`
: `Does the file exist?`;
const importerMod = server.moduleGraph.idToModuleMap
.get(id)
?.importers.values()
.next().value;
const importer = importerMod?.file || importerMod?.url;
const err = new Error(`Failed to load url ${url} (resolved id: ${id})${importer ? ` in ${importer}` : ''}. ${msg}`);
err.code = isPublicFile ? ERR_LOAD_PUBLIC_URL : ERR_LOAD_URL;
throw err;
}
if (server._restartPromise && !ssr)
throwClosedServerError();
// ensure module in graph after successful load
mod ?? (mod = await moduleGraph._ensureEntryFromUrl(url, ssr, undefined, resolved));
ensureWatchedFile(watcher, mod.file, root);
// transform
const transformStart = debugTransform ? performance.now() : 0;
const transformResult = await pluginContainer.transform(code, id, {
inMap: map,
ssr,
});
const originalCode = code;
if (transformResult == null ||
(isObject$2(transformResult) && transformResult.code == null)) {
// no transform applied, keep code as-is
debugTransform?.(timeFrom(transformStart) + colors$1.dim(` [skipped] ${prettyUrl}`));
}
else {
debugTransform?.(`${timeFrom(transformStart)} ${prettyUrl}`);
code = transformResult.code;
map = transformResult.map;
}
if (map && mod.file) {
map = (typeof map === 'string' ? JSON.parse(map) : map);
if (map.mappings) {
await injectSourcesContent(map, mod.file, logger);
}
const sourcemapPath = `${mod.file}.map`;
applySourcemapIgnoreList(map, sourcemapPath, config.server.sourcemapIgnoreList, logger);
if (path$o.isAbsolute(mod.file)) {
for (let sourcesIndex = 0; sourcesIndex < map.sources.length; ++sourcesIndex) {
const sourcePath = map.sources[sourcesIndex];
if (sourcePath) {
// Rewrite sources to relative paths to give debuggers the chance
// to resolve and display them in a meaningful way (rather than
// with absolute paths).
if (path$o.isAbsolute(sourcePath)) {
map.sources[sourcesIndex] = path$o.relative(path$o.dirname(mod.file), sourcePath);
}
}
}
}
}
if (server._restartPromise && !ssr)
throwClosedServerError();
const result = ssr && !server.config.experimental.skipSsrTransform
? await server.ssrTransform(code, map, url, originalCode)
: {
code,
map,
etag: getEtag(code, { weak: true }),
};
// Only cache the result if the module wasn't invalidated while it was
// being processed, so it is re-processed next time if it is stale
if (timestamp > mod.lastInvalidationTimestamp) {
if (ssr)
mod.ssrTransformResult = result;
else
mod.transformResult = result;
}
return result;
}
function createConvertSourceMapReadMap(originalFileName) {
return (filename) => {
return fsp.readFile(path$o.resolve(path$o.dirname(originalFileName), filename), 'utf-8');
};
}
/**
* @typedef { import('estree').Node} Node
* @typedef {{
* skip: () => void;
* remove: () => void;
* replace: (node: Node) => void;
* }} WalkerContext
*/
class WalkerBase {
constructor() {
/** @type {boolean} */
this.should_skip = false;
/** @type {boolean} */
this.should_remove = false;
/** @type {Node | null} */
this.replacement = null;
/** @type {WalkerContext} */
this.context = {
skip: () => (this.should_skip = true),
remove: () => (this.should_remove = true),
replace: (node) => (this.replacement = node)
};
}
/**
* @template {Node} Parent
* @param {Parent | null | undefined} parent
* @param {keyof Parent | null | undefined} prop
* @param {number | null | undefined} index
* @param {Node} node
*/
replace(parent, prop, index, node) {
if (parent && prop) {
if (index != null) {
/** @type {Array<Node>} */ (parent[prop])[index] = node;
} else {
/** @type {Node} */ (parent[prop]) = node;
}
}
}
/**
* @template {Node} Parent
* @param {Parent | null | undefined} parent
* @param {keyof Parent | null | undefined} prop
* @param {number | null | undefined} index
*/
remove(parent, prop, index) {
if (parent && prop) {
if (index !== null && index !== undefined) {
/** @type {Array<Node>} */ (parent[prop]).splice(index, 1);
} else {
delete parent[prop];
}
}
}
}
/**
* @typedef { import('estree').Node} Node
* @typedef { import('./walker.js').WalkerContext} WalkerContext
* @typedef {(
* this: WalkerContext,
* node: Node,
* parent: Node | null,
* key: string | number | symbol | null | undefined,
* index: number | null | undefined
* ) => void} SyncHandler
*/
class SyncWalker extends WalkerBase {
/**
*
* @param {SyncHandler} [enter]
* @param {SyncHandler} [leave]
*/
constructor(enter, leave) {
super();
/** @type {boolean} */
this.should_skip = false;
/** @type {boolean} */
this.should_remove = false;
/** @type {Node | null} */
this.replacement = null;
/** @type {WalkerContext} */
this.context = {
skip: () => (this.should_skip = true),
remove: () => (this.should_remove = true),
replace: (node) => (this.replacement = node)
};
/** @type {SyncHandler | undefined} */
this.enter = enter;
/** @type {SyncHandler | undefined} */
this.leave = leave;
}
/**
* @template {Node} Parent
* @param {Node} node
* @param {Parent | null} parent
* @param {keyof Parent} [prop]
* @param {number | null} [index]
* @returns {Node | null}
*/
visit(node, parent, prop, index) {
if (node) {
if (this.enter) {
const _should_skip = this.should_skip;
const _should_remove = this.should_remove;
const _replacement = this.replacement;
this.should_skip = false;
this.should_remove = false;
this.replacement = null;
this.enter.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const skipped = this.should_skip;
const removed = this.should_remove;
this.should_skip = _should_skip;
this.should_remove = _should_remove;
this.replacement = _replacement;
if (skipped) return node;
if (removed) return null;
}
/** @type {keyof Node} */
let key;
for (key in node) {
/** @type {unknown} */
const value = node[key];
if (value && typeof value === 'object') {
if (Array.isArray(value)) {
const nodes = /** @type {Array<unknown>} */ (value);
for (let i = 0; i < nodes.length; i += 1) {
const item = nodes[i];
if (isNode(item)) {
if (!this.visit(item, node, key, i)) {
// removed
i--;
}
}
}
} else if (isNode(value)) {
this.visit(value, node, key, null);
}
}
}
if (this.leave) {
const _replacement = this.replacement;
const _should_remove = this.should_remove;
this.replacement = null;
this.should_remove = false;
this.leave.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const removed = this.should_remove;
this.replacement = _replacement;
this.should_remove = _should_remove;
if (removed) return null;
}
}
return node;
}
}
/**
* Ducktype a node.
*
* @param {unknown} value
* @returns {value is Node}
*/
function isNode(value) {
return (
value !== null && typeof value === 'object' && 'type' in value && typeof value.type === 'string'
);
}
/**
* @typedef {import('estree').Node} Node
* @typedef {import('./sync.js').SyncHandler} SyncHandler
* @typedef {import('./async.js').AsyncHandler} AsyncHandler
*/
/**
* @param {Node} ast
* @param {{
* enter?: SyncHandler
* leave?: SyncHandler
* }} walker
* @returns {Node | null}
*/
function walk$1(ast, { enter, leave }) {
const instance = new SyncWalker(enter, leave);
return instance.visit(ast, null);
}
/**
* @param {import('estree').Node} param
* @returns {string[]}
*/
function extract_names(param) {
return extract_identifiers(param).map(node => node.name);
}
/**
* @param {import('estree').Node} param
* @param {import('estree').Identifier[]} nodes
* @returns {import('estree').Identifier[]}
*/
function extract_identifiers(param, nodes = []) {
switch (param.type) {
case 'Identifier':
nodes.push(param);
break;
case 'MemberExpression':
let object = param;
while (object.type === 'MemberExpression') {
object = /** @type {any} */ (object.object);
}
nodes.push(/** @type {any} */ (object));
break;
case 'ObjectPattern':
/** @param {import('estree').Property | import('estree').RestElement} prop */
const handle_prop = (prop) => {
if (prop.type === 'RestElement') {
extract_identifiers(prop.argument, nodes);
} else {
extract_identifiers(prop.value, nodes);
}
};
param.properties.forEach(handle_prop);
break;
case 'ArrayPattern':
/** @param {import('estree').Node} element */
const handle_element = (element) => {
if (element) extract_identifiers(element, nodes);
};
param.elements.forEach((element) => {
if (element) {
handle_element(element);
}
});
break;
case 'RestElement':
extract_identifiers(param.argument, nodes);
break;
case 'AssignmentPattern':
extract_identifiers(param.left, nodes);
break;
}
return nodes;
}
const ssrModuleExportsKey = `__vite_ssr_exports__`;
const ssrImportKey = `__vite_ssr_import__`;
const ssrDynamicImportKey = `__vite_ssr_dynamic_import__`;
const ssrExportAllKey = `__vite_ssr_exportAll__`;
const ssrImportMetaKey = `__vite_ssr_import_meta__`;
const hashbangRE = /^#!.*\n/;
async function ssrTransform(code, inMap, url, originalCode, options) {
if (options?.json?.stringify && isJSONRequest(url)) {
return ssrTransformJSON(code, inMap);
}
return ssrTransformScript(code, inMap, url, originalCode);
}
async function ssrTransformJSON(code, inMap) {
return {
code: code.replace('export default', `${ssrModuleExportsKey}.default =`),
map: inMap,
deps: [],
dynamicDeps: [],
};
}
async function ssrTransformScript(code, inMap, url, originalCode) {
const s = new MagicString(code);
let ast;
try {
ast = parser.parse(code, {
sourceType: 'module',
ecmaVersion: 'latest',
locations: true,
allowHashBang: true,
});
}
catch (err) {
if (!err.loc || !err.loc.line)
throw err;
const line = err.loc.line;
throw new Error(`Parse failure: ${err.message}\nAt file: ${url}\nContents of line ${line}: ${code.split('\n')[line - 1]}`);
}
let uid = 0;
const deps = new Set();
const dynamicDeps = new Set();
const idToImportMap = new Map();
const declaredConst = new Set();
// hoist at the start of the file, after the hashbang
const hoistIndex = code.match(hashbangRE)?.[0].length ?? 0;
function defineImport(source) {
deps.add(source);
const importId = `__vite_ssr_import_${uid++}__`;
// There will be an error if the module is called before it is imported,
// so the module import statement is hoisted to the top
s.appendLeft(hoistIndex, `const ${importId} = await ${ssrImportKey}(${JSON.stringify(source)});\n`);
return importId;
}
function defineExport(position, name, local = name) {
s.appendLeft(position, `\nObject.defineProperty(${ssrModuleExportsKey}, "${name}", ` +
`{ enumerable: true, configurable: true, get(){ return ${local} }});`);
}
// 1. check all import statements and record id -> importName map
for (const node of ast.body) {
// import foo from 'foo' --> foo -> __import_foo__.default
// import { baz } from 'foo' --> baz -> __import_foo__.baz
// import * as ok from 'foo' --> ok -> __import_foo__
if (node.type === 'ImportDeclaration') {
const importId = defineImport(node.source.value);
s.remove(node.start, node.end);
for (const spec of node.specifiers) {
if (spec.type === 'ImportSpecifier') {
idToImportMap.set(spec.local.name, `${importId}.${spec.imported.name}`);
}
else if (spec.type === 'ImportDefaultSpecifier') {
idToImportMap.set(spec.local.name, `${importId}.default`);
}
else {
// namespace specifier
idToImportMap.set(spec.local.name, importId);
}
}
}
}
// 2. check all export statements and define exports
for (const node of ast.body) {
// named exports
if (node.type === 'ExportNamedDeclaration') {
if (node.declaration) {
if (node.declaration.type === 'FunctionDeclaration' ||
node.declaration.type === 'ClassDeclaration') {
// export function foo() {}
defineExport(node.end, node.declaration.id.name);
}
else {
// export const foo = 1, bar = 2
for (const declaration of node.declaration.declarations) {
const names = extract_names(declaration.id);
for (const name of names) {
defineExport(node.end, name);
}
}
}
s.remove(node.start, node.declaration.start);
}
else {
s.remove(node.start, node.end);
if (node.source) {
// export { foo, bar } from './foo'
const importId = defineImport(node.source.value);
// hoist re-exports near the defined import so they are immediately exported
for (const spec of node.specifiers) {
defineExport(hoistIndex, spec.exported.name, `${importId}.${spec.local.name}`);
}
}
else {
// export { foo, bar }
for (const spec of node.specifiers) {
const local = spec.local.name;
const binding = idToImportMap.get(local);
defineExport(node.end, spec.exported.name, binding || local);
}
}
}
}
// default export
if (node.type === 'ExportDefaultDeclaration') {
const expressionTypes = ['FunctionExpression', 'ClassExpression'];
if ('id' in node.declaration &&
node.declaration.id &&
!expressionTypes.includes(node.declaration.type)) {
// named hoistable/class exports
// export default function foo() {}
// export default class A {}
const { name } = node.declaration.id;
s.remove(node.start, node.start + 15 /* 'export default '.length */);
s.append(`\nObject.defineProperty(${ssrModuleExportsKey}, "default", ` +
`{ enumerable: true, configurable: true, value: ${name} });`);
}
else {
// anonymous default exports
s.update(node.start, node.start + 14 /* 'export default'.length */, `${ssrModuleExportsKey}.default =`);
}
}
// export * from './foo'
if (node.type === 'ExportAllDeclaration') {
s.remove(node.start, node.end);
const importId = defineImport(node.source.value);
// hoist re-exports near the defined import so they are immediately exported
if (node.exported) {
defineExport(hoistIndex, node.exported.name, `${importId}`);
}
else {
s.appendLeft(hoistIndex, `${ssrExportAllKey}(${importId});\n`);
}
}
}
// 3. convert references to import bindings & import.meta references
walk(ast, {
onIdentifier(id, parent, parentStack) {
const grandparent = parentStack[1];
const binding = idToImportMap.get(id.name);
if (!binding) {
return;
}
if (isStaticProperty(parent) && parent.shorthand) {
// let binding used in a property shorthand
// { foo } -> { foo: __import_x__.foo }
// skip for destructuring patterns
if (!isNodeInPattern(parent) ||
isInDestructuringAssignment(parent, parentStack)) {
s.appendLeft(id.end, `: ${binding}`);
}
}
else if ((parent.type === 'PropertyDefinition' &&
grandparent?.type === 'ClassBody') ||
(parent.type === 'ClassDeclaration' && id === parent.superClass)) {
if (!declaredConst.has(id.name)) {
declaredConst.add(id.name);
// locate the top-most node containing the class declaration
const topNode = parentStack[parentStack.length - 2];
s.prependRight(topNode.start, `const ${id.name} = ${binding};\n`);
}
}
else if (
// don't transform class name identifier
!(parent.type === 'ClassExpression' && id === parent.id)) {
s.update(id.start, id.end, binding);
}
},
onImportMeta(node) {
s.update(node.start, node.end, ssrImportMetaKey);
},
onDynamicImport(node) {
s.update(node.start, node.start + 6, ssrDynamicImportKey);
if (node.type === 'ImportExpression' && node.source.type === 'Literal') {
dynamicDeps.add(node.source.value);
}
},
});
let map = s.generateMap({ hires: 'boundary' });
if (inMap && inMap.mappings && inMap.sources.length > 0) {
map = combineSourcemaps(url, [
{
...map,
sources: inMap.sources,
sourcesContent: inMap.sourcesContent,
},
inMap,
]);
}
else {
map.sources = [path$o.basename(url)];
// needs to use originalCode instead of code
// because code might be already transformed even if map is null
map.sourcesContent = [originalCode];
}
return {
code: s.toString(),
map,
deps: [...deps],
dynamicDeps: [...dynamicDeps],
};
}
const isNodeInPatternWeakSet = new WeakSet();
const setIsNodeInPattern = (node) => isNodeInPatternWeakSet.add(node);
const isNodeInPattern = (node) => isNodeInPatternWeakSet.has(node);
/**
* Same logic from \@vue/compiler-core & \@vue/compiler-sfc
* Except this is using acorn AST
*/
function walk(root, { onIdentifier, onImportMeta, onDynamicImport }) {
const parentStack = [];
const varKindStack = [];
const scopeMap = new WeakMap();
const identifiers = [];
const setScope = (node, name) => {
let scopeIds = scopeMap.get(node);
if (scopeIds && scopeIds.has(name)) {
return;
}
if (!scopeIds) {
scopeIds = new Set();
scopeMap.set(node, scopeIds);
}
scopeIds.add(name);
};
function isInScope(name, parents) {
return parents.some((node) => node && scopeMap.get(node)?.has(name));
}
function handlePattern(p, parentScope) {
if (p.type === 'Identifier') {
setScope(parentScope, p.name);
}
else if (p.type === 'RestElement') {
handlePattern(p.argument, parentScope);
}
else if (p.type === 'ObjectPattern') {
p.properties.forEach((property) => {
if (property.type === 'RestElement') {
setScope(parentScope, property.argument.name);
}
else {
handlePattern(property.value, parentScope);
}
});
}
else if (p.type === 'ArrayPattern') {
p.elements.forEach((element) => {
if (element) {
handlePattern(element, parentScope);
}
});
}
else if (p.type === 'AssignmentPattern') {
handlePattern(p.left, parentScope);
}
else {
setScope(parentScope, p.name);
}
}
walk$1(root, {
enter(node, parent) {
if (node.type === 'ImportDeclaration') {
return this.skip();
}
// track parent stack, skip for "else-if"/"else" branches as acorn nests
// the ast within "if" nodes instead of flattening them
if (parent &&
!(parent.type === 'IfStatement' && node === parent.alternate)) {
parentStack.unshift(parent);
}
// track variable declaration kind stack used by VariableDeclarator
if (node.type === 'VariableDeclaration') {
varKindStack.unshift(node.kind);
}
if (node.type === 'MetaProperty' && node.meta.name === 'import') {
onImportMeta(node);
}
else if (node.type === 'ImportExpression') {
onDynamicImport(node);
}
if (node.type === 'Identifier') {
if (!isInScope(node.name, parentStack) &&
isRefIdentifier(node, parent, parentStack)) {
// record the identifier, for DFS -> BFS
identifiers.push([node, parentStack.slice(0)]);
}
}
else if (isFunction(node)) {
// If it is a function declaration, it could be shadowing an import
// Add its name to the scope so it won't get replaced
if (node.type === 'FunctionDeclaration') {
const parentScope = findParentScope(parentStack);
if (parentScope) {
setScope(parentScope, node.id.name);
}
}
// walk function expressions and add its arguments to known identifiers
// so that we don't prefix them
node.params.forEach((p) => {
if (p.type === 'ObjectPattern' || p.type === 'ArrayPattern') {
handlePattern(p, node);
return;
}
walk$1(p.type === 'AssignmentPattern' ? p.left : p, {
enter(child, parent) {
// skip params default value of destructure
if (parent?.type === 'AssignmentPattern' &&
parent?.right === child) {
return this.skip();
}
if (child.type !== 'Identifier')
return;
// do not record as scope variable if is a destructuring keyword
if (isStaticPropertyKey(child, parent))
return;
// do not record if this is a default value
// assignment of a destructuring variable
if ((parent?.type === 'TemplateLiteral' &&
parent?.expressions.includes(child)) ||
(parent?.type === 'CallExpression' && parent?.callee === child)) {
return;
}
setScope(node, child.name);
},
});
});
}
else if (node.type === 'Property' && parent.type === 'ObjectPattern') {
// mark property in destructuring pattern
setIsNodeInPattern(node);
}
else if (node.type === 'VariableDeclarator') {
const parentFunction = findParentScope(parentStack, varKindStack[0] === 'var');
if (parentFunction) {
handlePattern(node.id, parentFunction);
}
}
else if (node.type === 'CatchClause' && node.param) {
handlePattern(node.param, node);
}
},
leave(node, parent) {
// untrack parent stack from above
if (parent &&
!(parent.type === 'IfStatement' && node === parent.alternate)) {
parentStack.shift();
}
if (node.type === 'VariableDeclaration') {
varKindStack.shift();
}
},
});
// emit the identifier events in BFS so the hoisted declarations
// can be captured correctly
identifiers.forEach(([node, stack]) => {
if (!isInScope(node.name, stack))
onIdentifier(node, stack[0], stack);
});
}
function isRefIdentifier(id, parent, parentStack) {
// declaration id
if (parent.type === 'CatchClause' ||
((parent.type === 'VariableDeclarator' ||
parent.type === 'ClassDeclaration') &&
parent.id === id)) {
return false;
}
if (isFunction(parent)) {
// function declaration/expression id
if (parent.id === id) {
return false;
}
// params list
if (parent.params.includes(id)) {
return false;
}
}
// class method name
if (parent.type === 'MethodDefinition' && !parent.computed) {
return false;
}
// property key
if (isStaticPropertyKey(id, parent)) {
return false;
}
// object destructuring pattern
if (isNodeInPattern(parent) && parent.value === id) {
return false;
}
// non-assignment array destructuring pattern
if (parent.type === 'ArrayPattern' &&
!isInDestructuringAssignment(parent, parentStack)) {
return false;
}
// member expression property
if (parent.type === 'MemberExpression' &&
parent.property === id &&
!parent.computed) {
return false;
}
if (parent.type === 'ExportSpecifier') {
return false;
}
// is a special keyword but parsed as identifier
if (id.name === 'arguments') {
return false;
}
return true;
}
const isStaticProperty = (node) => node && node.type === 'Property' && !node.computed;
const isStaticPropertyKey = (node, parent) => isStaticProperty(parent) && parent.key === node;
const functionNodeTypeRE = /Function(?:Expression|Declaration)$|Method$/;
function isFunction(node) {
return functionNodeTypeRE.test(node.type);
}
const blockNodeTypeRE = /^BlockStatement$|^For(?:In|Of)?Statement$/;
function isBlock(node) {
return blockNodeTypeRE.test(node.type);
}
function findParentScope(parentStack, isVar = false) {
return parentStack.find(isVar ? isFunction : isBlock);
}
function isInDestructuringAssignment(parent, parentStack) {
if (parent &&
(parent.type === 'Property' || parent.type === 'ArrayPattern')) {
return parentStack.some((i) => i.type === 'AssignmentExpression');
}
return false;
}
let offset;
function calculateOffsetOnce() {
if (offset !== undefined) {
return;
}
try {
new Function('throw new Error(1)')();
}
catch (e) {
// in Node 12, stack traces account for the function wrapper.
// in Node 13 and later, the function wrapper adds two lines,
// which must be subtracted to generate a valid mapping
const match = /:(\d+):\d+\)$/.exec(e.stack.split('\n')[1]);
offset = match ? +match[1] - 1 : 0;
}
}
function ssrRewriteStacktrace(stack, moduleGraph) {
calculateOffsetOnce();
return stack
.split('\n')
.map((line) => {
return line.replace(/^ {4}at (?:(\S.*?)\s\()?(.+?):(\d+)(?::(\d+))?\)?/, (input, varName, id, line, column) => {
if (!id)
return input;
const mod = moduleGraph.idToModuleMap.get(id);
const rawSourceMap = mod?.ssrTransformResult?.map;
if (!rawSourceMap) {
return input;
}
const traced = new TraceMap(rawSourceMap);
const pos = originalPositionFor$1(traced, {
line: Number(line) - offset,
// stacktrace's column is 1-indexed, but sourcemap's one is 0-indexed
column: Number(column) - 1,
});
if (!pos.source || pos.line == null || pos.column == null) {
return input;
}
const trimmedVarName = varName.trim();
const sourceFile = path$o.resolve(path$o.dirname(id), pos.source);
// stacktrace's column is 1-indexed, but sourcemap's one is 0-indexed
const source = `${sourceFile}:${pos.line}:${pos.column + 1}`;
if (!trimmedVarName || trimmedVarName === 'eval') {
return ` at ${source}`;
}
else {
return ` at ${trimmedVarName} (${source})`;
}
});
})
.join('\n');
}
function rebindErrorStacktrace(e, stacktrace) {
const { configurable, writable } = Object.getOwnPropertyDescriptor(e, 'stack');
if (configurable) {
Object.defineProperty(e, 'stack', {
value: stacktrace,
enumerable: true,
configurable: true,
writable: true,
});
}
else if (writable) {
e.stack = stacktrace;
}
}
const rewroteStacktraces = new WeakSet();
function ssrFixStacktrace(e, moduleGraph) {
if (!e.stack)
return;
// stacktrace shouldn't be rewritten more than once
if (rewroteStacktraces.has(e))
return;
const stacktrace = ssrRewriteStacktrace(e.stack, moduleGraph);
rebindErrorStacktrace(e, stacktrace);
rewroteStacktraces.add(e);
}
// eslint-disable-next-line @typescript-eslint/no-empty-function
const AsyncFunction = async function () { }.constructor;
let fnDeclarationLineCount = 0;
{
const body = '/*code*/';
const source = new AsyncFunction('a', 'b', body).toString();
fnDeclarationLineCount =
source.slice(0, source.indexOf(body)).split('\n').length - 1;
}
const pendingModules = new Map();
const pendingImports = new Map();
const importErrors = new WeakMap();
async function ssrLoadModule(url, server, context = { global }, urlStack = [], fixStacktrace) {
url = unwrapId(url);
// when we instantiate multiple dependency modules in parallel, they may
// point to shared modules. We need to avoid duplicate instantiation attempts
// by register every module as pending synchronously so that all subsequent
// request to that module are simply waiting on the same promise.
const pending = pendingModules.get(url);
if (pending) {
return pending;
}
const modulePromise = instantiateModule(url, server, context, urlStack, fixStacktrace);
pendingModules.set(url, modulePromise);
modulePromise
.catch(() => {
pendingImports.delete(url);
})
.finally(() => {
pendingModules.delete(url);
});
return modulePromise;
}
async function instantiateModule(url, server, context = { global }, urlStack = [], fixStacktrace) {
const { moduleGraph } = server;
const mod = await moduleGraph.ensureEntryFromUrl(url, true);
if (mod.ssrError) {
throw mod.ssrError;
}
if (mod.ssrModule) {
return mod.ssrModule;
}
const result = mod.ssrTransformResult ||
(await transformRequest(url, server, { ssr: true }));
if (!result) {
// TODO more info? is this even necessary?
throw new Error(`failed to load module for ssr: ${url}`);
}
const ssrModule = {
[Symbol.toStringTag]: 'Module',
};
Object.defineProperty(ssrModule, '__esModule', { value: true });
// Tolerate circular imports by ensuring the module can be
// referenced before it's been instantiated.
mod.ssrModule = ssrModule;
const ssrImportMeta = {
// The filesystem URL, matching native Node.js modules
url: pathToFileURL(mod.file).toString(),
};
urlStack = urlStack.concat(url);
const isCircular = (url) => urlStack.includes(url);
const { isProduction, resolve: { dedupe, preserveSymlinks }, root, ssr, } = server.config;
const overrideConditions = ssr.resolve?.externalConditions || [];
const resolveOptions = {
mainFields: ['main'],
browserField: true,
conditions: [],
overrideConditions: [...overrideConditions, 'production', 'development'],
extensions: ['.js', '.cjs', '.json'],
dedupe,
preserveSymlinks,
isBuild: false,
isProduction,
root,
ssrConfig: ssr,
};
// Since dynamic imports can happen in parallel, we need to
// account for multiple pending deps and duplicate imports.
const pendingDeps = [];
const ssrImport = async (dep) => {
try {
if (dep[0] !== '.' && dep[0] !== '/') {
return await nodeImport(dep, mod.file, resolveOptions);
}
// convert to rollup URL because `pendingImports`, `moduleGraph.urlToModuleMap` requires that
dep = unwrapId(dep);
if (!isCircular(dep) && !pendingImports.get(dep)?.some(isCircular)) {
pendingDeps.push(dep);
if (pendingDeps.length === 1) {
pendingImports.set(url, pendingDeps);
}
const mod = await ssrLoadModule(dep, server, context, urlStack, fixStacktrace);
if (pendingDeps.length === 1) {
pendingImports.delete(url);
}
else {
pendingDeps.splice(pendingDeps.indexOf(dep), 1);
}
// return local module to avoid race condition #5470
return mod;
}
return moduleGraph.urlToModuleMap.get(dep)?.ssrModule;
}
catch (err) {
// tell external error handler which mod was imported with error
importErrors.set(err, { importee: dep });
throw err;
}
};
const ssrDynamicImport = (dep) => {
// #3087 dynamic import vars is ignored at rewrite import path,
// so here need process relative path
if (dep[0] === '.') {
dep = path$o.posix.resolve(path$o.dirname(url), dep);
}
return ssrImport(dep);
};
function ssrExportAll(sourceModule) {
for (const key in sourceModule) {
if (key !== 'default' && key !== '__esModule') {
Object.defineProperty(ssrModule, key, {
enumerable: true,
configurable: true,
get() {
return sourceModule[key];
},
});
}
}
}
let sourceMapSuffix = '';
if (result.map) {
const moduleSourceMap = Object.assign({}, result.map, {
// currently we need to offset the line
// https://github.com/nodejs/node/issues/43047#issuecomment-1180632750
mappings: ';'.repeat(fnDeclarationLineCount) + result.map.mappings,
});
sourceMapSuffix =
'\n//# sourceMappingURL=' + genSourceMapUrl(moduleSourceMap);
}
try {
const initModule = new AsyncFunction(`global`, ssrModuleExportsKey, ssrImportMetaKey, ssrImportKey, ssrDynamicImportKey, ssrExportAllKey, '"use strict";' +
result.code +
`\n//# sourceURL=${mod.id}${sourceMapSuffix}`);
await initModule(context.global, ssrModule, ssrImportMeta, ssrImport, ssrDynamicImport, ssrExportAll);
}
catch (e) {
mod.ssrError = e;
const errorData = importErrors.get(e);
if (e.stack && fixStacktrace) {
ssrFixStacktrace(e, moduleGraph);
}
server.config.logger.error(colors$1.red(`Error when evaluating SSR module ${url}:` +
(errorData?.importee
? ` failed to import "${errorData.importee}"`
: '') +
`\n|- ${e.stack}\n`), {
timestamp: true,
clear: server.config.clearScreen,
error: e,
});
throw e;
}
return Object.freeze(ssrModule);
}
// In node@12+ we can use dynamic import to load CJS and ESM
async function nodeImport(id, importer, resolveOptions) {
let url;
if (id.startsWith('data:') || isBuiltin(id)) {
url = id;
}
else {
const resolved = tryNodeResolve(id, importer,
// Non-external modules can import ESM-only modules, but only outside
// of test runs, because we use Node `require` in Jest to avoid segfault.
// @ts-expect-error jest only exists when running Jest
typeof jest === 'undefined'
? { ...resolveOptions, tryEsmOnly: true }
: resolveOptions, false, undefined, true);
if (!resolved) {
const err = new Error(`Cannot find module '${id}' imported from '${importer}'`);
err.code = 'ERR_MODULE_NOT_FOUND';
throw err;
}
url = resolved.id;
if (usingDynamicImport) {
url = pathToFileURL(url).toString();
}
}
const mod = await dynamicImport(url);
return proxyESM(mod);
}
// rollup-style default import interop for cjs
function proxyESM(mod) {
// This is the only sensible option when the exports object is a primitive
if (isPrimitive(mod))
return { default: mod };
let defaultExport = 'default' in mod ? mod.default : mod;
if (!isPrimitive(defaultExport) && '__esModule' in defaultExport) {
mod = defaultExport;
if ('default' in defaultExport) {
defaultExport = defaultExport.default;
}
}
return new Proxy(mod, {
get(mod, prop) {
if (prop === 'default')
return defaultExport;
return mod[prop] ?? defaultExport?.[prop];
},
});
}
function isPrimitive(value) {
return !value || (typeof value !== 'object' && typeof value !== 'function');
}
var isWsl$2 = {exports: {}};
const fs$3 = require$$0__default;
let isDocker$2;
function hasDockerEnv() {
try {
fs$3.statSync('/.dockerenv');
return true;
} catch (_) {
return false;
}
}
function hasDockerCGroup() {
try {
return fs$3.readFileSync('/proc/self/cgroup', 'utf8').includes('docker');
} catch (_) {
return false;
}
}
var isDocker_1 = () => {
if (isDocker$2 === undefined) {
isDocker$2 = hasDockerEnv() || hasDockerCGroup();
}
return isDocker$2;
};
const os = require$$2;
const fs$2 = require$$0__default;
const isDocker$1 = isDocker_1;
const isWsl$1 = () => {
if (process.platform !== 'linux') {
return false;
}
if (os.release().toLowerCase().includes('microsoft')) {
if (isDocker$1()) {
return false;
}
return true;
}
try {
return fs$2.readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft') ?
!isDocker$1() : false;
} catch (_) {
return false;
}
};
if (process.env.__IS_WSL_TEST__) {
isWsl$2.exports = isWsl$1;
} else {
isWsl$2.exports = isWsl$1();
}
var isWslExports = isWsl$2.exports;
var defineLazyProp = (object, propertyName, fn) => {
const define = value => Object.defineProperty(object, propertyName, {value, enumerable: true, writable: true});
Object.defineProperty(object, propertyName, {
configurable: true,
enumerable: true,
get() {
const result = fn();
define(result);
return result;
},
set(value) {
define(value);
}
});
return object;
};
const path$3 = require$$0$4;
const childProcess = require$$2$1;
const {promises: fs$1, constants: fsConstants} = require$$0__default;
const isWsl = isWslExports;
const isDocker = isDocker_1;
const defineLazyProperty = defineLazyProp;
// Path to included `xdg-open`.
const localXdgOpenPath = path$3.join(__dirname, 'xdg-open');
const {platform, arch} = process;
// Podman detection
const hasContainerEnv = () => {
try {
fs$1.statSync('/run/.containerenv');
return true;
} catch {
return false;
}
};
let cachedResult;
function isInsideContainer() {
if (cachedResult === undefined) {
cachedResult = hasContainerEnv() || isDocker();
}
return cachedResult;
}
/**
Get the mount point for fixed drives in WSL.
@inner
@returns {string} The mount point.
*/
const getWslDrivesMountPoint = (() => {
// Default value for "root" param
// according to https://docs.microsoft.com/en-us/windows/wsl/wsl-config
const defaultMountPoint = '/mnt/';
let mountPoint;
return async function () {
if (mountPoint) {
// Return memoized mount point value
return mountPoint;
}
const configFilePath = '/etc/wsl.conf';
let isConfigFileExists = false;
try {
await fs$1.access(configFilePath, fsConstants.F_OK);
isConfigFileExists = true;
} catch {}
if (!isConfigFileExists) {
return defaultMountPoint;
}
const configContent = await fs$1.readFile(configFilePath, {encoding: 'utf8'});
const configMountPoint = /(?<!#.*)root\s*=\s*(?<mountPoint>.*)/g.exec(configContent);
if (!configMountPoint) {
return defaultMountPoint;
}
mountPoint = configMountPoint.groups.mountPoint.trim();
mountPoint = mountPoint.endsWith('/') ? mountPoint : `${mountPoint}/`;
return mountPoint;
};
})();
const pTryEach = async (array, mapper) => {
let latestError;
for (const item of array) {
try {
return await mapper(item); // eslint-disable-line no-await-in-loop
} catch (error) {
latestError = error;
}
}
throw latestError;
};
const baseOpen = async options => {
options = {
wait: false,
background: false,
newInstance: false,
allowNonzeroExitCode: false,
...options
};
if (Array.isArray(options.app)) {
return pTryEach(options.app, singleApp => baseOpen({
...options,
app: singleApp
}));
}
let {name: app, arguments: appArguments = []} = options.app || {};
appArguments = [...appArguments];
if (Array.isArray(app)) {
return pTryEach(app, appName => baseOpen({
...options,
app: {
name: appName,
arguments: appArguments
}
}));
}
let command;
const cliArguments = [];
const childProcessOptions = {};
if (platform === 'darwin') {
command = 'open';
if (options.wait) {
cliArguments.push('--wait-apps');
}
if (options.background) {
cliArguments.push('--background');
}
if (options.newInstance) {
cliArguments.push('--new');
}
if (app) {
cliArguments.push('-a', app);
}
} else if (platform === 'win32' || (isWsl && !isInsideContainer() && !app)) {
const mountPoint = await getWslDrivesMountPoint();
command = isWsl ?
`${mountPoint}c/Windows/System32/WindowsPowerShell/v1.0/powershell.exe` :
`${process.env.SYSTEMROOT}\\System32\\WindowsPowerShell\\v1.0\\powershell`;
cliArguments.push(
'-NoProfile',
'-NonInteractive',
'ExecutionPolicy',
'Bypass',
'-EncodedCommand'
);
if (!isWsl) {
childProcessOptions.windowsVerbatimArguments = true;
}
const encodedArguments = ['Start'];
if (options.wait) {
encodedArguments.push('-Wait');
}
if (app) {
// Double quote with double quotes to ensure the inner quotes are passed through.
// Inner quotes are delimited for PowerShell interpretation with backticks.
encodedArguments.push(`"\`"${app}\`""`, '-ArgumentList');
if (options.target) {
appArguments.unshift(options.target);
}
} else if (options.target) {
encodedArguments.push(`"${options.target}"`);
}
if (appArguments.length > 0) {
appArguments = appArguments.map(arg => `"\`"${arg}\`""`);
encodedArguments.push(appArguments.join(','));
}
// Using Base64-encoded command, accepted by PowerShell, to allow special characters.
options.target = Buffer.from(encodedArguments.join(' '), 'utf16le').toString('base64');
} else {
if (app) {
command = app;
} else {
// When bundled by Webpack, there's no actual package file path and no local `xdg-open`.
const isBundled = !__dirname || __dirname === '/';
// Check if local `xdg-open` exists and is executable.
let exeLocalXdgOpen = false;
try {
await fs$1.access(localXdgOpenPath, fsConstants.X_OK);
exeLocalXdgOpen = true;
} catch {}
const useSystemXdgOpen = process.versions.electron ||
platform === 'android' || isBundled || !exeLocalXdgOpen;
command = useSystemXdgOpen ? 'xdg-open' : localXdgOpenPath;
}
if (appArguments.length > 0) {
cliArguments.push(...appArguments);
}
if (!options.wait) {
// `xdg-open` will block the process unless stdio is ignored
// and it's detached from the parent even if it's unref'd.
childProcessOptions.stdio = 'ignore';
childProcessOptions.detached = true;
}
}
if (options.target) {
cliArguments.push(options.target);
}
if (platform === 'darwin' && appArguments.length > 0) {
cliArguments.push('--args', ...appArguments);
}
const subprocess = childProcess.spawn(command, cliArguments, childProcessOptions);
if (options.wait) {
return new Promise((resolve, reject) => {
subprocess.once('error', reject);
subprocess.once('close', exitCode => {
if (!options.allowNonzeroExitCode && exitCode > 0) {
reject(new Error(`Exited with code ${exitCode}`));
return;
}
resolve(subprocess);
});
});
}
subprocess.unref();
return subprocess;
};
const open = (target, options) => {
if (typeof target !== 'string') {
throw new TypeError('Expected a `target`');
}
return baseOpen({
...options,
target
});
};
const openApp = (name, options) => {
if (typeof name !== 'string') {
throw new TypeError('Expected a `name`');
}
const {arguments: appArguments = []} = options || {};
if (appArguments !== undefined && appArguments !== null && !Array.isArray(appArguments)) {
throw new TypeError('Expected `appArguments` as Array type');
}
return baseOpen({
...options,
app: {
name,
arguments: appArguments
}
});
};
function detectArchBinary(binary) {
if (typeof binary === 'string' || Array.isArray(binary)) {
return binary;
}
const {[arch]: archBinary} = binary;
if (!archBinary) {
throw new Error(`${arch} is not supported`);
}
return archBinary;
}
function detectPlatformBinary({[platform]: platformBinary}, {wsl}) {
if (wsl && isWsl) {
return detectArchBinary(wsl);
}
if (!platformBinary) {
throw new Error(`${platform} is not supported`);
}
return detectArchBinary(platformBinary);
}
const apps = {};
defineLazyProperty(apps, 'chrome', () => detectPlatformBinary({
darwin: 'google chrome',
win32: 'chrome',
linux: ['google-chrome', 'google-chrome-stable', 'chromium']
}, {
wsl: {
ia32: '/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
x64: ['/mnt/c/Program Files/Google/Chrome/Application/chrome.exe', '/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe']
}
}));
defineLazyProperty(apps, 'firefox', () => detectPlatformBinary({
darwin: 'firefox',
win32: 'C:\\Program Files\\Mozilla Firefox\\firefox.exe',
linux: 'firefox'
}, {
wsl: '/mnt/c/Program Files/Mozilla Firefox/firefox.exe'
}));
defineLazyProperty(apps, 'edge', () => detectPlatformBinary({
darwin: 'microsoft edge',
win32: 'msedge',
linux: ['microsoft-edge', 'microsoft-edge-dev']
}, {
wsl: '/mnt/c/Program Files (x86)/Microsoft/Edge/Application/msedge.exe'
}));
open.apps = apps;
open.openApp = openApp;
var open_1 = open;
var open$1 = /*@__PURE__*/getDefaultExportFromCjs(open_1);
var crossSpawn = {exports: {}};
var windows;
var hasRequiredWindows;
function requireWindows () {
if (hasRequiredWindows) return windows;
hasRequiredWindows = 1;
windows = isexe;
isexe.sync = sync;
var fs = require$$0__default;
function checkPathExt (path, options) {
var pathext = options.pathExt !== undefined ?
options.pathExt : process.env.PATHEXT;
if (!pathext) {
return true
}
pathext = pathext.split(';');
if (pathext.indexOf('') !== -1) {
return true
}
for (var i = 0; i < pathext.length; i++) {
var p = pathext[i].toLowerCase();
if (p && path.substr(-p.length).toLowerCase() === p) {
return true
}
}
return false
}
function checkStat (stat, path, options) {
if (!stat.isSymbolicLink() && !stat.isFile()) {
return false
}
return checkPathExt(path, options)
}
function isexe (path, options, cb) {
fs.stat(path, function (er, stat) {
cb(er, er ? false : checkStat(stat, path, options));
});
}
function sync (path, options) {
return checkStat(fs.statSync(path), path, options)
}
return windows;
}
var mode;
var hasRequiredMode;
function requireMode () {
if (hasRequiredMode) return mode;
hasRequiredMode = 1;
mode = isexe;
isexe.sync = sync;
var fs = require$$0__default;
function isexe (path, options, cb) {
fs.stat(path, function (er, stat) {
cb(er, er ? false : checkStat(stat, options));
});
}
function sync (path, options) {
return checkStat(fs.statSync(path), options)
}
function checkStat (stat, options) {
return stat.isFile() && checkMode(stat, options)
}
function checkMode (stat, options) {
var mod = stat.mode;
var uid = stat.uid;
var gid = stat.gid;
var myUid = options.uid !== undefined ?
options.uid : process.getuid && process.getuid();
var myGid = options.gid !== undefined ?
options.gid : process.getgid && process.getgid();
var u = parseInt('100', 8);
var g = parseInt('010', 8);
var o = parseInt('001', 8);
var ug = u | g;
var ret = (mod & o) ||
(mod & g) && gid === myGid ||
(mod & u) && uid === myUid ||
(mod & ug) && myUid === 0;
return ret
}
return mode;
}
var core;
if (process.platform === 'win32' || commonjsGlobal.TESTING_WINDOWS) {
core = requireWindows();
} else {
core = requireMode();
}
var isexe_1 = isexe$1;
isexe$1.sync = sync;
function isexe$1 (path, options, cb) {
if (typeof options === 'function') {
cb = options;
options = {};
}
if (!cb) {
if (typeof Promise !== 'function') {
throw new TypeError('callback not provided')
}
return new Promise(function (resolve, reject) {
isexe$1(path, options || {}, function (er, is) {
if (er) {
reject(er);
} else {
resolve(is);
}
});
})
}
core(path, options || {}, function (er, is) {
// ignore EACCES because that just means we aren't allowed to run it
if (er) {
if (er.code === 'EACCES' || options && options.ignoreErrors) {
er = null;
is = false;
}
}
cb(er, is);
});
}
function sync (path, options) {
// my kingdom for a filtered catch
try {
return core.sync(path, options || {})
} catch (er) {
if (options && options.ignoreErrors || er.code === 'EACCES') {
return false
} else {
throw er
}
}
}
const isWindows = process.platform === 'win32' ||
process.env.OSTYPE === 'cygwin' ||
process.env.OSTYPE === 'msys';
const path$2 = require$$0$4;
const COLON = isWindows ? ';' : ':';
const isexe = isexe_1;
const getNotFoundError = (cmd) =>
Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' });
const getPathInfo = (cmd, opt) => {
const colon = opt.colon || COLON;
// If it has a slash, then we don't bother searching the pathenv.
// just check the file itself, and that's it.
const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? ['']
: (
[
// windows always checks the cwd first
...(isWindows ? [process.cwd()] : []),
...(opt.path || process.env.PATH ||
/* istanbul ignore next: very unusual */ '').split(colon),
]
);
const pathExtExe = isWindows
? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM'
: '';
const pathExt = isWindows ? pathExtExe.split(colon) : [''];
if (isWindows) {
if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
pathExt.unshift('');
}
return {
pathEnv,
pathExt,
pathExtExe,
}
};
const which$1 = (cmd, opt, cb) => {
if (typeof opt === 'function') {
cb = opt;
opt = {};
}
if (!opt)
opt = {};
const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt);
const found = [];
const step = i => new Promise((resolve, reject) => {
if (i === pathEnv.length)
return opt.all && found.length ? resolve(found)
: reject(getNotFoundError(cmd))
const ppRaw = pathEnv[i];
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
const pCmd = path$2.join(pathPart, cmd);
const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd
: pCmd;
resolve(subStep(p, i, 0));
});
const subStep = (p, i, ii) => new Promise((resolve, reject) => {
if (ii === pathExt.length)
return resolve(step(i + 1))
const ext = pathExt[ii];
isexe(p + ext, { pathExt: pathExtExe }, (er, is) => {
if (!er && is) {
if (opt.all)
found.push(p + ext);
else
return resolve(p + ext)
}
return resolve(subStep(p, i, ii + 1))
});
});
return cb ? step(0).then(res => cb(null, res), cb) : step(0)
};
const whichSync = (cmd, opt) => {
opt = opt || {};
const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt);
const found = [];
for (let i = 0; i < pathEnv.length; i ++) {
const ppRaw = pathEnv[i];
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
const pCmd = path$2.join(pathPart, cmd);
const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd
: pCmd;
for (let j = 0; j < pathExt.length; j ++) {
const cur = p + pathExt[j];
try {
const is = isexe.sync(cur, { pathExt: pathExtExe });
if (is) {
if (opt.all)
found.push(cur);
else
return cur
}
} catch (ex) {}
}
}
if (opt.all && found.length)
return found
if (opt.nothrow)
return null
throw getNotFoundError(cmd)
};
var which_1 = which$1;
which$1.sync = whichSync;
var pathKey$1 = {exports: {}};
const pathKey = (options = {}) => {
const environment = options.env || process.env;
const platform = options.platform || process.platform;
if (platform !== 'win32') {
return 'PATH';
}
return Object.keys(environment).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path';
};
pathKey$1.exports = pathKey;
// TODO: Remove this for the next major release
pathKey$1.exports.default = pathKey;
var pathKeyExports = pathKey$1.exports;
const path$1 = require$$0$4;
const which = which_1;
const getPathKey = pathKeyExports;
function resolveCommandAttempt(parsed, withoutPathExt) {
const env = parsed.options.env || process.env;
const cwd = process.cwd();
const hasCustomCwd = parsed.options.cwd != null;
// Worker threads do not have process.chdir()
const shouldSwitchCwd = hasCustomCwd && process.chdir !== undefined && !process.chdir.disabled;
// If a custom `cwd` was specified, we need to change the process cwd
// because `which` will do stat calls but does not support a custom cwd
if (shouldSwitchCwd) {
try {
process.chdir(parsed.options.cwd);
} catch (err) {
/* Empty */
}
}
let resolved;
try {
resolved = which.sync(parsed.command, {
path: env[getPathKey({ env })],
pathExt: withoutPathExt ? path$1.delimiter : undefined,
});
} catch (e) {
/* Empty */
} finally {
if (shouldSwitchCwd) {
process.chdir(cwd);
}
}
// If we successfully resolved, ensure that an absolute path is returned
// Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
if (resolved) {
resolved = path$1.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved);
}
return resolved;
}
function resolveCommand$1(parsed) {
return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true);
}
var resolveCommand_1 = resolveCommand$1;
var _escape = {};
// See http://www.robvanderwoude.com/escapechars.php
const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g;
function escapeCommand(arg) {
// Escape meta chars
arg = arg.replace(metaCharsRegExp, '^$1');
return arg;
}
function escapeArgument(arg, doubleEscapeMetaChars) {
// Convert to string
arg = `${arg}`;
// Algorithm below is based on https://qntm.org/cmd
// Sequence of backslashes followed by a double quote:
// double up all the backslashes and escape the double quote
arg = arg.replace(/(\\*)"/g, '$1$1\\"');
// Sequence of backslashes followed by the end of the string
// (which will become a double quote later):
// double up all the backslashes
arg = arg.replace(/(\\*)$/, '$1$1');
// All other backslashes occur literally
// Quote the whole thing:
arg = `"${arg}"`;
// Escape meta chars
arg = arg.replace(metaCharsRegExp, '^$1');
// Double escape meta chars if necessary
if (doubleEscapeMetaChars) {
arg = arg.replace(metaCharsRegExp, '^$1');
}
return arg;
}
_escape.command = escapeCommand;
_escape.argument = escapeArgument;
var shebangRegex$1 = /^#!(.*)/;
const shebangRegex = shebangRegex$1;
var shebangCommand$1 = (string = '') => {
const match = string.match(shebangRegex);
if (!match) {
return null;
}
const [path, argument] = match[0].replace(/#! ?/, '').split(' ');
const binary = path.split('/').pop();
if (binary === 'env') {
return argument;
}
return argument ? `${binary} ${argument}` : binary;
};
const fs = require$$0__default;
const shebangCommand = shebangCommand$1;
function readShebang$1(command) {
// Read the first 150 bytes from the file
const size = 150;
const buffer = Buffer.alloc(size);
let fd;
try {
fd = fs.openSync(command, 'r');
fs.readSync(fd, buffer, 0, size, 0);
fs.closeSync(fd);
} catch (e) { /* Empty */ }
// Attempt to extract shebang (null is returned if not a shebang)
return shebangCommand(buffer.toString());
}
var readShebang_1 = readShebang$1;
const path = require$$0$4;
const resolveCommand = resolveCommand_1;
const escape$1 = _escape;
const readShebang = readShebang_1;
const isWin$1 = process.platform === 'win32';
const isExecutableRegExp = /\.(?:com|exe)$/i;
const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i;
function detectShebang(parsed) {
parsed.file = resolveCommand(parsed);
const shebang = parsed.file && readShebang(parsed.file);
if (shebang) {
parsed.args.unshift(parsed.file);
parsed.command = shebang;
return resolveCommand(parsed);
}
return parsed.file;
}
function parseNonShell(parsed) {
if (!isWin$1) {
return parsed;
}
// Detect & add support for shebangs
const commandFile = detectShebang(parsed);
// We don't need a shell if the command filename is an executable
const needsShell = !isExecutableRegExp.test(commandFile);
// If a shell is required, use cmd.exe and take care of escaping everything correctly
// Note that `forceShell` is an hidden option used only in tests
if (parsed.options.forceShell || needsShell) {
// Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
// The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
// Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
// we need to double escape them
const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
// Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
// This is necessary otherwise it will always fail with ENOENT in those cases
parsed.command = path.normalize(parsed.command);
// Escape command & arguments
parsed.command = escape$1.command(parsed.command);
parsed.args = parsed.args.map((arg) => escape$1.argument(arg, needsDoubleEscapeMetaChars));
const shellCommand = [parsed.command].concat(parsed.args).join(' ');
parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
parsed.command = process.env.comspec || 'cmd.exe';
parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
}
return parsed;
}
function parse$4(command, args, options) {
// Normalize arguments, similar to nodejs
if (args && !Array.isArray(args)) {
options = args;
args = null;
}
args = args ? args.slice(0) : []; // Clone array to avoid changing the original
options = Object.assign({}, options); // Clone object to avoid changing the original
// Build our parsed object
const parsed = {
command,
args,
options,
file: undefined,
original: {
command,
args,
},
};
// Delegate further parsing to shell or non-shell
return options.shell ? parsed : parseNonShell(parsed);
}
var parse_1 = parse$4;
const isWin = process.platform === 'win32';
function notFoundError(original, syscall) {
return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), {
code: 'ENOENT',
errno: 'ENOENT',
syscall: `${syscall} ${original.command}`,
path: original.command,
spawnargs: original.args,
});
}
function hookChildProcess(cp, parsed) {
if (!isWin) {
return;
}
const originalEmit = cp.emit;
cp.emit = function (name, arg1) {
// If emitting "exit" event and exit code is 1, we need to check if
// the command exists and emit an "error" instead
// See https://github.com/IndigoUnited/node-cross-spawn/issues/16
if (name === 'exit') {
const err = verifyENOENT(arg1, parsed);
if (err) {
return originalEmit.call(cp, 'error', err);
}
}
return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params
};
}
function verifyENOENT(status, parsed) {
if (isWin && status === 1 && !parsed.file) {
return notFoundError(parsed.original, 'spawn');
}
return null;
}
function verifyENOENTSync(status, parsed) {
if (isWin && status === 1 && !parsed.file) {
return notFoundError(parsed.original, 'spawnSync');
}
return null;
}
var enoent$1 = {
hookChildProcess,
verifyENOENT,
verifyENOENTSync,
notFoundError,
};
const cp = require$$2$1;
const parse$3 = parse_1;
const enoent = enoent$1;
function spawn(command, args, options) {
// Parse the arguments
const parsed = parse$3(command, args, options);
// Spawn the child process
const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
// Hook into child process "exit" event to emit an error if the command
// does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
enoent.hookChildProcess(spawned, parsed);
return spawned;
}
function spawnSync(command, args, options) {
// Parse the arguments
const parsed = parse$3(command, args, options);
// Spawn the child process
const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
// Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
return result;
}
crossSpawn.exports = spawn;
crossSpawn.exports.spawn = spawn;
crossSpawn.exports.sync = spawnSync;
crossSpawn.exports._parse = parse$3;
crossSpawn.exports._enoent = enoent;
var crossSpawnExports = crossSpawn.exports;
var spawn$1 = /*@__PURE__*/getDefaultExportFromCjs(crossSpawnExports);
/**
* The following is modified based on source found in
* https://github.com/facebook/create-react-app
*
* MIT Licensed
* Copyright (c) 2015-present, Facebook, Inc.
* https://github.com/facebook/create-react-app/blob/master/LICENSE
*
*/
/**
* Reads the BROWSER environment variable and decides what to do with it.
*/
function openBrowser(url, opt, logger) {
// The browser executable to open.
// See https://github.com/sindresorhus/open#app for documentation.
const browser = typeof opt === 'string' ? opt : process.env.BROWSER || '';
if (browser.toLowerCase().endsWith('.js')) {
executeNodeScript(browser, url, logger);
}
else if (browser.toLowerCase() !== 'none') {
const browserArgs = process.env.BROWSER_ARGS
? process.env.BROWSER_ARGS.split(' ')
: [];
startBrowserProcess(browser, browserArgs, url);
}
}
function executeNodeScript(scriptPath, url, logger) {
const extraArgs = process.argv.slice(2);
const child = spawn$1(process.execPath, [scriptPath, ...extraArgs, url], {
stdio: 'inherit',
});
child.on('close', (code) => {
if (code !== 0) {
logger.error(colors$1.red(`\nThe script specified as BROWSER environment variable failed.\n\n${colors$1.cyan(scriptPath)} exited with code ${code}.`), { error: null });
}
});
}
const supportedChromiumBrowsers = [
'Google Chrome Canary',
'Google Chrome Dev',
'Google Chrome Beta',
'Google Chrome',
'Microsoft Edge',
'Brave Browser',
'Vivaldi',
'Chromium',
];
async function startBrowserProcess(browser, browserArgs, url) {
// If we're on OS X, the user hasn't specifically
// requested a different browser, we can try opening
// a Chromium browser with AppleScript. This lets us reuse an
// existing tab when possible instead of creating a new one.
const preferredOSXBrowser = browser === 'google chrome' ? 'Google Chrome' : browser;
const shouldTryOpenChromeWithAppleScript = process.platform === 'darwin' &&
(!preferredOSXBrowser ||
supportedChromiumBrowsers.includes(preferredOSXBrowser));
if (shouldTryOpenChromeWithAppleScript) {
try {
const ps = await execAsync('ps cax');
const openedBrowser = preferredOSXBrowser && ps.includes(preferredOSXBrowser)
? preferredOSXBrowser
: supportedChromiumBrowsers.find((b) => ps.includes(b));
if (openedBrowser) {
// Try our best to reuse existing tab with AppleScript
await execAsync(`osascript openChrome.applescript "${encodeURI(url)}" "${openedBrowser}"`, {
cwd: join$2(VITE_PACKAGE_DIR, 'bin'),
});
return true;
}
}
catch (err) {
// Ignore errors
}
}
// Another special case: on OS X, check if BROWSER has been set to "open".
// In this case, instead of passing the string `open` to `open` function (which won't work),
// just ignore it (thus ensuring the intended behavior, i.e. opening the system browser):
// https://github.com/facebook/create-react-app/pull/1690#issuecomment-283518768
if (process.platform === 'darwin' && browser === 'open') {
browser = undefined;
}
// Fallback to open
// (It will always open new tab)
try {
const options = browser
? { app: { name: browser, arguments: browserArgs } }
: {};
open$1(url, options).catch(() => { }); // Prevent `unhandledRejection` error.
return true;
}
catch (err) {
return false;
}
}
function execAsync(command, options) {
return new Promise((resolve, reject) => {
exec(command, options, (error, stdout) => {
if (error) {
reject(error);
}
else {
resolve(stdout.toString());
}
});
});
}
function bindShortcuts(server, opts) {
if (!server.httpServer || !process.stdin.isTTY || process.env.CI) {
return;
}
const isDev = isDevServer(server);
if (isDev) {
server._shortcutsOptions = opts;
}
if (opts?.print) {
server.config.logger.info(colors$1.dim(colors$1.green(' ➜')) +
colors$1.dim(' press ') +
colors$1.bold('h') +
colors$1.dim(' to show help'));
}
const shortcuts = (opts?.customShortcuts ?? [])
.filter(isDefined)
// @ts-expect-error passing the right types, but typescript can't detect it
.concat(isDev ? BASE_DEV_SHORTCUTS : BASE_PREVIEW_SHORTCUTS);
let actionRunning = false;
const onInput = async (input) => {
// ctrl+c or ctrl+d
if (input === '\x03' || input === '\x04') {
try {
if (isDev) {
await server.close();
}
else {
server.httpServer.close();
}
}
finally {
process.exit(1);
}
return;
}
if (actionRunning)
return;
if (input === 'h') {
server.config.logger.info([
'',
colors$1.bold(' Shortcuts'),
...shortcuts.map((shortcut) => colors$1.dim(' press ') +
colors$1.bold(shortcut.key) +
colors$1.dim(` to ${shortcut.description}`)),
].join('\n'));
}
const shortcut = shortcuts.find((shortcut) => shortcut.key === input);
if (!shortcut)
return;
actionRunning = true;
await shortcut.action(server);
actionRunning = false;
};
process.stdin.setRawMode(true);
process.stdin.on('data', onInput).setEncoding('utf8').resume();
server.httpServer.on('close', () => {
process.stdin.off('data', onInput).pause();
});
}
function isDevServer(server) {
return 'pluginContainer' in server;
}
const BASE_DEV_SHORTCUTS = [
{
key: 'r',
description: 'restart the server',
async action(server) {
await server.restart();
},
},
{
key: 'u',
description: 'show server url',
action(server) {
server.config.logger.info('');
server.printUrls();
},
},
{
key: 'o',
description: 'open in browser',
action(server) {
server.openBrowser();
},
},
{
key: 'c',
description: 'clear console',
action(server) {
server.config.logger.clearScreen('error');
},
},
{
key: 'q',
description: 'quit',
async action(server) {
await server.close().finally(() => process.exit());
},
},
];
const BASE_PREVIEW_SHORTCUTS = [
{
key: 'o',
description: 'open in browser',
action(server) {
const url = server.resolvedUrls.local[0] ?? server.resolvedUrls.network[0];
openBrowser(url, true, server.config.logger);
},
},
{
key: 'q',
description: 'quit',
action(server) {
try {
server.httpServer.close();
}
finally {
process.exit();
}
},
},
];
var bufferUtil$1 = {exports: {}};
var constants = {
BINARY_TYPES: ['nodebuffer', 'arraybuffer', 'fragments'],
EMPTY_BUFFER: Buffer.alloc(0),
GUID: '258EAFA5-E914-47DA-95CA-C5AB0DC85B11',
kForOnEventAttribute: Symbol('kIsForOnEventAttribute'),
kListener: Symbol('kListener'),
kStatusCode: Symbol('status-code'),
kWebSocket: Symbol('websocket'),
NOOP: () => {}
};
const { EMPTY_BUFFER: EMPTY_BUFFER$3 } = constants;
const FastBuffer$2 = Buffer[Symbol.species];
/**
* Merges an array of buffers into a new buffer.
*
* @param {Buffer[]} list The array of buffers to concat
* @param {Number} totalLength The total length of buffers in the list
* @return {Buffer} The resulting buffer
* @public
*/
function concat$1(list, totalLength) {
if (list.length === 0) return EMPTY_BUFFER$3;
if (list.length === 1) return list[0];
const target = Buffer.allocUnsafe(totalLength);
let offset = 0;
for (let i = 0; i < list.length; i++) {
const buf = list[i];
target.set(buf, offset);
offset += buf.length;
}
if (offset < totalLength) {
return new FastBuffer$2(target.buffer, target.byteOffset, offset);
}
return target;
}
/**
* Masks a buffer using the given mask.
*
* @param {Buffer} source The buffer to mask
* @param {Buffer} mask The mask to use
* @param {Buffer} output The buffer where to store the result
* @param {Number} offset The offset at which to start writing
* @param {Number} length The number of bytes to mask.
* @public
*/
function _mask(source, mask, output, offset, length) {
for (let i = 0; i < length; i++) {
output[offset + i] = source[i] ^ mask[i & 3];
}
}
/**
* Unmasks a buffer using the given mask.
*
* @param {Buffer} buffer The buffer to unmask
* @param {Buffer} mask The mask to use
* @public
*/
function _unmask(buffer, mask) {
for (let i = 0; i < buffer.length; i++) {
buffer[i] ^= mask[i & 3];
}
}
/**
* Converts a buffer to an `ArrayBuffer`.
*
* @param {Buffer} buf The buffer to convert
* @return {ArrayBuffer} Converted buffer
* @public
*/
function toArrayBuffer$1(buf) {
if (buf.length === buf.buffer.byteLength) {
return buf.buffer;
}
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.length);
}
/**
* Converts `data` to a `Buffer`.
*
* @param {*} data The data to convert
* @return {Buffer} The buffer
* @throws {TypeError}
* @public
*/
function toBuffer$2(data) {
toBuffer$2.readOnly = true;
if (Buffer.isBuffer(data)) return data;
let buf;
if (data instanceof ArrayBuffer) {
buf = new FastBuffer$2(data);
} else if (ArrayBuffer.isView(data)) {
buf = new FastBuffer$2(data.buffer, data.byteOffset, data.byteLength);
} else {
buf = Buffer.from(data);
toBuffer$2.readOnly = false;
}
return buf;
}
bufferUtil$1.exports = {
concat: concat$1,
mask: _mask,
toArrayBuffer: toArrayBuffer$1,
toBuffer: toBuffer$2,
unmask: _unmask
};
/* istanbul ignore else */
if (!process.env.WS_NO_BUFFER_UTIL) {
try {
const bufferUtil = require('bufferutil');
bufferUtil$1.exports.mask = function (source, mask, output, offset, length) {
if (length < 48) _mask(source, mask, output, offset, length);
else bufferUtil.mask(source, mask, output, offset, length);
};
bufferUtil$1.exports.unmask = function (buffer, mask) {
if (buffer.length < 32) _unmask(buffer, mask);
else bufferUtil.unmask(buffer, mask);
};
} catch (e) {
// Continue regardless of the error.
}
}
var bufferUtilExports = bufferUtil$1.exports;
const kDone = Symbol('kDone');
const kRun = Symbol('kRun');
/**
* A very simple job queue with adjustable concurrency. Adapted from
* https://github.com/STRML/async-limiter
*/
let Limiter$1 = class Limiter {
/**
* Creates a new `Limiter`.
*
* @param {Number} [concurrency=Infinity] The maximum number of jobs allowed
* to run concurrently
*/
constructor(concurrency) {
this[kDone] = () => {
this.pending--;
this[kRun]();
};
this.concurrency = concurrency || Infinity;
this.jobs = [];
this.pending = 0;
}
/**
* Adds a job to the queue.
*
* @param {Function} job The job to run
* @public
*/
add(job) {
this.jobs.push(job);
this[kRun]();
}
/**
* Removes a job from the queue and runs it if possible.
*
* @private
*/
[kRun]() {
if (this.pending === this.concurrency) return;
if (this.jobs.length) {
const job = this.jobs.shift();
this.pending++;
job(this[kDone]);
}
}
};
var limiter = Limiter$1;
const zlib = require$$0$a;
const bufferUtil = bufferUtilExports;
const Limiter = limiter;
const { kStatusCode: kStatusCode$2 } = constants;
const FastBuffer$1 = Buffer[Symbol.species];
const TRAILER = Buffer.from([0x00, 0x00, 0xff, 0xff]);
const kPerMessageDeflate = Symbol('permessage-deflate');
const kTotalLength = Symbol('total-length');
const kCallback = Symbol('callback');
const kBuffers = Symbol('buffers');
const kError$1 = Symbol('error');
//
// We limit zlib concurrency, which prevents severe memory fragmentation
// as documented in https://github.com/nodejs/node/issues/8871#issuecomment-250915913
// and https://github.com/websockets/ws/issues/1202
//
// Intentionally global; it's the global thread pool that's an issue.
//
let zlibLimiter;
/**
* permessage-deflate implementation.
*/
let PerMessageDeflate$4 = class PerMessageDeflate {
/**
* Creates a PerMessageDeflate instance.
*
* @param {Object} [options] Configuration options
* @param {(Boolean|Number)} [options.clientMaxWindowBits] Advertise support
* for, or request, a custom client window size
* @param {Boolean} [options.clientNoContextTakeover=false] Advertise/
* acknowledge disabling of client context takeover
* @param {Number} [options.concurrencyLimit=10] The number of concurrent
* calls to zlib
* @param {(Boolean|Number)} [options.serverMaxWindowBits] Request/confirm the
* use of a custom server window size
* @param {Boolean} [options.serverNoContextTakeover=false] Request/accept
* disabling of server context takeover
* @param {Number} [options.threshold=1024] Size (in bytes) below which
* messages should not be compressed if context takeover is disabled
* @param {Object} [options.zlibDeflateOptions] Options to pass to zlib on
* deflate
* @param {Object} [options.zlibInflateOptions] Options to pass to zlib on
* inflate
* @param {Boolean} [isServer=false] Create the instance in either server or
* client mode
* @param {Number} [maxPayload=0] The maximum allowed message length
*/
constructor(options, isServer, maxPayload) {
this._maxPayload = maxPayload | 0;
this._options = options || {};
this._threshold =
this._options.threshold !== undefined ? this._options.threshold : 1024;
this._isServer = !!isServer;
this._deflate = null;
this._inflate = null;
this.params = null;
if (!zlibLimiter) {
const concurrency =
this._options.concurrencyLimit !== undefined
? this._options.concurrencyLimit
: 10;
zlibLimiter = new Limiter(concurrency);
}
}
/**
* @type {String}
*/
static get extensionName() {
return 'permessage-deflate';
}
/**
* Create an extension negotiation offer.
*
* @return {Object} Extension parameters
* @public
*/
offer() {
const params = {};
if (this._options.serverNoContextTakeover) {
params.server_no_context_takeover = true;
}
if (this._options.clientNoContextTakeover) {
params.client_no_context_takeover = true;
}
if (this._options.serverMaxWindowBits) {
params.server_max_window_bits = this._options.serverMaxWindowBits;
}
if (this._options.clientMaxWindowBits) {
params.client_max_window_bits = this._options.clientMaxWindowBits;
} else if (this._options.clientMaxWindowBits == null) {
params.client_max_window_bits = true;
}
return params;
}
/**
* Accept an extension negotiation offer/response.
*
* @param {Array} configurations The extension negotiation offers/reponse
* @return {Object} Accepted configuration
* @public
*/
accept(configurations) {
configurations = this.normalizeParams(configurations);
this.params = this._isServer
? this.acceptAsServer(configurations)
: this.acceptAsClient(configurations);
return this.params;
}
/**
* Releases all resources used by the extension.
*
* @public
*/
cleanup() {
if (this._inflate) {
this._inflate.close();
this._inflate = null;
}
if (this._deflate) {
const callback = this._deflate[kCallback];
this._deflate.close();
this._deflate = null;
if (callback) {
callback(
new Error(
'The deflate stream was closed while data was being processed'
)
);
}
}
}
/**
* Accept an extension negotiation offer.
*
* @param {Array} offers The extension negotiation offers
* @return {Object} Accepted configuration
* @private
*/
acceptAsServer(offers) {
const opts = this._options;
const accepted = offers.find((params) => {
if (
(opts.serverNoContextTakeover === false &&
params.server_no_context_takeover) ||
(params.server_max_window_bits &&
(opts.serverMaxWindowBits === false ||
(typeof opts.serverMaxWindowBits === 'number' &&
opts.serverMaxWindowBits > params.server_max_window_bits))) ||
(typeof opts.clientMaxWindowBits === 'number' &&
!params.client_max_window_bits)
) {
return false;
}
return true;
});
if (!accepted) {
throw new Error('None of the extension offers can be accepted');
}
if (opts.serverNoContextTakeover) {
accepted.server_no_context_takeover = true;
}
if (opts.clientNoContextTakeover) {
accepted.client_no_context_takeover = true;
}
if (typeof opts.serverMaxWindowBits === 'number') {
accepted.server_max_window_bits = opts.serverMaxWindowBits;
}
if (typeof opts.clientMaxWindowBits === 'number') {
accepted.client_max_window_bits = opts.clientMaxWindowBits;
} else if (
accepted.client_max_window_bits === true ||
opts.clientMaxWindowBits === false
) {
delete accepted.client_max_window_bits;
}
return accepted;
}
/**
* Accept the extension negotiation response.
*
* @param {Array} response The extension negotiation response
* @return {Object} Accepted configuration
* @private
*/
acceptAsClient(response) {
const params = response[0];
if (
this._options.clientNoContextTakeover === false &&
params.client_no_context_takeover
) {
throw new Error('Unexpected parameter "client_no_context_takeover"');
}
if (!params.client_max_window_bits) {
if (typeof this._options.clientMaxWindowBits === 'number') {
params.client_max_window_bits = this._options.clientMaxWindowBits;
}
} else if (
this._options.clientMaxWindowBits === false ||
(typeof this._options.clientMaxWindowBits === 'number' &&
params.client_max_window_bits > this._options.clientMaxWindowBits)
) {
throw new Error(
'Unexpected or invalid parameter "client_max_window_bits"'
);
}
return params;
}
/**
* Normalize parameters.
*
* @param {Array} configurations The extension negotiation offers/reponse
* @return {Array} The offers/response with normalized parameters
* @private
*/
normalizeParams(configurations) {
configurations.forEach((params) => {
Object.keys(params).forEach((key) => {
let value = params[key];
if (value.length > 1) {
throw new Error(`Parameter "${key}" must have only a single value`);
}
value = value[0];
if (key === 'client_max_window_bits') {
if (value !== true) {
const num = +value;
if (!Number.isInteger(num) || num < 8 || num > 15) {
throw new TypeError(
`Invalid value for parameter "${key}": ${value}`
);
}
value = num;
} else if (!this._isServer) {
throw new TypeError(
`Invalid value for parameter "${key}": ${value}`
);
}
} else if (key === 'server_max_window_bits') {
const num = +value;
if (!Number.isInteger(num) || num < 8 || num > 15) {
throw new TypeError(
`Invalid value for parameter "${key}": ${value}`
);
}
value = num;
} else if (
key === 'client_no_context_takeover' ||
key === 'server_no_context_takeover'
) {
if (value !== true) {
throw new TypeError(
`Invalid value for parameter "${key}": ${value}`
);
}
} else {
throw new Error(`Unknown parameter "${key}"`);
}
params[key] = value;
});
});
return configurations;
}
/**
* Decompress data. Concurrency limited.
*
* @param {Buffer} data Compressed data
* @param {Boolean} fin Specifies whether or not this is the last fragment
* @param {Function} callback Callback
* @public
*/
decompress(data, fin, callback) {
zlibLimiter.add((done) => {
this._decompress(data, fin, (err, result) => {
done();
callback(err, result);
});
});
}
/**
* Compress data. Concurrency limited.
*
* @param {(Buffer|String)} data Data to compress
* @param {Boolean} fin Specifies whether or not this is the last fragment
* @param {Function} callback Callback
* @public
*/
compress(data, fin, callback) {
zlibLimiter.add((done) => {
this._compress(data, fin, (err, result) => {
done();
callback(err, result);
});
});
}
/**
* Decompress data.
*
* @param {Buffer} data Compressed data
* @param {Boolean} fin Specifies whether or not this is the last fragment
* @param {Function} callback Callback
* @private
*/
_decompress(data, fin, callback) {
const endpoint = this._isServer ? 'client' : 'server';
if (!this._inflate) {
const key = `${endpoint}_max_window_bits`;
const windowBits =
typeof this.params[key] !== 'number'
? zlib.Z_DEFAULT_WINDOWBITS
: this.params[key];
this._inflate = zlib.createInflateRaw({
...this._options.zlibInflateOptions,
windowBits
});
this._inflate[kPerMessageDeflate] = this;
this._inflate[kTotalLength] = 0;
this._inflate[kBuffers] = [];
this._inflate.on('error', inflateOnError);
this._inflate.on('data', inflateOnData);
}
this._inflate[kCallback] = callback;
this._inflate.write(data);
if (fin) this._inflate.write(TRAILER);
this._inflate.flush(() => {
const err = this._inflate[kError$1];
if (err) {
this._inflate.close();
this._inflate = null;
callback(err);
return;
}
const data = bufferUtil.concat(
this._inflate[kBuffers],
this._inflate[kTotalLength]
);
if (this._inflate._readableState.endEmitted) {
this._inflate.close();
this._inflate = null;
} else {
this._inflate[kTotalLength] = 0;
this._inflate[kBuffers] = [];
if (fin && this.params[`${endpoint}_no_context_takeover`]) {
this._inflate.reset();
}
}
callback(null, data);
});
}
/**
* Compress data.
*
* @param {(Buffer|String)} data Data to compress
* @param {Boolean} fin Specifies whether or not this is the last fragment
* @param {Function} callback Callback
* @private
*/
_compress(data, fin, callback) {
const endpoint = this._isServer ? 'server' : 'client';
if (!this._deflate) {
const key = `${endpoint}_max_window_bits`;
const windowBits =
typeof this.params[key] !== 'number'
? zlib.Z_DEFAULT_WINDOWBITS
: this.params[key];
this._deflate = zlib.createDeflateRaw({
...this._options.zlibDeflateOptions,
windowBits
});
this._deflate[kTotalLength] = 0;
this._deflate[kBuffers] = [];
this._deflate.on('data', deflateOnData);
}
this._deflate[kCallback] = callback;
this._deflate.write(data);
this._deflate.flush(zlib.Z_SYNC_FLUSH, () => {
if (!this._deflate) {
//
// The deflate stream was closed while data was being processed.
//
return;
}
let data = bufferUtil.concat(
this._deflate[kBuffers],
this._deflate[kTotalLength]
);
if (fin) {
data = new FastBuffer$1(data.buffer, data.byteOffset, data.length - 4);
}
//
// Ensure that the callback will not be called again in
// `PerMessageDeflate#cleanup()`.
//
this._deflate[kCallback] = null;
this._deflate[kTotalLength] = 0;
this._deflate[kBuffers] = [];
if (fin && this.params[`${endpoint}_no_context_takeover`]) {
this._deflate.reset();
}
callback(null, data);
});
}
};
var permessageDeflate = PerMessageDeflate$4;
/**
* The listener of the `zlib.DeflateRaw` stream `'data'` event.
*
* @param {Buffer} chunk A chunk of data
* @private
*/
function deflateOnData(chunk) {
this[kBuffers].push(chunk);
this[kTotalLength] += chunk.length;
}
/**
* The listener of the `zlib.InflateRaw` stream `'data'` event.
*
* @param {Buffer} chunk A chunk of data
* @private
*/
function inflateOnData(chunk) {
this[kTotalLength] += chunk.length;
if (
this[kPerMessageDeflate]._maxPayload < 1 ||
this[kTotalLength] <= this[kPerMessageDeflate]._maxPayload
) {
this[kBuffers].push(chunk);
return;
}
this[kError$1] = new RangeError('Max payload size exceeded');
this[kError$1].code = 'WS_ERR_UNSUPPORTED_MESSAGE_LENGTH';
this[kError$1][kStatusCode$2] = 1009;
this.removeListener('data', inflateOnData);
this.reset();
}
/**
* The listener of the `zlib.InflateRaw` stream `'error'` event.
*
* @param {Error} err The emitted error
* @private
*/
function inflateOnError(err) {
//
// There is no need to call `Zlib#close()` as the handle is automatically
// closed when an error is emitted.
//
this[kPerMessageDeflate]._inflate = null;
err[kStatusCode$2] = 1007;
this[kCallback](err);
}
var validation = {exports: {}};
const { isUtf8 } = require$$0$b;
//
// Allowed token characters:
//
// '!', '#', '$', '%', '&', ''', '*', '+', '-',
// '.', 0-9, A-Z, '^', '_', '`', a-z, '|', '~'
//
// tokenChars[32] === 0 // ' '
// tokenChars[33] === 1 // '!'
// tokenChars[34] === 0 // '"'
// ...
//
// prettier-ignore
const tokenChars$2 = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 - 15
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31
0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, // 32 - 47
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48 - 63
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 64 - 79
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, // 80 - 95
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 96 - 111
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0 // 112 - 127
];
/**
* Checks if a status code is allowed in a close frame.
*
* @param {Number} code The status code
* @return {Boolean} `true` if the status code is valid, else `false`
* @public
*/
function isValidStatusCode$2(code) {
return (
(code >= 1000 &&
code <= 1014 &&
code !== 1004 &&
code !== 1005 &&
code !== 1006) ||
(code >= 3000 && code <= 4999)
);
}
/**
* Checks if a given buffer contains only correct UTF-8.
* Ported from https://www.cl.cam.ac.uk/%7Emgk25/ucs/utf8_check.c by
* Markus Kuhn.
*
* @param {Buffer} buf The buffer to check
* @return {Boolean} `true` if `buf` contains only correct UTF-8, else `false`
* @public
*/
function _isValidUTF8(buf) {
const len = buf.length;
let i = 0;
while (i < len) {
if ((buf[i] & 0x80) === 0) {
// 0xxxxxxx
i++;
} else if ((buf[i] & 0xe0) === 0xc0) {
// 110xxxxx 10xxxxxx
if (
i + 1 === len ||
(buf[i + 1] & 0xc0) !== 0x80 ||
(buf[i] & 0xfe) === 0xc0 // Overlong
) {
return false;
}
i += 2;
} else if ((buf[i] & 0xf0) === 0xe0) {
// 1110xxxx 10xxxxxx 10xxxxxx
if (
i + 2 >= len ||
(buf[i + 1] & 0xc0) !== 0x80 ||
(buf[i + 2] & 0xc0) !== 0x80 ||
(buf[i] === 0xe0 && (buf[i + 1] & 0xe0) === 0x80) || // Overlong
(buf[i] === 0xed && (buf[i + 1] & 0xe0) === 0xa0) // Surrogate (U+D800 - U+DFFF)
) {
return false;
}
i += 3;
} else if ((buf[i] & 0xf8) === 0xf0) {
// 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
if (
i + 3 >= len ||
(buf[i + 1] & 0xc0) !== 0x80 ||
(buf[i + 2] & 0xc0) !== 0x80 ||
(buf[i + 3] & 0xc0) !== 0x80 ||
(buf[i] === 0xf0 && (buf[i + 1] & 0xf0) === 0x80) || // Overlong
(buf[i] === 0xf4 && buf[i + 1] > 0x8f) ||
buf[i] > 0xf4 // > U+10FFFF
) {
return false;
}
i += 4;
} else {
return false;
}
}
return true;
}
validation.exports = {
isValidStatusCode: isValidStatusCode$2,
isValidUTF8: _isValidUTF8,
tokenChars: tokenChars$2
};
if (isUtf8) {
validation.exports.isValidUTF8 = function (buf) {
return buf.length < 24 ? _isValidUTF8(buf) : isUtf8(buf);
};
} /* istanbul ignore else */ else if (!process.env.WS_NO_UTF_8_VALIDATE) {
try {
const isValidUTF8 = require('utf-8-validate');
validation.exports.isValidUTF8 = function (buf) {
return buf.length < 32 ? _isValidUTF8(buf) : isValidUTF8(buf);
};
} catch (e) {
// Continue regardless of the error.
}
}
var validationExports = validation.exports;
const { Writable: Writable$1 } = require$$0$7;
const PerMessageDeflate$3 = permessageDeflate;
const {
BINARY_TYPES: BINARY_TYPES$1,
EMPTY_BUFFER: EMPTY_BUFFER$2,
kStatusCode: kStatusCode$1,
kWebSocket: kWebSocket$2
} = constants;
const { concat, toArrayBuffer, unmask } = bufferUtilExports;
const { isValidStatusCode: isValidStatusCode$1, isValidUTF8 } = validationExports;
const FastBuffer = Buffer[Symbol.species];
const GET_INFO = 0;
const GET_PAYLOAD_LENGTH_16 = 1;
const GET_PAYLOAD_LENGTH_64 = 2;
const GET_MASK = 3;
const GET_DATA = 4;
const INFLATING = 5;
/**
* HyBi Receiver implementation.
*
* @extends Writable
*/
let Receiver$1 = class Receiver extends Writable$1 {
/**
* Creates a Receiver instance.
*
* @param {Object} [options] Options object
* @param {String} [options.binaryType=nodebuffer] The type for binary data
* @param {Object} [options.extensions] An object containing the negotiated
* extensions
* @param {Boolean} [options.isServer=false] Specifies whether to operate in
* client or server mode
* @param {Number} [options.maxPayload=0] The maximum allowed message length
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
* not to skip UTF-8 validation for text and close messages
*/
constructor(options = {}) {
super();
this._binaryType = options.binaryType || BINARY_TYPES$1[0];
this._extensions = options.extensions || {};
this._isServer = !!options.isServer;
this._maxPayload = options.maxPayload | 0;
this._skipUTF8Validation = !!options.skipUTF8Validation;
this[kWebSocket$2] = undefined;
this._bufferedBytes = 0;
this._buffers = [];
this._compressed = false;
this._payloadLength = 0;
this._mask = undefined;
this._fragmented = 0;
this._masked = false;
this._fin = false;
this._opcode = 0;
this._totalPayloadLength = 0;
this._messageLength = 0;
this._fragments = [];
this._state = GET_INFO;
this._loop = false;
}
/**
* Implements `Writable.prototype._write()`.
*
* @param {Buffer} chunk The chunk of data to write
* @param {String} encoding The character encoding of `chunk`
* @param {Function} cb Callback
* @private
*/
_write(chunk, encoding, cb) {
if (this._opcode === 0x08 && this._state == GET_INFO) return cb();
this._bufferedBytes += chunk.length;
this._buffers.push(chunk);
this.startLoop(cb);
}
/**
* Consumes `n` bytes from the buffered data.
*
* @param {Number} n The number of bytes to consume
* @return {Buffer} The consumed bytes
* @private
*/
consume(n) {
this._bufferedBytes -= n;
if (n === this._buffers[0].length) return this._buffers.shift();
if (n < this._buffers[0].length) {
const buf = this._buffers[0];
this._buffers[0] = new FastBuffer(
buf.buffer,
buf.byteOffset + n,
buf.length - n
);
return new FastBuffer(buf.buffer, buf.byteOffset, n);
}
const dst = Buffer.allocUnsafe(n);
do {
const buf = this._buffers[0];
const offset = dst.length - n;
if (n >= buf.length) {
dst.set(this._buffers.shift(), offset);
} else {
dst.set(new Uint8Array(buf.buffer, buf.byteOffset, n), offset);
this._buffers[0] = new FastBuffer(
buf.buffer,
buf.byteOffset + n,
buf.length - n
);
}
n -= buf.length;
} while (n > 0);
return dst;
}
/**
* Starts the parsing loop.
*
* @param {Function} cb Callback
* @private
*/
startLoop(cb) {
let err;
this._loop = true;
do {
switch (this._state) {
case GET_INFO:
err = this.getInfo();
break;
case GET_PAYLOAD_LENGTH_16:
err = this.getPayloadLength16();
break;
case GET_PAYLOAD_LENGTH_64:
err = this.getPayloadLength64();
break;
case GET_MASK:
this.getMask();
break;
case GET_DATA:
err = this.getData(cb);
break;
default:
// `INFLATING`
this._loop = false;
return;
}
} while (this._loop);
cb(err);
}
/**
* Reads the first two bytes of a frame.
*
* @return {(RangeError|undefined)} A possible error
* @private
*/
getInfo() {
if (this._bufferedBytes < 2) {
this._loop = false;
return;
}
const buf = this.consume(2);
if ((buf[0] & 0x30) !== 0x00) {
this._loop = false;
return error(
RangeError,
'RSV2 and RSV3 must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_RSV_2_3'
);
}
const compressed = (buf[0] & 0x40) === 0x40;
if (compressed && !this._extensions[PerMessageDeflate$3.extensionName]) {
this._loop = false;
return error(
RangeError,
'RSV1 must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_RSV_1'
);
}
this._fin = (buf[0] & 0x80) === 0x80;
this._opcode = buf[0] & 0x0f;
this._payloadLength = buf[1] & 0x7f;
if (this._opcode === 0x00) {
if (compressed) {
this._loop = false;
return error(
RangeError,
'RSV1 must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_RSV_1'
);
}
if (!this._fragmented) {
this._loop = false;
return error(
RangeError,
'invalid opcode 0',
true,
1002,
'WS_ERR_INVALID_OPCODE'
);
}
this._opcode = this._fragmented;
} else if (this._opcode === 0x01 || this._opcode === 0x02) {
if (this._fragmented) {
this._loop = false;
return error(
RangeError,
`invalid opcode ${this._opcode}`,
true,
1002,
'WS_ERR_INVALID_OPCODE'
);
}
this._compressed = compressed;
} else if (this._opcode > 0x07 && this._opcode < 0x0b) {
if (!this._fin) {
this._loop = false;
return error(
RangeError,
'FIN must be set',
true,
1002,
'WS_ERR_EXPECTED_FIN'
);
}
if (compressed) {
this._loop = false;
return error(
RangeError,
'RSV1 must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_RSV_1'
);
}
if (
this._payloadLength > 0x7d ||
(this._opcode === 0x08 && this._payloadLength === 1)
) {
this._loop = false;
return error(
RangeError,
`invalid payload length ${this._payloadLength}`,
true,
1002,
'WS_ERR_INVALID_CONTROL_PAYLOAD_LENGTH'
);
}
} else {
this._loop = false;
return error(
RangeError,
`invalid opcode ${this._opcode}`,
true,
1002,
'WS_ERR_INVALID_OPCODE'
);
}
if (!this._fin && !this._fragmented) this._fragmented = this._opcode;
this._masked = (buf[1] & 0x80) === 0x80;
if (this._isServer) {
if (!this._masked) {
this._loop = false;
return error(
RangeError,
'MASK must be set',
true,
1002,
'WS_ERR_EXPECTED_MASK'
);
}
} else if (this._masked) {
this._loop = false;
return error(
RangeError,
'MASK must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_MASK'
);
}
if (this._payloadLength === 126) this._state = GET_PAYLOAD_LENGTH_16;
else if (this._payloadLength === 127) this._state = GET_PAYLOAD_LENGTH_64;
else return this.haveLength();
}
/**
* Gets extended payload length (7+16).
*
* @return {(RangeError|undefined)} A possible error
* @private
*/
getPayloadLength16() {
if (this._bufferedBytes < 2) {
this._loop = false;
return;
}
this._payloadLength = this.consume(2).readUInt16BE(0);
return this.haveLength();
}
/**
* Gets extended payload length (7+64).
*
* @return {(RangeError|undefined)} A possible error
* @private
*/
getPayloadLength64() {
if (this._bufferedBytes < 8) {
this._loop = false;
return;
}
const buf = this.consume(8);
const num = buf.readUInt32BE(0);
//
// The maximum safe integer in JavaScript is 2^53 - 1. An error is returned
// if payload length is greater than this number.
//
if (num > Math.pow(2, 53 - 32) - 1) {
this._loop = false;
return error(
RangeError,
'Unsupported WebSocket frame: payload length > 2^53 - 1',
false,
1009,
'WS_ERR_UNSUPPORTED_DATA_PAYLOAD_LENGTH'
);
}
this._payloadLength = num * Math.pow(2, 32) + buf.readUInt32BE(4);
return this.haveLength();
}
/**
* Payload length has been read.
*
* @return {(RangeError|undefined)} A possible error
* @private
*/
haveLength() {
if (this._payloadLength && this._opcode < 0x08) {
this._totalPayloadLength += this._payloadLength;
if (this._totalPayloadLength > this._maxPayload && this._maxPayload > 0) {
this._loop = false;
return error(
RangeError,
'Max payload size exceeded',
false,
1009,
'WS_ERR_UNSUPPORTED_MESSAGE_LENGTH'
);
}
}
if (this._masked) this._state = GET_MASK;
else this._state = GET_DATA;
}
/**
* Reads mask bytes.
*
* @private
*/
getMask() {
if (this._bufferedBytes < 4) {
this._loop = false;
return;
}
this._mask = this.consume(4);
this._state = GET_DATA;
}
/**
* Reads data bytes.
*
* @param {Function} cb Callback
* @return {(Error|RangeError|undefined)} A possible error
* @private
*/
getData(cb) {
let data = EMPTY_BUFFER$2;
if (this._payloadLength) {
if (this._bufferedBytes < this._payloadLength) {
this._loop = false;
return;
}
data = this.consume(this._payloadLength);
if (
this._masked &&
(this._mask[0] | this._mask[1] | this._mask[2] | this._mask[3]) !== 0
) {
unmask(data, this._mask);
}
}
if (this._opcode > 0x07) return this.controlMessage(data);
if (this._compressed) {
this._state = INFLATING;
this.decompress(data, cb);
return;
}
if (data.length) {
//
// This message is not compressed so its length is the sum of the payload
// length of all fragments.
//
this._messageLength = this._totalPayloadLength;
this._fragments.push(data);
}
return this.dataMessage();
}
/**
* Decompresses data.
*
* @param {Buffer} data Compressed data
* @param {Function} cb Callback
* @private
*/
decompress(data, cb) {
const perMessageDeflate = this._extensions[PerMessageDeflate$3.extensionName];
perMessageDeflate.decompress(data, this._fin, (err, buf) => {
if (err) return cb(err);
if (buf.length) {
this._messageLength += buf.length;
if (this._messageLength > this._maxPayload && this._maxPayload > 0) {
return cb(
error(
RangeError,
'Max payload size exceeded',
false,
1009,
'WS_ERR_UNSUPPORTED_MESSAGE_LENGTH'
)
);
}
this._fragments.push(buf);
}
const er = this.dataMessage();
if (er) return cb(er);
this.startLoop(cb);
});
}
/**
* Handles a data message.
*
* @return {(Error|undefined)} A possible error
* @private
*/
dataMessage() {
if (this._fin) {
const messageLength = this._messageLength;
const fragments = this._fragments;
this._totalPayloadLength = 0;
this._messageLength = 0;
this._fragmented = 0;
this._fragments = [];
if (this._opcode === 2) {
let data;
if (this._binaryType === 'nodebuffer') {
data = concat(fragments, messageLength);
} else if (this._binaryType === 'arraybuffer') {
data = toArrayBuffer(concat(fragments, messageLength));
} else {
data = fragments;
}
this.emit('message', data, true);
} else {
const buf = concat(fragments, messageLength);
if (!this._skipUTF8Validation && !isValidUTF8(buf)) {
this._loop = false;
return error(
Error,
'invalid UTF-8 sequence',
true,
1007,
'WS_ERR_INVALID_UTF8'
);
}
this.emit('message', buf, false);
}
}
this._state = GET_INFO;
}
/**
* Handles a control message.
*
* @param {Buffer} data Data to handle
* @return {(Error|RangeError|undefined)} A possible error
* @private
*/
controlMessage(data) {
if (this._opcode === 0x08) {
this._loop = false;
if (data.length === 0) {
this.emit('conclude', 1005, EMPTY_BUFFER$2);
this.end();
} else {
const code = data.readUInt16BE(0);
if (!isValidStatusCode$1(code)) {
return error(
RangeError,
`invalid status code ${code}`,
true,
1002,
'WS_ERR_INVALID_CLOSE_CODE'
);
}
const buf = new FastBuffer(
data.buffer,
data.byteOffset + 2,
data.length - 2
);
if (!this._skipUTF8Validation && !isValidUTF8(buf)) {
return error(
Error,
'invalid UTF-8 sequence',
true,
1007,
'WS_ERR_INVALID_UTF8'
);
}
this.emit('conclude', code, buf);
this.end();
}
} else if (this._opcode === 0x09) {
this.emit('ping', data);
} else {
this.emit('pong', data);
}
this._state = GET_INFO;
}
};
var receiver = Receiver$1;
/**
* Builds an error object.
*
* @param {function(new:Error|RangeError)} ErrorCtor The error constructor
* @param {String} message The error message
* @param {Boolean} prefix Specifies whether or not to add a default prefix to
* `message`
* @param {Number} statusCode The status code
* @param {String} errorCode The exposed error code
* @return {(Error|RangeError)} The error
* @private
*/
function error(ErrorCtor, message, prefix, statusCode, errorCode) {
const err = new ErrorCtor(
prefix ? `Invalid WebSocket frame: ${message}` : message
);
Error.captureStackTrace(err, error);
err.code = errorCode;
err[kStatusCode$1] = statusCode;
return err;
}
/* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^net|tls$" }] */
const { randomFillSync } = require$$3$1;
const PerMessageDeflate$2 = permessageDeflate;
const { EMPTY_BUFFER: EMPTY_BUFFER$1 } = constants;
const { isValidStatusCode } = validationExports;
const { mask: applyMask, toBuffer: toBuffer$1 } = bufferUtilExports;
const kByteLength = Symbol('kByteLength');
const maskBuffer = Buffer.alloc(4);
/**
* HyBi Sender implementation.
*/
let Sender$1 = class Sender {
/**
* Creates a Sender instance.
*
* @param {(net.Socket|tls.Socket)} socket The connection socket
* @param {Object} [extensions] An object containing the negotiated extensions
* @param {Function} [generateMask] The function used to generate the masking
* key
*/
constructor(socket, extensions, generateMask) {
this._extensions = extensions || {};
if (generateMask) {
this._generateMask = generateMask;
this._maskBuffer = Buffer.alloc(4);
}
this._socket = socket;
this._firstFragment = true;
this._compress = false;
this._bufferedBytes = 0;
this._deflating = false;
this._queue = [];
}
/**
* Frames a piece of data according to the HyBi WebSocket protocol.
*
* @param {(Buffer|String)} data The data to frame
* @param {Object} options Options object
* @param {Boolean} [options.fin=false] Specifies whether or not to set the
* FIN bit
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
* `data`
* @param {Buffer} [options.maskBuffer] The buffer used to store the masking
* key
* @param {Number} options.opcode The opcode
* @param {Boolean} [options.readOnly=false] Specifies whether `data` can be
* modified
* @param {Boolean} [options.rsv1=false] Specifies whether or not to set the
* RSV1 bit
* @return {(Buffer|String)[]} The framed data
* @public
*/
static frame(data, options) {
let mask;
let merge = false;
let offset = 2;
let skipMasking = false;
if (options.mask) {
mask = options.maskBuffer || maskBuffer;
if (options.generateMask) {
options.generateMask(mask);
} else {
randomFillSync(mask, 0, 4);
}
skipMasking = (mask[0] | mask[1] | mask[2] | mask[3]) === 0;
offset = 6;
}
let dataLength;
if (typeof data === 'string') {
if (
(!options.mask || skipMasking) &&
options[kByteLength] !== undefined
) {
dataLength = options[kByteLength];
} else {
data = Buffer.from(data);
dataLength = data.length;
}
} else {
dataLength = data.length;
merge = options.mask && options.readOnly && !skipMasking;
}
let payloadLength = dataLength;
if (dataLength >= 65536) {
offset += 8;
payloadLength = 127;
} else if (dataLength > 125) {
offset += 2;
payloadLength = 126;
}
const target = Buffer.allocUnsafe(merge ? dataLength + offset : offset);
target[0] = options.fin ? options.opcode | 0x80 : options.opcode;
if (options.rsv1) target[0] |= 0x40;
target[1] = payloadLength;
if (payloadLength === 126) {
target.writeUInt16BE(dataLength, 2);
} else if (payloadLength === 127) {
target[2] = target[3] = 0;
target.writeUIntBE(dataLength, 4, 6);
}
if (!options.mask) return [target, data];
target[1] |= 0x80;
target[offset - 4] = mask[0];
target[offset - 3] = mask[1];
target[offset - 2] = mask[2];
target[offset - 1] = mask[3];
if (skipMasking) return [target, data];
if (merge) {
applyMask(data, mask, target, offset, dataLength);
return [target];
}
applyMask(data, mask, data, 0, dataLength);
return [target, data];
}
/**
* Sends a close message to the other peer.
*
* @param {Number} [code] The status code component of the body
* @param {(String|Buffer)} [data] The message component of the body
* @param {Boolean} [mask=false] Specifies whether or not to mask the message
* @param {Function} [cb] Callback
* @public
*/
close(code, data, mask, cb) {
let buf;
if (code === undefined) {
buf = EMPTY_BUFFER$1;
} else if (typeof code !== 'number' || !isValidStatusCode(code)) {
throw new TypeError('First argument must be a valid error code number');
} else if (data === undefined || !data.length) {
buf = Buffer.allocUnsafe(2);
buf.writeUInt16BE(code, 0);
} else {
const length = Buffer.byteLength(data);
if (length > 123) {
throw new RangeError('The message must not be greater than 123 bytes');
}
buf = Buffer.allocUnsafe(2 + length);
buf.writeUInt16BE(code, 0);
if (typeof data === 'string') {
buf.write(data, 2);
} else {
buf.set(data, 2);
}
}
const options = {
[kByteLength]: buf.length,
fin: true,
generateMask: this._generateMask,
mask,
maskBuffer: this._maskBuffer,
opcode: 0x08,
readOnly: false,
rsv1: false
};
if (this._deflating) {
this.enqueue([this.dispatch, buf, false, options, cb]);
} else {
this.sendFrame(Sender.frame(buf, options), cb);
}
}
/**
* Sends a ping message to the other peer.
*
* @param {*} data The message to send
* @param {Boolean} [mask=false] Specifies whether or not to mask `data`
* @param {Function} [cb] Callback
* @public
*/
ping(data, mask, cb) {
let byteLength;
let readOnly;
if (typeof data === 'string') {
byteLength = Buffer.byteLength(data);
readOnly = false;
} else {
data = toBuffer$1(data);
byteLength = data.length;
readOnly = toBuffer$1.readOnly;
}
if (byteLength > 125) {
throw new RangeError('The data size must not be greater than 125 bytes');
}
const options = {
[kByteLength]: byteLength,
fin: true,
generateMask: this._generateMask,
mask,
maskBuffer: this._maskBuffer,
opcode: 0x09,
readOnly,
rsv1: false
};
if (this._deflating) {
this.enqueue([this.dispatch, data, false, options, cb]);
} else {
this.sendFrame(Sender.frame(data, options), cb);
}
}
/**
* Sends a pong message to the other peer.
*
* @param {*} data The message to send
* @param {Boolean} [mask=false] Specifies whether or not to mask `data`
* @param {Function} [cb] Callback
* @public
*/
pong(data, mask, cb) {
let byteLength;
let readOnly;
if (typeof data === 'string') {
byteLength = Buffer.byteLength(data);
readOnly = false;
} else {
data = toBuffer$1(data);
byteLength = data.length;
readOnly = toBuffer$1.readOnly;
}
if (byteLength > 125) {
throw new RangeError('The data size must not be greater than 125 bytes');
}
const options = {
[kByteLength]: byteLength,
fin: true,
generateMask: this._generateMask,
mask,
maskBuffer: this._maskBuffer,
opcode: 0x0a,
readOnly,
rsv1: false
};
if (this._deflating) {
this.enqueue([this.dispatch, data, false, options, cb]);
} else {
this.sendFrame(Sender.frame(data, options), cb);
}
}
/**
* Sends a data message to the other peer.
*
* @param {*} data The message to send
* @param {Object} options Options object
* @param {Boolean} [options.binary=false] Specifies whether `data` is binary
* or text
* @param {Boolean} [options.compress=false] Specifies whether or not to
* compress `data`
* @param {Boolean} [options.fin=false] Specifies whether the fragment is the
* last one
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
* `data`
* @param {Function} [cb] Callback
* @public
*/
send(data, options, cb) {
const perMessageDeflate = this._extensions[PerMessageDeflate$2.extensionName];
let opcode = options.binary ? 2 : 1;
let rsv1 = options.compress;
let byteLength;
let readOnly;
if (typeof data === 'string') {
byteLength = Buffer.byteLength(data);
readOnly = false;
} else {
data = toBuffer$1(data);
byteLength = data.length;
readOnly = toBuffer$1.readOnly;
}
if (this._firstFragment) {
this._firstFragment = false;
if (
rsv1 &&
perMessageDeflate &&
perMessageDeflate.params[
perMessageDeflate._isServer
? 'server_no_context_takeover'
: 'client_no_context_takeover'
]
) {
rsv1 = byteLength >= perMessageDeflate._threshold;
}
this._compress = rsv1;
} else {
rsv1 = false;
opcode = 0;
}
if (options.fin) this._firstFragment = true;
if (perMessageDeflate) {
const opts = {
[kByteLength]: byteLength,
fin: options.fin,
generateMask: this._generateMask,
mask: options.mask,
maskBuffer: this._maskBuffer,
opcode,
readOnly,
rsv1
};
if (this._deflating) {
this.enqueue([this.dispatch, data, this._compress, opts, cb]);
} else {
this.dispatch(data, this._compress, opts, cb);
}
} else {
this.sendFrame(
Sender.frame(data, {
[kByteLength]: byteLength,
fin: options.fin,
generateMask: this._generateMask,
mask: options.mask,
maskBuffer: this._maskBuffer,
opcode,
readOnly,
rsv1: false
}),
cb
);
}
}
/**
* Dispatches a message.
*
* @param {(Buffer|String)} data The message to send
* @param {Boolean} [compress=false] Specifies whether or not to compress
* `data`
* @param {Object} options Options object
* @param {Boolean} [options.fin=false] Specifies whether or not to set the
* FIN bit
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
* `data`
* @param {Buffer} [options.maskBuffer] The buffer used to store the masking
* key
* @param {Number} options.opcode The opcode
* @param {Boolean} [options.readOnly=false] Specifies whether `data` can be
* modified
* @param {Boolean} [options.rsv1=false] Specifies whether or not to set the
* RSV1 bit
* @param {Function} [cb] Callback
* @private
*/
dispatch(data, compress, options, cb) {
if (!compress) {
this.sendFrame(Sender.frame(data, options), cb);
return;
}
const perMessageDeflate = this._extensions[PerMessageDeflate$2.extensionName];
this._bufferedBytes += options[kByteLength];
this._deflating = true;
perMessageDeflate.compress(data, options.fin, (_, buf) => {
if (this._socket.destroyed) {
const err = new Error(
'The socket was closed while data was being compressed'
);
if (typeof cb === 'function') cb(err);
for (let i = 0; i < this._queue.length; i++) {
const params = this._queue[i];
const callback = params[params.length - 1];
if (typeof callback === 'function') callback(err);
}
return;
}
this._bufferedBytes -= options[kByteLength];
this._deflating = false;
options.readOnly = false;
this.sendFrame(Sender.frame(buf, options), cb);
this.dequeue();
});
}
/**
* Executes queued send operations.
*
* @private
*/
dequeue() {
while (!this._deflating && this._queue.length) {
const params = this._queue.shift();
this._bufferedBytes -= params[3][kByteLength];
Reflect.apply(params[0], this, params.slice(1));
}
}
/**
* Enqueues a send operation.
*
* @param {Array} params Send operation parameters.
* @private
*/
enqueue(params) {
this._bufferedBytes += params[3][kByteLength];
this._queue.push(params);
}
/**
* Sends a frame.
*
* @param {Buffer[]} list The frame to send
* @param {Function} [cb] Callback
* @private
*/
sendFrame(list, cb) {
if (list.length === 2) {
this._socket.cork();
this._socket.write(list[0]);
this._socket.write(list[1], cb);
this._socket.uncork();
} else {
this._socket.write(list[0], cb);
}
}
};
var sender = Sender$1;
const { kForOnEventAttribute: kForOnEventAttribute$1, kListener: kListener$1 } = constants;
const kCode = Symbol('kCode');
const kData = Symbol('kData');
const kError = Symbol('kError');
const kMessage = Symbol('kMessage');
const kReason = Symbol('kReason');
const kTarget = Symbol('kTarget');
const kType = Symbol('kType');
const kWasClean = Symbol('kWasClean');
/**
* Class representing an event.
*/
let Event$1 = class Event {
/**
* Create a new `Event`.
*
* @param {String} type The name of the event
* @throws {TypeError} If the `type` argument is not specified
*/
constructor(type) {
this[kTarget] = null;
this[kType] = type;
}
/**
* @type {*}
*/
get target() {
return this[kTarget];
}
/**
* @type {String}
*/
get type() {
return this[kType];
}
};
Object.defineProperty(Event$1.prototype, 'target', { enumerable: true });
Object.defineProperty(Event$1.prototype, 'type', { enumerable: true });
/**
* Class representing a close event.
*
* @extends Event
*/
class CloseEvent extends Event$1 {
/**
* Create a new `CloseEvent`.
*
* @param {String} type The name of the event
* @param {Object} [options] A dictionary object that allows for setting
* attributes via object members of the same name
* @param {Number} [options.code=0] The status code explaining why the
* connection was closed
* @param {String} [options.reason=''] A human-readable string explaining why
* the connection was closed
* @param {Boolean} [options.wasClean=false] Indicates whether or not the
* connection was cleanly closed
*/
constructor(type, options = {}) {
super(type);
this[kCode] = options.code === undefined ? 0 : options.code;
this[kReason] = options.reason === undefined ? '' : options.reason;
this[kWasClean] = options.wasClean === undefined ? false : options.wasClean;
}
/**
* @type {Number}
*/
get code() {
return this[kCode];
}
/**
* @type {String}
*/
get reason() {
return this[kReason];
}
/**
* @type {Boolean}
*/
get wasClean() {
return this[kWasClean];
}
}
Object.defineProperty(CloseEvent.prototype, 'code', { enumerable: true });
Object.defineProperty(CloseEvent.prototype, 'reason', { enumerable: true });
Object.defineProperty(CloseEvent.prototype, 'wasClean', { enumerable: true });
/**
* Class representing an error event.
*
* @extends Event
*/
class ErrorEvent extends Event$1 {
/**
* Create a new `ErrorEvent`.
*
* @param {String} type The name of the event
* @param {Object} [options] A dictionary object that allows for setting
* attributes via object members of the same name
* @param {*} [options.error=null] The error that generated this event
* @param {String} [options.message=''] The error message
*/
constructor(type, options = {}) {
super(type);
this[kError] = options.error === undefined ? null : options.error;
this[kMessage] = options.message === undefined ? '' : options.message;
}
/**
* @type {*}
*/
get error() {
return this[kError];
}
/**
* @type {String}
*/
get message() {
return this[kMessage];
}
}
Object.defineProperty(ErrorEvent.prototype, 'error', { enumerable: true });
Object.defineProperty(ErrorEvent.prototype, 'message', { enumerable: true });
/**
* Class representing a message event.
*
* @extends Event
*/
class MessageEvent extends Event$1 {
/**
* Create a new `MessageEvent`.
*
* @param {String} type The name of the event
* @param {Object} [options] A dictionary object that allows for setting
* attributes via object members of the same name
* @param {*} [options.data=null] The message content
*/
constructor(type, options = {}) {
super(type);
this[kData] = options.data === undefined ? null : options.data;
}
/**
* @type {*}
*/
get data() {
return this[kData];
}
}
Object.defineProperty(MessageEvent.prototype, 'data', { enumerable: true });
/**
* This provides methods for emulating the `EventTarget` interface. It's not
* meant to be used directly.
*
* @mixin
*/
const EventTarget = {
/**
* Register an event listener.
*
* @param {String} type A string representing the event type to listen for
* @param {(Function|Object)} handler The listener to add
* @param {Object} [options] An options object specifies characteristics about
* the event listener
* @param {Boolean} [options.once=false] A `Boolean` indicating that the
* listener should be invoked at most once after being added. If `true`,
* the listener would be automatically removed when invoked.
* @public
*/
addEventListener(type, handler, options = {}) {
for (const listener of this.listeners(type)) {
if (
!options[kForOnEventAttribute$1] &&
listener[kListener$1] === handler &&
!listener[kForOnEventAttribute$1]
) {
return;
}
}
let wrapper;
if (type === 'message') {
wrapper = function onMessage(data, isBinary) {
const event = new MessageEvent('message', {
data: isBinary ? data : data.toString()
});
event[kTarget] = this;
callListener(handler, this, event);
};
} else if (type === 'close') {
wrapper = function onClose(code, message) {
const event = new CloseEvent('close', {
code,
reason: message.toString(),
wasClean: this._closeFrameReceived && this._closeFrameSent
});
event[kTarget] = this;
callListener(handler, this, event);
};
} else if (type === 'error') {
wrapper = function onError(error) {
const event = new ErrorEvent('error', {
error,
message: error.message
});
event[kTarget] = this;
callListener(handler, this, event);
};
} else if (type === 'open') {
wrapper = function onOpen() {
const event = new Event$1('open');
event[kTarget] = this;
callListener(handler, this, event);
};
} else {
return;
}
wrapper[kForOnEventAttribute$1] = !!options[kForOnEventAttribute$1];
wrapper[kListener$1] = handler;
if (options.once) {
this.once(type, wrapper);
} else {
this.on(type, wrapper);
}
},
/**
* Remove an event listener.
*
* @param {String} type A string representing the event type to remove
* @param {(Function|Object)} handler The listener to remove
* @public
*/
removeEventListener(type, handler) {
for (const listener of this.listeners(type)) {
if (listener[kListener$1] === handler && !listener[kForOnEventAttribute$1]) {
this.removeListener(type, listener);
break;
}
}
}
};
var eventTarget = {
CloseEvent,
ErrorEvent,
Event: Event$1,
EventTarget,
MessageEvent
};
/**
* Call an event listener
*
* @param {(Function|Object)} listener The listener to call
* @param {*} thisArg The value to use as `this`` when calling the listener
* @param {Event} event The event to pass to the listener
* @private
*/
function callListener(listener, thisArg, event) {
if (typeof listener === 'object' && listener.handleEvent) {
listener.handleEvent.call(listener, event);
} else {
listener.call(thisArg, event);
}
}
const { tokenChars: tokenChars$1 } = validationExports;
/**
* Adds an offer to the map of extension offers or a parameter to the map of
* parameters.
*
* @param {Object} dest The map of extension offers or parameters
* @param {String} name The extension or parameter name
* @param {(Object|Boolean|String)} elem The extension parameters or the
* parameter value
* @private
*/
function push(dest, name, elem) {
if (dest[name] === undefined) dest[name] = [elem];
else dest[name].push(elem);
}
/**
* Parses the `Sec-WebSocket-Extensions` header into an object.
*
* @param {String} header The field value of the header
* @return {Object} The parsed object
* @public
*/
function parse$2(header) {
const offers = Object.create(null);
let params = Object.create(null);
let mustUnescape = false;
let isEscaping = false;
let inQuotes = false;
let extensionName;
let paramName;
let start = -1;
let code = -1;
let end = -1;
let i = 0;
for (; i < header.length; i++) {
code = header.charCodeAt(i);
if (extensionName === undefined) {
if (end === -1 && tokenChars$1[code] === 1) {
if (start === -1) start = i;
} else if (
i !== 0 &&
(code === 0x20 /* ' ' */ || code === 0x09) /* '\t' */
) {
if (end === -1 && start !== -1) end = i;
} else if (code === 0x3b /* ';' */ || code === 0x2c /* ',' */) {
if (start === -1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (end === -1) end = i;
const name = header.slice(start, end);
if (code === 0x2c) {
push(offers, name, params);
params = Object.create(null);
} else {
extensionName = name;
}
start = end = -1;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
} else if (paramName === undefined) {
if (end === -1 && tokenChars$1[code] === 1) {
if (start === -1) start = i;
} else if (code === 0x20 || code === 0x09) {
if (end === -1 && start !== -1) end = i;
} else if (code === 0x3b || code === 0x2c) {
if (start === -1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (end === -1) end = i;
push(params, header.slice(start, end), true);
if (code === 0x2c) {
push(offers, extensionName, params);
params = Object.create(null);
extensionName = undefined;
}
start = end = -1;
} else if (code === 0x3d /* '=' */ && start !== -1 && end === -1) {
paramName = header.slice(start, i);
start = end = -1;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
} else {
//
// The value of a quoted-string after unescaping must conform to the
// token ABNF, so only token characters are valid.
// Ref: https://tools.ietf.org/html/rfc6455#section-9.1
//
if (isEscaping) {
if (tokenChars$1[code] !== 1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (start === -1) start = i;
else if (!mustUnescape) mustUnescape = true;
isEscaping = false;
} else if (inQuotes) {
if (tokenChars$1[code] === 1) {
if (start === -1) start = i;
} else if (code === 0x22 /* '"' */ && start !== -1) {
inQuotes = false;
end = i;
} else if (code === 0x5c /* '\' */) {
isEscaping = true;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
} else if (code === 0x22 && header.charCodeAt(i - 1) === 0x3d) {
inQuotes = true;
} else if (end === -1 && tokenChars$1[code] === 1) {
if (start === -1) start = i;
} else if (start !== -1 && (code === 0x20 || code === 0x09)) {
if (end === -1) end = i;
} else if (code === 0x3b || code === 0x2c) {
if (start === -1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (end === -1) end = i;
let value = header.slice(start, end);
if (mustUnescape) {
value = value.replace(/\\/g, '');
mustUnescape = false;
}
push(params, paramName, value);
if (code === 0x2c) {
push(offers, extensionName, params);
params = Object.create(null);
extensionName = undefined;
}
paramName = undefined;
start = end = -1;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
}
}
if (start === -1 || inQuotes || code === 0x20 || code === 0x09) {
throw new SyntaxError('Unexpected end of input');
}
if (end === -1) end = i;
const token = header.slice(start, end);
if (extensionName === undefined) {
push(offers, token, params);
} else {
if (paramName === undefined) {
push(params, token, true);
} else if (mustUnescape) {
push(params, paramName, token.replace(/\\/g, ''));
} else {
push(params, paramName, token);
}
push(offers, extensionName, params);
}
return offers;
}
/**
* Builds the `Sec-WebSocket-Extensions` header field value.
*
* @param {Object} extensions The map of extensions and parameters to format
* @return {String} A string representing the given object
* @public
*/
function format$1(extensions) {
return Object.keys(extensions)
.map((extension) => {
let configurations = extensions[extension];
if (!Array.isArray(configurations)) configurations = [configurations];
return configurations
.map((params) => {
return [extension]
.concat(
Object.keys(params).map((k) => {
let values = params[k];
if (!Array.isArray(values)) values = [values];
return values
.map((v) => (v === true ? k : `${k}=${v}`))
.join('; ');
})
)
.join('; ');
})
.join(', ');
})
.join(', ');
}
var extension$1 = { format: format$1, parse: parse$2 };
/* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^Readable$" }] */
const EventEmitter$1 = require$$0$5;
const https$2 = require$$1$2;
const http$3 = require$$1$1;
const net = require$$3$2;
const tls = require$$4$1;
const { randomBytes, createHash: createHash$1 } = require$$3$1;
const { URL: URL$2 } = require$$0$9;
const PerMessageDeflate$1 = permessageDeflate;
const Receiver = receiver;
const Sender = sender;
const {
BINARY_TYPES,
EMPTY_BUFFER,
GUID: GUID$1,
kForOnEventAttribute,
kListener,
kStatusCode,
kWebSocket: kWebSocket$1,
NOOP
} = constants;
const {
EventTarget: { addEventListener, removeEventListener }
} = eventTarget;
const { format, parse: parse$1 } = extension$1;
const { toBuffer } = bufferUtilExports;
const closeTimeout = 30 * 1000;
const kAborted = Symbol('kAborted');
const protocolVersions = [8, 13];
const readyStates = ['CONNECTING', 'OPEN', 'CLOSING', 'CLOSED'];
const subprotocolRegex = /^[!#$%&'*+\-.0-9A-Z^_`|a-z~]+$/;
/**
* Class representing a WebSocket.
*
* @extends EventEmitter
*/
let WebSocket$1 = class WebSocket extends EventEmitter$1 {
/**
* Create a new `WebSocket`.
*
* @param {(String|URL)} address The URL to which to connect
* @param {(String|String[])} [protocols] The subprotocols
* @param {Object} [options] Connection options
*/
constructor(address, protocols, options) {
super();
this._binaryType = BINARY_TYPES[0];
this._closeCode = 1006;
this._closeFrameReceived = false;
this._closeFrameSent = false;
this._closeMessage = EMPTY_BUFFER;
this._closeTimer = null;
this._extensions = {};
this._paused = false;
this._protocol = '';
this._readyState = WebSocket.CONNECTING;
this._receiver = null;
this._sender = null;
this._socket = null;
if (address !== null) {
this._bufferedAmount = 0;
this._isServer = false;
this._redirects = 0;
if (protocols === undefined) {
protocols = [];
} else if (!Array.isArray(protocols)) {
if (typeof protocols === 'object' && protocols !== null) {
options = protocols;
protocols = [];
} else {
protocols = [protocols];
}
}
initAsClient(this, address, protocols, options);
} else {
this._isServer = true;
}
}
/**
* This deviates from the WHATWG interface since ws doesn't support the
* required default "blob" type (instead we define a custom "nodebuffer"
* type).
*
* @type {String}
*/
get binaryType() {
return this._binaryType;
}
set binaryType(type) {
if (!BINARY_TYPES.includes(type)) return;
this._binaryType = type;
//
// Allow to change `binaryType` on the fly.
//
if (this._receiver) this._receiver._binaryType = type;
}
/**
* @type {Number}
*/
get bufferedAmount() {
if (!this._socket) return this._bufferedAmount;
return this._socket._writableState.length + this._sender._bufferedBytes;
}
/**
* @type {String}
*/
get extensions() {
return Object.keys(this._extensions).join();
}
/**
* @type {Boolean}
*/
get isPaused() {
return this._paused;
}
/**
* @type {Function}
*/
/* istanbul ignore next */
get onclose() {
return null;
}
/**
* @type {Function}
*/
/* istanbul ignore next */
get onerror() {
return null;
}
/**
* @type {Function}
*/
/* istanbul ignore next */
get onopen() {
return null;
}
/**
* @type {Function}
*/
/* istanbul ignore next */
get onmessage() {
return null;
}
/**
* @type {String}
*/
get protocol() {
return this._protocol;
}
/**
* @type {Number}
*/
get readyState() {
return this._readyState;
}
/**
* @type {String}
*/
get url() {
return this._url;
}
/**
* Set up the socket and the internal resources.
*
* @param {(net.Socket|tls.Socket)} socket The network socket between the
* server and client
* @param {Buffer} head The first packet of the upgraded stream
* @param {Object} options Options object
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Number} [options.maxPayload=0] The maximum allowed message size
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
* not to skip UTF-8 validation for text and close messages
* @private
*/
setSocket(socket, head, options) {
const receiver = new Receiver({
binaryType: this.binaryType,
extensions: this._extensions,
isServer: this._isServer,
maxPayload: options.maxPayload,
skipUTF8Validation: options.skipUTF8Validation
});
this._sender = new Sender(socket, this._extensions, options.generateMask);
this._receiver = receiver;
this._socket = socket;
receiver[kWebSocket$1] = this;
socket[kWebSocket$1] = this;
receiver.on('conclude', receiverOnConclude);
receiver.on('drain', receiverOnDrain);
receiver.on('error', receiverOnError);
receiver.on('message', receiverOnMessage);
receiver.on('ping', receiverOnPing);
receiver.on('pong', receiverOnPong);
socket.setTimeout(0);
socket.setNoDelay();
if (head.length > 0) socket.unshift(head);
socket.on('close', socketOnClose);
socket.on('data', socketOnData);
socket.on('end', socketOnEnd);
socket.on('error', socketOnError$1);
this._readyState = WebSocket.OPEN;
this.emit('open');
}
/**
* Emit the `'close'` event.
*
* @private
*/
emitClose() {
if (!this._socket) {
this._readyState = WebSocket.CLOSED;
this.emit('close', this._closeCode, this._closeMessage);
return;
}
if (this._extensions[PerMessageDeflate$1.extensionName]) {
this._extensions[PerMessageDeflate$1.extensionName].cleanup();
}
this._receiver.removeAllListeners();
this._readyState = WebSocket.CLOSED;
this.emit('close', this._closeCode, this._closeMessage);
}
/**
* Start a closing handshake.
*
* +----------+ +-----------+ +----------+
* - - -|ws.close()|-->|close frame|-->|ws.close()|- - -
* | +----------+ +-----------+ +----------+ |
* +----------+ +-----------+ |
* CLOSING |ws.close()|<--|close frame|<--+-----+ CLOSING
* +----------+ +-----------+ |
* | | | +---+ |
* +------------------------+-->|fin| - - - -
* | +---+ | +---+
* - - - - -|fin|<---------------------+
* +---+
*
* @param {Number} [code] Status code explaining why the connection is closing
* @param {(String|Buffer)} [data] The reason why the connection is
* closing
* @public
*/
close(code, data) {
if (this.readyState === WebSocket.CLOSED) return;
if (this.readyState === WebSocket.CONNECTING) {
const msg = 'WebSocket was closed before the connection was established';
abortHandshake$1(this, this._req, msg);
return;
}
if (this.readyState === WebSocket.CLOSING) {
if (
this._closeFrameSent &&
(this._closeFrameReceived || this._receiver._writableState.errorEmitted)
) {
this._socket.end();
}
return;
}
this._readyState = WebSocket.CLOSING;
this._sender.close(code, data, !this._isServer, (err) => {
//
// This error is handled by the `'error'` listener on the socket. We only
// want to know if the close frame has been sent here.
//
if (err) return;
this._closeFrameSent = true;
if (
this._closeFrameReceived ||
this._receiver._writableState.errorEmitted
) {
this._socket.end();
}
});
//
// Specify a timeout for the closing handshake to complete.
//
this._closeTimer = setTimeout(
this._socket.destroy.bind(this._socket),
closeTimeout
);
}
/**
* Pause the socket.
*
* @public
*/
pause() {
if (
this.readyState === WebSocket.CONNECTING ||
this.readyState === WebSocket.CLOSED
) {
return;
}
this._paused = true;
this._socket.pause();
}
/**
* Send a ping.
*
* @param {*} [data] The data to send
* @param {Boolean} [mask] Indicates whether or not to mask `data`
* @param {Function} [cb] Callback which is executed when the ping is sent
* @public
*/
ping(data, mask, cb) {
if (this.readyState === WebSocket.CONNECTING) {
throw new Error('WebSocket is not open: readyState 0 (CONNECTING)');
}
if (typeof data === 'function') {
cb = data;
data = mask = undefined;
} else if (typeof mask === 'function') {
cb = mask;
mask = undefined;
}
if (typeof data === 'number') data = data.toString();
if (this.readyState !== WebSocket.OPEN) {
sendAfterClose(this, data, cb);
return;
}
if (mask === undefined) mask = !this._isServer;
this._sender.ping(data || EMPTY_BUFFER, mask, cb);
}
/**
* Send a pong.
*
* @param {*} [data] The data to send
* @param {Boolean} [mask] Indicates whether or not to mask `data`
* @param {Function} [cb] Callback which is executed when the pong is sent
* @public
*/
pong(data, mask, cb) {
if (this.readyState === WebSocket.CONNECTING) {
throw new Error('WebSocket is not open: readyState 0 (CONNECTING)');
}
if (typeof data === 'function') {
cb = data;
data = mask = undefined;
} else if (typeof mask === 'function') {
cb = mask;
mask = undefined;
}
if (typeof data === 'number') data = data.toString();
if (this.readyState !== WebSocket.OPEN) {
sendAfterClose(this, data, cb);
return;
}
if (mask === undefined) mask = !this._isServer;
this._sender.pong(data || EMPTY_BUFFER, mask, cb);
}
/**
* Resume the socket.
*
* @public
*/
resume() {
if (
this.readyState === WebSocket.CONNECTING ||
this.readyState === WebSocket.CLOSED
) {
return;
}
this._paused = false;
if (!this._receiver._writableState.needDrain) this._socket.resume();
}
/**
* Send a data message.
*
* @param {*} data The message to send
* @param {Object} [options] Options object
* @param {Boolean} [options.binary] Specifies whether `data` is binary or
* text
* @param {Boolean} [options.compress] Specifies whether or not to compress
* `data`
* @param {Boolean} [options.fin=true] Specifies whether the fragment is the
* last one
* @param {Boolean} [options.mask] Specifies whether or not to mask `data`
* @param {Function} [cb] Callback which is executed when data is written out
* @public
*/
send(data, options, cb) {
if (this.readyState === WebSocket.CONNECTING) {
throw new Error('WebSocket is not open: readyState 0 (CONNECTING)');
}
if (typeof options === 'function') {
cb = options;
options = {};
}
if (typeof data === 'number') data = data.toString();
if (this.readyState !== WebSocket.OPEN) {
sendAfterClose(this, data, cb);
return;
}
const opts = {
binary: typeof data !== 'string',
mask: !this._isServer,
compress: true,
fin: true,
...options
};
if (!this._extensions[PerMessageDeflate$1.extensionName]) {
opts.compress = false;
}
this._sender.send(data || EMPTY_BUFFER, opts, cb);
}
/**
* Forcibly close the connection.
*
* @public
*/
terminate() {
if (this.readyState === WebSocket.CLOSED) return;
if (this.readyState === WebSocket.CONNECTING) {
const msg = 'WebSocket was closed before the connection was established';
abortHandshake$1(this, this._req, msg);
return;
}
if (this._socket) {
this._readyState = WebSocket.CLOSING;
this._socket.destroy();
}
}
};
/**
* @constant {Number} CONNECTING
* @memberof WebSocket
*/
Object.defineProperty(WebSocket$1, 'CONNECTING', {
enumerable: true,
value: readyStates.indexOf('CONNECTING')
});
/**
* @constant {Number} CONNECTING
* @memberof WebSocket.prototype
*/
Object.defineProperty(WebSocket$1.prototype, 'CONNECTING', {
enumerable: true,
value: readyStates.indexOf('CONNECTING')
});
/**
* @constant {Number} OPEN
* @memberof WebSocket
*/
Object.defineProperty(WebSocket$1, 'OPEN', {
enumerable: true,
value: readyStates.indexOf('OPEN')
});
/**
* @constant {Number} OPEN
* @memberof WebSocket.prototype
*/
Object.defineProperty(WebSocket$1.prototype, 'OPEN', {
enumerable: true,
value: readyStates.indexOf('OPEN')
});
/**
* @constant {Number} CLOSING
* @memberof WebSocket
*/
Object.defineProperty(WebSocket$1, 'CLOSING', {
enumerable: true,
value: readyStates.indexOf('CLOSING')
});
/**
* @constant {Number} CLOSING
* @memberof WebSocket.prototype
*/
Object.defineProperty(WebSocket$1.prototype, 'CLOSING', {
enumerable: true,
value: readyStates.indexOf('CLOSING')
});
/**
* @constant {Number} CLOSED
* @memberof WebSocket
*/
Object.defineProperty(WebSocket$1, 'CLOSED', {
enumerable: true,
value: readyStates.indexOf('CLOSED')
});
/**
* @constant {Number} CLOSED
* @memberof WebSocket.prototype
*/
Object.defineProperty(WebSocket$1.prototype, 'CLOSED', {
enumerable: true,
value: readyStates.indexOf('CLOSED')
});
[
'binaryType',
'bufferedAmount',
'extensions',
'isPaused',
'protocol',
'readyState',
'url'
].forEach((property) => {
Object.defineProperty(WebSocket$1.prototype, property, { enumerable: true });
});
//
// Add the `onopen`, `onerror`, `onclose`, and `onmessage` attributes.
// See https://html.spec.whatwg.org/multipage/comms.html#the-websocket-interface
//
['open', 'error', 'close', 'message'].forEach((method) => {
Object.defineProperty(WebSocket$1.prototype, `on${method}`, {
enumerable: true,
get() {
for (const listener of this.listeners(method)) {
if (listener[kForOnEventAttribute]) return listener[kListener];
}
return null;
},
set(handler) {
for (const listener of this.listeners(method)) {
if (listener[kForOnEventAttribute]) {
this.removeListener(method, listener);
break;
}
}
if (typeof handler !== 'function') return;
this.addEventListener(method, handler, {
[kForOnEventAttribute]: true
});
}
});
});
WebSocket$1.prototype.addEventListener = addEventListener;
WebSocket$1.prototype.removeEventListener = removeEventListener;
var websocket = WebSocket$1;
/**
* Initialize a WebSocket client.
*
* @param {WebSocket} websocket The client to initialize
* @param {(String|URL)} address The URL to which to connect
* @param {Array} protocols The subprotocols
* @param {Object} [options] Connection options
* @param {Boolean} [options.followRedirects=false] Whether or not to follow
* redirects
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Number} [options.handshakeTimeout] Timeout in milliseconds for the
* handshake request
* @param {Number} [options.maxPayload=104857600] The maximum allowed message
* size
* @param {Number} [options.maxRedirects=10] The maximum number of redirects
* allowed
* @param {String} [options.origin] Value of the `Origin` or
* `Sec-WebSocket-Origin` header
* @param {(Boolean|Object)} [options.perMessageDeflate=true] Enable/disable
* permessage-deflate
* @param {Number} [options.protocolVersion=13] Value of the
* `Sec-WebSocket-Version` header
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
* not to skip UTF-8 validation for text and close messages
* @private
*/
function initAsClient(websocket, address, protocols, options) {
const opts = {
protocolVersion: protocolVersions[1],
maxPayload: 100 * 1024 * 1024,
skipUTF8Validation: false,
perMessageDeflate: true,
followRedirects: false,
maxRedirects: 10,
...options,
createConnection: undefined,
socketPath: undefined,
hostname: undefined,
protocol: undefined,
timeout: undefined,
method: 'GET',
host: undefined,
path: undefined,
port: undefined
};
if (!protocolVersions.includes(opts.protocolVersion)) {
throw new RangeError(
`Unsupported protocol version: ${opts.protocolVersion} ` +
`(supported versions: ${protocolVersions.join(', ')})`
);
}
let parsedUrl;
if (address instanceof URL$2) {
parsedUrl = address;
websocket._url = address.href;
} else {
try {
parsedUrl = new URL$2(address);
} catch (e) {
throw new SyntaxError(`Invalid URL: ${address}`);
}
websocket._url = address;
}
const isSecure = parsedUrl.protocol === 'wss:';
const isIpcUrl = parsedUrl.protocol === 'ws+unix:';
let invalidUrlMessage;
if (parsedUrl.protocol !== 'ws:' && !isSecure && !isIpcUrl) {
invalidUrlMessage =
'The URL\'s protocol must be one of "ws:", "wss:", or "ws+unix:"';
} else if (isIpcUrl && !parsedUrl.pathname) {
invalidUrlMessage = "The URL's pathname is empty";
} else if (parsedUrl.hash) {
invalidUrlMessage = 'The URL contains a fragment identifier';
}
if (invalidUrlMessage) {
const err = new SyntaxError(invalidUrlMessage);
if (websocket._redirects === 0) {
throw err;
} else {
emitErrorAndClose(websocket, err);
return;
}
}
const defaultPort = isSecure ? 443 : 80;
const key = randomBytes(16).toString('base64');
const request = isSecure ? https$2.request : http$3.request;
const protocolSet = new Set();
let perMessageDeflate;
opts.createConnection = isSecure ? tlsConnect : netConnect;
opts.defaultPort = opts.defaultPort || defaultPort;
opts.port = parsedUrl.port || defaultPort;
opts.host = parsedUrl.hostname.startsWith('[')
? parsedUrl.hostname.slice(1, -1)
: parsedUrl.hostname;
opts.headers = {
...opts.headers,
'Sec-WebSocket-Version': opts.protocolVersion,
'Sec-WebSocket-Key': key,
Connection: 'Upgrade',
Upgrade: 'websocket'
};
opts.path = parsedUrl.pathname + parsedUrl.search;
opts.timeout = opts.handshakeTimeout;
if (opts.perMessageDeflate) {
perMessageDeflate = new PerMessageDeflate$1(
opts.perMessageDeflate !== true ? opts.perMessageDeflate : {},
false,
opts.maxPayload
);
opts.headers['Sec-WebSocket-Extensions'] = format({
[PerMessageDeflate$1.extensionName]: perMessageDeflate.offer()
});
}
if (protocols.length) {
for (const protocol of protocols) {
if (
typeof protocol !== 'string' ||
!subprotocolRegex.test(protocol) ||
protocolSet.has(protocol)
) {
throw new SyntaxError(
'An invalid or duplicated subprotocol was specified'
);
}
protocolSet.add(protocol);
}
opts.headers['Sec-WebSocket-Protocol'] = protocols.join(',');
}
if (opts.origin) {
if (opts.protocolVersion < 13) {
opts.headers['Sec-WebSocket-Origin'] = opts.origin;
} else {
opts.headers.Origin = opts.origin;
}
}
if (parsedUrl.username || parsedUrl.password) {
opts.auth = `${parsedUrl.username}:${parsedUrl.password}`;
}
if (isIpcUrl) {
const parts = opts.path.split(':');
opts.socketPath = parts[0];
opts.path = parts[1];
}
let req;
if (opts.followRedirects) {
if (websocket._redirects === 0) {
websocket._originalIpc = isIpcUrl;
websocket._originalSecure = isSecure;
websocket._originalHostOrSocketPath = isIpcUrl
? opts.socketPath
: parsedUrl.host;
const headers = options && options.headers;
//
// Shallow copy the user provided options so that headers can be changed
// without mutating the original object.
//
options = { ...options, headers: {} };
if (headers) {
for (const [key, value] of Object.entries(headers)) {
options.headers[key.toLowerCase()] = value;
}
}
} else if (websocket.listenerCount('redirect') === 0) {
const isSameHost = isIpcUrl
? websocket._originalIpc
? opts.socketPath === websocket._originalHostOrSocketPath
: false
: websocket._originalIpc
? false
: parsedUrl.host === websocket._originalHostOrSocketPath;
if (!isSameHost || (websocket._originalSecure && !isSecure)) {
//
// Match curl 7.77.0 behavior and drop the following headers. These
// headers are also dropped when following a redirect to a subdomain.
//
delete opts.headers.authorization;
delete opts.headers.cookie;
if (!isSameHost) delete opts.headers.host;
opts.auth = undefined;
}
}
//
// Match curl 7.77.0 behavior and make the first `Authorization` header win.
// If the `Authorization` header is set, then there is nothing to do as it
// will take precedence.
//
if (opts.auth && !options.headers.authorization) {
options.headers.authorization =
'Basic ' + Buffer.from(opts.auth).toString('base64');
}
req = websocket._req = request(opts);
if (websocket._redirects) {
//
// Unlike what is done for the `'upgrade'` event, no early exit is
// triggered here if the user calls `websocket.close()` or
// `websocket.terminate()` from a listener of the `'redirect'` event. This
// is because the user can also call `request.destroy()` with an error
// before calling `websocket.close()` or `websocket.terminate()` and this
// would result in an error being emitted on the `request` object with no
// `'error'` event listeners attached.
//
websocket.emit('redirect', websocket.url, req);
}
} else {
req = websocket._req = request(opts);
}
if (opts.timeout) {
req.on('timeout', () => {
abortHandshake$1(websocket, req, 'Opening handshake has timed out');
});
}
req.on('error', (err) => {
if (req === null || req[kAborted]) return;
req = websocket._req = null;
emitErrorAndClose(websocket, err);
});
req.on('response', (res) => {
const location = res.headers.location;
const statusCode = res.statusCode;
if (
location &&
opts.followRedirects &&
statusCode >= 300 &&
statusCode < 400
) {
if (++websocket._redirects > opts.maxRedirects) {
abortHandshake$1(websocket, req, 'Maximum redirects exceeded');
return;
}
req.abort();
let addr;
try {
addr = new URL$2(location, address);
} catch (e) {
const err = new SyntaxError(`Invalid URL: ${location}`);
emitErrorAndClose(websocket, err);
return;
}
initAsClient(websocket, addr, protocols, options);
} else if (!websocket.emit('unexpected-response', req, res)) {
abortHandshake$1(
websocket,
req,
`Unexpected server response: ${res.statusCode}`
);
}
});
req.on('upgrade', (res, socket, head) => {
websocket.emit('upgrade', res);
//
// The user may have closed the connection from a listener of the
// `'upgrade'` event.
//
if (websocket.readyState !== WebSocket$1.CONNECTING) return;
req = websocket._req = null;
if (res.headers.upgrade.toLowerCase() !== 'websocket') {
abortHandshake$1(websocket, socket, 'Invalid Upgrade header');
return;
}
const digest = createHash$1('sha1')
.update(key + GUID$1)
.digest('base64');
if (res.headers['sec-websocket-accept'] !== digest) {
abortHandshake$1(websocket, socket, 'Invalid Sec-WebSocket-Accept header');
return;
}
const serverProt = res.headers['sec-websocket-protocol'];
let protError;
if (serverProt !== undefined) {
if (!protocolSet.size) {
protError = 'Server sent a subprotocol but none was requested';
} else if (!protocolSet.has(serverProt)) {
protError = 'Server sent an invalid subprotocol';
}
} else if (protocolSet.size) {
protError = 'Server sent no subprotocol';
}
if (protError) {
abortHandshake$1(websocket, socket, protError);
return;
}
if (serverProt) websocket._protocol = serverProt;
const secWebSocketExtensions = res.headers['sec-websocket-extensions'];
if (secWebSocketExtensions !== undefined) {
if (!perMessageDeflate) {
const message =
'Server sent a Sec-WebSocket-Extensions header but no extension ' +
'was requested';
abortHandshake$1(websocket, socket, message);
return;
}
let extensions;
try {
extensions = parse$1(secWebSocketExtensions);
} catch (err) {
const message = 'Invalid Sec-WebSocket-Extensions header';
abortHandshake$1(websocket, socket, message);
return;
}
const extensionNames = Object.keys(extensions);
if (
extensionNames.length !== 1 ||
extensionNames[0] !== PerMessageDeflate$1.extensionName
) {
const message = 'Server indicated an extension that was not requested';
abortHandshake$1(websocket, socket, message);
return;
}
try {
perMessageDeflate.accept(extensions[PerMessageDeflate$1.extensionName]);
} catch (err) {
const message = 'Invalid Sec-WebSocket-Extensions header';
abortHandshake$1(websocket, socket, message);
return;
}
websocket._extensions[PerMessageDeflate$1.extensionName] =
perMessageDeflate;
}
websocket.setSocket(socket, head, {
generateMask: opts.generateMask,
maxPayload: opts.maxPayload,
skipUTF8Validation: opts.skipUTF8Validation
});
});
if (opts.finishRequest) {
opts.finishRequest(req, websocket);
} else {
req.end();
}
}
/**
* Emit the `'error'` and `'close'` events.
*
* @param {WebSocket} websocket The WebSocket instance
* @param {Error} The error to emit
* @private
*/
function emitErrorAndClose(websocket, err) {
websocket._readyState = WebSocket$1.CLOSING;
websocket.emit('error', err);
websocket.emitClose();
}
/**
* Create a `net.Socket` and initiate a connection.
*
* @param {Object} options Connection options
* @return {net.Socket} The newly created socket used to start the connection
* @private
*/
function netConnect(options) {
options.path = options.socketPath;
return net.connect(options);
}
/**
* Create a `tls.TLSSocket` and initiate a connection.
*
* @param {Object} options Connection options
* @return {tls.TLSSocket} The newly created socket used to start the connection
* @private
*/
function tlsConnect(options) {
options.path = undefined;
if (!options.servername && options.servername !== '') {
options.servername = net.isIP(options.host) ? '' : options.host;
}
return tls.connect(options);
}
/**
* Abort the handshake and emit an error.
*
* @param {WebSocket} websocket The WebSocket instance
* @param {(http.ClientRequest|net.Socket|tls.Socket)} stream The request to
* abort or the socket to destroy
* @param {String} message The error message
* @private
*/
function abortHandshake$1(websocket, stream, message) {
websocket._readyState = WebSocket$1.CLOSING;
const err = new Error(message);
Error.captureStackTrace(err, abortHandshake$1);
if (stream.setHeader) {
stream[kAborted] = true;
stream.abort();
if (stream.socket && !stream.socket.destroyed) {
//
// On Node.js >= 14.3.0 `request.abort()` does not destroy the socket if
// called after the request completed. See
// https://github.com/websockets/ws/issues/1869.
//
stream.socket.destroy();
}
process.nextTick(emitErrorAndClose, websocket, err);
} else {
stream.destroy(err);
stream.once('error', websocket.emit.bind(websocket, 'error'));
stream.once('close', websocket.emitClose.bind(websocket));
}
}
/**
* Handle cases where the `ping()`, `pong()`, or `send()` methods are called
* when the `readyState` attribute is `CLOSING` or `CLOSED`.
*
* @param {WebSocket} websocket The WebSocket instance
* @param {*} [data] The data to send
* @param {Function} [cb] Callback
* @private
*/
function sendAfterClose(websocket, data, cb) {
if (data) {
const length = toBuffer(data).length;
//
// The `_bufferedAmount` property is used only when the peer is a client and
// the opening handshake fails. Under these circumstances, in fact, the
// `setSocket()` method is not called, so the `_socket` and `_sender`
// properties are set to `null`.
//
if (websocket._socket) websocket._sender._bufferedBytes += length;
else websocket._bufferedAmount += length;
}
if (cb) {
const err = new Error(
`WebSocket is not open: readyState ${websocket.readyState} ` +
`(${readyStates[websocket.readyState]})`
);
process.nextTick(cb, err);
}
}
/**
* The listener of the `Receiver` `'conclude'` event.
*
* @param {Number} code The status code
* @param {Buffer} reason The reason for closing
* @private
*/
function receiverOnConclude(code, reason) {
const websocket = this[kWebSocket$1];
websocket._closeFrameReceived = true;
websocket._closeMessage = reason;
websocket._closeCode = code;
if (websocket._socket[kWebSocket$1] === undefined) return;
websocket._socket.removeListener('data', socketOnData);
process.nextTick(resume, websocket._socket);
if (code === 1005) websocket.close();
else websocket.close(code, reason);
}
/**
* The listener of the `Receiver` `'drain'` event.
*
* @private
*/
function receiverOnDrain() {
const websocket = this[kWebSocket$1];
if (!websocket.isPaused) websocket._socket.resume();
}
/**
* The listener of the `Receiver` `'error'` event.
*
* @param {(RangeError|Error)} err The emitted error
* @private
*/
function receiverOnError(err) {
const websocket = this[kWebSocket$1];
if (websocket._socket[kWebSocket$1] !== undefined) {
websocket._socket.removeListener('data', socketOnData);
//
// On Node.js < 14.0.0 the `'error'` event is emitted synchronously. See
// https://github.com/websockets/ws/issues/1940.
//
process.nextTick(resume, websocket._socket);
websocket.close(err[kStatusCode]);
}
websocket.emit('error', err);
}
/**
* The listener of the `Receiver` `'finish'` event.
*
* @private
*/
function receiverOnFinish() {
this[kWebSocket$1].emitClose();
}
/**
* The listener of the `Receiver` `'message'` event.
*
* @param {Buffer|ArrayBuffer|Buffer[])} data The message
* @param {Boolean} isBinary Specifies whether the message is binary or not
* @private
*/
function receiverOnMessage(data, isBinary) {
this[kWebSocket$1].emit('message', data, isBinary);
}
/**
* The listener of the `Receiver` `'ping'` event.
*
* @param {Buffer} data The data included in the ping frame
* @private
*/
function receiverOnPing(data) {
const websocket = this[kWebSocket$1];
websocket.pong(data, !websocket._isServer, NOOP);
websocket.emit('ping', data);
}
/**
* The listener of the `Receiver` `'pong'` event.
*
* @param {Buffer} data The data included in the pong frame
* @private
*/
function receiverOnPong(data) {
this[kWebSocket$1].emit('pong', data);
}
/**
* Resume a readable stream
*
* @param {Readable} stream The readable stream
* @private
*/
function resume(stream) {
stream.resume();
}
/**
* The listener of the `net.Socket` `'close'` event.
*
* @private
*/
function socketOnClose() {
const websocket = this[kWebSocket$1];
this.removeListener('close', socketOnClose);
this.removeListener('data', socketOnData);
this.removeListener('end', socketOnEnd);
websocket._readyState = WebSocket$1.CLOSING;
let chunk;
//
// The close frame might not have been received or the `'end'` event emitted,
// for example, if the socket was destroyed due to an error. Ensure that the
// `receiver` stream is closed after writing any remaining buffered data to
// it. If the readable side of the socket is in flowing mode then there is no
// buffered data as everything has been already written and `readable.read()`
// will return `null`. If instead, the socket is paused, any possible buffered
// data will be read as a single chunk.
//
if (
!this._readableState.endEmitted &&
!websocket._closeFrameReceived &&
!websocket._receiver._writableState.errorEmitted &&
(chunk = websocket._socket.read()) !== null
) {
websocket._receiver.write(chunk);
}
websocket._receiver.end();
this[kWebSocket$1] = undefined;
clearTimeout(websocket._closeTimer);
if (
websocket._receiver._writableState.finished ||
websocket._receiver._writableState.errorEmitted
) {
websocket.emitClose();
} else {
websocket._receiver.on('error', receiverOnFinish);
websocket._receiver.on('finish', receiverOnFinish);
}
}
/**
* The listener of the `net.Socket` `'data'` event.
*
* @param {Buffer} chunk A chunk of data
* @private
*/
function socketOnData(chunk) {
if (!this[kWebSocket$1]._receiver.write(chunk)) {
this.pause();
}
}
/**
* The listener of the `net.Socket` `'end'` event.
*
* @private
*/
function socketOnEnd() {
const websocket = this[kWebSocket$1];
websocket._readyState = WebSocket$1.CLOSING;
websocket._receiver.end();
this.end();
}
/**
* The listener of the `net.Socket` `'error'` event.
*
* @private
*/
function socketOnError$1() {
const websocket = this[kWebSocket$1];
this.removeListener('error', socketOnError$1);
this.on('error', NOOP);
if (websocket) {
websocket._readyState = WebSocket$1.CLOSING;
this.destroy();
}
}
const { tokenChars } = validationExports;
/**
* Parses the `Sec-WebSocket-Protocol` header into a set of subprotocol names.
*
* @param {String} header The field value of the header
* @return {Set} The subprotocol names
* @public
*/
function parse(header) {
const protocols = new Set();
let start = -1;
let end = -1;
let i = 0;
for (i; i < header.length; i++) {
const code = header.charCodeAt(i);
if (end === -1 && tokenChars[code] === 1) {
if (start === -1) start = i;
} else if (
i !== 0 &&
(code === 0x20 /* ' ' */ || code === 0x09) /* '\t' */
) {
if (end === -1 && start !== -1) end = i;
} else if (code === 0x2c /* ',' */) {
if (start === -1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (end === -1) end = i;
const protocol = header.slice(start, end);
if (protocols.has(protocol)) {
throw new SyntaxError(`The "${protocol}" subprotocol is duplicated`);
}
protocols.add(protocol);
start = end = -1;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
}
if (start === -1 || end !== -1) {
throw new SyntaxError('Unexpected end of input');
}
const protocol = header.slice(start, i);
if (protocols.has(protocol)) {
throw new SyntaxError(`The "${protocol}" subprotocol is duplicated`);
}
protocols.add(protocol);
return protocols;
}
var subprotocol$1 = { parse };
/* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^net|tls|https$" }] */
const EventEmitter = require$$0$5;
const http$2 = require$$1$1;
const { createHash } = require$$3$1;
const extension = extension$1;
const PerMessageDeflate = permessageDeflate;
const subprotocol = subprotocol$1;
const WebSocket = websocket;
const { GUID, kWebSocket } = constants;
const keyRegex = /^[+/0-9A-Za-z]{22}==$/;
const RUNNING = 0;
const CLOSING = 1;
const CLOSED = 2;
/**
* Class representing a WebSocket server.
*
* @extends EventEmitter
*/
class WebSocketServer extends EventEmitter {
/**
* Create a `WebSocketServer` instance.
*
* @param {Object} options Configuration options
* @param {Number} [options.backlog=511] The maximum length of the queue of
* pending connections
* @param {Boolean} [options.clientTracking=true] Specifies whether or not to
* track clients
* @param {Function} [options.handleProtocols] A hook to handle protocols
* @param {String} [options.host] The hostname where to bind the server
* @param {Number} [options.maxPayload=104857600] The maximum allowed message
* size
* @param {Boolean} [options.noServer=false] Enable no server mode
* @param {String} [options.path] Accept only connections matching this path
* @param {(Boolean|Object)} [options.perMessageDeflate=false] Enable/disable
* permessage-deflate
* @param {Number} [options.port] The port where to bind the server
* @param {(http.Server|https.Server)} [options.server] A pre-created HTTP/S
* server to use
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
* not to skip UTF-8 validation for text and close messages
* @param {Function} [options.verifyClient] A hook to reject connections
* @param {Function} [options.WebSocket=WebSocket] Specifies the `WebSocket`
* class to use. It must be the `WebSocket` class or class that extends it
* @param {Function} [callback] A listener for the `listening` event
*/
constructor(options, callback) {
super();
options = {
maxPayload: 100 * 1024 * 1024,
skipUTF8Validation: false,
perMessageDeflate: false,
handleProtocols: null,
clientTracking: true,
verifyClient: null,
noServer: false,
backlog: null, // use default (511 as implemented in net.js)
server: null,
host: null,
path: null,
port: null,
WebSocket,
...options
};
if (
(options.port == null && !options.server && !options.noServer) ||
(options.port != null && (options.server || options.noServer)) ||
(options.server && options.noServer)
) {
throw new TypeError(
'One and only one of the "port", "server", or "noServer" options ' +
'must be specified'
);
}
if (options.port != null) {
this._server = http$2.createServer((req, res) => {
const body = http$2.STATUS_CODES[426];
res.writeHead(426, {
'Content-Length': body.length,
'Content-Type': 'text/plain'
});
res.end(body);
});
this._server.listen(
options.port,
options.host,
options.backlog,
callback
);
} else if (options.server) {
this._server = options.server;
}
if (this._server) {
const emitConnection = this.emit.bind(this, 'connection');
this._removeListeners = addListeners(this._server, {
listening: this.emit.bind(this, 'listening'),
error: this.emit.bind(this, 'error'),
upgrade: (req, socket, head) => {
this.handleUpgrade(req, socket, head, emitConnection);
}
});
}
if (options.perMessageDeflate === true) options.perMessageDeflate = {};
if (options.clientTracking) {
this.clients = new Set();
this._shouldEmitClose = false;
}
this.options = options;
this._state = RUNNING;
}
/**
* Returns the bound address, the address family name, and port of the server
* as reported by the operating system if listening on an IP socket.
* If the server is listening on a pipe or UNIX domain socket, the name is
* returned as a string.
*
* @return {(Object|String|null)} The address of the server
* @public
*/
address() {
if (this.options.noServer) {
throw new Error('The server is operating in "noServer" mode');
}
if (!this._server) return null;
return this._server.address();
}
/**
* Stop the server from accepting new connections and emit the `'close'` event
* when all existing connections are closed.
*
* @param {Function} [cb] A one-time listener for the `'close'` event
* @public
*/
close(cb) {
if (this._state === CLOSED) {
if (cb) {
this.once('close', () => {
cb(new Error('The server is not running'));
});
}
process.nextTick(emitClose, this);
return;
}
if (cb) this.once('close', cb);
if (this._state === CLOSING) return;
this._state = CLOSING;
if (this.options.noServer || this.options.server) {
if (this._server) {
this._removeListeners();
this._removeListeners = this._server = null;
}
if (this.clients) {
if (!this.clients.size) {
process.nextTick(emitClose, this);
} else {
this._shouldEmitClose = true;
}
} else {
process.nextTick(emitClose, this);
}
} else {
const server = this._server;
this._removeListeners();
this._removeListeners = this._server = null;
//
// The HTTP/S server was created internally. Close it, and rely on its
// `'close'` event.
//
server.close(() => {
emitClose(this);
});
}
}
/**
* See if a given request should be handled by this server instance.
*
* @param {http.IncomingMessage} req Request object to inspect
* @return {Boolean} `true` if the request is valid, else `false`
* @public
*/
shouldHandle(req) {
if (this.options.path) {
const index = req.url.indexOf('?');
const pathname = index !== -1 ? req.url.slice(0, index) : req.url;
if (pathname !== this.options.path) return false;
}
return true;
}
/**
* Handle a HTTP Upgrade request.
*
* @param {http.IncomingMessage} req The request object
* @param {(net.Socket|tls.Socket)} socket The network socket between the
* server and client
* @param {Buffer} head The first packet of the upgraded stream
* @param {Function} cb Callback
* @public
*/
handleUpgrade(req, socket, head, cb) {
socket.on('error', socketOnError);
const key = req.headers['sec-websocket-key'];
const version = +req.headers['sec-websocket-version'];
if (req.method !== 'GET') {
const message = 'Invalid HTTP method';
abortHandshakeOrEmitwsClientError(this, req, socket, 405, message);
return;
}
if (req.headers.upgrade.toLowerCase() !== 'websocket') {
const message = 'Invalid Upgrade header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
if (!key || !keyRegex.test(key)) {
const message = 'Missing or invalid Sec-WebSocket-Key header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
if (version !== 8 && version !== 13) {
const message = 'Missing or invalid Sec-WebSocket-Version header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
if (!this.shouldHandle(req)) {
abortHandshake(socket, 400);
return;
}
const secWebSocketProtocol = req.headers['sec-websocket-protocol'];
let protocols = new Set();
if (secWebSocketProtocol !== undefined) {
try {
protocols = subprotocol.parse(secWebSocketProtocol);
} catch (err) {
const message = 'Invalid Sec-WebSocket-Protocol header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
}
const secWebSocketExtensions = req.headers['sec-websocket-extensions'];
const extensions = {};
if (
this.options.perMessageDeflate &&
secWebSocketExtensions !== undefined
) {
const perMessageDeflate = new PerMessageDeflate(
this.options.perMessageDeflate,
true,
this.options.maxPayload
);
try {
const offers = extension.parse(secWebSocketExtensions);
if (offers[PerMessageDeflate.extensionName]) {
perMessageDeflate.accept(offers[PerMessageDeflate.extensionName]);
extensions[PerMessageDeflate.extensionName] = perMessageDeflate;
}
} catch (err) {
const message =
'Invalid or unacceptable Sec-WebSocket-Extensions header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
}
//
// Optionally call external client verification handler.
//
if (this.options.verifyClient) {
const info = {
origin:
req.headers[`${version === 8 ? 'sec-websocket-origin' : 'origin'}`],
secure: !!(req.socket.authorized || req.socket.encrypted),
req
};
if (this.options.verifyClient.length === 2) {
this.options.verifyClient(info, (verified, code, message, headers) => {
if (!verified) {
return abortHandshake(socket, code || 401, message, headers);
}
this.completeUpgrade(
extensions,
key,
protocols,
req,
socket,
head,
cb
);
});
return;
}
if (!this.options.verifyClient(info)) return abortHandshake(socket, 401);
}
this.completeUpgrade(extensions, key, protocols, req, socket, head, cb);
}
/**
* Upgrade the connection to WebSocket.
*
* @param {Object} extensions The accepted extensions
* @param {String} key The value of the `Sec-WebSocket-Key` header
* @param {Set} protocols The subprotocols
* @param {http.IncomingMessage} req The request object
* @param {(net.Socket|tls.Socket)} socket The network socket between the
* server and client
* @param {Buffer} head The first packet of the upgraded stream
* @param {Function} cb Callback
* @throws {Error} If called more than once with the same socket
* @private
*/
completeUpgrade(extensions, key, protocols, req, socket, head, cb) {
//
// Destroy the socket if the client has already sent a FIN packet.
//
if (!socket.readable || !socket.writable) return socket.destroy();
if (socket[kWebSocket]) {
throw new Error(
'server.handleUpgrade() was called more than once with the same ' +
'socket, possibly due to a misconfiguration'
);
}
if (this._state > RUNNING) return abortHandshake(socket, 503);
const digest = createHash('sha1')
.update(key + GUID)
.digest('base64');
const headers = [
'HTTP/1.1 101 Switching Protocols',
'Upgrade: websocket',
'Connection: Upgrade',
`Sec-WebSocket-Accept: ${digest}`
];
const ws = new this.options.WebSocket(null);
if (protocols.size) {
//
// Optionally call external protocol selection handler.
//
const protocol = this.options.handleProtocols
? this.options.handleProtocols(protocols, req)
: protocols.values().next().value;
if (protocol) {
headers.push(`Sec-WebSocket-Protocol: ${protocol}`);
ws._protocol = protocol;
}
}
if (extensions[PerMessageDeflate.extensionName]) {
const params = extensions[PerMessageDeflate.extensionName].params;
const value = extension.format({
[PerMessageDeflate.extensionName]: [params]
});
headers.push(`Sec-WebSocket-Extensions: ${value}`);
ws._extensions = extensions;
}
//
// Allow external modification/inspection of handshake headers.
//
this.emit('headers', headers, req);
socket.write(headers.concat('\r\n').join('\r\n'));
socket.removeListener('error', socketOnError);
ws.setSocket(socket, head, {
maxPayload: this.options.maxPayload,
skipUTF8Validation: this.options.skipUTF8Validation
});
if (this.clients) {
this.clients.add(ws);
ws.on('close', () => {
this.clients.delete(ws);
if (this._shouldEmitClose && !this.clients.size) {
process.nextTick(emitClose, this);
}
});
}
cb(ws, req);
}
}
var websocketServer = WebSocketServer;
/**
* Add event listeners on an `EventEmitter` using a map of <event, listener>
* pairs.
*
* @param {EventEmitter} server The event emitter
* @param {Object.<String, Function>} map The listeners to add
* @return {Function} A function that will remove the added listeners when
* called
* @private
*/
function addListeners(server, map) {
for (const event of Object.keys(map)) server.on(event, map[event]);
return function removeListeners() {
for (const event of Object.keys(map)) {
server.removeListener(event, map[event]);
}
};
}
/**
* Emit a `'close'` event on an `EventEmitter`.
*
* @param {EventEmitter} server The event emitter
* @private
*/
function emitClose(server) {
server._state = CLOSED;
server.emit('close');
}
/**
* Handle socket errors.
*
* @private
*/
function socketOnError() {
this.destroy();
}
/**
* Close the connection when preconditions are not fulfilled.
*
* @param {(net.Socket|tls.Socket)} socket The socket of the upgrade request
* @param {Number} code The HTTP response status code
* @param {String} [message] The HTTP response body
* @param {Object} [headers] Additional HTTP response headers
* @private
*/
function abortHandshake(socket, code, message, headers) {
//
// The socket is writable unless the user destroyed or ended it before calling
// `server.handleUpgrade()` or in the `verifyClient` function, which is a user
// error. Handling this does not make much sense as the worst that can happen
// is that some of the data written by the user might be discarded due to the
// call to `socket.end()` below, which triggers an `'error'` event that in
// turn causes the socket to be destroyed.
//
message = message || http$2.STATUS_CODES[code];
headers = {
Connection: 'close',
'Content-Type': 'text/html',
'Content-Length': Buffer.byteLength(message),
...headers
};
socket.once('finish', socket.destroy);
socket.end(
`HTTP/1.1 ${code} ${http$2.STATUS_CODES[code]}\r\n` +
Object.keys(headers)
.map((h) => `${h}: ${headers[h]}`)
.join('\r\n') +
'\r\n\r\n' +
message
);
}
/**
* Emit a `'wsClientError'` event on a `WebSocketServer` if there is at least
* one listener for it, otherwise call `abortHandshake()`.
*
* @param {WebSocketServer} server The WebSocket server
* @param {http.IncomingMessage} req The request object
* @param {(net.Socket|tls.Socket)} socket The socket of the upgrade request
* @param {Number} code The HTTP response status code
* @param {String} message The HTTP response body
* @private
*/
function abortHandshakeOrEmitwsClientError(server, req, socket, code, message) {
if (server.listenerCount('wsClientError')) {
const err = new Error(message);
Error.captureStackTrace(err, abortHandshakeOrEmitwsClientError);
server.emit('wsClientError', err, socket, req);
} else {
abortHandshake(socket, code, message);
}
}
var WebSocketServerRaw_ = /*@__PURE__*/getDefaultExportFromCjs(websocketServer);
/* In Bun, the `ws` module is overridden to hook into the native code. Using the bundled `js` version
* of `ws` will not work as Bun's req.socket does not allow reading/writing to the underlying socket.
*/
const WebSocketServerRaw = process.versions.bun
? // @ts-expect-error: Bun defines `import.meta.require`
import.meta.require('ws').WebSocketServer
: WebSocketServerRaw_;
const HMR_HEADER = 'vite-hmr';
const wsServerEvents = [
'connection',
'error',
'headers',
'listening',
'message',
];
function createWebSocketServer(server, config, httpsOptions) {
let wss;
let wsHttpServer = undefined;
const hmr = isObject$2(config.server.hmr) && config.server.hmr;
const hmrServer = hmr && hmr.server;
const hmrPort = hmr && hmr.port;
// TODO: the main server port may not have been chosen yet as it may use the next available
const portsAreCompatible = !hmrPort || hmrPort === config.server.port;
const wsServer = hmrServer || (portsAreCompatible && server);
let hmrServerWsListener;
const customListeners = new Map();
const clientsMap = new WeakMap();
const port = hmrPort || 24678;
const host = (hmr && hmr.host) || undefined;
if (wsServer) {
let hmrBase = config.base;
const hmrPath = hmr ? hmr.path : undefined;
if (hmrPath) {
hmrBase = path$o.posix.join(hmrBase, hmrPath);
}
wss = new WebSocketServerRaw({ noServer: true });
hmrServerWsListener = (req, socket, head) => {
if (req.headers['sec-websocket-protocol'] === HMR_HEADER &&
req.url === hmrBase) {
wss.handleUpgrade(req, socket, head, (ws) => {
wss.emit('connection', ws, req);
});
}
};
wsServer.on('upgrade', hmrServerWsListener);
}
else {
// http server request handler keeps the same with
// https://github.com/websockets/ws/blob/45e17acea791d865df6b255a55182e9c42e5877a/lib/websocket-server.js#L88-L96
const route = ((_, res) => {
const statusCode = 426;
const body = STATUS_CODES[statusCode];
if (!body)
throw new Error(`No body text found for the ${statusCode} status code`);
res.writeHead(statusCode, {
'Content-Length': body.length,
'Content-Type': 'text/plain',
});
res.end(body);
});
if (httpsOptions) {
wsHttpServer = createServer$2(httpsOptions, route);
}
else {
wsHttpServer = createServer$3(route);
}
// vite dev server in middleware mode
// need to call ws listen manually
wss = new WebSocketServerRaw({ server: wsHttpServer });
}
wss.on('connection', (socket) => {
socket.on('message', (raw) => {
if (!customListeners.size)
return;
let parsed;
try {
parsed = JSON.parse(String(raw));
}
catch { }
if (!parsed || parsed.type !== 'custom' || !parsed.event)
return;
const listeners = customListeners.get(parsed.event);
if (!listeners?.size)
return;
const client = getSocketClient(socket);
listeners.forEach((listener) => listener(parsed.data, client));
});
socket.on('error', (err) => {
config.logger.error(`${colors$1.red(`ws error:`)}\n${err.stack}`, {
timestamp: true,
error: err,
});
});
socket.send(JSON.stringify({ type: 'connected' }));
if (bufferedError) {
socket.send(JSON.stringify(bufferedError));
bufferedError = null;
}
});
wss.on('error', (e) => {
if (e.code === 'EADDRINUSE') {
config.logger.error(colors$1.red(`WebSocket server error: Port is already in use`), { error: e });
}
else {
config.logger.error(colors$1.red(`WebSocket server error:\n${e.stack || e.message}`), { error: e });
}
});
// Provide a wrapper to the ws client so we can send messages in JSON format
// To be consistent with server.ws.send
function getSocketClient(socket) {
if (!clientsMap.has(socket)) {
clientsMap.set(socket, {
send: (...args) => {
let payload;
if (typeof args[0] === 'string') {
payload = {
type: 'custom',
event: args[0],
data: args[1],
};
}
else {
payload = args[0];
}
socket.send(JSON.stringify(payload));
},
socket,
});
}
return clientsMap.get(socket);
}
// On page reloads, if a file fails to compile and returns 500, the server
// sends the error payload before the client connection is established.
// If we have no open clients, buffer the error and send it to the next
// connected client.
let bufferedError = null;
return {
listen: () => {
wsHttpServer?.listen(port, host);
},
on: ((event, fn) => {
if (wsServerEvents.includes(event))
wss.on(event, fn);
else {
if (!customListeners.has(event)) {
customListeners.set(event, new Set());
}
customListeners.get(event).add(fn);
}
}),
off: ((event, fn) => {
if (wsServerEvents.includes(event)) {
wss.off(event, fn);
}
else {
customListeners.get(event)?.delete(fn);
}
}),
get clients() {
return new Set(Array.from(wss.clients).map(getSocketClient));
},
send(...args) {
let payload;
if (typeof args[0] === 'string') {
payload = {
type: 'custom',
event: args[0],
data: args[1],
};
}
else {
payload = args[0];
}
if (payload.type === 'error' && !wss.clients.size) {
bufferedError = payload;
return;
}
const stringified = JSON.stringify(payload);
wss.clients.forEach((client) => {
// readyState 1 means the connection is open
if (client.readyState === 1) {
client.send(stringified);
}
});
},
close() {
// should remove listener if hmr.server is set
// otherwise the old listener swallows all WebSocket connections
if (hmrServerWsListener && wsServer) {
wsServer.off('upgrade', hmrServerWsListener);
}
return new Promise((resolve, reject) => {
wss.clients.forEach((client) => {
client.terminate();
});
wss.close((err) => {
if (err) {
reject(err);
}
else {
if (wsHttpServer) {
wsHttpServer.close((err) => {
if (err) {
reject(err);
}
else {
resolve();
}
});
}
else {
resolve();
}
}
});
});
},
};
}
// this middleware is only active when (base !== '/')
function baseMiddleware({ config, }) {
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return function viteBaseMiddleware(req, res, next) {
const url = req.url;
const parsed = new URL(url, 'http://vitejs.dev');
const path = parsed.pathname || '/';
const base = config.rawBase;
if (path.startsWith(base)) {
// rewrite url to remove base. this ensures that other middleware does
// not need to consider base being prepended or not
req.url = stripBase(url, base);
return next();
}
// skip redirect and error fallback on middleware mode, #4057
if (config.server.middlewareMode) {
return next();
}
if (path === '/' || path === '/index.html') {
// redirect root visit to based url with search and hash
res.writeHead(302, {
Location: base + (parsed.search || '') + (parsed.hash || ''),
});
res.end();
return;
}
else if (req.headers.accept?.includes('text/html')) {
// non-based page visit
const redirectPath = withTrailingSlash(url) !== base ? joinUrlSegments(base, url) : base;
res.writeHead(404, {
'Content-Type': 'text/html',
});
res.end(`The server is configured with a public base URL of ${base} - ` +
`did you mean to visit <a href="${redirectPath}">${redirectPath}</a> instead?`);
return;
}
next();
};
}
var httpProxy$3 = {exports: {}};
var eventemitter3 = {exports: {}};
(function (module) {
var has = Object.prototype.hasOwnProperty
, prefix = '~';
/**
* Constructor to create a storage for our `EE` objects.
* An `Events` instance is a plain object whose properties are event names.
*
* @constructor
* @private
*/
function Events() {}
//
// We try to not inherit from `Object.prototype`. In some engines creating an
// instance in this way is faster than calling `Object.create(null)` directly.
// If `Object.create(null)` is not supported we prefix the event names with a
// character to make sure that the built-in object properties are not
// overridden or used as an attack vector.
//
if (Object.create) {
Events.prototype = Object.create(null);
//
// This hack is needed because the `__proto__` property is still inherited in
// some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.
//
if (!new Events().__proto__) prefix = false;
}
/**
* Representation of a single event listener.
*
* @param {Function} fn The listener function.
* @param {*} context The context to invoke the listener with.
* @param {Boolean} [once=false] Specify if the listener is a one-time listener.
* @constructor
* @private
*/
function EE(fn, context, once) {
this.fn = fn;
this.context = context;
this.once = once || false;
}
/**
* Add a listener for a given event.
*
* @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
* @param {(String|Symbol)} event The event name.
* @param {Function} fn The listener function.
* @param {*} context The context to invoke the listener with.
* @param {Boolean} once Specify if the listener is a one-time listener.
* @returns {EventEmitter}
* @private
*/
function addListener(emitter, event, fn, context, once) {
if (typeof fn !== 'function') {
throw new TypeError('The listener must be a function');
}
var listener = new EE(fn, context || emitter, once)
, evt = prefix ? prefix + event : event;
if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;
else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);
else emitter._events[evt] = [emitter._events[evt], listener];
return emitter;
}
/**
* Clear event by name.
*
* @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
* @param {(String|Symbol)} evt The Event name.
* @private
*/
function clearEvent(emitter, evt) {
if (--emitter._eventsCount === 0) emitter._events = new Events();
else delete emitter._events[evt];
}
/**
* Minimal `EventEmitter` interface that is molded against the Node.js
* `EventEmitter` interface.
*
* @constructor
* @public
*/
function EventEmitter() {
this._events = new Events();
this._eventsCount = 0;
}
/**
* Return an array listing the events for which the emitter has registered
* listeners.
*
* @returns {Array}
* @public
*/
EventEmitter.prototype.eventNames = function eventNames() {
var names = []
, events
, name;
if (this._eventsCount === 0) return names;
for (name in (events = this._events)) {
if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);
}
if (Object.getOwnPropertySymbols) {
return names.concat(Object.getOwnPropertySymbols(events));
}
return names;
};
/**
* Return the listeners registered for a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Array} The registered listeners.
* @public
*/
EventEmitter.prototype.listeners = function listeners(event) {
var evt = prefix ? prefix + event : event
, handlers = this._events[evt];
if (!handlers) return [];
if (handlers.fn) return [handlers.fn];
for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
ee[i] = handlers[i].fn;
}
return ee;
};
/**
* Return the number of listeners listening to a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Number} The number of listeners.
* @public
*/
EventEmitter.prototype.listenerCount = function listenerCount(event) {
var evt = prefix ? prefix + event : event
, listeners = this._events[evt];
if (!listeners) return 0;
if (listeners.fn) return 1;
return listeners.length;
};
/**
* Calls each of the listeners registered for a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Boolean} `true` if the event had listeners, else `false`.
* @public
*/
EventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
var evt = prefix ? prefix + event : event;
if (!this._events[evt]) return false;
var listeners = this._events[evt]
, len = arguments.length
, args
, i;
if (listeners.fn) {
if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);
switch (len) {
case 1: return listeners.fn.call(listeners.context), true;
case 2: return listeners.fn.call(listeners.context, a1), true;
case 3: return listeners.fn.call(listeners.context, a1, a2), true;
case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;
case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
}
for (i = 1, args = new Array(len -1); i < len; i++) {
args[i - 1] = arguments[i];
}
listeners.fn.apply(listeners.context, args);
} else {
var length = listeners.length
, j;
for (i = 0; i < length; i++) {
if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);
switch (len) {
case 1: listeners[i].fn.call(listeners[i].context); break;
case 2: listeners[i].fn.call(listeners[i].context, a1); break;
case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;
case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;
default:
if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {
args[j - 1] = arguments[j];
}
listeners[i].fn.apply(listeners[i].context, args);
}
}
}
return true;
};
/**
* Add a listener for a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn The listener function.
* @param {*} [context=this] The context to invoke the listener with.
* @returns {EventEmitter} `this`.
* @public
*/
EventEmitter.prototype.on = function on(event, fn, context) {
return addListener(this, event, fn, context, false);
};
/**
* Add a one-time listener for a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn The listener function.
* @param {*} [context=this] The context to invoke the listener with.
* @returns {EventEmitter} `this`.
* @public
*/
EventEmitter.prototype.once = function once(event, fn, context) {
return addListener(this, event, fn, context, true);
};
/**
* Remove the listeners of a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn Only remove the listeners that match this function.
* @param {*} context Only remove the listeners that have this context.
* @param {Boolean} once Only remove one-time listeners.
* @returns {EventEmitter} `this`.
* @public
*/
EventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {
var evt = prefix ? prefix + event : event;
if (!this._events[evt]) return this;
if (!fn) {
clearEvent(this, evt);
return this;
}
var listeners = this._events[evt];
if (listeners.fn) {
if (
listeners.fn === fn &&
(!once || listeners.once) &&
(!context || listeners.context === context)
) {
clearEvent(this, evt);
}
} else {
for (var i = 0, events = [], length = listeners.length; i < length; i++) {
if (
listeners[i].fn !== fn ||
(once && !listeners[i].once) ||
(context && listeners[i].context !== context)
) {
events.push(listeners[i]);
}
}
//
// Reset the array, or remove it completely if we have no more listeners.
//
if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;
else clearEvent(this, evt);
}
return this;
};
/**
* Remove all listeners, or those of the specified event.
*
* @param {(String|Symbol)} [event] The event name.
* @returns {EventEmitter} `this`.
* @public
*/
EventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {
var evt;
if (event) {
evt = prefix ? prefix + event : event;
if (this._events[evt]) clearEvent(this, evt);
} else {
this._events = new Events();
this._eventsCount = 0;
}
return this;
};
//
// Alias methods names because people roll like that.
//
EventEmitter.prototype.off = EventEmitter.prototype.removeListener;
EventEmitter.prototype.addListener = EventEmitter.prototype.on;
//
// Expose the prefix.
//
EventEmitter.prefixed = prefix;
//
// Allow `EventEmitter` to be imported as module namespace.
//
EventEmitter.EventEmitter = EventEmitter;
//
// Expose the module.
//
{
module.exports = EventEmitter;
}
} (eventemitter3));
var eventemitter3Exports = eventemitter3.exports;
var common$3 = {};
/**
* Check if we're required to add a port number.
*
* @see https://url.spec.whatwg.org/#default-port
* @param {Number|String} port Port number we need to check
* @param {String} protocol Protocol we need to check against.
* @returns {Boolean} Is it a default port for the given protocol
* @api private
*/
var requiresPort = function required(port, protocol) {
protocol = protocol.split(':')[0];
port = +port;
if (!port) return false;
switch (protocol) {
case 'http':
case 'ws':
return port !== 80;
case 'https':
case 'wss':
return port !== 443;
case 'ftp':
return port !== 21;
case 'gopher':
return port !== 70;
case 'file':
return false;
}
return port !== 0;
};
(function (exports) {
var common = exports,
url = require$$0$9,
extend = require$$0$6._extend,
required = requiresPort;
var upgradeHeader = /(^|,)\s*upgrade\s*($|,)/i,
isSSL = /^https|wss/;
/**
* Simple Regex for testing if protocol is https
*/
common.isSSL = isSSL;
/**
* Copies the right headers from `options` and `req` to
* `outgoing` which is then used to fire the proxied
* request.
*
* Examples:
*
* common.setupOutgoing(outgoing, options, req)
* // => { host: ..., hostname: ...}
*
* @param {Object} Outgoing Base object to be filled with required properties
* @param {Object} Options Config object passed to the proxy
* @param {ClientRequest} Req Request Object
* @param {String} Forward String to select forward or target
* 
* @return {Object} Outgoing Object with all required properties set
*
* @api private
*/
common.setupOutgoing = function(outgoing, options, req, forward) {
outgoing.port = options[forward || 'target'].port ||
(isSSL.test(options[forward || 'target'].protocol) ? 443 : 80);
['host', 'hostname', 'socketPath', 'pfx', 'key',
'passphrase', 'cert', 'ca', 'ciphers', 'secureProtocol'].forEach(
function(e) { outgoing[e] = options[forward || 'target'][e]; }
);
outgoing.method = options.method || req.method;
outgoing.headers = extend({}, req.headers);
if (options.headers){
extend(outgoing.headers, options.headers);
}
if (options.auth) {
outgoing.auth = options.auth;
}
if (options.ca) {
outgoing.ca = options.ca;
}
if (isSSL.test(options[forward || 'target'].protocol)) {
outgoing.rejectUnauthorized = (typeof options.secure === "undefined") ? true : options.secure;
}
outgoing.agent = options.agent || false;
outgoing.localAddress = options.localAddress;
//
// Remark: If we are false and not upgrading, set the connection: close. This is the right thing to do
// as node core doesn't handle this COMPLETELY properly yet.
//
if (!outgoing.agent) {
outgoing.headers = outgoing.headers || {};
if (typeof outgoing.headers.connection !== 'string'
|| !upgradeHeader.test(outgoing.headers.connection)
) { outgoing.headers.connection = 'close'; }
}
// the final path is target path + relative path requested by user:
var target = options[forward || 'target'];
var targetPath = target && options.prependPath !== false
? (target.path || '')
: '';
//
// Remark: Can we somehow not use url.parse as a perf optimization?
//
var outgoingPath = !options.toProxy
? (url.parse(req.url).path || '')
: req.url;
//
// Remark: ignorePath will just straight up ignore whatever the request's
// path is. This can be labeled as FOOT-GUN material if you do not know what
// you are doing and are using conflicting options.
//
outgoingPath = !options.ignorePath ? outgoingPath : '';
outgoing.path = common.urlJoin(targetPath, outgoingPath);
if (options.changeOrigin) {
outgoing.headers.host =
required(outgoing.port, options[forward || 'target'].protocol) && !hasPort(outgoing.host)
? outgoing.host + ':' + outgoing.port
: outgoing.host;
}
return outgoing;
};
/**
* Set the proper configuration for sockets,
* set no delay and set keep alive, also set
* the timeout to 0.
*
* Examples:
*
* common.setupSocket(socket)
* // => Socket
*
* @param {Socket} Socket instance to setup
* 
* @return {Socket} Return the configured socket.
*
* @api private
*/
common.setupSocket = function(socket) {
socket.setTimeout(0);
socket.setNoDelay(true);
socket.setKeepAlive(true, 0);
return socket;
};
/**
* Get the port number from the host. Or guess it based on the connection type.
*
* @param {Request} req Incoming HTTP request.
*
* @return {String} The port number.
*
* @api private
*/
common.getPort = function(req) {
var res = req.headers.host ? req.headers.host.match(/:(\d+)/) : '';
return res ?
res[1] :
common.hasEncryptedConnection(req) ? '443' : '80';
};
/**
* Check if the request has an encrypted connection.
*
* @param {Request} req Incoming HTTP request.
*
* @return {Boolean} Whether the connection is encrypted or not.
*
* @api private
*/
common.hasEncryptedConnection = function(req) {
return Boolean(req.connection.encrypted || req.connection.pair);
};
/**
* OS-agnostic join (doesn't break on URLs like path.join does on Windows)>
*
* @return {String} The generated path.
*
* @api private
*/
common.urlJoin = function() {
//
// We do not want to mess with the query string. All we want to touch is the path.
//
var args = Array.prototype.slice.call(arguments),
lastIndex = args.length - 1,
last = args[lastIndex],
lastSegs = last.split('?'),
retSegs;
args[lastIndex] = lastSegs.shift();
//
// Join all strings, but remove empty strings so we don't get extra slashes from
// joining e.g. ['', 'am']
//
retSegs = [
args.filter(Boolean).join('/')
.replace(/\/+/g, '/')
.replace('http:/', 'http://')
.replace('https:/', 'https://')
];
// Only join the query string if it exists so we don't have trailing a '?'
// on every request
// Handle case where there could be multiple ? in the URL.
retSegs.push.apply(retSegs, lastSegs);
return retSegs.join('?')
};
/**
* Rewrites or removes the domain of a cookie header
*
* @param {String|Array} Header
* @param {Object} Config, mapping of domain to rewritten domain.
* '*' key to match any domain, null value to remove the domain.
*
* @api private
*/
common.rewriteCookieProperty = function rewriteCookieProperty(header, config, property) {
if (Array.isArray(header)) {
return header.map(function (headerElement) {
return rewriteCookieProperty(headerElement, config, property);
});
}
return header.replace(new RegExp("(;\\s*" + property + "=)([^;]+)", 'i'), function(match, prefix, previousValue) {
var newValue;
if (previousValue in config) {
newValue = config[previousValue];
} else if ('*' in config) {
newValue = config['*'];
} else {
//no match, return previous value
return match;
}
if (newValue) {
//replace value
return prefix + newValue;
} else {
//remove value
return '';
}
});
};
/**
* Check the host and see if it potentially has a port in it (keep it simple)
*
* @returns {Boolean} Whether we have one or not
*
* @api private
*/
function hasPort(host) {
return !!~host.indexOf(':');
}} (common$3));
var url$1 = require$$0$9,
common$2 = common$3;
var redirectRegex = /^201|30(1|2|7|8)$/;
/*!
* Array of passes.
*
* A `pass` is just a function that is executed on `req, res, options`
* so that you can easily add new checks while still keeping the base
* flexible.
*/
var webOutgoing = { // <--
/**
* If is a HTTP 1.0 request, remove chunk headers
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {proxyResponse} Res Response object from the proxy request
*
* @api private
*/
removeChunked: function removeChunked(req, res, proxyRes) {
if (req.httpVersion === '1.0') {
delete proxyRes.headers['transfer-encoding'];
}
},
/**
* If is a HTTP 1.0 request, set the correct connection header
* or if connection header not present, then use `keep-alive`
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {proxyResponse} Res Response object from the proxy request
*
* @api private
*/
setConnection: function setConnection(req, res, proxyRes) {
if (req.httpVersion === '1.0') {
proxyRes.headers.connection = req.headers.connection || 'close';
} else if (req.httpVersion !== '2.0' && !proxyRes.headers.connection) {
proxyRes.headers.connection = req.headers.connection || 'keep-alive';
}
},
setRedirectHostRewrite: function setRedirectHostRewrite(req, res, proxyRes, options) {
if ((options.hostRewrite || options.autoRewrite || options.protocolRewrite)
&& proxyRes.headers['location']
&& redirectRegex.test(proxyRes.statusCode)) {
var target = url$1.parse(options.target);
var u = url$1.parse(proxyRes.headers['location']);
// make sure the redirected host matches the target host before rewriting
if (target.host != u.host) {
return;
}
if (options.hostRewrite) {
u.host = options.hostRewrite;
} else if (options.autoRewrite) {
u.host = req.headers['host'];
}
if (options.protocolRewrite) {
u.protocol = options.protocolRewrite;
}
proxyRes.headers['location'] = u.format();
}
},
/**
* Copy headers from proxyResponse to response
* set each header in response object.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {proxyResponse} Res Response object from the proxy request
* @param {Object} Options options.cookieDomainRewrite: Config to rewrite cookie domain
*
* @api private
*/
writeHeaders: function writeHeaders(req, res, proxyRes, options) {
var rewriteCookieDomainConfig = options.cookieDomainRewrite,
rewriteCookiePathConfig = options.cookiePathRewrite,
preserveHeaderKeyCase = options.preserveHeaderKeyCase,
rawHeaderKeyMap,
setHeader = function(key, header) {
if (header == undefined) return;
if (rewriteCookieDomainConfig && key.toLowerCase() === 'set-cookie') {
header = common$2.rewriteCookieProperty(header, rewriteCookieDomainConfig, 'domain');
}
if (rewriteCookiePathConfig && key.toLowerCase() === 'set-cookie') {
header = common$2.rewriteCookieProperty(header, rewriteCookiePathConfig, 'path');
}
res.setHeader(String(key).trim(), header);
};
if (typeof rewriteCookieDomainConfig === 'string') { //also test for ''
rewriteCookieDomainConfig = { '*': rewriteCookieDomainConfig };
}
if (typeof rewriteCookiePathConfig === 'string') { //also test for ''
rewriteCookiePathConfig = { '*': rewriteCookiePathConfig };
}
// message.rawHeaders is added in: v0.11.6
// https://nodejs.org/api/http.html#http_message_rawheaders
if (preserveHeaderKeyCase && proxyRes.rawHeaders != undefined) {
rawHeaderKeyMap = {};
for (var i = 0; i < proxyRes.rawHeaders.length; i += 2) {
var key = proxyRes.rawHeaders[i];
rawHeaderKeyMap[key.toLowerCase()] = key;
}
}
Object.keys(proxyRes.headers).forEach(function(key) {
var header = proxyRes.headers[key];
if (preserveHeaderKeyCase && rawHeaderKeyMap) {
key = rawHeaderKeyMap[key] || key;
}
setHeader(key, header);
});
},
/**
* Set the statusCode from the proxyResponse
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {proxyResponse} Res Response object from the proxy request
*
* @api private
*/
writeStatusCode: function writeStatusCode(req, res, proxyRes) {
// From Node.js docs: response.writeHead(statusCode[, statusMessage][, headers])
if(proxyRes.statusMessage) {
res.statusCode = proxyRes.statusCode;
res.statusMessage = proxyRes.statusMessage;
} else {
res.statusCode = proxyRes.statusCode;
}
}
};
var followRedirects$1 = {exports: {}};
var debug$3;
var debug_1 = function () {
if (!debug$3) {
try {
/* eslint global-require: off */
debug$3 = srcExports$1("follow-redirects");
}
catch (error) { /* */ }
if (typeof debug$3 !== "function") {
debug$3 = function () { /* */ };
}
}
debug$3.apply(null, arguments);
};
var url = require$$0$9;
var URL$1 = url.URL;
var http$1 = require$$1$1;
var https$1 = require$$1$2;
var Writable = require$$0$7.Writable;
var assert = require$$5;
var debug$2 = debug_1;
// Create handlers that pass events from native requests
var events = ["abort", "aborted", "connect", "error", "socket", "timeout"];
var eventHandlers = Object.create(null);
events.forEach(function (event) {
eventHandlers[event] = function (arg1, arg2, arg3) {
this._redirectable.emit(event, arg1, arg2, arg3);
};
});
// Error types with codes
var RedirectionError = createErrorType(
"ERR_FR_REDIRECTION_FAILURE",
"Redirected request failed"
);
var TooManyRedirectsError = createErrorType(
"ERR_FR_TOO_MANY_REDIRECTS",
"Maximum number of redirects exceeded"
);
var MaxBodyLengthExceededError = createErrorType(
"ERR_FR_MAX_BODY_LENGTH_EXCEEDED",
"Request body larger than maxBodyLength limit"
);
var WriteAfterEndError = createErrorType(
"ERR_STREAM_WRITE_AFTER_END",
"write after end"
);
// An HTTP(S) request that can be redirected
function RedirectableRequest(options, responseCallback) {
// Initialize the request
Writable.call(this);
this._sanitizeOptions(options);
this._options = options;
this._ended = false;
this._ending = false;
this._redirectCount = 0;
this._redirects = [];
this._requestBodyLength = 0;
this._requestBodyBuffers = [];
// Attach a callback if passed
if (responseCallback) {
this.on("response", responseCallback);
}
// React to responses of native requests
var self = this;
this._onNativeResponse = function (response) {
self._processResponse(response);
};
// Perform the first request
this._performRequest();
}
RedirectableRequest.prototype = Object.create(Writable.prototype);
RedirectableRequest.prototype.abort = function () {
abortRequest(this._currentRequest);
this.emit("abort");
};
// Writes buffered data to the current native request
RedirectableRequest.prototype.write = function (data, encoding, callback) {
// Writing is not allowed if end has been called
if (this._ending) {
throw new WriteAfterEndError();
}
// Validate input and shift parameters if necessary
if (!(typeof data === "string" || typeof data === "object" && ("length" in data))) {
throw new TypeError("data should be a string, Buffer or Uint8Array");
}
if (typeof encoding === "function") {
callback = encoding;
encoding = null;
}
// Ignore empty buffers, since writing them doesn't invoke the callback
// https://github.com/nodejs/node/issues/22066
if (data.length === 0) {
if (callback) {
callback();
}
return;
}
// Only write when we don't exceed the maximum body length
if (this._requestBodyLength + data.length <= this._options.maxBodyLength) {
this._requestBodyLength += data.length;
this._requestBodyBuffers.push({ data: data, encoding: encoding });
this._currentRequest.write(data, encoding, callback);
}
// Error when we exceed the maximum body length
else {
this.emit("error", new MaxBodyLengthExceededError());
this.abort();
}
};
// Ends the current native request
RedirectableRequest.prototype.end = function (data, encoding, callback) {
// Shift parameters if necessary
if (typeof data === "function") {
callback = data;
data = encoding = null;
}
else if (typeof encoding === "function") {
callback = encoding;
encoding = null;
}
// Write data if needed and end
if (!data) {
this._ended = this._ending = true;
this._currentRequest.end(null, null, callback);
}
else {
var self = this;
var currentRequest = this._currentRequest;
this.write(data, encoding, function () {
self._ended = true;
currentRequest.end(null, null, callback);
});
this._ending = true;
}
};
// Sets a header value on the current native request
RedirectableRequest.prototype.setHeader = function (name, value) {
this._options.headers[name] = value;
this._currentRequest.setHeader(name, value);
};
// Clears a header value on the current native request
RedirectableRequest.prototype.removeHeader = function (name) {
delete this._options.headers[name];
this._currentRequest.removeHeader(name);
};
// Global timeout for all underlying requests
RedirectableRequest.prototype.setTimeout = function (msecs, callback) {
var self = this;
// Destroys the socket on timeout
function destroyOnTimeout(socket) {
socket.setTimeout(msecs);
socket.removeListener("timeout", socket.destroy);
socket.addListener("timeout", socket.destroy);
}
// Sets up a timer to trigger a timeout event
function startTimer(socket) {
if (self._timeout) {
clearTimeout(self._timeout);
}
self._timeout = setTimeout(function () {
self.emit("timeout");
clearTimer();
}, msecs);
destroyOnTimeout(socket);
}
// Stops a timeout from triggering
function clearTimer() {
// Clear the timeout
if (self._timeout) {
clearTimeout(self._timeout);
self._timeout = null;
}
// Clean up all attached listeners
self.removeListener("abort", clearTimer);
self.removeListener("error", clearTimer);
self.removeListener("response", clearTimer);
if (callback) {
self.removeListener("timeout", callback);
}
if (!self.socket) {
self._currentRequest.removeListener("socket", startTimer);
}
}
// Attach callback if passed
if (callback) {
this.on("timeout", callback);
}
// Start the timer if or when the socket is opened
if (this.socket) {
startTimer(this.socket);
}
else {
this._currentRequest.once("socket", startTimer);
}
// Clean up on events
this.on("socket", destroyOnTimeout);
this.on("abort", clearTimer);
this.on("error", clearTimer);
this.on("response", clearTimer);
return this;
};
// Proxy all other public ClientRequest methods
[
"flushHeaders", "getHeader",
"setNoDelay", "setSocketKeepAlive",
].forEach(function (method) {
RedirectableRequest.prototype[method] = function (a, b) {
return this._currentRequest[method](a, b);
};
});
// Proxy all public ClientRequest properties
["aborted", "connection", "socket"].forEach(function (property) {
Object.defineProperty(RedirectableRequest.prototype, property, {
get: function () { return this._currentRequest[property]; },
});
});
RedirectableRequest.prototype._sanitizeOptions = function (options) {
// Ensure headers are always present
if (!options.headers) {
options.headers = {};
}
// Since http.request treats host as an alias of hostname,
// but the url module interprets host as hostname plus port,
// eliminate the host property to avoid confusion.
if (options.host) {
// Use hostname if set, because it has precedence
if (!options.hostname) {
options.hostname = options.host;
}
delete options.host;
}
// Complete the URL object when necessary
if (!options.pathname && options.path) {
var searchPos = options.path.indexOf("?");
if (searchPos < 0) {
options.pathname = options.path;
}
else {
options.pathname = options.path.substring(0, searchPos);
options.search = options.path.substring(searchPos);
}
}
};
// Executes the next native request (initial or redirect)
RedirectableRequest.prototype._performRequest = function () {
// Load the native protocol
var protocol = this._options.protocol;
var nativeProtocol = this._options.nativeProtocols[protocol];
if (!nativeProtocol) {
this.emit("error", new TypeError("Unsupported protocol " + protocol));
return;
}
// If specified, use the agent corresponding to the protocol
// (HTTP and HTTPS use different types of agents)
if (this._options.agents) {
var scheme = protocol.slice(0, -1);
this._options.agent = this._options.agents[scheme];
}
// Create the native request
var request = this._currentRequest =
nativeProtocol.request(this._options, this._onNativeResponse);
this._currentUrl = url.format(this._options);
// Set up event handlers
request._redirectable = this;
for (var e = 0; e < events.length; e++) {
request.on(events[e], eventHandlers[events[e]]);
}
// End a redirected request
// (The first request must be ended explicitly with RedirectableRequest#end)
if (this._isRedirect) {
// Write the request entity and end.
var i = 0;
var self = this;
var buffers = this._requestBodyBuffers;
(function writeNext(error) {
// Only write if this request has not been redirected yet
/* istanbul ignore else */
if (request === self._currentRequest) {
// Report any write errors
/* istanbul ignore if */
if (error) {
self.emit("error", error);
}
// Write the next buffer if there are still left
else if (i < buffers.length) {
var buffer = buffers[i++];
/* istanbul ignore else */
if (!request.finished) {
request.write(buffer.data, buffer.encoding, writeNext);
}
}
// End the request if `end` has been called on us
else if (self._ended) {
request.end();
}
}
}());
}
};
// Processes a response from the current native request
RedirectableRequest.prototype._processResponse = function (response) {
// Store the redirected response
var statusCode = response.statusCode;
if (this._options.trackRedirects) {
this._redirects.push({
url: this._currentUrl,
headers: response.headers,
statusCode: statusCode,
});
}
// RFC7231§6.4: The 3xx (Redirection) class of status code indicates
// that further action needs to be taken by the user agent in order to
// fulfill the request. If a Location header field is provided,
// the user agent MAY automatically redirect its request to the URI
// referenced by the Location field value,
// even if the specific status code is not understood.
// If the response is not a redirect; return it as-is
var location = response.headers.location;
if (!location || this._options.followRedirects === false ||
statusCode < 300 || statusCode >= 400) {
response.responseUrl = this._currentUrl;
response.redirects = this._redirects;
this.emit("response", response);
// Clean up
this._requestBodyBuffers = [];
return;
}
// The response is a redirect, so abort the current request
abortRequest(this._currentRequest);
// Discard the remainder of the response to avoid waiting for data
response.destroy();
// RFC7231§6.4: A client SHOULD detect and intervene
// in cyclical redirections (i.e., "infinite" redirection loops).
if (++this._redirectCount > this._options.maxRedirects) {
this.emit("error", new TooManyRedirectsError());
return;
}
// Store the request headers if applicable
var requestHeaders;
var beforeRedirect = this._options.beforeRedirect;
if (beforeRedirect) {
requestHeaders = Object.assign({
// The Host header was set by nativeProtocol.request
Host: response.req.getHeader("host"),
}, this._options.headers);
}
// RFC7231§6.4: Automatic redirection needs to done with
// care for methods not known to be safe, […]
// RFC7231§6.4.23: For historical reasons, a user agent MAY change
// the request method from POST to GET for the subsequent request.
var method = this._options.method;
if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" ||
// RFC7231§6.4.4: The 303 (See Other) status code indicates that
// the server is redirecting the user agent to a different resource […]
// A user agent can perform a retrieval request targeting that URI
// (a GET or HEAD request if using HTTP) […]
(statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) {
this._options.method = "GET";
// Drop a possible entity and headers related to it
this._requestBodyBuffers = [];
removeMatchingHeaders(/^content-/i, this._options.headers);
}
// Drop the Host header, as the redirect might lead to a different host
var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers);
// If the redirect is relative, carry over the host of the last request
var currentUrlParts = url.parse(this._currentUrl);
var currentHost = currentHostHeader || currentUrlParts.host;
var currentUrl = /^\w+:/.test(location) ? this._currentUrl :
url.format(Object.assign(currentUrlParts, { host: currentHost }));
// Determine the URL of the redirection
var redirectUrl;
try {
redirectUrl = url.resolve(currentUrl, location);
}
catch (cause) {
this.emit("error", new RedirectionError(cause));
return;
}
// Create the redirected request
debug$2("redirecting to", redirectUrl);
this._isRedirect = true;
var redirectUrlParts = url.parse(redirectUrl);
Object.assign(this._options, redirectUrlParts);
// Drop confidential headers when redirecting to a less secure protocol
// or to a different domain that is not a superdomain
if (redirectUrlParts.protocol !== currentUrlParts.protocol &&
redirectUrlParts.protocol !== "https:" ||
redirectUrlParts.host !== currentHost &&
!isSubdomain(redirectUrlParts.host, currentHost)) {
removeMatchingHeaders(/^(?:authorization|cookie)$/i, this._options.headers);
}
// Evaluate the beforeRedirect callback
if (typeof beforeRedirect === "function") {
var responseDetails = {
headers: response.headers,
statusCode: statusCode,
};
var requestDetails = {
url: currentUrl,
method: method,
headers: requestHeaders,
};
try {
beforeRedirect(this._options, responseDetails, requestDetails);
}
catch (err) {
this.emit("error", err);
return;
}
this._sanitizeOptions(this._options);
}
// Perform the redirected request
try {
this._performRequest();
}
catch (cause) {
this.emit("error", new RedirectionError(cause));
}
};
// Wraps the key/value object of protocols with redirect functionality
function wrap(protocols) {
// Default settings
var exports = {
maxRedirects: 21,
maxBodyLength: 10 * 1024 * 1024,
};
// Wrap each protocol
var nativeProtocols = {};
Object.keys(protocols).forEach(function (scheme) {
var protocol = scheme + ":";
var nativeProtocol = nativeProtocols[protocol] = protocols[scheme];
var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol);
// Executes a request, following redirects
function request(input, options, callback) {
// Parse parameters
if (typeof input === "string") {
var urlStr = input;
try {
input = urlToOptions(new URL$1(urlStr));
}
catch (err) {
/* istanbul ignore next */
input = url.parse(urlStr);
}
}
else if (URL$1 && (input instanceof URL$1)) {
input = urlToOptions(input);
}
else {
callback = options;
options = input;
input = { protocol: protocol };
}
if (typeof options === "function") {
callback = options;
options = null;
}
// Set defaults
options = Object.assign({
maxRedirects: exports.maxRedirects,
maxBodyLength: exports.maxBodyLength,
}, input, options);
options.nativeProtocols = nativeProtocols;
assert.equal(options.protocol, protocol, "protocol mismatch");
debug$2("options", options);
return new RedirectableRequest(options, callback);
}
// Executes a GET request, following redirects
function get(input, options, callback) {
var wrappedRequest = wrappedProtocol.request(input, options, callback);
wrappedRequest.end();
return wrappedRequest;
}
// Expose the properties on the wrapped protocol
Object.defineProperties(wrappedProtocol, {
request: { value: request, configurable: true, enumerable: true, writable: true },
get: { value: get, configurable: true, enumerable: true, writable: true },
});
});
return exports;
}
/* istanbul ignore next */
function noop$1() { /* empty */ }
// from https://github.com/nodejs/node/blob/master/lib/internal/url.js
function urlToOptions(urlObject) {
var options = {
protocol: urlObject.protocol,
hostname: urlObject.hostname.startsWith("[") ?
/* istanbul ignore next */
urlObject.hostname.slice(1, -1) :
urlObject.hostname,
hash: urlObject.hash,
search: urlObject.search,
pathname: urlObject.pathname,
path: urlObject.pathname + urlObject.search,
href: urlObject.href,
};
if (urlObject.port !== "") {
options.port = Number(urlObject.port);
}
return options;
}
function removeMatchingHeaders(regex, headers) {
var lastValue;
for (var header in headers) {
if (regex.test(header)) {
lastValue = headers[header];
delete headers[header];
}
}
return (lastValue === null || typeof lastValue === "undefined") ?
undefined : String(lastValue).trim();
}
function createErrorType(code, defaultMessage) {
function CustomError(cause) {
Error.captureStackTrace(this, this.constructor);
if (!cause) {
this.message = defaultMessage;
}
else {
this.message = defaultMessage + ": " + cause.message;
this.cause = cause;
}
}
CustomError.prototype = new Error();
CustomError.prototype.constructor = CustomError;
CustomError.prototype.name = "Error [" + code + "]";
CustomError.prototype.code = code;
return CustomError;
}
function abortRequest(request) {
for (var e = 0; e < events.length; e++) {
request.removeListener(events[e], eventHandlers[events[e]]);
}
request.on("error", noop$1);
request.abort();
}
function isSubdomain(subdomain, domain) {
const dot = subdomain.length - domain.length - 1;
return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain);
}
// Exports
followRedirects$1.exports = wrap({ http: http$1, https: https$1 });
followRedirects$1.exports.wrap = wrap;
var followRedirectsExports = followRedirects$1.exports;
var httpNative = require$$1$1,
httpsNative = require$$1$2,
web_o = webOutgoing,
common$1 = common$3,
followRedirects = followRedirectsExports;
web_o = Object.keys(web_o).map(function(pass) {
return web_o[pass];
});
var nativeAgents = { http: httpNative, https: httpsNative };
/*!
* Array of passes.
*
* A `pass` is just a function that is executed on `req, res, options`
* so that you can easily add new checks while still keeping the base
* flexible.
*/
var webIncoming = {
/**
* Sets `content-length` to '0' if request is of DELETE type.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
deleteLength: function deleteLength(req, res, options) {
if((req.method === 'DELETE' || req.method === 'OPTIONS')
&& !req.headers['content-length']) {
req.headers['content-length'] = '0';
delete req.headers['transfer-encoding'];
}
},
/**
* Sets timeout in request socket if it was specified in options.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
timeout: function timeout(req, res, options) {
if(options.timeout) {
req.socket.setTimeout(options.timeout);
}
},
/**
* Sets `x-forwarded-*` headers if specified in config.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
XHeaders: function XHeaders(req, res, options) {
if(!options.xfwd) return;
var encrypted = req.isSpdy || common$1.hasEncryptedConnection(req);
var values = {
for : req.connection.remoteAddress || req.socket.remoteAddress,
port : common$1.getPort(req),
proto: encrypted ? 'https' : 'http'
};
['for', 'port', 'proto'].forEach(function(header) {
req.headers['x-forwarded-' + header] =
(req.headers['x-forwarded-' + header] || '') +
(req.headers['x-forwarded-' + header] ? ',' : '') +
values[header];
});
req.headers['x-forwarded-host'] = req.headers['x-forwarded-host'] || req.headers['host'] || '';
},
/**
* Does the actual proxying. If `forward` is enabled fires up
* a ForwardStream, same happens for ProxyStream. The request
* just dies otherwise.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
stream: function stream(req, res, options, _, server, clb) {
// And we begin!
server.emit('start', req, res, options.target || options.forward);
var agents = options.followRedirects ? followRedirects : nativeAgents;
var http = agents.http;
var https = agents.https;
if(options.forward) {
// If forward enable, so just pipe the request
var forwardReq = (options.forward.protocol === 'https:' ? https : http).request(
common$1.setupOutgoing(options.ssl || {}, options, req, 'forward')
);
// error handler (e.g. ECONNRESET, ECONNREFUSED)
// Handle errors on incoming request as well as it makes sense to
var forwardError = createErrorHandler(forwardReq, options.forward);
req.on('error', forwardError);
forwardReq.on('error', forwardError);
(options.buffer || req).pipe(forwardReq);
if(!options.target) { return res.end(); }
}
// Request initalization
var proxyReq = (options.target.protocol === 'https:' ? https : http).request(
common$1.setupOutgoing(options.ssl || {}, options, req)
);
// Enable developers to modify the proxyReq before headers are sent
proxyReq.on('socket', function(socket) {
if(server && !proxyReq.getHeader('expect')) {
server.emit('proxyReq', proxyReq, req, res, options);
}
});
// allow outgoing socket to timeout so that we could
// show an error page at the initial request
if(options.proxyTimeout) {
proxyReq.setTimeout(options.proxyTimeout, function() {
proxyReq.abort();
});
}
// Ensure we abort proxy if request is aborted
req.on('aborted', function () {
proxyReq.abort();
});
// handle errors in proxy and incoming request, just like for forward proxy
var proxyError = createErrorHandler(proxyReq, options.target);
req.on('error', proxyError);
proxyReq.on('error', proxyError);
function createErrorHandler(proxyReq, url) {
return function proxyError(err) {
if (req.socket.destroyed && err.code === 'ECONNRESET') {
server.emit('econnreset', err, req, res, url);
return proxyReq.abort();
}
if (clb) {
clb(err, req, res, url);
} else {
server.emit('error', err, req, res, url);
}
}
}
(options.buffer || req).pipe(proxyReq);
proxyReq.on('response', function(proxyRes) {
if(server) { server.emit('proxyRes', proxyRes, req, res); }
if(!res.headersSent && !options.selfHandleResponse) {
for(var i=0; i < web_o.length; i++) {
if(web_o[i](req, res, proxyRes, options)) { break; }
}
}
if (!res.finished) {
// Allow us to listen when the proxy has completed
proxyRes.on('end', function () {
if (server) server.emit('end', req, res, proxyRes);
});
// We pipe to the response unless its expected to be handled by the user
if (!options.selfHandleResponse) proxyRes.pipe(res);
} else {
if (server) server.emit('end', req, res, proxyRes);
}
});
}
};
var http = require$$1$1,
https = require$$1$2,
common = common$3;
/*!
* Array of passes.
*
* A `pass` is just a function that is executed on `req, socket, options`
* so that you can easily add new checks while still keeping the base
* flexible.
*/
/*
* Websockets Passes
*
*/
var wsIncoming = {
/**
* WebSocket requests must have the `GET` method and
* the `upgrade:websocket` header
*
* @param {ClientRequest} Req Request object
* @param {Socket} Websocket
*
* @api private
*/
checkMethodAndHeader : function checkMethodAndHeader(req, socket) {
if (req.method !== 'GET' || !req.headers.upgrade) {
socket.destroy();
return true;
}
if (req.headers.upgrade.toLowerCase() !== 'websocket') {
socket.destroy();
return true;
}
},
/**
* Sets `x-forwarded-*` headers if specified in config.
*
* @param {ClientRequest} Req Request object
* @param {Socket} Websocket
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
XHeaders : function XHeaders(req, socket, options) {
if(!options.xfwd) return;
var values = {
for : req.connection.remoteAddress || req.socket.remoteAddress,
port : common.getPort(req),
proto: common.hasEncryptedConnection(req) ? 'wss' : 'ws'
};
['for', 'port', 'proto'].forEach(function(header) {
req.headers['x-forwarded-' + header] =
(req.headers['x-forwarded-' + header] || '') +
(req.headers['x-forwarded-' + header] ? ',' : '') +
values[header];
});
},
/**
* Does the actual proxying. Make the request and upgrade it
* send the Switching Protocols request and pipe the sockets.
*
* @param {ClientRequest} Req Request object
* @param {Socket} Websocket
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
stream : function stream(req, socket, options, head, server, clb) {
var createHttpHeader = function(line, headers) {
return Object.keys(headers).reduce(function (head, key) {
var value = headers[key];
if (!Array.isArray(value)) {
head.push(key + ': ' + value);
return head;
}
for (var i = 0; i < value.length; i++) {
head.push(key + ': ' + value[i]);
}
return head;
}, [line])
.join('\r\n') + '\r\n\r\n';
};
common.setupSocket(socket);
if (head && head.length) socket.unshift(head);
var proxyReq = (common.isSSL.test(options.target.protocol) ? https : http).request(
common.setupOutgoing(options.ssl || {}, options, req)
);
// Enable developers to modify the proxyReq before headers are sent
if (server) { server.emit('proxyReqWs', proxyReq, req, socket, options, head); }
// Error Handler
proxyReq.on('error', onOutgoingError);
proxyReq.on('response', function (res) {
// if upgrade event isn't going to happen, close the socket
if (!res.upgrade) {
socket.write(createHttpHeader('HTTP/' + res.httpVersion + ' ' + res.statusCode + ' ' + res.statusMessage, res.headers));
res.pipe(socket);
}
});
proxyReq.on('upgrade', function(proxyRes, proxySocket, proxyHead) {
proxySocket.on('error', onOutgoingError);
// Allow us to listen when the websocket has completed
proxySocket.on('end', function () {
server.emit('close', proxyRes, proxySocket, proxyHead);
});
// The pipe below will end proxySocket if socket closes cleanly, but not
// if it errors (eg, vanishes from the net and starts returning
// EHOSTUNREACH). We need to do that explicitly.
socket.on('error', function () {
proxySocket.end();
});
common.setupSocket(proxySocket);
if (proxyHead && proxyHead.length) proxySocket.unshift(proxyHead);
//
// Remark: Handle writing the headers to the socket when switching protocols
// Also handles when a header is an array
//
socket.write(createHttpHeader('HTTP/1.1 101 Switching Protocols', proxyRes.headers));
proxySocket.pipe(socket).pipe(proxySocket);
server.emit('open', proxySocket);
server.emit('proxySocket', proxySocket); //DEPRECATED.
});
return proxyReq.end(); // XXX: CHECK IF THIS IS THIS CORRECT
function onOutgoingError(err) {
if (clb) {
clb(err, req, socket);
} else {
server.emit('error', err, req, socket);
}
socket.end();
}
}
};
(function (module) {
var httpProxy = module.exports,
extend = require$$0$6._extend,
parse_url = require$$0$9.parse,
EE3 = eventemitter3Exports,
http = require$$1$1,
https = require$$1$2,
web = webIncoming,
ws = wsIncoming;
httpProxy.Server = ProxyServer;
/**
* Returns a function that creates the loader for
* either `ws` or `web`'s passes.
*
* Examples:
*
* httpProxy.createRightProxy('ws')
* // => [Function]
*
* @param {String} Type Either 'ws' or 'web'
* 
* @return {Function} Loader Function that when called returns an iterator for the right passes
*
* @api private
*/
function createRightProxy(type) {
return function(options) {
return function(req, res /*, [head], [opts] */) {
var passes = (type === 'ws') ? this.wsPasses : this.webPasses,
args = [].slice.call(arguments),
cntr = args.length - 1,
head, cbl;
/* optional args parse begin */
if(typeof args[cntr] === 'function') {
cbl = args[cntr];
cntr--;
}
var requestOptions = options;
if(
!(args[cntr] instanceof Buffer) &&
args[cntr] !== res
) {
//Copy global options
requestOptions = extend({}, options);
//Overwrite with request options
extend(requestOptions, args[cntr]);
cntr--;
}
if(args[cntr] instanceof Buffer) {
head = args[cntr];
}
/* optional args parse end */
['target', 'forward'].forEach(function(e) {
if (typeof requestOptions[e] === 'string')
requestOptions[e] = parse_url(requestOptions[e]);
});
if (!requestOptions.target && !requestOptions.forward) {
return this.emit('error', new Error('Must provide a proper URL as target'));
}
for(var i=0; i < passes.length; i++) {
/**
* Call of passes functions
* pass(req, res, options, head)
*
* In WebSockets case the `res` variable
* refer to the connection socket
* pass(req, socket, options, head)
*/
if(passes[i](req, res, requestOptions, head, this, cbl)) { // passes can return a truthy value to halt the loop
break;
}
}
};
};
}
httpProxy.createRightProxy = createRightProxy;
function ProxyServer(options) {
EE3.call(this);
options = options || {};
options.prependPath = options.prependPath === false ? false : true;
this.web = this.proxyRequest = createRightProxy('web')(options);
this.ws = this.proxyWebsocketRequest = createRightProxy('ws')(options);
this.options = options;
this.webPasses = Object.keys(web).map(function(pass) {
return web[pass];
});
this.wsPasses = Object.keys(ws).map(function(pass) {
return ws[pass];
});
this.on('error', this.onError, this);
}
require$$0$6.inherits(ProxyServer, EE3);
ProxyServer.prototype.onError = function (err) {
//
// Remark: Replicate node core behavior using EE3
// so we force people to handle their own errors
//
if(this.listeners('error').length === 1) {
throw err;
}
};
ProxyServer.prototype.listen = function(port, hostname) {
var self = this,
closure = function(req, res) { self.web(req, res); };
this._server = this.options.ssl ?
https.createServer(this.options.ssl, closure) :
http.createServer(closure);
if(this.options.ws) {
this._server.on('upgrade', function(req, socket, head) { self.ws(req, socket, head); });
}
this._server.listen(port, hostname);
return this;
};
ProxyServer.prototype.close = function(callback) {
var self = this;
if (this._server) {
this._server.close(done);
}
// Wrap callback to nullify server after all open connections are closed.
function done() {
self._server = null;
if (callback) {
callback.apply(null, arguments);
}
} };
ProxyServer.prototype.before = function(type, passName, callback) {
if (type !== 'ws' && type !== 'web') {
throw new Error('type must be `web` or `ws`');
}
var passes = (type === 'ws') ? this.wsPasses : this.webPasses,
i = false;
passes.forEach(function(v, idx) {
if(v.name === passName) i = idx;
});
if(i === false) throw new Error('No such pass');
passes.splice(i, 0, callback);
};
ProxyServer.prototype.after = function(type, passName, callback) {
if (type !== 'ws' && type !== 'web') {
throw new Error('type must be `web` or `ws`');
}
var passes = (type === 'ws') ? this.wsPasses : this.webPasses,
i = false;
passes.forEach(function(v, idx) {
if(v.name === passName) i = idx;
});
if(i === false) throw new Error('No such pass');
passes.splice(i++, 0, callback);
};
} (httpProxy$3));
var httpProxyExports = httpProxy$3.exports;
// Use explicit /index.js to help browserify negociation in require '/lib/http-proxy' (!)
var ProxyServer = httpProxyExports.Server;
/**
* Creates the proxy server.
*
* Examples:
*
* httpProxy.createProxyServer({ .. }, 8000)
* // => '{ web: [Function], ws: [Function] ... }'
*
* @param {Object} Options Config object passed to the proxy
*
* @return {Object} Proxy Proxy object with handlers for `ws` and `web` requests
*
* @api public
*/
function createProxyServer(options) {
/*
* `options` is needed and it must have the following layout:
*
* {
* target : <url string to be parsed with the url module>
* forward: <url string to be parsed with the url module>
* agent : <object to be passed to http(s).request>
* ssl : <object to be passed to https.createServer()>
* ws : <true/false, if you want to proxy websockets>
* xfwd : <true/false, adds x-forward headers>
* secure : <true/false, verify SSL certificate>
* toProxy: <true/false, explicitly specify if we are proxying to another proxy>
* prependPath: <true/false, Default: true - specify whether you want to prepend the target's path to the proxy path>
* ignorePath: <true/false, Default: false - specify whether you want to ignore the proxy path of the incoming request>
* localAddress : <Local interface string to bind for outgoing connections>
* changeOrigin: <true/false, Default: false - changes the origin of the host header to the target URL>
* preserveHeaderKeyCase: <true/false, Default: false - specify whether you want to keep letter case of response header key >
* auth : Basic authentication i.e. 'user:password' to compute an Authorization header.
* hostRewrite: rewrites the location hostname on (201/301/302/307/308) redirects, Default: null.
* autoRewrite: rewrites the location host/port on (201/301/302/307/308) redirects based on requested host/port. Default: false.
* protocolRewrite: rewrites the location protocol on (201/301/302/307/308) redirects to 'http' or 'https'. Default: null.
* }
*
* NOTE: `options.ws` and `options.ssl` are optional.
* `options.target and `options.forward` cannot be
* both missing
* }
*/
return new ProxyServer(options);
}
ProxyServer.createProxyServer = createProxyServer;
ProxyServer.createServer = createProxyServer;
ProxyServer.createProxy = createProxyServer;
/**
* Export the proxy "Server" as the main export.
*/
var httpProxy$2 = ProxyServer;
/*!
* Caron dimonio, con occhi di bragia
* loro accennando, tutte le raccoglie;
* batte col remo qualunque sadagia
*
* Charon the demon, with the eyes of glede,
* Beckoning to them, collects them all together,
* Beats with his oar whoever lags behind
*
* Dante - The Divine Comedy (Canto III)
*/
var httpProxy = httpProxy$2;
var httpProxy$1 = /*@__PURE__*/getDefaultExportFromCjs(httpProxy);
const debug$1 = createDebugger('vite:proxy');
function proxyMiddleware(httpServer, options, config) {
// lazy require only when proxy is used
const proxies = {};
Object.keys(options).forEach((context) => {
let opts = options[context];
if (!opts) {
return;
}
if (typeof opts === 'string') {
opts = { target: opts, changeOrigin: true };
}
const proxy = httpProxy$1.createProxyServer(opts);
if (opts.configure) {
opts.configure(proxy, opts);
}
proxy.on('error', (err, req, originalRes) => {
// When it is ws proxy, res is net.Socket
// originalRes can be falsy if the proxy itself errored
const res = originalRes;
if (!res) {
config.logger.error(`${colors$1.red(`http proxy error: ${err.message}`)}\n${err.stack}`, {
timestamp: true,
error: err,
});
}
else if ('req' in res) {
config.logger.error(`${colors$1.red(`http proxy error at ${originalRes.req.url}:`)}\n${err.stack}`, {
timestamp: true,
error: err,
});
if (!res.headersSent && !res.writableEnded) {
res
.writeHead(500, {
'Content-Type': 'text/plain',
})
.end();
}
}
else {
config.logger.error(`${colors$1.red(`ws proxy error:`)}\n${err.stack}`, {
timestamp: true,
error: err,
});
res.end();
}
});
proxy.on('proxyReqWs', (proxyReq, req, socket, options, head) => {
socket.on('error', (err) => {
config.logger.error(`${colors$1.red(`ws proxy socket error:`)}\n${err.stack}`, {
timestamp: true,
error: err,
});
});
});
// https://github.com/http-party/node-http-proxy/issues/1520#issue-877626125
// https://github.com/chimurai/http-proxy-middleware/blob/cd58f962aec22c925b7df5140502978da8f87d5f/src/plugins/default/debug-proxy-errors-plugin.ts#L25-L37
proxy.on('proxyRes', (proxyRes, req, res) => {
res.on('close', () => {
if (!res.writableEnded) {
debug$1?.('destroying proxyRes in proxyRes close event');
proxyRes.destroy();
}
});
});
// clone before saving because http-proxy mutates the options
proxies[context] = [proxy, { ...opts }];
});
if (httpServer) {
httpServer.on('upgrade', (req, socket, head) => {
const url = req.url;
for (const context in proxies) {
if (doesProxyContextMatchUrl(context, url)) {
const [proxy, opts] = proxies[context];
if (opts.ws ||
opts.target?.toString().startsWith('ws:') ||
opts.target?.toString().startsWith('wss:')) {
if (opts.rewrite) {
req.url = opts.rewrite(url);
}
debug$1?.(`${req.url} -> ws ${opts.target}`);
proxy.ws(req, socket, head);
return;
}
}
}
});
}
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return function viteProxyMiddleware(req, res, next) {
const url = req.url;
for (const context in proxies) {
if (doesProxyContextMatchUrl(context, url)) {
const [proxy, opts] = proxies[context];
const options = {};
if (opts.bypass) {
const bypassResult = opts.bypass(req, res, opts);
if (typeof bypassResult === 'string') {
req.url = bypassResult;
debug$1?.(`bypass: ${req.url} -> ${bypassResult}`);
return next();
}
else if (bypassResult === false) {
debug$1?.(`bypass: ${req.url} -> 404`);
return res.end(404);
}
}
debug$1?.(`${req.url} -> ${opts.target || opts.forward}`);
if (opts.rewrite) {
req.url = opts.rewrite(req.url);
}
proxy.web(req, res, options);
return;
}
}
next();
};
}
function doesProxyContextMatchUrl(context, url) {
return ((context[0] === '^' && new RegExp(context).test(url)) ||
url.startsWith(context));
}
var lib = {exports: {}};
(function (module, exports) {
var url = require$$0$9;
module.exports = function historyApiFallback(options) {
options = options || {};
var logger = getLogger(options);
return function(req, res, next) {
var headers = req.headers;
if (req.method !== 'GET' && req.method !== 'HEAD') {
logger(
'Not rewriting',
req.method,
req.url,
'because the method is not GET or HEAD.'
);
return next();
} else if (!headers || typeof headers.accept !== 'string') {
logger(
'Not rewriting',
req.method,
req.url,
'because the client did not send an HTTP accept header.'
);
return next();
} else if (headers.accept.indexOf('application/json') === 0) {
logger(
'Not rewriting',
req.method,
req.url,
'because the client prefers JSON.'
);
return next();
} else if (!acceptsHtml(headers.accept, options)) {
logger(
'Not rewriting',
req.method,
req.url,
'because the client does not accept HTML.'
);
return next();
}
var parsedUrl = url.parse(req.url);
var rewriteTarget;
options.rewrites = options.rewrites || [];
for (var i = 0; i < options.rewrites.length; i++) {
var rewrite = options.rewrites[i];
var match = parsedUrl.pathname.match(rewrite.from);
if (match !== null) {
rewriteTarget = evaluateRewriteRule(parsedUrl, match, rewrite.to, req);
if(rewriteTarget.charAt(0) !== '/') {
logger(
'We recommend using an absolute path for the rewrite target.',
'Received a non-absolute rewrite target',
rewriteTarget,
'for URL',
req.url
);
}
logger('Rewriting', req.method, req.url, 'to', rewriteTarget);
req.url = rewriteTarget;
return next();
}
}
var pathname = parsedUrl.pathname;
if (pathname.lastIndexOf('.') > pathname.lastIndexOf('/') &&
options.disableDotRule !== true) {
logger(
'Not rewriting',
req.method,
req.url,
'because the path includes a dot (.) character.'
);
return next();
}
rewriteTarget = options.index || '/index.html';
logger('Rewriting', req.method, req.url, 'to', rewriteTarget);
req.url = rewriteTarget;
next();
};
};
function evaluateRewriteRule(parsedUrl, match, rule, req) {
if (typeof rule === 'string') {
return rule;
} else if (typeof rule !== 'function') {
throw new Error('Rewrite rule can only be of type string or function.');
}
return rule({
parsedUrl: parsedUrl,
match: match,
request: req
});
}
function acceptsHtml(header, options) {
options.htmlAcceptHeaders = options.htmlAcceptHeaders || ['text/html', '*/*'];
for (var i = 0; i < options.htmlAcceptHeaders.length; i++) {
if (header.indexOf(options.htmlAcceptHeaders[i]) !== -1) {
return true;
}
}
return false;
}
function getLogger(options) {
if (options && options.logger) {
return options.logger;
} else if (options && options.verbose) {
// eslint-disable-next-line no-console
return console.log.bind(console);
}
return function(){};
}
} (lib));
var libExports = lib.exports;
var history = /*@__PURE__*/getDefaultExportFromCjs(libExports);
function htmlFallbackMiddleware(root, spaFallback) {
const historyHtmlFallbackMiddleware = history({
logger: createDebugger('vite:html-fallback'),
// support /dir/ without explicit index.html
rewrites: [
{
from: /\/$/,
to({ parsedUrl, request }) {
const rewritten = decodeURIComponent(parsedUrl.pathname) + 'index.html';
if (fs$l.existsSync(path$o.join(root, rewritten))) {
return rewritten;
}
return spaFallback ? `/index.html` : request.url;
},
},
],
});
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return function viteHtmlFallbackMiddleware(req, res, next) {
return historyHtmlFallbackMiddleware(req, res, next);
};
}
const debugCache = createDebugger('vite:cache');
const knownIgnoreList = new Set(['/', '/favicon.ico']);
function transformMiddleware(server) {
const { config: { root, logger }, moduleGraph, } = server;
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return async function viteTransformMiddleware(req, res, next) {
if (req.method !== 'GET' || knownIgnoreList.has(req.url)) {
return next();
}
let url;
try {
url = decodeURI(removeTimestampQuery(req.url)).replace(NULL_BYTE_PLACEHOLDER, '\0');
}
catch (e) {
return next(e);
}
const withoutQuery = cleanUrl(url);
try {
const isSourceMap = withoutQuery.endsWith('.map');
// since we generate source map references, handle those requests here
if (isSourceMap) {
const depsOptimizer = getDepsOptimizer(server.config, false); // non-ssr
if (depsOptimizer?.isOptimizedDepUrl(url)) {
// If the browser is requesting a source map for an optimized dep, it
// means that the dependency has already been pre-bundled and loaded
const sourcemapPath = url.startsWith(FS_PREFIX)
? fsPathFromId(url)
: normalizePath$3(path$o.resolve(root, url.slice(1)));
try {
const map = JSON.parse(await fsp.readFile(sourcemapPath, 'utf-8'));
applySourcemapIgnoreList(map, sourcemapPath, server.config.server.sourcemapIgnoreList, logger);
return send$2(req, res, JSON.stringify(map), 'json', {
headers: server.config.server.headers,
});
}
catch (e) {
// Outdated source map request for optimized deps, this isn't an error
// but part of the normal flow when re-optimizing after missing deps
// Send back an empty source map so the browser doesn't issue warnings
const dummySourceMap = {
version: 3,
file: sourcemapPath.replace(/\.map$/, ''),
sources: [],
sourcesContent: [],
names: [],
mappings: ';;;;;;;;;',
};
return send$2(req, res, JSON.stringify(dummySourceMap), 'json', {
cacheControl: 'no-cache',
headers: server.config.server.headers,
});
}
}
else {
const originalUrl = url.replace(/\.map($|\?)/, '$1');
const map = (await moduleGraph.getModuleByUrl(originalUrl, false))
?.transformResult?.map;
if (map) {
return send$2(req, res, JSON.stringify(map), 'json', {
headers: server.config.server.headers,
});
}
else {
return next();
}
}
}
// check if public dir is inside root dir
const publicDir = normalizePath$3(server.config.publicDir);
const rootDir = normalizePath$3(server.config.root);
if (publicDir.startsWith(withTrailingSlash(rootDir))) {
const publicPath = `${publicDir.slice(rootDir.length)}/`;
// warn explicit public paths
if (url.startsWith(withTrailingSlash(publicPath))) {
let warning;
if (isImportRequest(url)) {
const rawUrl = removeImportQuery(url);
if (urlRE.test(url)) {
warning =
`Assets in the public directory are served at the root path.\n` +
`Instead of ${colors$1.cyan(rawUrl)}, use ${colors$1.cyan(rawUrl.replace(publicPath, '/'))}.`;
}
else {
warning =
'Assets in public directory cannot be imported from JavaScript.\n' +
`If you intend to import that asset, put the file in the src directory, and use ${colors$1.cyan(rawUrl.replace(publicPath, '/src/'))} instead of ${colors$1.cyan(rawUrl)}.\n` +
`If you intend to use the URL of that asset, use ${colors$1.cyan(injectQuery(rawUrl.replace(publicPath, '/'), 'url'))}.`;
}
}
else {
warning =
`files in the public directory are served at the root path.\n` +
`Instead of ${colors$1.cyan(url)}, use ${colors$1.cyan(url.replace(publicPath, '/'))}.`;
}
logger.warn(colors$1.yellow(warning));
}
}
if (isJSRequest(url) ||
isImportRequest(url) ||
isCSSRequest(url) ||
isHTMLProxy(url)) {
// strip ?import
url = removeImportQuery(url);
// Strip valid id prefix. This is prepended to resolved Ids that are
// not valid browser import specifiers by the importAnalysis plugin.
url = unwrapId(url);
// for CSS, we need to differentiate between normal CSS requests and
// imports
if (isCSSRequest(url) &&
!isDirectRequest(url) &&
req.headers.accept?.includes('text/css')) {
url = injectQuery(url, 'direct');
}
// check if we can return 304 early
const ifNoneMatch = req.headers['if-none-match'];
if (ifNoneMatch &&
(await moduleGraph.getModuleByUrl(url, false))?.transformResult
?.etag === ifNoneMatch) {
debugCache?.(`[304] ${prettifyUrl(url, root)}`);
res.statusCode = 304;
return res.end();
}
// resolve, load and transform using the plugin container
const result = await transformRequest(url, server, {
html: req.headers.accept?.includes('text/html'),
});
if (result) {
const depsOptimizer = getDepsOptimizer(server.config, false); // non-ssr
const type = isDirectCSSRequest(url) ? 'css' : 'js';
const isDep = DEP_VERSION_RE.test(url) || depsOptimizer?.isOptimizedDepUrl(url);
return send$2(req, res, result.code, type, {
etag: result.etag,
// allow browser to cache npm deps!
cacheControl: isDep ? 'max-age=31536000,immutable' : 'no-cache',
headers: server.config.server.headers,
map: result.map,
});
}
}
}
catch (e) {
if (e?.code === ERR_OPTIMIZE_DEPS_PROCESSING_ERROR) {
// Skip if response has already been sent
if (!res.writableEnded) {
res.statusCode = 504; // status code request timeout
res.statusMessage = 'Optimize Deps Processing Error';
res.end();
}
// This timeout is unexpected
logger.error(e.message);
return;
}
if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP) {
// Skip if response has already been sent
if (!res.writableEnded) {
res.statusCode = 504; // status code request timeout
res.statusMessage = 'Outdated Optimize Dep';
res.end();
}
// We don't need to log an error in this case, the request
// is outdated because new dependencies were discovered and
// the new pre-bundle dependencies have changed.
// A full-page reload has been issued, and these old requests
// can't be properly fulfilled. This isn't an unexpected
// error but a normal part of the missing deps discovery flow
return;
}
if (e?.code === ERR_CLOSED_SERVER) {
// Skip if response has already been sent
if (!res.writableEnded) {
res.statusCode = 504; // status code request timeout
res.statusMessage = 'Outdated Request';
res.end();
}
// We don't need to log an error in this case, the request
// is outdated because new dependencies were discovered and
// the new pre-bundle dependencies have changed.
// A full-page reload has been issued, and these old requests
// can't be properly fulfilled. This isn't an unexpected
// error but a normal part of the missing deps discovery flow
return;
}
if (e?.code === ERR_LOAD_URL) {
// Let other middleware handle if we can't load the url via transformRequest
return next();
}
return next(e);
}
next();
};
}
function createDevHtmlTransformFn(server) {
const [preHooks, normalHooks, postHooks] = resolveHtmlTransforms(server.config.plugins);
return (url, html, originalUrl) => {
return applyHtmlTransforms(html, [
preImportMapHook(server.config),
...preHooks,
htmlEnvHook(server.config),
devHtmlHook,
...normalHooks,
...postHooks,
postImportMapHook(),
], {
path: url,
filename: getHtmlFilename(url, server),
server,
originalUrl,
});
};
}
function getHtmlFilename(url, server) {
if (url.startsWith(FS_PREFIX)) {
return decodeURIComponent(fsPathFromId(url));
}
else {
return decodeURIComponent(normalizePath$3(path$o.join(server.config.root, url.slice(1))));
}
}
function shouldPreTransform(url, config) {
return (!checkPublicFile(url, config) && (isJSRequest(url) || isCSSRequest(url)));
}
const processNodeUrl = (attr, sourceCodeLocation, s, config, htmlPath, originalUrl, server) => {
let url = attr.value || '';
if (server?.moduleGraph) {
const mod = server.moduleGraph.urlToModuleMap.get(url);
if (mod && mod.lastHMRTimestamp > 0) {
url = injectQuery(url, `t=${mod.lastHMRTimestamp}`);
}
}
const devBase = config.base;
if (url[0] === '/' && url[1] !== '/') {
// prefix with base (dev only, base is never relative)
const fullUrl = path$o.posix.join(devBase, url);
overwriteAttrValue(s, sourceCodeLocation, fullUrl);
if (server && shouldPreTransform(url, config)) {
preTransformRequest(server, fullUrl, devBase);
}
}
else if (url[0] === '.' &&
originalUrl &&
originalUrl !== '/' &&
htmlPath === '/index.html') {
// prefix with base (dev only, base is never relative)
const replacer = (url) => {
const fullUrl = path$o.posix.join(devBase, url);
if (server && shouldPreTransform(url, config)) {
preTransformRequest(server, fullUrl, devBase);
}
return fullUrl;
};
// #3230 if some request url (localhost:3000/a/b) return to fallback html, the relative assets
// path will add `/a/` prefix, it will caused 404.
// rewrite before `./index.js` -> `localhost:5173/a/index.js`.
// rewrite after `../index.js` -> `localhost:5173/index.js`.
const processedUrl = attr.name === 'srcset' && attr.prefix === undefined
? processSrcSetSync(url, ({ url }) => replacer(url))
: replacer(url);
overwriteAttrValue(s, sourceCodeLocation, processedUrl);
}
};
const devHtmlHook = async (html, { path: htmlPath, filename, server, originalUrl }) => {
const { config, moduleGraph, watcher } = server;
const base = config.base || '/';
htmlPath = decodeURI(htmlPath);
let proxyModulePath;
let proxyModuleUrl;
const trailingSlash = htmlPath.endsWith('/');
if (!trailingSlash && fs$l.existsSync(filename)) {
proxyModulePath = htmlPath;
proxyModuleUrl = joinUrlSegments(base, htmlPath);
}
else {
// There are users of vite.transformIndexHtml calling it with url '/'
// for SSR integrations #7993, filename is root for this case
// A user may also use a valid name for a virtual html file
// Mark the path as virtual in both cases so sourcemaps aren't processed
// and ids are properly handled
const validPath = `${htmlPath}${trailingSlash ? 'index.html' : ''}`;
proxyModulePath = `\0${validPath}`;
proxyModuleUrl = wrapId(proxyModulePath);
}
const s = new MagicString(html);
let inlineModuleIndex = -1;
const proxyCacheUrl = cleanUrl(proxyModulePath).replace(normalizePath$3(config.root), '');
const styleUrl = [];
const addInlineModule = (node, ext) => {
inlineModuleIndex++;
const contentNode = node.childNodes[0];
const code = contentNode.value;
let map;
if (proxyModulePath[0] !== '\0') {
map = new MagicString(html)
.snip(contentNode.sourceCodeLocation.startOffset, contentNode.sourceCodeLocation.endOffset)
.generateMap({ hires: 'boundary' });
map.sources = [filename];
map.file = filename;
}
// add HTML Proxy to Map
addToHTMLProxyCache(config, proxyCacheUrl, inlineModuleIndex, { code, map });
// inline js module. convert to src="proxy" (dev only, base is never relative)
const modulePath = `${proxyModuleUrl}?html-proxy&index=${inlineModuleIndex}.${ext}`;
// invalidate the module so the newly cached contents will be served
const module = server?.moduleGraph.getModuleById(modulePath);
if (module) {
server?.moduleGraph.invalidateModule(module);
}
s.update(node.sourceCodeLocation.startOffset, node.sourceCodeLocation.endOffset, `<script type="module" src="${modulePath}"></script>`);
preTransformRequest(server, modulePath, base);
};
await traverseHtml(html, filename, (node) => {
if (!nodeIsElement(node)) {
return;
}
// script tags
if (node.nodeName === 'script') {
const { src, sourceCodeLocation, isModule } = getScriptInfo(node);
if (src) {
processNodeUrl(src, sourceCodeLocation, s, config, htmlPath, originalUrl, server);
}
else if (isModule && node.childNodes.length) {
addInlineModule(node, 'js');
}
}
if (node.nodeName === 'style' && node.childNodes.length) {
const children = node.childNodes[0];
styleUrl.push({
start: children.sourceCodeLocation.startOffset,
end: children.sourceCodeLocation.endOffset,
code: children.value,
});
}
// elements with [href/src] attrs
const assetAttrs = assetAttrsConfig[node.nodeName];
if (assetAttrs) {
for (const p of node.attrs) {
const attrKey = getAttrKey(p);
if (p.value && assetAttrs.includes(attrKey)) {
processNodeUrl(p, node.sourceCodeLocation.attrs[attrKey], s, config, htmlPath, originalUrl);
}
}
}
});
await Promise.all(styleUrl.map(async ({ start, end, code }, index) => {
const url = `${proxyModulePath}?html-proxy&direct&index=${index}.css`;
// ensure module in graph after successful load
const mod = await moduleGraph.ensureEntryFromUrl(url, false);
ensureWatchedFile(watcher, mod.file, config.root);
const result = await server.pluginContainer.transform(code, mod.id);
let content = '';
if (result) {
if (result.map) {
if (result.map.mappings) {
await injectSourcesContent(result.map, proxyModulePath, config.logger);
}
content = getCodeWithSourcemap('css', result.code, result.map);
}
else {
content = result.code;
}
}
s.overwrite(start, end, content);
}));
html = s.toString();
return {
html,
tags: [
{
tag: 'script',
attrs: {
type: 'module',
src: path$o.posix.join(base, CLIENT_PUBLIC_PATH),
},
injectTo: 'head-prepend',
},
],
};
};
function indexHtmlMiddleware(server) {
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return async function viteIndexHtmlMiddleware(req, res, next) {
if (res.writableEnded) {
return next();
}
const url = req.url && cleanUrl(req.url);
// htmlFallbackMiddleware appends '.html' to URLs
if (url?.endsWith('.html') && req.headers['sec-fetch-dest'] !== 'script') {
const filename = getHtmlFilename(url, server);
if (fs$l.existsSync(filename)) {
try {
let html = await fsp.readFile(filename, 'utf-8');
html = await server.transformIndexHtml(url, html, req.originalUrl);
return send$2(req, res, html, 'html', {
headers: server.config.server.headers,
});
}
catch (e) {
return next(e);
}
}
}
next();
};
}
function preTransformRequest(server, url, base) {
if (!server.config.server.preTransformRequests)
return;
url = unwrapId(stripBase(url, base));
// transform all url as non-ssr as html includes client-side assets only
server.transformRequest(url).catch((e) => {
if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP ||
e?.code === ERR_CLOSED_SERVER) {
// these are expected errors
return;
}
// Unexpected error, log the issue but avoid an unhandled exception
server.config.logger.error(e.message);
});
}
const logTime = createDebugger('vite:time');
function timeMiddleware(root) {
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
return function viteTimeMiddleware(req, res, next) {
const start = performance.now();
const end = res.end;
res.end = (...args) => {
logTime?.(`${timeFrom(start)} ${prettifyUrl(req.url, root)}`);
return end.call(res, ...args);
};
next();
};
}
class ModuleNode {
/**
* @param setIsSelfAccepting - set `false` to set `isSelfAccepting` later. e.g. #7870
*/
constructor(url, setIsSelfAccepting = true) {
/**
* Resolved file system path + query
*/
this.id = null;
this.file = null;
this.importers = new Set();
this.clientImportedModules = new Set();
this.ssrImportedModules = new Set();
this.acceptedHmrDeps = new Set();
this.acceptedHmrExports = null;
this.importedBindings = null;
this.transformResult = null;
this.ssrTransformResult = null;
this.ssrModule = null;
this.ssrError = null;
this.lastHMRTimestamp = 0;
this.lastInvalidationTimestamp = 0;
this.url = url;
this.type = isDirectCSSRequest(url) ? 'css' : 'js';
if (setIsSelfAccepting) {
this.isSelfAccepting = false;
}
}
get importedModules() {
const importedModules = new Set(this.clientImportedModules);
for (const module of this.ssrImportedModules) {
importedModules.add(module);
}
return importedModules;
}
}
class ModuleGraph {
constructor(resolveId) {
this.resolveId = resolveId;
this.urlToModuleMap = new Map();
this.idToModuleMap = new Map();
// a single file may corresponds to multiple modules with different queries
this.fileToModulesMap = new Map();
this.safeModulesPath = new Set();
/**
* @internal
*/
this._unresolvedUrlToModuleMap = new Map();
/**
* @internal
*/
this._ssrUnresolvedUrlToModuleMap = new Map();
}
async getModuleByUrl(rawUrl, ssr) {
// Quick path, if we already have a module for this rawUrl (even without extension)
rawUrl = removeImportQuery(removeTimestampQuery(rawUrl));
const mod = this._getUnresolvedUrlToModule(rawUrl, ssr);
if (mod) {
return mod;
}
const [url] = await this._resolveUrl(rawUrl, ssr);
return this.urlToModuleMap.get(url);
}
getModuleById(id) {
return this.idToModuleMap.get(removeTimestampQuery(id));
}
getModulesByFile(file) {
return this.fileToModulesMap.get(file);
}
onFileChange(file) {
const mods = this.getModulesByFile(file);
if (mods) {
const seen = new Set();
mods.forEach((mod) => {
this.invalidateModule(mod, seen);
});
}
}
invalidateModule(mod, seen = new Set(), timestamp = Date.now(), isHmr = false, hmrBoundaries = []) {
if (seen.has(mod)) {
return;
}
seen.add(mod);
if (isHmr) {
mod.lastHMRTimestamp = timestamp;
}
else {
// Save the timestamp for this invalidation, so we can avoid caching the result of possible already started
// processing being done for this module
mod.lastInvalidationTimestamp = timestamp;
}
// Don't invalidate mod.info and mod.meta, as they are part of the processing pipeline
// Invalidating the transform result is enough to ensure this module is re-processed next time it is requested
mod.transformResult = null;
mod.ssrTransformResult = null;
mod.ssrModule = null;
mod.ssrError = null;
// Fix #3033
if (hmrBoundaries.includes(mod)) {
return;
}
mod.importers.forEach((importer) => {
if (!importer.acceptedHmrDeps.has(mod)) {
this.invalidateModule(importer, seen, timestamp, isHmr);
}
});
}
invalidateAll() {
const timestamp = Date.now();
const seen = new Set();
this.idToModuleMap.forEach((mod) => {
this.invalidateModule(mod, seen, timestamp);
});
}
/**
* Update the module graph based on a module's updated imports information
* If there are dependencies that no longer have any importers, they are
* returned as a Set.
*/
async updateModuleInfo(mod, importedModules, importedBindings, acceptedModules, acceptedExports, isSelfAccepting, ssr) {
mod.isSelfAccepting = isSelfAccepting;
const prevImports = ssr ? mod.ssrImportedModules : mod.clientImportedModules;
let noLongerImported;
let resolvePromises = [];
let resolveResults = new Array(importedModules.size);
let index = 0;
// update import graph
for (const imported of importedModules) {
const nextIndex = index++;
if (typeof imported === 'string') {
resolvePromises.push(this.ensureEntryFromUrl(imported, ssr).then((dep) => {
dep.importers.add(mod);
resolveResults[nextIndex] = dep;
}));
}
else {
imported.importers.add(mod);
resolveResults[nextIndex] = imported;
}
}
if (resolvePromises.length) {
await Promise.all(resolvePromises);
}
const nextImports = new Set(resolveResults);
if (ssr) {
mod.ssrImportedModules = nextImports;
}
else {
mod.clientImportedModules = nextImports;
}
// remove the importer from deps that were imported but no longer are.
prevImports.forEach((dep) => {
if (!mod.clientImportedModules.has(dep) &&
!mod.ssrImportedModules.has(dep)) {
dep.importers.delete(mod);
if (!dep.importers.size) {
(noLongerImported || (noLongerImported = new Set())).add(dep);
}
}
});
// update accepted hmr deps
resolvePromises = [];
resolveResults = new Array(acceptedModules.size);
index = 0;
for (const accepted of acceptedModules) {
const nextIndex = index++;
if (typeof accepted === 'string') {
resolvePromises.push(this.ensureEntryFromUrl(accepted, ssr).then((dep) => {
resolveResults[nextIndex] = dep;
}));
}
else {
resolveResults[nextIndex] = accepted;
}
}
if (resolvePromises.length) {
await Promise.all(resolvePromises);
}
mod.acceptedHmrDeps = new Set(resolveResults);
// update accepted hmr exports
mod.acceptedHmrExports = acceptedExports;
mod.importedBindings = importedBindings;
return noLongerImported;
}
async ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting = true) {
return this._ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting);
}
/**
* @internal
*/
async _ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting = true,
// Optimization, avoid resolving the same url twice if the caller already did it
resolved) {
// Quick path, if we already have a module for this rawUrl (even without extension)
rawUrl = removeImportQuery(removeTimestampQuery(rawUrl));
let mod = this._getUnresolvedUrlToModule(rawUrl, ssr);
if (mod) {
return mod;
}
const modPromise = (async () => {
const [url, resolvedId, meta] = await this._resolveUrl(rawUrl, ssr, resolved);
mod = this.idToModuleMap.get(resolvedId);
if (!mod) {
mod = new ModuleNode(url, setIsSelfAccepting);
if (meta)
mod.meta = meta;
this.urlToModuleMap.set(url, mod);
mod.id = resolvedId;
this.idToModuleMap.set(resolvedId, mod);
const file = (mod.file = cleanUrl(resolvedId));
let fileMappedModules = this.fileToModulesMap.get(file);
if (!fileMappedModules) {
fileMappedModules = new Set();
this.fileToModulesMap.set(file, fileMappedModules);
}
fileMappedModules.add(mod);
}
// multiple urls can map to the same module and id, make sure we register
// the url to the existing module in that case
else if (!this.urlToModuleMap.has(url)) {
this.urlToModuleMap.set(url, mod);
}
this._setUnresolvedUrlToModule(rawUrl, mod, ssr);
return mod;
})();
// Also register the clean url to the module, so that we can short-circuit
// resolving the same url twice
this._setUnresolvedUrlToModule(rawUrl, modPromise, ssr);
return modPromise;
}
// some deps, like a css file referenced via @import, don't have its own
// url because they are inlined into the main css import. But they still
// need to be represented in the module graph so that they can trigger
// hmr in the importing css file.
createFileOnlyEntry(file) {
file = normalizePath$3(file);
let fileMappedModules = this.fileToModulesMap.get(file);
if (!fileMappedModules) {
fileMappedModules = new Set();
this.fileToModulesMap.set(file, fileMappedModules);
}
const url = `${FS_PREFIX}${file}`;
for (const m of fileMappedModules) {
if (m.url === url || m.id === file) {
return m;
}
}
const mod = new ModuleNode(url);
mod.file = file;
fileMappedModules.add(mod);
return mod;
}
// for incoming urls, it is important to:
// 1. remove the HMR timestamp query (?t=xxxx) and the ?import query
// 2. resolve its extension so that urls with or without extension all map to
// the same module
async resolveUrl(url, ssr) {
url = removeImportQuery(removeTimestampQuery(url));
const mod = await this._getUnresolvedUrlToModule(url, ssr);
if (mod?.id) {
return [mod.url, mod.id, mod.meta];
}
return this._resolveUrl(url, ssr);
}
/**
* @internal
*/
_getUnresolvedUrlToModule(url, ssr) {
return (ssr ? this._ssrUnresolvedUrlToModuleMap : this._unresolvedUrlToModuleMap).get(url);
}
/**
* @internal
*/
_setUnresolvedUrlToModule(url, mod, ssr) {
(ssr
? this._ssrUnresolvedUrlToModuleMap
: this._unresolvedUrlToModuleMap).set(url, mod);
}
/**
* @internal
*/
async _resolveUrl(url, ssr, alreadyResolved) {
const resolved = alreadyResolved ?? (await this.resolveId(url, !!ssr));
const resolvedId = resolved?.id || url;
if (url !== resolvedId &&
!url.includes('\0') &&
!url.startsWith(`virtual:`)) {
const ext = extname$1(cleanUrl(resolvedId));
if (ext) {
const pathname = cleanUrl(url);
if (!pathname.endsWith(ext)) {
url = pathname + ext + url.slice(pathname.length);
}
}
}
return [url, resolvedId, resolved?.meta];
}
}
function createServer(inlineConfig = {}) {
return _createServer(inlineConfig, { ws: true });
}
async function _createServer(inlineConfig = {}, options) {
const config = await resolveConfig(inlineConfig, 'serve');
const { root, server: serverConfig } = config;
const httpsOptions = await resolveHttpsConfig(config.server.https);
const { middlewareMode } = serverConfig;
const resolvedWatchOptions = resolveChokidarOptions(config, {
disableGlobbing: true,
...serverConfig.watch,
});
const middlewares = connect$1();
const httpServer = middlewareMode
? null
: await resolveHttpServer(serverConfig, middlewares, httpsOptions);
const ws = createWebSocketServer(httpServer, config, httpsOptions);
if (httpServer) {
setClientErrorHandler(httpServer, config.logger);
}
const watcher = chokidar.watch(
// config file dependencies and env file might be outside of root
[root, ...config.configFileDependencies, config.envDir], resolvedWatchOptions);
const moduleGraph = new ModuleGraph((url, ssr) => container.resolveId(url, undefined, { ssr }));
const container = await createPluginContainer(config, moduleGraph, watcher);
const closeHttpServer = createServerCloseFn(httpServer);
let exitProcess;
const server = {
config,
middlewares,
httpServer,
watcher,
pluginContainer: container,
ws,
moduleGraph,
resolvedUrls: null,
ssrTransform(code, inMap, url, originalCode = code) {
return ssrTransform(code, inMap, url, originalCode, server.config);
},
transformRequest(url, options) {
return transformRequest(url, server, options);
},
transformIndexHtml: null,
async ssrLoadModule(url, opts) {
if (isDepsOptimizerEnabled(config, true)) {
await initDevSsrDepsOptimizer(config, server);
}
if (config.legacy?.buildSsrCjsExternalHeuristics) {
await updateCjsSsrExternals(server);
}
return ssrLoadModule(url, server, undefined, undefined, opts?.fixStacktrace);
},
ssrFixStacktrace(e) {
ssrFixStacktrace(e, moduleGraph);
},
ssrRewriteStacktrace(stack) {
return ssrRewriteStacktrace(stack, moduleGraph);
},
async reloadModule(module) {
if (serverConfig.hmr !== false && module.file) {
updateModules(module.file, [module], Date.now(), server);
}
},
async listen(port, isRestart) {
await startServer(server, port);
if (httpServer) {
server.resolvedUrls = await resolveServerUrls(httpServer, config.server, config);
if (!isRestart && config.server.open)
server.openBrowser();
}
return server;
},
openBrowser() {
const options = server.config.server;
const url = server.resolvedUrls?.local[0] ?? server.resolvedUrls?.network[0];
if (url) {
const path = typeof options.open === 'string'
? new URL(options.open, url).href
: url;
openBrowser(path, true, server.config.logger);
}
else {
server.config.logger.warn('No URL available to open in browser');
}
},
async close() {
if (!middlewareMode) {
process.off('SIGTERM', exitProcess);
if (process.env.CI !== 'true') {
process.stdin.off('end', exitProcess);
}
}
await Promise.allSettled([
watcher.close(),
ws.close(),
container.close(),
getDepsOptimizer(server.config)?.close(),
getDepsOptimizer(server.config, true)?.close(),
closeHttpServer(),
]);
// Await pending requests. We throw early in transformRequest
// and in hooks if the server is closing for non-ssr requests,
// so the import analysis plugin stops pre-transforming static
// imports and this block is resolved sooner.
// During SSR, we let pending requests finish to avoid exposing
// the server closed error to the users.
while (server._pendingRequests.size > 0) {
await Promise.allSettled([...server._pendingRequests.values()].map((pending) => pending.request));
}
server.resolvedUrls = null;
},
printUrls() {
if (server.resolvedUrls) {
printServerUrls(server.resolvedUrls, serverConfig.host, config.logger.info);
}
else if (middlewareMode) {
throw new Error('cannot print server URLs in middleware mode.');
}
else {
throw new Error('cannot print server URLs before server.listen is called.');
}
},
async restart(forceOptimize) {
if (!server._restartPromise) {
server._forceOptimizeOnRestart = !!forceOptimize;
server._restartPromise = restartServer(server).finally(() => {
server._restartPromise = null;
server._forceOptimizeOnRestart = false;
});
}
return server._restartPromise;
},
_ssrExternals: null,
_restartPromise: null,
_importGlobMap: new Map(),
_forceOptimizeOnRestart: false,
_pendingRequests: new Map(),
_fsDenyGlob: picomatch$4(config.server.fs.deny, { matchBase: true }),
_shortcutsOptions: undefined,
};
server.transformIndexHtml = createDevHtmlTransformFn(server);
if (!middlewareMode) {
exitProcess = async () => {
try {
await server.close();
}
finally {
process.exit();
}
};
process.once('SIGTERM', exitProcess);
if (process.env.CI !== 'true') {
process.stdin.on('end', exitProcess);
}
}
const onHMRUpdate = async (file, configOnly) => {
if (serverConfig.hmr !== false) {
try {
await handleHMRUpdate(file, server, configOnly);
}
catch (err) {
ws.send({
type: 'error',
err: prepareError(err),
});
}
}
};
const onFileAddUnlink = async (file) => {
file = normalizePath$3(file);
await handleFileAddUnlink(file, server);
await onHMRUpdate(file, true);
};
watcher.on('change', async (file) => {
file = normalizePath$3(file);
// invalidate module graph cache on file change
moduleGraph.onFileChange(file);
await onHMRUpdate(file, false);
});
watcher.on('add', onFileAddUnlink);
watcher.on('unlink', onFileAddUnlink);
ws.on('vite:invalidate', async ({ path, message }) => {
const mod = moduleGraph.urlToModuleMap.get(path);
if (mod && mod.isSelfAccepting && mod.lastHMRTimestamp > 0) {
config.logger.info(colors$1.yellow(`hmr invalidate `) +
colors$1.dim(path) +
(message ? ` ${message}` : ''), { timestamp: true });
const file = getShortName(mod.file, config.root);
updateModules(file, [...mod.importers], mod.lastHMRTimestamp, server, true);
}
});
if (!middlewareMode && httpServer) {
httpServer.once('listening', () => {
// update actual port since this may be different from initial value
serverConfig.port = httpServer.address().port;
});
}
// apply server configuration hooks from plugins
const postHooks = [];
for (const hook of config.getSortedPluginHooks('configureServer')) {
postHooks.push(await hook(server));
}
// Internal middlewares ------------------------------------------------------
// request timer
if (process.env.DEBUG) {
middlewares.use(timeMiddleware(root));
}
// cors (enabled by default)
const { cors } = serverConfig;
if (cors !== false) {
middlewares.use(corsMiddleware(typeof cors === 'boolean' ? {} : cors));
}
// proxy
const { proxy } = serverConfig;
if (proxy) {
middlewares.use(proxyMiddleware(httpServer, proxy, config));
}
// base
if (config.base !== '/') {
middlewares.use(baseMiddleware(server));
}
// open in editor support
middlewares.use('/__open-in-editor', launchEditorMiddleware$1());
// ping request handler
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
middlewares.use(function viteHMRPingMiddleware(req, res, next) {
if (req.headers['accept'] === 'text/x-vite-ping') {
res.writeHead(204).end();
}
else {
next();
}
});
// serve static files under /public
// this applies before the transform middleware so that these files are served
// as-is without transforms.
if (config.publicDir) {
middlewares.use(servePublicMiddleware(config.publicDir, config.server.headers));
}
// main transform middleware
middlewares.use(transformMiddleware(server));
// serve static files
middlewares.use(serveRawFsMiddleware(server));
middlewares.use(serveStaticMiddleware(root, server));
// html fallback
if (config.appType === 'spa' || config.appType === 'mpa') {
middlewares.use(htmlFallbackMiddleware(root, config.appType === 'spa'));
}
// run post config hooks
// This is applied before the html middleware so that user middleware can
// serve custom content instead of index.html.
postHooks.forEach((fn) => fn && fn());
if (config.appType === 'spa' || config.appType === 'mpa') {
// transform index.html
middlewares.use(indexHtmlMiddleware(server));
// handle 404s
// Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...`
middlewares.use(function vite404Middleware(_, res) {
res.statusCode = 404;
res.end();
});
}
// error handler
middlewares.use(errorMiddleware(server, middlewareMode));
// httpServer.listen can be called multiple times
// when port when using next port number
// this code is to avoid calling buildStart multiple times
let initingServer;
let serverInited = false;
const initServer = async () => {
if (serverInited)
return;
if (initingServer)
return initingServer;
initingServer = (async function () {
await container.buildStart({});
// start deps optimizer after all container plugins are ready
if (isDepsOptimizerEnabled(config, false)) {
await initDepsOptimizer(config, server);
}
initingServer = undefined;
serverInited = true;
})();
return initingServer;
};
if (!middlewareMode && httpServer) {
// overwrite listen to init optimizer before server start
const listen = httpServer.listen.bind(httpServer);
httpServer.listen = (async (port, ...args) => {
try {
// ensure ws server started
ws.listen();
await initServer();
}
catch (e) {
httpServer.emit('error', e);
return;
}
return listen(port, ...args);
});
}
else {
if (options.ws) {
ws.listen();
}
await initServer();
}
return server;
}
async function startServer(server, inlinePort) {
const httpServer = server.httpServer;
if (!httpServer) {
throw new Error('Cannot call server.listen in middleware mode.');
}
const options = server.config.server;
const port = inlinePort ?? options.port ?? DEFAULT_DEV_PORT;
const hostname = await resolveHostname(options.host);
await httpServerStart(httpServer, {
port,
strictPort: options.strictPort,
host: hostname.host,
logger: server.config.logger,
});
}
function createServerCloseFn(server) {
if (!server) {
return () => { };
}
let hasListened = false;
const openSockets = new Set();
server.on('connection', (socket) => {
openSockets.add(socket);
socket.on('close', () => {
openSockets.delete(socket);
});
});
server.once('listening', () => {
hasListened = true;
});
return () => new Promise((resolve, reject) => {
openSockets.forEach((s) => s.destroy());
if (hasListened) {
server.close((err) => {
if (err) {
reject(err);
}
else {
resolve();
}
});
}
else {
resolve();
}
});
}
function resolvedAllowDir(root, dir) {
return normalizePath$3(path$o.resolve(root, dir));
}
function resolveServerOptions(root, raw, logger) {
const server = {
preTransformRequests: true,
...raw,
sourcemapIgnoreList: raw?.sourcemapIgnoreList === false
? () => false
: raw?.sourcemapIgnoreList || isInNodeModules,
middlewareMode: !!raw?.middlewareMode,
};
let allowDirs = server.fs?.allow;
const deny = server.fs?.deny || ['.env', '.env.*', '*.{crt,pem}'];
if (!allowDirs) {
allowDirs = [searchForWorkspaceRoot(root)];
}
allowDirs = allowDirs.map((i) => resolvedAllowDir(root, i));
// only push client dir when vite itself is outside-of-root
const resolvedClientDir = resolvedAllowDir(root, CLIENT_DIR);
if (!allowDirs.some((dir) => isParentDirectory(dir, resolvedClientDir))) {
allowDirs.push(resolvedClientDir);
}
server.fs = {
strict: server.fs?.strict ?? true,
allow: allowDirs,
deny,
};
if (server.origin?.endsWith('/')) {
server.origin = server.origin.slice(0, -1);
logger.warn(colors$1.yellow(`${colors$1.bold('(!)')} server.origin should not end with "/". Using "${server.origin}" instead.`));
}
return server;
}
async function restartServer(server) {
global.__vite_start_time = performance.now();
const { port: prevPort, host: prevHost } = server.config.server;
const shortcutsOptions = server._shortcutsOptions;
const oldUrls = server.resolvedUrls;
let inlineConfig = server.config.inlineConfig;
if (server._forceOptimizeOnRestart) {
inlineConfig = mergeConfig(inlineConfig, {
optimizeDeps: {
force: true,
},
});
}
let newServer = null;
try {
// delay ws server listen
newServer = await _createServer(inlineConfig, { ws: false });
}
catch (err) {
server.config.logger.error(err.message, {
timestamp: true,
});
server.config.logger.error('server restart failed', { timestamp: true });
return;
}
await server.close();
// Assign new server props to existing server instance
Object.assign(server, newServer);
const { logger, server: { port, host, middlewareMode }, } = server.config;
if (!middlewareMode) {
await server.listen(port, true);
logger.info('server restarted.', { timestamp: true });
if ((port ?? DEFAULT_DEV_PORT) !== (prevPort ?? DEFAULT_DEV_PORT) ||
host !== prevHost ||
diffDnsOrderChange(oldUrls, newServer.resolvedUrls)) {
logger.info('');
server.printUrls();
}
}
else {
server.ws.listen();
logger.info('server restarted.', { timestamp: true });
}
if (shortcutsOptions) {
shortcutsOptions.print = false;
bindShortcuts(newServer, shortcutsOptions);
}
}
async function updateCjsSsrExternals(server) {
if (!server._ssrExternals) {
let knownImports = [];
// Important! We use the non-ssr optimized deps to find known imports
// Only the explicitly defined deps are optimized during dev SSR, so
// we use the generated list from the scanned deps in regular dev.
// This is part of the v2 externalization heuristics and it is kept
// for backwards compatibility in case user needs to fallback to the
// legacy scheme. It may be removed in a future v3 minor.
const depsOptimizer = getDepsOptimizer(server.config, false); // non-ssr
if (depsOptimizer) {
await depsOptimizer.scanProcessing;
knownImports = [
...Object.keys(depsOptimizer.metadata.optimized),
...Object.keys(depsOptimizer.metadata.discovered),
];
}
server._ssrExternals = cjsSsrResolveExternals(server.config, knownImports);
}
}
var index = {
__proto__: null,
_createServer: _createServer,
createServer: createServer,
resolveServerOptions: resolveServerOptions
};
/* eslint-disable */
//@ts-nocheck
//TODO: replace this code with https://github.com/lukeed/polka/pull/148 once it's released
// This is based on https://github.com/preactjs/wmr/blob/main/packages/wmr/src/lib/polkompress.js
// MIT Licensed https://github.com/preactjs/wmr/blob/main/LICENSE
/* global Buffer */
const noop = () => { };
const mimes = /text|javascript|\/json|xml/i;
const threshold = 1024;
const level = -1;
let brotli = false;
const getChunkSize = (chunk, enc) => (chunk ? Buffer.byteLength(chunk, enc) : 0);
function compression() {
const brotliOpts = (typeof brotli === 'object' && brotli) || {};
const gzipOpts = {};
// disable Brotli on Node<12.7 where it is unsupported:
if (!zlib$1.createBrotliCompress)
brotli = false;
return function viteCompressionMiddleware(req, res, next = noop) {
const accept = req.headers['accept-encoding'] + '';
const encoding = ((brotli && accept.match(/\bbr\b/)) ||
(accept.match(/\bgzip\b/)) ||
[])[0];
// skip if no response body or no supported encoding:
if (req.method === 'HEAD' || !encoding)
return next();
/** @type {zlib.Gzip | zlib.BrotliCompress} */
let compress;
let pendingStatus;
/** @type {[string, function][]?} */
let pendingListeners = [];
let started = false;
let size = 0;
function start() {
started = true;
size = res.getHeader('Content-Length') | 0 || size;
const compressible = mimes.test(String(res.getHeader('Content-Type') || 'text/plain'));
const cleartext = !res.getHeader('Content-Encoding');
const listeners = pendingListeners || [];
if (compressible && cleartext && size >= threshold) {
res.setHeader('Content-Encoding', encoding);
res.removeHeader('Content-Length');
if (encoding === 'br') {
const params = {
[zlib$1.constants.BROTLI_PARAM_QUALITY]: level,
[zlib$1.constants.BROTLI_PARAM_SIZE_HINT]: size,
};
compress = zlib$1.createBrotliCompress({
params: Object.assign(params, brotliOpts),
});
}
else {
compress = zlib$1.createGzip(Object.assign({ level }, gzipOpts));
}
// backpressure
compress.on('data', (chunk) => write.call(res, chunk) === false && compress.pause());
on.call(res, 'drain', () => compress.resume());
compress.on('end', () => end.call(res));
listeners.forEach((p) => compress.on.apply(compress, p));
}
else {
pendingListeners = null;
listeners.forEach((p) => on.apply(res, p));
}
writeHead.call(res, pendingStatus || res.statusCode);
}
const { end, write, on, writeHead } = res;
res.writeHead = function (status, reason, headers) {
if (typeof reason !== 'string')
[headers, reason] = [reason, headers];
if (headers)
for (let i in headers)
res.setHeader(i, headers[i]);
pendingStatus = status;
return this;
};
res.write = function (chunk, enc, cb) {
size += getChunkSize(chunk, enc);
if (!started)
start();
if (!compress)
return write.apply(this, arguments);
return compress.write.apply(compress, arguments);
};
res.end = function (chunk, enc, cb) {
if (arguments.length > 0 && typeof chunk !== 'function') {
size += getChunkSize(chunk, enc);
}
if (!started)
start();
if (!compress)
return end.apply(this, arguments);
return compress.end.apply(compress, arguments);
};
res.on = function (type, listener) {
if (!pendingListeners || type !== 'drain')
on.call(this, type, listener);
else if (compress)
compress.on(type, listener);
else
pendingListeners.push([type, listener]);
return this;
};
next();
};
}
function resolvePreviewOptions(preview, server) {
// The preview server inherits every CommonServerOption from the `server` config
// except for the port to enable having both the dev and preview servers running
// at the same time without extra configuration
return {
port: preview?.port,
strictPort: preview?.strictPort ?? server.strictPort,
host: preview?.host ?? server.host,
https: preview?.https ?? server.https,
open: preview?.open ?? server.open,
proxy: preview?.proxy ?? server.proxy,
cors: preview?.cors ?? server.cors,
headers: preview?.headers ?? server.headers,
};
}
/**
* Starts the Vite server in preview mode, to simulate a production deployment
*/
async function preview(inlineConfig = {}) {
const config = await resolveConfig(inlineConfig, 'serve', 'production', 'production');
const distDir = path$o.resolve(config.root, config.build.outDir);
if (!fs$l.existsSync(distDir) &&
// error if no plugins implement `configurePreviewServer`
config.plugins.every((plugin) => !plugin.configurePreviewServer) &&
// error if called in CLI only. programmatic usage could access `httpServer`
// and affect file serving
process.argv[1]?.endsWith(path$o.normalize('bin/vite.js')) &&
process.argv[2] === 'preview') {
throw new Error(`The directory "${config.build.outDir}" does not exist. Did you build your project?`);
}
const app = connect$1();
const httpServer = await resolveHttpServer(config.preview, app, await resolveHttpsConfig(config.preview?.https));
setClientErrorHandler(httpServer, config.logger);
const options = config.preview;
const logger = config.logger;
const server = {
config,
middlewares: app,
httpServer,
resolvedUrls: null,
printUrls() {
if (server.resolvedUrls) {
printServerUrls(server.resolvedUrls, options.host, logger.info);
}
else {
throw new Error('cannot print server URLs before server is listening.');
}
},
};
// apply server hooks from plugins
const postHooks = [];
for (const hook of config.getSortedPluginHooks('configurePreviewServer')) {
postHooks.push(await hook(server));
}
// cors
const { cors } = config.preview;
if (cors !== false) {
app.use(corsMiddleware(typeof cors === 'boolean' ? {} : cors));
}
// proxy
const { proxy } = config.preview;
if (proxy) {
app.use(proxyMiddleware(httpServer, proxy, config));
}
app.use(compression());
const previewBase = config.base === './' || config.base === '' ? '/' : config.base;
// static assets
const headers = config.preview.headers;
const viteAssetMiddleware = (...args) => sirv(distDir, {
etag: true,
dev: true,
single: config.appType === 'spa',
setHeaders(res) {
if (headers) {
for (const name in headers) {
res.setHeader(name, headers[name]);
}
}
},
shouldServe(filePath) {
return shouldServeFile(filePath, distDir);
},
})(...args);
app.use(previewBase, viteAssetMiddleware);
// apply post server hooks from plugins
postHooks.forEach((fn) => fn && fn());
const hostname = await resolveHostname(options.host);
const port = options.port ?? DEFAULT_PREVIEW_PORT;
const protocol = options.https ? 'https' : 'http';
const serverPort = await httpServerStart(httpServer, {
port,
strictPort: options.strictPort,
host: hostname.host,
logger,
});
server.resolvedUrls = await resolveServerUrls(httpServer, config.preview, config);
if (options.open) {
const path = typeof options.open === 'string' ? options.open : previewBase;
openBrowser(path.startsWith('http')
? path
: new URL(path, `${protocol}://${hostname.name}:${serverPort}`).href, true, logger);
}
return server;
}
var preview$1 = {
__proto__: null,
preview: preview,
resolvePreviewOptions: resolvePreviewOptions
};
function resolveSSROptions(ssr, preserveSymlinks, buildSsrCjsExternalHeuristics) {
ssr ?? (ssr = {});
const optimizeDeps = ssr.optimizeDeps ?? {};
const format = buildSsrCjsExternalHeuristics ? 'cjs' : 'esm';
const target = 'node';
return {
format,
target,
...ssr,
optimizeDeps: {
disabled: true,
...optimizeDeps,
esbuildOptions: {
preserveSymlinks,
...optimizeDeps.esbuildOptions,
},
},
};
}
const debug = createDebugger('vite:config');
const promisifiedRealpath = promisify$4(fs$l.realpath);
function defineConfig(config) {
return config;
}
async function resolveConfig(inlineConfig, command, defaultMode = 'development', defaultNodeEnv = 'development') {
let config = inlineConfig;
let configFileDependencies = [];
let mode = inlineConfig.mode || defaultMode;
const isNodeEnvSet = !!process.env.NODE_ENV;
const packageCache = new Map();
// some dependencies e.g. @vue/compiler-* relies on NODE_ENV for getting
// production-specific behavior, so set it early on
if (!isNodeEnvSet) {
process.env.NODE_ENV = defaultNodeEnv;
}
const configEnv = {
mode,
command,
ssrBuild: !!config.build?.ssr,
};
let { configFile } = config;
if (configFile !== false) {
const loadResult = await loadConfigFromFile(configEnv, configFile, config.root, config.logLevel);
if (loadResult) {
config = mergeConfig(loadResult.config, config);
configFile = loadResult.path;
configFileDependencies = loadResult.dependencies;
}
}
// user config may provide an alternative mode. But --mode has a higher priority
mode = inlineConfig.mode || config.mode || mode;
configEnv.mode = mode;
const filterPlugin = (p) => {
if (!p) {
return false;
}
else if (!p.apply) {
return true;
}
else if (typeof p.apply === 'function') {
return p.apply({ ...config, mode }, configEnv);
}
else {
return p.apply === command;
}
};
// Some plugins that aren't intended to work in the bundling of workers (doing post-processing at build time for example).
// And Plugins may also have cached that could be corrupted by being used in these extra rollup calls.
// So we need to separate the worker plugin from the plugin that vite needs to run.
const rawWorkerUserPlugins = (await asyncFlatten(config.worker?.plugins || [])).filter(filterPlugin);
// resolve plugins
const rawUserPlugins = (await asyncFlatten(config.plugins || [])).filter(filterPlugin);
const [prePlugins, normalPlugins, postPlugins] = sortUserPlugins(rawUserPlugins);
// run config hooks
const userPlugins = [...prePlugins, ...normalPlugins, ...postPlugins];
config = await runConfigHook(config, userPlugins, configEnv);
// If there are custom commonjsOptions, don't force optimized deps for this test
// even if the env var is set as it would interfere with the playground specs.
if (!config.build?.commonjsOptions &&
process.env.VITE_TEST_WITHOUT_PLUGIN_COMMONJS) {
config = mergeConfig(config, {
optimizeDeps: { disabled: false },
ssr: { optimizeDeps: { disabled: false } },
});
config.build ?? (config.build = {});
config.build.commonjsOptions = { include: [] };
}
// Define logger
const logger = createLogger(config.logLevel, {
allowClearScreen: config.clearScreen,
customLogger: config.customLogger,
});
// resolve root
const resolvedRoot = normalizePath$3(config.root ? path$o.resolve(config.root) : process.cwd());
const clientAlias = [
{
find: /^\/?@vite\/env/,
replacement: path$o.posix.join(FS_PREFIX, normalizePath$3(ENV_ENTRY)),
},
{
find: /^\/?@vite\/client/,
replacement: path$o.posix.join(FS_PREFIX, normalizePath$3(CLIENT_ENTRY)),
},
];
// resolve alias with internal client alias
const resolvedAlias = normalizeAlias(mergeAlias(clientAlias, config.resolve?.alias || []));
const resolveOptions = {
mainFields: config.resolve?.mainFields ?? DEFAULT_MAIN_FIELDS,
browserField: config.resolve?.browserField ?? true,
conditions: config.resolve?.conditions ?? [],
extensions: config.resolve?.extensions ?? DEFAULT_EXTENSIONS$1,
dedupe: config.resolve?.dedupe ?? [],
preserveSymlinks: config.resolve?.preserveSymlinks ?? false,
alias: resolvedAlias,
};
// load .env files
const envDir = config.envDir
? normalizePath$3(path$o.resolve(resolvedRoot, config.envDir))
: resolvedRoot;
const userEnv = inlineConfig.envFile !== false &&
loadEnv(mode, envDir, resolveEnvPrefix(config));
// Note it is possible for user to have a custom mode, e.g. `staging` where
// development-like behavior is expected. This is indicated by NODE_ENV=development
// loaded from `.staging.env` and set by us as VITE_USER_NODE_ENV
const userNodeEnv = process.env.VITE_USER_NODE_ENV;
if (!isNodeEnvSet && userNodeEnv) {
if (userNodeEnv === 'development') {
process.env.NODE_ENV = 'development';
}
else {
// NODE_ENV=production is not supported as it could break HMR in dev for frameworks like Vue
logger.warn(`NODE_ENV=${userNodeEnv} is not supported in the .env file. ` +
`Only NODE_ENV=development is supported to create a development build of your project. ` +
`If you need to set process.env.NODE_ENV, you can set it in the Vite config instead.`);
}
}
const isProduction = process.env.NODE_ENV === 'production';
// resolve public base url
const isBuild = command === 'build';
const relativeBaseShortcut = config.base === '' || config.base === './';
// During dev, we ignore relative base and fallback to '/'
// For the SSR build, relative base isn't possible by means
// of import.meta.url.
const resolvedBase = relativeBaseShortcut
? !isBuild || config.build?.ssr
? '/'
: './'
: resolveBaseUrl(config.base, isBuild, logger) ?? '/';
const resolvedBuildOptions = resolveBuildOptions(config.build, logger, resolvedRoot);
// resolve cache directory
const pkgDir = findNearestPackageData(resolvedRoot, packageCache)?.dir;
const cacheDir = normalizePath$3(config.cacheDir
? path$o.resolve(resolvedRoot, config.cacheDir)
: pkgDir
? path$o.join(pkgDir, `node_modules/.vite`)
: path$o.join(resolvedRoot, `.vite`));
const assetsFilter = config.assetsInclude &&
(!Array.isArray(config.assetsInclude) || config.assetsInclude.length)
? createFilter(config.assetsInclude)
: () => false;
// create an internal resolver to be used in special scenarios, e.g.
// optimizer & handling css @imports
const createResolver = (options) => {
let aliasContainer;
let resolverContainer;
return async (id, importer, aliasOnly, ssr) => {
let container;
if (aliasOnly) {
container =
aliasContainer ||
(aliasContainer = await createPluginContainer({
...resolved,
plugins: [alias$1({ entries: resolved.resolve.alias })],
}));
}
else {
container =
resolverContainer ||
(resolverContainer = await createPluginContainer({
...resolved,
plugins: [
alias$1({ entries: resolved.resolve.alias }),
resolvePlugin({
...resolved.resolve,
root: resolvedRoot,
isProduction,
isBuild: command === 'build',
ssrConfig: resolved.ssr,
asSrc: true,
preferRelative: false,
tryIndex: true,
...options,
idOnly: true,
}),
],
}));
}
return (await container.resolveId(id, importer, {
ssr,
scan: options?.scan,
}))?.id;
};
};
const { publicDir } = config;
const resolvedPublicDir = publicDir !== false && publicDir !== ''
? path$o.resolve(resolvedRoot, typeof publicDir === 'string' ? publicDir : 'public')
: '';
const server = resolveServerOptions(resolvedRoot, config.server, logger);
const ssr = resolveSSROptions(config.ssr, resolveOptions.preserveSymlinks, config.legacy?.buildSsrCjsExternalHeuristics);
const middlewareMode = config?.server?.middlewareMode;
const optimizeDeps = config.optimizeDeps || {};
const BASE_URL = resolvedBase;
// resolve worker
let workerConfig = mergeConfig({}, config);
const [workerPrePlugins, workerNormalPlugins, workerPostPlugins] = sortUserPlugins(rawWorkerUserPlugins);
// run config hooks
const workerUserPlugins = [
...workerPrePlugins,
...workerNormalPlugins,
...workerPostPlugins,
];
workerConfig = await runConfigHook(workerConfig, workerUserPlugins, configEnv);
const resolvedWorkerOptions = {
format: workerConfig.worker?.format || 'iife',
plugins: [],
rollupOptions: workerConfig.worker?.rollupOptions || {},
getSortedPlugins: undefined,
getSortedPluginHooks: undefined,
};
const resolvedConfig = {
configFile: configFile ? normalizePath$3(configFile) : undefined,
configFileDependencies: configFileDependencies.map((name) => normalizePath$3(path$o.resolve(name))),
inlineConfig,
root: resolvedRoot,
base: withTrailingSlash(resolvedBase),
rawBase: resolvedBase,
resolve: resolveOptions,
publicDir: resolvedPublicDir,
cacheDir,
command,
mode,
ssr,
isWorker: false,
mainConfig: null,
isProduction,
plugins: userPlugins,
css: resolveCSSOptions(config.css),
esbuild: config.esbuild === false
? false
: {
jsxDev: !isProduction,
...config.esbuild,
},
server,
build: resolvedBuildOptions,
preview: resolvePreviewOptions(config.preview, server),
envDir,
env: {
...userEnv,
BASE_URL,
MODE: mode,
DEV: !isProduction,
PROD: isProduction,
},
assetsInclude(file) {
return DEFAULT_ASSETS_RE.test(file) || assetsFilter(file);
},
logger,
packageCache,
createResolver,
optimizeDeps: {
disabled: 'build',
...optimizeDeps,
esbuildOptions: {
preserveSymlinks: resolveOptions.preserveSymlinks,
...optimizeDeps.esbuildOptions,
},
},
worker: resolvedWorkerOptions,
appType: config.appType ?? (middlewareMode === 'ssr' ? 'custom' : 'spa'),
experimental: {
importGlobRestoreExtension: false,
hmrPartialAccept: false,
...config.experimental,
},
getSortedPlugins: undefined,
getSortedPluginHooks: undefined,
};
const resolved = {
...config,
...resolvedConfig,
};
resolved.plugins = await resolvePlugins(resolved, prePlugins, normalPlugins, postPlugins);
Object.assign(resolved, createPluginHookUtils(resolved.plugins));
const workerResolved = {
...workerConfig,
...resolvedConfig,
isWorker: true,
mainConfig: resolved,
};
resolvedConfig.worker.plugins = await resolvePlugins(workerResolved, workerPrePlugins, workerNormalPlugins, workerPostPlugins);
Object.assign(resolvedConfig.worker, createPluginHookUtils(resolvedConfig.worker.plugins));
// call configResolved hooks
await Promise.all([
...resolved
.getSortedPluginHooks('configResolved')
.map((hook) => hook(resolved)),
...resolvedConfig.worker
.getSortedPluginHooks('configResolved')
.map((hook) => hook(workerResolved)),
]);
// validate config
if (middlewareMode === 'ssr') {
logger.warn(colors$1.yellow(`Setting server.middlewareMode to 'ssr' is deprecated, set server.middlewareMode to \`true\`${config.appType === 'custom' ? '' : ` and appType to 'custom'`} instead`));
}
if (middlewareMode === 'html') {
logger.warn(colors$1.yellow(`Setting server.middlewareMode to 'html' is deprecated, set server.middlewareMode to \`true\` instead`));
}
if (config.server?.force &&
!isBuild &&
config.optimizeDeps?.force === undefined) {
resolved.optimizeDeps.force = true;
logger.warn(colors$1.yellow(`server.force is deprecated, use optimizeDeps.force instead`));
}
debug?.(`using resolved config: %O`, {
...resolved,
plugins: resolved.plugins.map((p) => p.name),
worker: {
...resolved.worker,
plugins: resolved.worker.plugins.map((p) => p.name),
},
});
if (config.build?.terserOptions && config.build.minify !== 'terser') {
logger.warn(colors$1.yellow(`build.terserOptions is specified but build.minify is not set to use Terser. ` +
`Note Vite now defaults to use esbuild for minification. If you still ` +
`prefer Terser, set build.minify to "terser".`));
}
// Check if all assetFileNames have the same reference.
// If not, display a warn for user.
const outputOption = config.build?.rollupOptions?.output ?? [];
// Use isArray to narrow its type to array
if (Array.isArray(outputOption)) {
const assetFileNamesList = outputOption.map((output) => output.assetFileNames);
if (assetFileNamesList.length > 1) {
const firstAssetFileNames = assetFileNamesList[0];
const hasDifferentReference = assetFileNamesList.some((assetFileNames) => assetFileNames !== firstAssetFileNames);
if (hasDifferentReference) {
resolved.logger.warn(colors$1.yellow(`
assetFileNames isn't equal for every build.rollupOptions.output. A single pattern across all outputs is supported by Vite.
`));
}
}
}
// Warn about removal of experimental features
if (config.legacy?.buildSsrCjsExternalHeuristics ||
config.ssr?.format === 'cjs') {
resolved.logger.warn(colors$1.yellow(`
(!) Experimental legacy.buildSsrCjsExternalHeuristics and ssr.format: 'cjs' are going to be removed in Vite 5.
Find more information and give feedback at https://github.com/vitejs/vite/discussions/13816.
`));
}
return resolved;
}
/**
* Resolve base url. Note that some users use Vite to build for non-web targets like
* electron or expects to deploy
*/
function resolveBaseUrl(base = '/', isBuild, logger) {
if (base[0] === '.') {
logger.warn(colors$1.yellow(colors$1.bold(`(!) invalid "base" option: ${base}. The value can only be an absolute ` +
`URL, ./, or an empty string.`)));
return '/';
}
// external URL flag
const isExternal = isExternalUrl(base);
// no leading slash warn
if (!isExternal && base[0] !== '/') {
logger.warn(colors$1.yellow(colors$1.bold(`(!) "base" option should start with a slash.`)));
}
// parse base when command is serve or base is not External URL
if (!isBuild || !isExternal) {
base = new URL(base, 'http://vitejs.dev').pathname;
// ensure leading slash
if (base[0] !== '/') {
base = '/' + base;
}
}
return base;
}
function sortUserPlugins(plugins) {
const prePlugins = [];
const postPlugins = [];
const normalPlugins = [];
if (plugins) {
plugins.flat().forEach((p) => {
if (p.enforce === 'pre')
prePlugins.push(p);
else if (p.enforce === 'post')
postPlugins.push(p);
else
normalPlugins.push(p);
});
}
return [prePlugins, normalPlugins, postPlugins];
}
async function loadConfigFromFile(configEnv, configFile, configRoot = process.cwd(), logLevel) {
const start = performance.now();
const getTime = () => `${(performance.now() - start).toFixed(2)}ms`;
let resolvedPath;
if (configFile) {
// explicit config path is always resolved from cwd
resolvedPath = path$o.resolve(configFile);
}
else {
// implicit config file loaded from inline root (if present)
// otherwise from cwd
for (const filename of DEFAULT_CONFIG_FILES) {
const filePath = path$o.resolve(configRoot, filename);
if (!fs$l.existsSync(filePath))
continue;
resolvedPath = filePath;
break;
}
}
if (!resolvedPath) {
debug?.('no config file found.');
return null;
}
let isESM = false;
if (/\.m[jt]s$/.test(resolvedPath)) {
isESM = true;
}
else if (/\.c[jt]s$/.test(resolvedPath)) {
isESM = false;
}
else {
// check package.json for type: "module" and set `isESM` to true
try {
const pkg = lookupFile(configRoot, ['package.json']);
isESM =
!!pkg && JSON.parse(fs$l.readFileSync(pkg, 'utf-8')).type === 'module';
}
catch (e) { }
}
try {
const bundled = await bundleConfigFile(resolvedPath, isESM);
const userConfig = await loadConfigFromBundledFile(resolvedPath, bundled.code, isESM);
debug?.(`bundled config file loaded in ${getTime()}`);
const config = await (typeof userConfig === 'function'
? userConfig(configEnv)
: userConfig);
if (!isObject$2(config)) {
throw new Error(`config must export or return an object.`);
}
return {
path: normalizePath$3(resolvedPath),
config,
dependencies: bundled.dependencies,
};
}
catch (e) {
createLogger(logLevel).error(colors$1.red(`failed to load config from ${resolvedPath}`), { error: e });
throw e;
}
}
async function bundleConfigFile(fileName, isESM) {
const dirnameVarName = '__vite_injected_original_dirname';
const filenameVarName = '__vite_injected_original_filename';
const importMetaUrlVarName = '__vite_injected_original_import_meta_url';
const result = await build$3({
absWorkingDir: process.cwd(),
entryPoints: [fileName],
outfile: 'out.js',
write: false,
target: ['node14.18', 'node16'],
platform: 'node',
bundle: true,
format: isESM ? 'esm' : 'cjs',
mainFields: ['main'],
sourcemap: 'inline',
metafile: true,
define: {
__dirname: dirnameVarName,
__filename: filenameVarName,
'import.meta.url': importMetaUrlVarName,
},
plugins: [
{
name: 'externalize-deps',
setup(build) {
const packageCache = new Map();
const resolveByViteResolver = (id, importer, isRequire) => {
return tryNodeResolve(id, importer, {
root: path$o.dirname(fileName),
isBuild: true,
isProduction: true,
preferRelative: false,
tryIndex: true,
mainFields: [],
browserField: false,
conditions: [],
overrideConditions: ['node'],
dedupe: [],
extensions: DEFAULT_EXTENSIONS$1,
preserveSymlinks: false,
packageCache,
isRequire,
}, false)?.id;
};
const isESMFile = (id) => {
if (id.endsWith('.mjs'))
return true;
if (id.endsWith('.cjs'))
return false;
const nearestPackageJson = findNearestPackageData(path$o.dirname(id), packageCache);
return (!!nearestPackageJson && nearestPackageJson.data.type === 'module');
};
// externalize bare imports
build.onResolve({ filter: /^[^.].*/ }, async ({ path: id, importer, kind }) => {
if (kind === 'entry-point' ||
path$o.isAbsolute(id) ||
isNodeBuiltin(id)) {
return;
}
// With the `isNodeBuiltin` check above, this check captures if the builtin is a
// non-node built-in, which esbuild doesn't know how to handle. In that case, we
// externalize it so the non-node runtime handles it instead.
if (isBuiltin(id)) {
return { external: true };
}
const isImport = isESM || kind === 'dynamic-import';
let idFsPath;
try {
idFsPath = resolveByViteResolver(id, importer, !isImport);
}
catch (e) {
if (!isImport) {
let canResolveWithImport = false;
try {
canResolveWithImport = !!resolveByViteResolver(id, importer, false);
}
catch { }
if (canResolveWithImport) {
throw new Error(`Failed to resolve ${JSON.stringify(id)}. This package is ESM only but it was tried to load by \`require\`. See http://vitejs.dev/guide/troubleshooting.html#this-package-is-esm-only for more details.`);
}
}
throw e;
}
if (idFsPath && isImport) {
idFsPath = pathToFileURL(idFsPath).href;
}
if (idFsPath && !isImport && isESMFile(idFsPath)) {
throw new Error(`${JSON.stringify(id)} resolved to an ESM file. ESM file cannot be loaded by \`require\`. See http://vitejs.dev/guide/troubleshooting.html#this-package-is-esm-only for more details.`);
}
return {
path: idFsPath,
external: true,
};
});
},
},
{
name: 'inject-file-scope-variables',
setup(build) {
build.onLoad({ filter: /\.[cm]?[jt]s$/ }, async (args) => {
const contents = await fsp.readFile(args.path, 'utf8');
const injectValues = `const ${dirnameVarName} = ${JSON.stringify(path$o.dirname(args.path))};` +
`const ${filenameVarName} = ${JSON.stringify(args.path)};` +
`const ${importMetaUrlVarName} = ${JSON.stringify(pathToFileURL(args.path).href)};`;
return {
loader: args.path.endsWith('ts') ? 'ts' : 'js',
contents: injectValues + contents,
};
});
},
},
],
});
const { text } = result.outputFiles[0];
return {
code: text,
dependencies: result.metafile ? Object.keys(result.metafile.inputs) : [],
};
}
const _require = createRequire$1(import.meta.url);
async function loadConfigFromBundledFile(fileName, bundledCode, isESM) {
// for esm, before we can register loaders without requiring users to run node
// with --experimental-loader themselves, we have to do a hack here:
// write it to disk, load it with native Node ESM, then delete the file.
if (isESM) {
const fileBase = `${fileName}.timestamp-${Date.now()}-${Math.random()
.toString(16)
.slice(2)}`;
const fileNameTmp = `${fileBase}.mjs`;
const fileUrl = `${pathToFileURL(fileBase)}.mjs`;
await fsp.writeFile(fileNameTmp, bundledCode);
try {
return (await dynamicImport(fileUrl)).default;
}
finally {
fs$l.unlink(fileNameTmp, () => { }); // Ignore errors
}
}
// for cjs, we can register a custom loader via `_require.extensions`
else {
const extension = path$o.extname(fileName);
// We don't use fsp.realpath() here because it has the same behaviour as
// fs.realpath.native. On some Windows systems, it returns uppercase volume
// letters (e.g. "C:\") while the Node.js loader uses lowercase volume letters.
// See https://github.com/vitejs/vite/issues/12923
const realFileName = await promisifiedRealpath(fileName);
const loaderExt = extension in _require.extensions ? extension : '.js';
const defaultLoader = _require.extensions[loaderExt];
_require.extensions[loaderExt] = (module, filename) => {
if (filename === realFileName) {
module._compile(bundledCode, filename);
}
else {
defaultLoader(module, filename);
}
};
// clear cache in case of server restart
delete _require.cache[_require.resolve(fileName)];
const raw = _require(fileName);
_require.extensions[loaderExt] = defaultLoader;
return raw.__esModule ? raw.default : raw;
}
}
async function runConfigHook(config, plugins, configEnv) {
let conf = config;
for (const p of getSortedPluginsByHook('config', plugins)) {
const hook = p.config;
const handler = hook && 'handler' in hook ? hook.handler : hook;
if (handler) {
const res = await handler(conf, configEnv);
if (res) {
conf = mergeConfig(conf, res);
}
}
}
return conf;
}
function getDepOptimizationConfig(config, ssr) {
return ssr ? config.ssr.optimizeDeps : config.optimizeDeps;
}
function isDepsOptimizerEnabled(config, ssr) {
const { command } = config;
const { disabled } = getDepOptimizationConfig(config, ssr);
return !(disabled === true ||
(command === 'build' && disabled === 'build') ||
(command === 'serve' && disabled === 'dev'));
}
export { loadEnv as A, resolveEnvPrefix as B, colors$1 as C, bindShortcuts as D, getDefaultExportFromCjs as E, commonjsGlobal as F, index$1 as G, build$1 as H, index as I, preview$1 as J, preprocessCSS as a, build as b, createServer as c, resolvePackageData as d, buildErrorMessage as e, formatPostcssSourceMap as f, defineConfig as g, resolveConfig as h, isInNodeModules as i, resolveBaseUrl as j, getDepOptimizationConfig as k, loadConfigFromFile as l, isDepsOptimizerEnabled as m, normalizePath$3 as n, optimizeDeps as o, preview as p, mergeConfig as q, resolvePackageEntry as r, sortUserPlugins as s, transformWithEsbuild as t, mergeAlias as u, createFilter as v, send$2 as w, createLogger as x, searchForWorkspaceRoot as y, isFileServingAllowed as z };