feat: docker compose maybe

main
Ashley Flanagan 10 months ago
parent 180b261e40
commit b625ccd8d6

@ -0,0 +1,17 @@
FROM node:19-buster-slim
# Install node dependencies
WORKDIR /svelte
COPY ./package.json .
COPY ./svelte.config.js .
RUN npm install
# Move to container
COPY . .
# Build for production
RUN npm run build
# Launch app
CMD ["node", "build"]
EXPOSE 3000

@ -0,0 +1,25 @@
version: '3.9'
services:
print-notificaations:
build:
context: .
container_name: print-notifications
ports:
- '4445:3000'
depends_on:
- mongo
environment:
DB_URI: mongodb://user:pass@mongo:27017
restart: unless-stopped
mongo:
image: mongo:4.4.6
restart: always
environment:
MONGO_INITDB_ROOT_USERNAME: user
MONGO_INITDB_ROOT_PASSWORD: pass
#MONGO_INITDB_DATABASE: win11updates
volumes:
- mongo-data:/data/db
volumes:
mongo-data:

1
node_modules/.bin/acorn generated vendored

@ -0,0 +1 @@
../acorn/bin/acorn

1
node_modules/.bin/autoprefixer generated vendored

@ -0,0 +1 @@
../autoprefixer/bin/autoprefixer

1
node_modules/.bin/browserslist generated vendored

@ -0,0 +1 @@
../browserslist/cli.js

1
node_modules/.bin/cssesc generated vendored

@ -0,0 +1 @@
../cssesc/bin/cssesc

1
node_modules/.bin/esbuild generated vendored

@ -0,0 +1 @@
../esbuild/bin/esbuild

1
node_modules/.bin/eslint generated vendored

@ -0,0 +1 @@
../eslint/bin/eslint.js

@ -0,0 +1 @@
../eslint-config-prettier/bin/cli.js

1
node_modules/.bin/jiti generated vendored

@ -0,0 +1 @@
../jiti/bin/jiti.js

1
node_modules/.bin/js-yaml generated vendored

@ -0,0 +1 @@
../js-yaml/bin/js-yaml.js

1
node_modules/.bin/mkdirp generated vendored

@ -0,0 +1 @@
../mkdirp/bin/cmd.js

1
node_modules/.bin/nanoid generated vendored

@ -0,0 +1 @@
../nanoid/bin/nanoid.cjs

1
node_modules/.bin/node-which generated vendored

@ -0,0 +1 @@
../which/bin/node-which

1
node_modules/.bin/prettier generated vendored

@ -0,0 +1 @@
../prettier/bin-prettier.js

1
node_modules/.bin/purgecss generated vendored

@ -0,0 +1 @@
../purgecss/bin/purgecss.js

1
node_modules/.bin/resolve generated vendored

@ -0,0 +1 @@
../resolve/bin/resolve

1
node_modules/.bin/rimraf generated vendored

@ -0,0 +1 @@
../rimraf/bin.js

1
node_modules/.bin/rollup generated vendored

@ -0,0 +1 @@
../rollup/dist/bin/rollup

1
node_modules/.bin/semver generated vendored

@ -0,0 +1 @@
../semver/bin/semver.js

1
node_modules/.bin/sorcery generated vendored

@ -0,0 +1 @@
../sorcery/bin/sorcery

1
node_modules/.bin/sucrase generated vendored

@ -0,0 +1 @@
../sucrase/bin/sucrase

1
node_modules/.bin/sucrase-node generated vendored

@ -0,0 +1 @@
../sucrase/bin/sucrase-node

1
node_modules/.bin/svelte-check generated vendored

@ -0,0 +1 @@
../svelte-check/bin/svelte-check

1
node_modules/.bin/svelte-kit generated vendored

@ -0,0 +1 @@
../@sveltejs/kit/svelte-kit.js

1
node_modules/.bin/tailwind generated vendored

@ -0,0 +1 @@
../tailwindcss/lib/cli.js

1
node_modules/.bin/tailwindcss generated vendored

@ -0,0 +1 @@
../tailwindcss/lib/cli.js

1
node_modules/.bin/tsc generated vendored

@ -0,0 +1 @@
../typescript/bin/tsc

1
node_modules/.bin/tsserver generated vendored

@ -0,0 +1 @@
../typescript/bin/tsserver

@ -0,0 +1 @@
../update-browserslist-db/cli.js

1
node_modules/.bin/vite generated vendored

@ -0,0 +1 @@
../vite/bin/vite.js

3429
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

@ -0,0 +1,3 @@
{
"type": "module"
}

@ -0,0 +1,3 @@
{
"type": "module"
}

@ -0,0 +1,3 @@
{
"type": "module"
}

@ -0,0 +1,3 @@
{
"type": "module"
}

@ -0,0 +1,3 @@
{
"type": "module"
}

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014-2016, Jon Schlinkert
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

@ -0,0 +1,182 @@
# word-wrap [![NPM version](https://img.shields.io/npm/v/word-wrap.svg?style=flat)](https://www.npmjs.com/package/word-wrap) [![NPM monthly downloads](https://img.shields.io/npm/dm/word-wrap.svg?style=flat)](https://npmjs.org/package/word-wrap) [![NPM total downloads](https://img.shields.io/npm/dt/word-wrap.svg?style=flat)](https://npmjs.org/package/word-wrap) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/word-wrap.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/word-wrap)
> Wrap words to a specified length.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save word-wrap
```
## Usage
```js
var wrap = require('word-wrap');
wrap('Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.');
```
Results in:
```
Lorem ipsum dolor sit amet, consectetur adipiscing
elit, sed do eiusmod tempor incididunt ut labore
et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut
aliquip ex ea commodo consequat.
```
## Options
![image](https://cloud.githubusercontent.com/assets/383994/6543728/7a381c08-c4f6-11e4-8b7d-b6ba197569c9.png)
### options.width
Type: `Number`
Default: `50`
The width of the text before wrapping to a new line.
**Example:**
```js
wrap(str, {width: 60});
```
### options.indent
Type: `String`
Default: `` (none)
The string to use at the beginning of each line.
**Example:**
```js
wrap(str, {indent: ' '});
```
### options.newline
Type: `String`
Default: `\n`
The string to use at the end of each line.
**Example:**
```js
wrap(str, {newline: '\n\n'});
```
### options.escape
Type: `function`
Default: `function(str){return str;}`
An escape function to run on each line after splitting them.
**Example:**
```js
var xmlescape = require('xml-escape');
wrap(str, {
escape: function(string){
return xmlescape(string);
}
});
```
### options.trim
Type: `Boolean`
Default: `false`
Trim trailing whitespace from the returned string. This option is included since `.trim()` would also strip the leading indentation from the first line.
**Example:**
```js
wrap(str, {trim: true});
```
### options.cut
Type: `Boolean`
Default: `false`
Break a word between any two letters when the word is longer than the specified width.
**Example:**
```js
wrap(str, {cut: true});
```
## About
### Related projects
* [common-words](https://www.npmjs.com/package/common-words): Updated list (JSON) of the 100 most common words in the English language. Useful for… [more](https://github.com/jonschlinkert/common-words) | [homepage](https://github.com/jonschlinkert/common-words "Updated list (JSON) of the 100 most common words in the English language. Useful for excluding these words from arrays.")
* [shuffle-words](https://www.npmjs.com/package/shuffle-words): Shuffle the words in a string and optionally the letters in each word using the… [more](https://github.com/jonschlinkert/shuffle-words) | [homepage](https://github.com/jonschlinkert/shuffle-words "Shuffle the words in a string and optionally the letters in each word using the Fisher-Yates algorithm. Useful for creating test fixtures, benchmarking samples, etc.")
* [unique-words](https://www.npmjs.com/package/unique-words): Return the unique words in a string or array. | [homepage](https://github.com/jonschlinkert/unique-words "Return the unique words in a string or array.")
* [wordcount](https://www.npmjs.com/package/wordcount): Count the words in a string. Support for english, CJK and Cyrillic. | [homepage](https://github.com/jonschlinkert/wordcount "Count the words in a string. Support for english, CJK and Cyrillic.")
### Contributing
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
### Contributors
| **Commits** | **Contributor** |
| --- | --- |
| 43 | [jonschlinkert](https://github.com/jonschlinkert) |
| 2 | [lordvlad](https://github.com/lordvlad) |
| 2 | [hildjj](https://github.com/hildjj) |
| 1 | [danilosampaio](https://github.com/danilosampaio) |
| 1 | [2fd](https://github.com/2fd) |
| 1 | [toddself](https://github.com/toddself) |
| 1 | [wolfgang42](https://github.com/wolfgang42) |
| 1 | [zachhale](https://github.com/zachhale) |
### Building docs
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
To generate the readme, run the following command:
```sh
$ npm install -g verbose/verb#dev verb-generate-readme && verb
```
### Running tests
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
```sh
$ npm install && npm test
```
### Author
**Jon Schlinkert**
* [github/jonschlinkert](https://github.com/jonschlinkert)
* [twitter/jonschlinkert](https://twitter.com/jonschlinkert)
### License
Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT License](LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 02, 2017._

@ -0,0 +1,50 @@
/**
* Wrap words to a specified length.
*/
export = wrap;
declare function wrap(str: string, options?: wrap.IOptions): string;
declare namespace wrap {
export interface IOptions {
/**
* The width of the text before wrapping to a new line.
* @default ´50´
*/
width?: number;
/**
* The string to use at the beginning of each line.
* @default ´´ (none)
*/
indent?: string;
/**
* The string to use at the end of each line.
* @default ´\n´
*/
newline?: string;
/**
* An escape function to run on each line after splitting them.
* @default (str: string) => string;
*/
escape?: (str: string) => string;
/**
* Trim trailing whitespace from the returned string.
* This option is included since .trim() would also strip
* the leading indentation from the first line.
* @default true
*/
trim?: boolean;
/**
* Break a word between any two letters when the word is longer
* than the specified width.
* @default false
*/
cut?: boolean;
}
}

@ -0,0 +1,52 @@
/*!
* word-wrap <https://github.com/jonschlinkert/word-wrap>
*
* Copyright (c) 2014-2023, Jon Schlinkert.
* Released under the MIT License.
*/
function trimTabAndSpaces(str) {
const lines = str.split('\n');
const trimmedLines = lines.map((line) => line.trimEnd());
return trimmedLines.join('\n');
}
module.exports = function(str, options) {
options = options || {};
if (str == null) {
return str;
}
var width = options.width || 50;
var indent = (typeof options.indent === 'string')
? options.indent
: '';
var newline = options.newline || '\n' + indent;
var escape = typeof options.escape === 'function'
? options.escape
: identity;
var regexString = '.{1,' + width + '}';
if (options.cut !== true) {
regexString += '([\\s\u200B]+|$)|[^\\s\u200B]+?([\\s\u200B]+|$)';
}
var re = new RegExp(regexString, 'g');
var lines = str.match(re) || [];
var result = indent + lines.map(function(line) {
if (line.slice(-1) === '\n') {
line = line.slice(0, line.length - 1);
}
return escape(line);
}).join(newline);
if (options.trim === true) {
result = trimTabAndSpaces(result);
}
return result;
};
function identity(str) {
return str;
}

@ -0,0 +1,81 @@
{
"name": "@aashutoshrathi/word-wrap",
"description": "Wrap words to a specified length.",
"version": "1.2.6",
"homepage": "https://github.com/aashutoshrathi/word-wrap",
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
"contributors": [
"Aashutosh Rathi <aashutoshrathi@gmail.com>",
"Danilo Sampaio <danilo.sampaio@gmail.com> (localhost:8080)",
"Fede Ramirez <i@2fd.me> (https://2fd.github.io)",
"Joe Hildebrand <joe-github@cursive.net> (https://twitter.com/hildjj)",
"Jon Schlinkert <jon.schlinkert@sellside.com> (http://twitter.com/jonschlinkert)",
"Todd Kennedy (https://tck.io)",
"Waldemar Reusch (https://github.com/lordvlad)",
"Wolfgang Faust (http://www.linestarve.com)",
"Zach Hale <zachhale@gmail.com> (http://zachhale.com)"
],
"repository": {
"type": "git",
"url": "git+https://github.com/aashutoshrathi/word-wrap.git"
},
"bugs": {
"url": "https://github.com/aashutoshrathi/word-wrap/issues"
},
"license": "MIT",
"files": [
"index.js",
"index.d.ts"
],
"main": "index.js",
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "mocha"
},
"devDependencies": {
"gulp-format-md": "^0.1.11",
"mocha": "^10.2.0"
},
"keywords": [
"break",
"carriage",
"line",
"new-line",
"newline",
"return",
"soft",
"text",
"word",
"word-wrap",
"words",
"wrap"
],
"typings": "index.d.ts",
"verb": {
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"plugins": [
"gulp-format-md"
],
"lint": {
"reflinks": true
},
"related": {
"list": [
"common-words",
"shuffle-words",
"unique-words",
"wordcount"
]
},
"reflinks": [
"verb",
"verb-generate-readme"
]
}
}

@ -0,0 +1,128 @@
declare namespace QuickLRU {
interface Options<KeyType, ValueType> {
/**
The maximum number of milliseconds an item should remain in the cache.
@default Infinity
By default, `maxAge` will be `Infinity`, which means that items will never expire.
Lazy expiration upon the next write or read call.
Individual expiration of an item can be specified by the `set(key, value, maxAge)` method.
*/
readonly maxAge?: number;
/**
The maximum number of items before evicting the least recently used items.
*/
readonly maxSize: number;
/**
Called right before an item is evicted from the cache.
Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`).
*/
onEviction?: (key: KeyType, value: ValueType) => void;
}
}
declare class QuickLRU<KeyType, ValueType>
implements Iterable<[KeyType, ValueType]> {
/**
The stored item count.
*/
readonly size: number;
/**
Simple ["Least Recently Used" (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29).
The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop.
@example
```
import QuickLRU = require('quick-lru');
const lru = new QuickLRU({maxSize: 1000});
lru.set('🦄', '🌈');
lru.has('🦄');
//=> true
lru.get('🦄');
//=> '🌈'
```
*/
constructor(options: QuickLRU.Options<KeyType, ValueType>);
[Symbol.iterator](): IterableIterator<[KeyType, ValueType]>;
/**
Set an item. Returns the instance.
Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified in the constructor, otherwise the item will never expire.
@returns The list instance.
*/
set(key: KeyType, value: ValueType, options?: {maxAge?: number}): this;
/**
Get an item.
@returns The stored item or `undefined`.
*/
get(key: KeyType): ValueType | undefined;
/**
Check if an item exists.
*/
has(key: KeyType): boolean;
/**
Get an item without marking it as recently used.
@returns The stored item or `undefined`.
*/
peek(key: KeyType): ValueType | undefined;
/**
Delete an item.
@returns `true` if the item is removed or `false` if the item doesn't exist.
*/
delete(key: KeyType): boolean;
/**
Delete all items.
*/
clear(): void;
/**
Update the `maxSize` in-place, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee.
Useful for on-the-fly tuning of cache sizes in live systems.
*/
resize(maxSize: number): void;
/**
Iterable for all the keys.
*/
keys(): IterableIterator<KeyType>;
/**
Iterable for all the values.
*/
values(): IterableIterator<ValueType>;
/**
Iterable for all entries, starting with the oldest (ascending in recency).
*/
entriesAscending(): IterableIterator<[KeyType, ValueType]>;
/**
Iterable for all entries, starting with the newest (descending in recency).
*/
entriesDescending(): IterableIterator<[KeyType, ValueType]>;
}
export = QuickLRU;

@ -0,0 +1,263 @@
'use strict';
class QuickLRU {
constructor(options = {}) {
if (!(options.maxSize && options.maxSize > 0)) {
throw new TypeError('`maxSize` must be a number greater than 0');
}
if (typeof options.maxAge === 'number' && options.maxAge === 0) {
throw new TypeError('`maxAge` must be a number greater than 0');
}
this.maxSize = options.maxSize;
this.maxAge = options.maxAge || Infinity;
this.onEviction = options.onEviction;
this.cache = new Map();
this.oldCache = new Map();
this._size = 0;
}
_emitEvictions(cache) {
if (typeof this.onEviction !== 'function') {
return;
}
for (const [key, item] of cache) {
this.onEviction(key, item.value);
}
}
_deleteIfExpired(key, item) {
if (typeof item.expiry === 'number' && item.expiry <= Date.now()) {
if (typeof this.onEviction === 'function') {
this.onEviction(key, item.value);
}
return this.delete(key);
}
return false;
}
_getOrDeleteIfExpired(key, item) {
const deleted = this._deleteIfExpired(key, item);
if (deleted === false) {
return item.value;
}
}
_getItemValue(key, item) {
return item.expiry ? this._getOrDeleteIfExpired(key, item) : item.value;
}
_peek(key, cache) {
const item = cache.get(key);
return this._getItemValue(key, item);
}
_set(key, value) {
this.cache.set(key, value);
this._size++;
if (this._size >= this.maxSize) {
this._size = 0;
this._emitEvictions(this.oldCache);
this.oldCache = this.cache;
this.cache = new Map();
}
}
_moveToRecent(key, item) {
this.oldCache.delete(key);
this._set(key, item);
}
* _entriesAscending() {
for (const item of this.oldCache) {
const [key, value] = item;
if (!this.cache.has(key)) {
const deleted = this._deleteIfExpired(key, value);
if (deleted === false) {
yield item;
}
}
}
for (const item of this.cache) {
const [key, value] = item;
const deleted = this._deleteIfExpired(key, value);
if (deleted === false) {
yield item;
}
}
}
get(key) {
if (this.cache.has(key)) {
const item = this.cache.get(key);
return this._getItemValue(key, item);
}
if (this.oldCache.has(key)) {
const item = this.oldCache.get(key);
if (this._deleteIfExpired(key, item) === false) {
this._moveToRecent(key, item);
return item.value;
}
}
}
set(key, value, {maxAge = this.maxAge === Infinity ? undefined : Date.now() + this.maxAge} = {}) {
if (this.cache.has(key)) {
this.cache.set(key, {
value,
maxAge
});
} else {
this._set(key, {value, expiry: maxAge});
}
}
has(key) {
if (this.cache.has(key)) {
return !this._deleteIfExpired(key, this.cache.get(key));
}
if (this.oldCache.has(key)) {
return !this._deleteIfExpired(key, this.oldCache.get(key));
}
return false;
}
peek(key) {
if (this.cache.has(key)) {
return this._peek(key, this.cache);
}
if (this.oldCache.has(key)) {
return this._peek(key, this.oldCache);
}
}
delete(key) {
const deleted = this.cache.delete(key);
if (deleted) {
this._size--;
}
return this.oldCache.delete(key) || deleted;
}
clear() {
this.cache.clear();
this.oldCache.clear();
this._size = 0;
}
resize(newSize) {
if (!(newSize && newSize > 0)) {
throw new TypeError('`maxSize` must be a number greater than 0');
}
const items = [...this._entriesAscending()];
const removeCount = items.length - newSize;
if (removeCount < 0) {
this.cache = new Map(items);
this.oldCache = new Map();
this._size = items.length;
} else {
if (removeCount > 0) {
this._emitEvictions(items.slice(0, removeCount));
}
this.oldCache = new Map(items.slice(removeCount));
this.cache = new Map();
this._size = 0;
}
this.maxSize = newSize;
}
* keys() {
for (const [key] of this) {
yield key;
}
}
* values() {
for (const [, value] of this) {
yield value;
}
}
* [Symbol.iterator]() {
for (const item of this.cache) {
const [key, value] = item;
const deleted = this._deleteIfExpired(key, value);
if (deleted === false) {
yield [key, value.value];
}
}
for (const item of this.oldCache) {
const [key, value] = item;
if (!this.cache.has(key)) {
const deleted = this._deleteIfExpired(key, value);
if (deleted === false) {
yield [key, value.value];
}
}
}
}
* entriesDescending() {
let items = [...this.cache];
for (let i = items.length - 1; i >= 0; --i) {
const item = items[i];
const [key, value] = item;
const deleted = this._deleteIfExpired(key, value);
if (deleted === false) {
yield [key, value.value];
}
}
items = [...this.oldCache];
for (let i = items.length - 1; i >= 0; --i) {
const item = items[i];
const [key, value] = item;
if (!this.cache.has(key)) {
const deleted = this._deleteIfExpired(key, value);
if (deleted === false) {
yield [key, value.value];
}
}
}
}
* entriesAscending() {
for (const [key, value] of this._entriesAscending()) {
yield [key, value.value];
}
}
get size() {
if (!this._size) {
return this.oldCache.size;
}
let oldCacheSize = 0;
for (const key of this.oldCache.keys()) {
if (!this.cache.has(key)) {
oldCacheSize++;
}
}
return Math.min(this._size + oldCacheSize, this.maxSize);
}
}
module.exports = QuickLRU;

@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,43 @@
{
"name": "@alloc/quick-lru",
"version": "5.2.0",
"description": "Simple “Least Recently Used” (LRU) cache",
"license": "MIT",
"repository": "sindresorhus/quick-lru",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"engines": {
"node": ">=10"
},
"scripts": {
"test": "xo && nyc ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"lru",
"quick",
"cache",
"caching",
"least",
"recently",
"used",
"fast",
"map",
"hash",
"buffer"
],
"devDependencies": {
"ava": "^2.0.0",
"coveralls": "^3.0.3",
"nyc": "^15.0.0",
"tsd": "^0.11.0",
"xo": "^0.26.0"
}
}

@ -0,0 +1,139 @@
# quick-lru [![Build Status](https://travis-ci.org/sindresorhus/quick-lru.svg?branch=master)](https://travis-ci.org/sindresorhus/quick-lru) [![Coverage Status](https://coveralls.io/repos/github/sindresorhus/quick-lru/badge.svg?branch=master)](https://coveralls.io/github/sindresorhus/quick-lru?branch=master)
> Simple [“Least Recently Used” (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29)
Useful when you need to cache something and limit memory usage.
Inspired by the [`hashlru` algorithm](https://github.com/dominictarr/hashlru#algorithm), but instead uses [`Map`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Map) to support keys of any type, not just strings, and values can be `undefined`.
## Install
```
$ npm install quick-lru
```
## Usage
```js
const QuickLRU = require('quick-lru');
const lru = new QuickLRU({maxSize: 1000});
lru.set('🦄', '🌈');
lru.has('🦄');
//=> true
lru.get('🦄');
//=> '🌈'
```
## API
### new QuickLRU(options?)
Returns a new instance.
### options
Type: `object`
#### maxSize
*Required*\
Type: `number`
The maximum number of items before evicting the least recently used items.
#### maxAge
Type: `number`\
Default: `Infinity`
The maximum number of milliseconds an item should remain in cache.
By default maxAge will be Infinity, which means that items will never expire.
Lazy expiration happens upon the next `write` or `read` call.
Individual expiration of an item can be specified by the `set(key, value, options)` method.
#### onEviction
*Optional*\
Type: `(key, value) => void`
Called right before an item is evicted from the cache.
Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`).
### Instance
The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop.
Both `key` and `value` can be of any type.
#### .set(key, value, options?)
Set an item. Returns the instance.
Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified on the constructor, otherwise the item will never expire.
#### .get(key)
Get an item.
#### .has(key)
Check if an item exists.
#### .peek(key)
Get an item without marking it as recently used.
#### .delete(key)
Delete an item.
Returns `true` if the item is removed or `false` if the item doesn't exist.
#### .clear()
Delete all items.
#### .resize(maxSize)
Update the `maxSize`, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee.
Useful for on-the-fly tuning of cache sizes in live systems.
#### .keys()
Iterable for all the keys.
#### .values()
Iterable for all the values.
#### .entriesAscending()
Iterable for all entries, starting with the oldest (ascending in recency).
#### .entriesDescending()
Iterable for all entries, starting with the newest (descending in recency).
#### .size
The stored item count.
---
<div align="center">
<b>
<a href="https://tidelift.com/subscription/pkg/npm-quick-lru?utm_source=npm-quick-lru&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
</b>
<br>
<sub>
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
</sub>
</div>

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

@ -0,0 +1,218 @@
# @ampproject/remapping
> Remap sequential sourcemaps through transformations to point at the original source code
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
them to the original source locations. Think "my minified code, transformed with babel and bundled
with webpack", all pointing to the correct location in your original source code.
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
they only need to generate an output sourcemap. This greatly simplifies building custom
transformations (think a find-and-replace).
## Installation
```sh
npm install @ampproject/remapping
```
## Usage
```typescript
function remapping(
map: SourceMap | SourceMap[],
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
options?: { excludeContent: boolean, decodedMappings: boolean }
): SourceMap;
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
// "source" location (where child sources are resolved relative to, or the location of original
// source), and the ability to override the "content" of an original source for inclusion in the
// output sourcemap.
type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
}
```
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
sourcemap. If not, the path will be treated as an original, untransformed source code.
```js
// Babel transformed "helloworld.js" into "transformed.js"
const transformedMap = JSON.stringify({
file: 'transformed.js',
// 1st column of 2nd line of output file translates into the 1st source
// file, line 3, column 2
mappings: ';CAEE',
sources: ['helloworld.js'],
version: 3,
});
// Uglify minified "transformed.js" into "transformed.min.js"
const minifiedTransformedMap = JSON.stringify({
file: 'transformed.min.js',
// 0th column of 1st line of output file translates into the 1st source
// file, line 2, column 1.
mappings: 'AACC',
names: [],
sources: ['transformed.js'],
version: 3,
});
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
// The "transformed.js" file is an transformed file.
if (file === 'transformed.js') {
// The root importer is empty.
console.assert(ctx.importer === '');
// The depth in the sourcemap tree we're currently loading.
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
console.assert(ctx.depth === 1);
return transformedMap;
}
// Loader will be called to load transformedMap's source file pointers as well.
console.assert(file === 'helloworld.js');
// `transformed.js`'s sourcemap points into `helloworld.js`.
console.assert(ctx.importer === 'transformed.js');
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
console.assert(ctx.depth === 2);
return null;
}
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
In this example, `loader` will be called twice:
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
be traced through it into the source files it represents.
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
we return `null`.
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
column of the 2nd line of the file `helloworld.js`".
### Multiple transformations of a file
As a convenience, if you have multiple single-source transformations of a file, you may pass an
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
changes the `importer` and `depth` of each call to our loader. So our above example could have been
written as:
```js
const remapped = remapping(
[minifiedTransformedMap, transformedMap],
() => null
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
### Advanced control of the loading graph
#### `source`
The `source` property can overridden to any value to change the location of the current load. Eg,
for an original source file, it allows us to change the location to the original source regardless
of what the sourcemap source entry says. And for transformed files, it allows us to change the
relative resolving location for child sources of the loaded sourcemap.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
// source files are loaded, they will now be relative to `src/`.
ctx.source = 'src/transformed.js';
return transformedMap;
}
console.assert(file === 'src/helloworld.js');
// We could futher change the source of this original file, eg, to be inside a nested directory
// itself. This will be reflected in the remapped sourcemap.
ctx.source = 'src/nested/transformed.js';
return null;
}
);
console.log(remapped);
// {
// …,
// sources: ['src/nested/helloworld.js'],
// };
```
#### `content`
The `content` property can be overridden when we encounter an original source file. Eg, this allows
you to manually provide the source content of the original file regardless of whether the
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
the source content.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
// would not include any `sourcesContent` values.
return transformedMap;
}
console.assert(file === 'helloworld.js');
// We can read the file to provide the source content.
ctx.content = fs.readFileSync(file, 'utf8');
return null;
}
);
console.log(remapped);
// {
// …,
// sourcesContent: [
// 'console.log("Hello world!")',
// ],
// };
```
### Options
#### excludeContent
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
the size out the sourcemap.
#### decodedMappings
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
encoding into a VLQ string.

@ -0,0 +1,191 @@
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
import { GenMapping, maybeAddSegment, setSourceContent, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null);
const EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content) {
return { source, line, column, name, content };
}
function Source(map, sources, source, content) {
return {
map,
sources,
source,
content,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content) {
return Source(null, EMPTY_SOURCES, source, content);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
const { column, line, name, content, source } = traced;
maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null)
setSourceContent(gen, source, content);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content);
}
const segment = traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build(new TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
return OriginalSource(source, sourceContent);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
}
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
export { remapping as default };
//# sourceMappingURL=remapping.mjs.map

File diff suppressed because one or more lines are too long

@ -0,0 +1,196 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
})(this, (function (traceMapping, genMapping) { 'use strict';
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null);
const EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content) {
return { source, line, column, name, content };
}
function Source(map, sources, source, content) {
return {
map,
sources,
source,
content,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content) {
return Source(null, EMPTY_SOURCES, source, content);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
const gen = new genMapping.GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = traceMapping.decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
const { column, line, name, content, source } = traced;
genMapping.maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null)
genMapping.setSourceContent(gen, source, content);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content);
}
const segment = traceMapping.traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
return OriginalSource(source, sourceContent);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? genMapping.toDecodedMap(map) : genMapping.toEncodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
}
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
return remapping;
}));
//# sourceMappingURL=remapping.umd.js.map

File diff suppressed because one or more lines are too long

@ -0,0 +1,14 @@
import type { MapSource as MapSourceType } from './source-map-tree';
import type { SourceMapInput, SourceMapLoader } from './types';
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;

@ -0,0 +1,19 @@
import SourceMap from './source-map';
import type { SourceMapInput, SourceMapLoader, Options } from './types';
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;

@ -0,0 +1,42 @@
import { GenMapping } from '@jridgewell/gen-mapping';
import type { TraceMap } from '@jridgewell/trace-mapping';
export declare type SourceMapSegmentObject = {
column: number;
line: number;
name: string;
source: string;
content: string | null;
};
export declare type OriginalSource = {
map: null;
sources: Sources[];
source: string;
content: string | null;
};
export declare type MapSource = {
map: TraceMap;
sources: Sources[];
source: string;
content: null;
};
export declare type Sources = OriginalSource | MapSource;
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
export declare function OriginalSource(source: string, content: string | null): OriginalSource;
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
export declare function traceMappings(tree: MapSource): GenMapping;
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;

@ -0,0 +1,17 @@
import type { GenMapping } from '@jridgewell/gen-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
export default class SourceMap {
file?: string | null;
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
sourceRoot?: string;
names: string[];
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
constructor(map: GenMapping, options: Options);
toString(): string;
}

@ -0,0 +1,14 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
export type { SourceMapInput };
export declare type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
};
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
export declare type Options = {
excludeContent?: boolean;
decodedMappings?: boolean;
};

@ -0,0 +1,75 @@
{
"name": "@ampproject/remapping",
"version": "2.2.1",
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
"keywords": [
"source",
"map",
"remap"
],
"main": "dist/remapping.umd.js",
"module": "dist/remapping.mjs",
"types": "dist/types/remapping.d.ts",
"exports": {
".": [
{
"types": "./dist/types/remapping.d.ts",
"browser": "./dist/remapping.umd.js",
"require": "./dist/remapping.umd.js",
"import": "./dist/remapping.mjs"
},
"./dist/remapping.umd.js"
],
"./package.json": "./package.json"
},
"files": [
"dist"
],
"author": "Justin Ridgewell <jridgewell@google.com>",
"repository": {
"type": "git",
"url": "git+https://github.com/ampproject/remapping.git"
},
"license": "Apache-2.0",
"engines": {
"node": ">=6.0.0"
},
"scripts": {
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"prebuild": "rm -rf dist",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build",
"test": "run-s -n test:lint test:only",
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "jest --coverage",
"test:watch": "jest --coverage --watch"
},
"devDependencies": {
"@rollup/plugin-typescript": "8.3.2",
"@types/jest": "27.4.1",
"@typescript-eslint/eslint-plugin": "5.20.0",
"@typescript-eslint/parser": "5.20.0",
"eslint": "8.14.0",
"eslint-config-prettier": "8.5.0",
"jest": "27.5.1",
"jest-config": "27.5.1",
"npm-run-all": "4.1.5",
"prettier": "2.6.2",
"rollup": "2.70.2",
"ts-jest": "27.1.4",
"tslib": "2.4.0",
"typescript": "4.6.3"
},
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.0",
"@jridgewell/trace-mapping": "^0.3.9"
}
}

@ -0,0 +1,3 @@
# esbuild
This is the Linux 64-bit binary for esbuild, a JavaScript bundler and minifier. See https://github.com/evanw/esbuild for details.

Binary file not shown.

@ -0,0 +1,17 @@
{
"name": "@esbuild/linux-x64",
"version": "0.18.20",
"description": "The Linux 64-bit binary for esbuild, a JavaScript bundler.",
"repository": "https://github.com/evanw/esbuild",
"license": "MIT",
"preferUnplugged": true,
"engines": {
"node": ">=12"
},
"os": [
"linux"
],
"cpu": [
"x64"
]
}

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Toru Nagashima
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,37 @@
# @eslint-community/eslint-utils
[![npm version](https://img.shields.io/npm/v/@eslint-community/eslint-utils.svg)](https://www.npmjs.com/package/@eslint-community/eslint-utils)
[![Downloads/month](https://img.shields.io/npm/dm/@eslint-community/eslint-utils.svg)](http://www.npmtrends.com/@eslint-community/eslint-utils)
[![Build Status](https://github.com/eslint-community/eslint-utils/workflows/CI/badge.svg)](https://github.com/eslint-community/eslint-utils/actions)
[![Coverage Status](https://codecov.io/gh/eslint-community/eslint-utils/branch/main/graph/badge.svg)](https://codecov.io/gh/eslint-community/eslint-utils)
## 🏁 Goal
This package provides utility functions and classes for make ESLint custom rules.
For examples:
- [`getStaticValue`](https://eslint-community.github.io/eslint-utils/api/ast-utils.html#getstaticvalue) evaluates static value on AST.
- [`ReferenceTracker`](https://eslint-community.github.io/eslint-utils/api/scope-utils.html#referencetracker-class) checks the members of modules/globals as handling assignments and destructuring.
## 📖 Usage
See [documentation](https://eslint-community.github.io/eslint-utils).
## 📰 Changelog
See [releases](https://github.com/eslint-community/eslint-utils/releases).
## ❤️ Contributing
Welcome contributing!
Please use GitHub's Issues/PRs.
### Development Tools
- `npm test` runs tests and measures coverage.
- `npm run clean` removes the coverage result of `npm test` command.
- `npm run coverage` shows the coverage result of the last `npm test` command.
- `npm run lint` runs ESLint.
- `npm run watch` runs tests on each file change.

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

@ -0,0 +1,73 @@
{
"name": "@eslint-community/eslint-utils",
"version": "4.4.0",
"description": "Utilities for ESLint plugins.",
"keywords": [
"eslint"
],
"homepage": "https://github.com/eslint-community/eslint-utils#readme",
"bugs": {
"url": "https://github.com/eslint-community/eslint-utils/issues"
},
"repository": {
"type": "git",
"url": "https://github.com/eslint-community/eslint-utils"
},
"license": "MIT",
"author": "Toru Nagashima",
"sideEffects": false,
"exports": {
".": {
"import": "./index.mjs",
"require": "./index.js"
},
"./package.json": "./package.json"
},
"main": "index",
"module": "index.mjs",
"files": [
"index.*"
],
"scripts": {
"prebuild": "npm run -s clean",
"build": "rollup -c",
"clean": "rimraf .nyc_output coverage index.*",
"coverage": "opener ./coverage/lcov-report/index.html",
"docs:build": "vitepress build docs",
"docs:watch": "vitepress dev docs",
"format": "npm run -s format:prettier -- --write",
"format:prettier": "prettier .",
"format:check": "npm run -s format:prettier -- --check",
"lint": "eslint .",
"test": "c8 mocha --reporter dot \"test/*.mjs\"",
"preversion": "npm test && npm run -s build",
"postversion": "git push && git push --tags",
"prewatch": "npm run -s clean",
"watch": "warun \"{src,test}/**/*.mjs\" -- npm run -s test:mocha"
},
"dependencies": {
"eslint-visitor-keys": "^3.3.0"
},
"devDependencies": {
"@eslint-community/eslint-plugin-mysticatea": "^15.2.0",
"c8": "^7.12.0",
"dot-prop": "^6.0.1",
"eslint": "^8.28.0",
"mocha": "^9.2.2",
"npm-run-all": "^4.1.5",
"opener": "^1.5.2",
"prettier": "2.8.4",
"rimraf": "^3.0.2",
"rollup": "^2.79.1",
"rollup-plugin-sourcemaps": "^0.6.3",
"semver": "^7.3.8",
"vitepress": "^1.0.0-alpha.40",
"warun": "^1.0.0"
},
"peerDependencies": {
"eslint": "^6.0.0 || ^7.0.0 || >=8.0.0"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}
}

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Toru Nagashima
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,177 @@
# @eslint-community/regexpp
[![npm version](https://img.shields.io/npm/v/@eslint-community/regexpp.svg)](https://www.npmjs.com/package/@eslint-community/regexpp)
[![Downloads/month](https://img.shields.io/npm/dm/@eslint-community/regexpp.svg)](http://www.npmtrends.com/@eslint-community/regexpp)
[![Build Status](https://github.com/eslint-community/regexpp/workflows/CI/badge.svg)](https://github.com/eslint-community/regexpp/actions)
[![codecov](https://codecov.io/gh/eslint-community/regexpp/branch/main/graph/badge.svg)](https://codecov.io/gh/eslint-community/regexpp)
A regular expression parser for ECMAScript.
## 💿 Installation
```bash
$ npm install @eslint-community/regexpp
```
- require Node@^12.0.0 || ^14.0.0 || >=16.0.0.
## 📖 Usage
```ts
import {
AST,
RegExpParser,
RegExpValidator,
RegExpVisitor,
parseRegExpLiteral,
validateRegExpLiteral,
visitRegExpAST
} from "@eslint-community/regexpp"
```
### parseRegExpLiteral(source, options?)
Parse a given regular expression literal then make AST object.
This is equivalent to `new RegExpParser(options).parseLiteral(source)`.
- **Parameters:**
- `source` (`string | RegExp`) The source code to parse.
- `options?` ([`RegExpParser.Options`]) The options to parse.
- **Return:**
- The AST of the regular expression.
### validateRegExpLiteral(source, options?)
Validate a given regular expression literal.
This is equivalent to `new RegExpValidator(options).validateLiteral(source)`.
- **Parameters:**
- `source` (`string`) The source code to validate.
- `options?` ([`RegExpValidator.Options`]) The options to validate.
### visitRegExpAST(ast, handlers)
Visit each node of a given AST.
This is equivalent to `new RegExpVisitor(handlers).visit(ast)`.
- **Parameters:**
- `ast` ([`AST.Node`]) The AST to visit.
- `handlers` ([`RegExpVisitor.Handlers`]) The callbacks.
### RegExpParser
#### new RegExpParser(options?)
- **Parameters:**
- `options?` ([`RegExpParser.Options`]) The options to parse.
#### parser.parseLiteral(source, start?, end?)
Parse a regular expression literal.
- **Parameters:**
- `source` (`string`) The source code to parse. E.g. `"/abc/g"`.
- `start?` (`number`) The start index in the source code. Default is `0`.
- `end?` (`number`) The end index in the source code. Default is `source.length`.
- **Return:**
- The AST of the regular expression.
#### parser.parsePattern(source, start?, end?, flags?)
Parse a regular expression pattern.
- **Parameters:**
- `source` (`string`) The source code to parse. E.g. `"abc"`.
- `start?` (`number`) The start index in the source code. Default is `0`.
- `end?` (`number`) The end index in the source code. Default is `source.length`.
- `flags?` (`{ unicode?: boolean, unicodeSets?: boolean }`) The flags to enable Unicode mode, and Unicode Set mode.
- **Return:**
- The AST of the regular expression pattern.
#### parser.parseFlags(source, start?, end?)
Parse a regular expression flags.
- **Parameters:**
- `source` (`string`) The source code to parse. E.g. `"gim"`.
- `start?` (`number`) The start index in the source code. Default is `0`.
- `end?` (`number`) The end index in the source code. Default is `source.length`.
- **Return:**
- The AST of the regular expression flags.
### RegExpValidator
#### new RegExpValidator(options)
- **Parameters:**
- `options` ([`RegExpValidator.Options`]) The options to validate.
#### validator.validateLiteral(source, start, end)
Validate a regular expression literal.
- **Parameters:**
- `source` (`string`) The source code to validate.
- `start?` (`number`) The start index in the source code. Default is `0`.
- `end?` (`number`) The end index in the source code. Default is `source.length`.
#### validator.validatePattern(source, start, end, flags)
Validate a regular expression pattern.
- **Parameters:**
- `source` (`string`) The source code to validate.
- `start?` (`number`) The start index in the source code. Default is `0`.
- `end?` (`number`) The end index in the source code. Default is `source.length`.
- `flags?` (`{ unicode?: boolean, unicodeSets?: boolean }`) The flags to enable Unicode mode, and Unicode Set mode.
#### validator.validateFlags(source, start, end)
Validate a regular expression flags.
- **Parameters:**
- `source` (`string`) The source code to validate.
- `start?` (`number`) The start index in the source code. Default is `0`.
- `end?` (`number`) The end index in the source code. Default is `source.length`.
### RegExpVisitor
#### new RegExpVisitor(handlers)
- **Parameters:**
- `handlers` ([`RegExpVisitor.Handlers`]) The callbacks.
#### visitor.visit(ast)
Validate a regular expression literal.
- **Parameters:**
- `ast` ([`AST.Node`]) The AST to visit.
## 📰 Changelog
- [GitHub Releases](https://github.com/eslint-community/regexpp/releases)
## 🍻 Contributing
Welcome contributing!
Please use GitHub's Issues/PRs.
### Development Tools
- `npm test` runs tests and measures coverage.
- `npm run build` compiles TypeScript source code to `index.js`, `index.js.map`, and `index.d.ts`.
- `npm run clean` removes the temporary files which are created by `npm test` and `npm run build`.
- `npm run lint` runs ESLint.
- `npm run update:test` updates test fixtures.
- `npm run update:ids` updates `src/unicode/ids.ts`.
- `npm run watch` runs tests with `--watch` option.
[`AST.Node`]: src/ast.ts#L4
[`RegExpParser.Options`]: src/parser.ts#L743
[`RegExpValidator.Options`]: src/validator.ts#L220
[`RegExpVisitor.Handlers`]: src/visitor.ts#L291

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

@ -0,0 +1,93 @@
{
"name": "@eslint-community/regexpp",
"version": "4.10.0",
"description": "Regular expression parser for ECMAScript.",
"keywords": [
"regexp",
"regular",
"expression",
"parser",
"validator",
"ast",
"abstract",
"syntax",
"tree",
"ecmascript",
"es2015",
"es2016",
"es2017",
"es2018",
"es2019",
"es2020",
"es2021",
"annexB"
],
"homepage": "https://github.com/eslint-community/regexpp#readme",
"bugs": {
"url": "https://github.com/eslint-community/regexpp/issues"
},
"repository": {
"type": "git",
"url": "https://github.com/eslint-community/regexpp"
},
"license": "MIT",
"author": "Toru Nagashima",
"exports": {
".": {
"types": "./index.d.ts",
"import": "./index.mjs",
"default": "./index.js"
},
"./package.json": "./package.json"
},
"main": "index",
"files": [
"index.*"
],
"scripts": {
"prebuild": "npm run -s clean",
"build": "run-s build:*",
"build:tsc": "tsc --module es2015",
"build:rollup": "rollup -c",
"build:dts": "npm run -s build:tsc -- --removeComments false && dts-bundle --name @eslint-community/regexpp --main .temp/index.d.ts --out ../index.d.ts && prettier --write index.d.ts",
"clean": "rimraf .temp index.*",
"lint": "eslint . --ext .ts",
"test": "nyc _mocha \"test/*.ts\" --reporter dot --timeout 10000",
"debug": "mocha --require ts-node/register/transpile-only \"test/*.ts\" --reporter dot --timeout 10000",
"update:test": "ts-node scripts/update-fixtures.ts",
"update:unicode": "run-s update:unicode:*",
"update:unicode:ids": "ts-node scripts/update-unicode-ids.ts",
"update:unicode:props": "ts-node scripts/update-unicode-properties.ts",
"update:test262:extract": "ts-node -T scripts/extract-test262.ts",
"preversion": "npm test && npm run -s build",
"postversion": "git push && git push --tags",
"prewatch": "npm run -s clean",
"watch": "_mocha \"test/*.ts\" --require ts-node/register --reporter dot --timeout 10000 --watch-extensions ts --watch --growl"
},
"dependencies": {},
"devDependencies": {
"@eslint-community/eslint-plugin-mysticatea": "^15.5.1",
"@rollup/plugin-node-resolve": "^14.1.0",
"@types/eslint": "^8.44.3",
"@types/jsdom": "^16.2.15",
"@types/mocha": "^9.1.1",
"@types/node": "^12.20.55",
"dts-bundle": "^0.7.3",
"eslint": "^8.50.0",
"js-tokens": "^8.0.2",
"jsdom": "^19.0.0",
"mocha": "^9.2.2",
"npm-run-all": "^4.1.5",
"nyc": "^14.1.1",
"rimraf": "^3.0.2",
"rollup": "^2.79.1",
"rollup-plugin-sourcemaps": "^0.6.3",
"test262": "git+https://github.com/tc39/test262.git",
"test262-stream": "^1.4.0",
"ts-node": "^10.9.1",
"typescript": "~5.0.2"
},
"engines": {
"node": "^12.0.0 || ^14.0.0 || >=16.0.0"
}
}

@ -0,0 +1,19 @@
Copyright OpenJS Foundation and other contributors, <www.openjsf.org>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

@ -0,0 +1,115 @@
# ESLintRC Library
This repository contains the legacy ESLintRC configuration file format for ESLint. This package is not intended for use outside of the ESLint ecosystem. It is ESLint-specific and not intended for use in other programs.
**Note:** This package is frozen except for critical bug fixes as ESLint moves to a new config system.
## Installation
You can install the package as follows:
```
npm install @eslint/eslintrc --save-dev
# or
yarn add @eslint/eslintrc -D
```
## Usage (ESM)
The primary class in this package is `FlatCompat`, which is a utility to translate ESLintRC-style configs into flat configs. Here's how you use it inside of your `eslint.config.js` file:
```js
import { FlatCompat } from "@eslint/eslintrc";
import js from "@eslint/js";
import path from "path";
import { fileURLToPath } from "url";
// mimic CommonJS variables -- not needed if using CommonJS
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname, // optional; default: process.cwd()
resolvePluginsRelativeTo: __dirname, // optional
recommendedConfig: js.configs.recommended, // optional
allConfig: js.configs.all, // optional
});
export default [
// mimic ESLintRC-style extends
...compat.extends("standard", "example"),
// mimic environments
...compat.env({
es2020: true,
node: true
}),
// mimic plugins
...compat.plugins("airbnb", "react"),
// translate an entire config
...compat.config({
plugins: ["airbnb", "react"],
extends: "standard",
env: {
es2020: true,
node: true
},
rules: {
semi: "error"
}
})
];
```
## Usage (CommonJS)
Using `FlatCompat` in CommonJS files is similar to ESM, but you'll use `require()` and `module.exports` instead of `import` and `export`. Here's how you use it inside of your `eslint.config.js` CommonJS file:
```js
const { FlatCompat } = require("@eslint/eslintrc");
const js = require("@eslint/js");
const compat = new FlatCompat({
baseDirectory: __dirname, // optional; default: process.cwd()
resolvePluginsRelativeTo: __dirname, // optional
recommendedConfig: js.configs.recommended, // optional
allConfig: js.configs.all, // optional
});
module.exports = [
// mimic ESLintRC-style extends
...compat.extends("standard", "example"),
// mimic environments
...compat.env({
es2020: true,
node: true
}),
// mimic plugins
...compat.plugins("airbnb", "react"),
// translate an entire config
...compat.config({
plugins: ["airbnb", "react"],
extends: "standard",
env: {
es2020: true,
node: true
},
rules: {
semi: "error"
}
})
];
```
## License
MIT License

@ -0,0 +1,79 @@
/**
* @fileoverview Defines a schema for configs.
* @author Sylvan Mably
*/
const baseConfigProperties = {
$schema: { type: "string" },
env: { type: "object" },
extends: { $ref: "#/definitions/stringOrStrings" },
globals: { type: "object" },
overrides: {
type: "array",
items: { $ref: "#/definitions/overrideConfig" },
additionalItems: false
},
parser: { type: ["string", "null"] },
parserOptions: { type: "object" },
plugins: { type: "array" },
processor: { type: "string" },
rules: { type: "object" },
settings: { type: "object" },
noInlineConfig: { type: "boolean" },
reportUnusedDisableDirectives: { type: "boolean" },
ecmaFeatures: { type: "object" } // deprecated; logs a warning when used
};
const configSchema = {
definitions: {
stringOrStrings: {
oneOf: [
{ type: "string" },
{
type: "array",
items: { type: "string" },
additionalItems: false
}
]
},
stringOrStringsRequired: {
oneOf: [
{ type: "string" },
{
type: "array",
items: { type: "string" },
additionalItems: false,
minItems: 1
}
]
},
// Config at top-level.
objectConfig: {
type: "object",
properties: {
root: { type: "boolean" },
ignorePatterns: { $ref: "#/definitions/stringOrStrings" },
...baseConfigProperties
},
additionalProperties: false
},
// Config in `overrides`.
overrideConfig: {
type: "object",
properties: {
excludedFiles: { $ref: "#/definitions/stringOrStrings" },
files: { $ref: "#/definitions/stringOrStringsRequired" },
...baseConfigProperties
},
required: ["files"],
additionalProperties: false
}
},
$ref: "#/definitions/objectConfig"
};
export default configSchema;

@ -0,0 +1,215 @@
/**
* @fileoverview Defines environment settings and globals.
* @author Elan Shanker
*/
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import globals from "globals";
//------------------------------------------------------------------------------
// Helpers
//------------------------------------------------------------------------------
/**
* Get the object that has difference.
* @param {Record<string,boolean>} current The newer object.
* @param {Record<string,boolean>} prev The older object.
* @returns {Record<string,boolean>} The difference object.
*/
function getDiff(current, prev) {
const retv = {};
for (const [key, value] of Object.entries(current)) {
if (!Object.hasOwnProperty.call(prev, key)) {
retv[key] = value;
}
}
return retv;
}
const newGlobals2015 = getDiff(globals.es2015, globals.es5); // 19 variables such as Promise, Map, ...
const newGlobals2017 = {
Atomics: false,
SharedArrayBuffer: false
};
const newGlobals2020 = {
BigInt: false,
BigInt64Array: false,
BigUint64Array: false,
globalThis: false
};
const newGlobals2021 = {
AggregateError: false,
FinalizationRegistry: false,
WeakRef: false
};
//------------------------------------------------------------------------------
// Public Interface
//------------------------------------------------------------------------------
/** @type {Map<string, import("../lib/shared/types").Environment>} */
export default new Map(Object.entries({
// Language
builtin: {
globals: globals.es5
},
es6: {
globals: newGlobals2015,
parserOptions: {
ecmaVersion: 6
}
},
es2015: {
globals: newGlobals2015,
parserOptions: {
ecmaVersion: 6
}
},
es2016: {
globals: newGlobals2015,
parserOptions: {
ecmaVersion: 7
}
},
es2017: {
globals: { ...newGlobals2015, ...newGlobals2017 },
parserOptions: {
ecmaVersion: 8
}
},
es2018: {
globals: { ...newGlobals2015, ...newGlobals2017 },
parserOptions: {
ecmaVersion: 9
}
},
es2019: {
globals: { ...newGlobals2015, ...newGlobals2017 },
parserOptions: {
ecmaVersion: 10
}
},
es2020: {
globals: { ...newGlobals2015, ...newGlobals2017, ...newGlobals2020 },
parserOptions: {
ecmaVersion: 11
}
},
es2021: {
globals: { ...newGlobals2015, ...newGlobals2017, ...newGlobals2020, ...newGlobals2021 },
parserOptions: {
ecmaVersion: 12
}
},
es2022: {
globals: { ...newGlobals2015, ...newGlobals2017, ...newGlobals2020, ...newGlobals2021 },
parserOptions: {
ecmaVersion: 13
}
},
es2023: {
globals: { ...newGlobals2015, ...newGlobals2017, ...newGlobals2020, ...newGlobals2021 },
parserOptions: {
ecmaVersion: 14
}
},
es2024: {
globals: { ...newGlobals2015, ...newGlobals2017, ...newGlobals2020, ...newGlobals2021 },
parserOptions: {
ecmaVersion: 15
}
},
// Platforms
browser: {
globals: globals.browser
},
node: {
globals: globals.node,
parserOptions: {
ecmaFeatures: {
globalReturn: true
}
}
},
"shared-node-browser": {
globals: globals["shared-node-browser"]
},
worker: {
globals: globals.worker
},
serviceworker: {
globals: globals.serviceworker
},
// Frameworks
commonjs: {
globals: globals.commonjs,
parserOptions: {
ecmaFeatures: {
globalReturn: true
}
}
},
amd: {
globals: globals.amd
},
mocha: {
globals: globals.mocha
},
jasmine: {
globals: globals.jasmine
},
jest: {
globals: globals.jest
},
phantomjs: {
globals: globals.phantomjs
},
jquery: {
globals: globals.jquery
},
qunit: {
globals: globals.qunit
},
prototypejs: {
globals: globals.prototypejs
},
shelljs: {
globals: globals.shelljs
},
meteor: {
globals: globals.meteor
},
mongo: {
globals: globals.mongo
},
protractor: {
globals: globals.protractor
},
applescript: {
globals: globals.applescript
},
nashorn: {
globals: globals.nashorn
},
atomtest: {
globals: globals.atomtest
},
embertest: {
globals: globals.embertest
},
webextensions: {
globals: globals.webextensions
},
greasemonkey: {
globals: globals.greasemonkey
}
}));

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

@ -0,0 +1,532 @@
/**
* @fileoverview `CascadingConfigArrayFactory` class.
*
* `CascadingConfigArrayFactory` class has a responsibility:
*
* 1. Handles cascading of config files.
*
* It provides two methods:
*
* - `getConfigArrayForFile(filePath)`
* Get the corresponded configuration of a given file. This method doesn't
* throw even if the given file didn't exist.
* - `clearCache()`
* Clear the internal cache. You have to call this method when
* `additionalPluginPool` was updated if `baseConfig` or `cliConfig` depends
* on the additional plugins. (`CLIEngine#addPlugin()` method calls this.)
*
* @author Toru Nagashima <https://github.com/mysticatea>
*/
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import debugOrig from "debug";
import os from "os";
import path from "path";
import { ConfigArrayFactory } from "./config-array-factory.js";
import {
ConfigArray,
ConfigDependency,
IgnorePattern
} from "./config-array/index.js";
import ConfigValidator from "./shared/config-validator.js";
import { emitDeprecationWarning } from "./shared/deprecation-warnings.js";
const debug = debugOrig("eslintrc:cascading-config-array-factory");
//------------------------------------------------------------------------------
// Helpers
//------------------------------------------------------------------------------
// Define types for VSCode IntelliSense.
/** @typedef {import("./shared/types").ConfigData} ConfigData */
/** @typedef {import("./shared/types").Parser} Parser */
/** @typedef {import("./shared/types").Plugin} Plugin */
/** @typedef {import("./shared/types").Rule} Rule */
/** @typedef {ReturnType<ConfigArrayFactory["create"]>} ConfigArray */
/**
* @typedef {Object} CascadingConfigArrayFactoryOptions
* @property {Map<string,Plugin>} [additionalPluginPool] The map for additional plugins.
* @property {ConfigData} [baseConfig] The config by `baseConfig` option.
* @property {ConfigData} [cliConfig] The config by CLI options (`--env`, `--global`, `--ignore-pattern`, `--parser`, `--parser-options`, `--plugin`, and `--rule`). CLI options overwrite the setting in config files.
* @property {string} [cwd] The base directory to start lookup.
* @property {string} [ignorePath] The path to the alternative file of `.eslintignore`.
* @property {string[]} [rulePaths] The value of `--rulesdir` option.
* @property {string} [specificConfigPath] The value of `--config` option.
* @property {boolean} [useEslintrc] if `false` then it doesn't load config files.
* @property {Function} loadRules The function to use to load rules.
* @property {Map<string,Rule>} builtInRules The rules that are built in to ESLint.
* @property {Object} [resolver=ModuleResolver] The module resolver object.
* @property {string} eslintAllPath The path to the definitions for eslint:all.
* @property {Function} getEslintAllConfig Returns the config data for eslint:all.
* @property {string} eslintRecommendedPath The path to the definitions for eslint:recommended.
* @property {Function} getEslintRecommendedConfig Returns the config data for eslint:recommended.
*/
/**
* @typedef {Object} CascadingConfigArrayFactoryInternalSlots
* @property {ConfigArray} baseConfigArray The config array of `baseConfig` option.
* @property {ConfigData} baseConfigData The config data of `baseConfig` option. This is used to reset `baseConfigArray`.
* @property {ConfigArray} cliConfigArray The config array of CLI options.
* @property {ConfigData} cliConfigData The config data of CLI options. This is used to reset `cliConfigArray`.
* @property {ConfigArrayFactory} configArrayFactory The factory for config arrays.
* @property {Map<string, ConfigArray>} configCache The cache from directory paths to config arrays.
* @property {string} cwd The base directory to start lookup.
* @property {WeakMap<ConfigArray, ConfigArray>} finalizeCache The cache from config arrays to finalized config arrays.
* @property {string} [ignorePath] The path to the alternative file of `.eslintignore`.
* @property {string[]|null} rulePaths The value of `--rulesdir` option. This is used to reset `baseConfigArray`.
* @property {string|null} specificConfigPath The value of `--config` option. This is used to reset `cliConfigArray`.
* @property {boolean} useEslintrc if `false` then it doesn't load config files.
* @property {Function} loadRules The function to use to load rules.
* @property {Map<string,Rule>} builtInRules The rules that are built in to ESLint.
* @property {Object} [resolver=ModuleResolver] The module resolver object.
* @property {string} eslintAllPath The path to the definitions for eslint:all.
* @property {Function} getEslintAllConfig Returns the config data for eslint:all.
* @property {string} eslintRecommendedPath The path to the definitions for eslint:recommended.
* @property {Function} getEslintRecommendedConfig Returns the config data for eslint:recommended.
*/
/** @type {WeakMap<CascadingConfigArrayFactory, CascadingConfigArrayFactoryInternalSlots>} */
const internalSlotsMap = new WeakMap();
/**
* Create the config array from `baseConfig` and `rulePaths`.
* @param {CascadingConfigArrayFactoryInternalSlots} slots The slots.
* @returns {ConfigArray} The config array of the base configs.
*/
function createBaseConfigArray({
configArrayFactory,
baseConfigData,
rulePaths,
cwd,
loadRules
}) {
const baseConfigArray = configArrayFactory.create(
baseConfigData,
{ name: "BaseConfig" }
);
/*
* Create the config array element for the default ignore patterns.
* This element has `ignorePattern` property that ignores the default
* patterns in the current working directory.
*/
baseConfigArray.unshift(configArrayFactory.create(
{ ignorePatterns: IgnorePattern.DefaultPatterns },
{ name: "DefaultIgnorePattern" }
)[0]);
/*
* Load rules `--rulesdir` option as a pseudo plugin.
* Use a pseudo plugin to define rules of `--rulesdir`, so we can validate
* the rule's options with only information in the config array.
*/
if (rulePaths && rulePaths.length > 0) {
baseConfigArray.push({
type: "config",
name: "--rulesdir",
filePath: "",
plugins: {
"": new ConfigDependency({
definition: {
rules: rulePaths.reduce(
(map, rulesPath) => Object.assign(
map,
loadRules(rulesPath, cwd)
),
{}
)
},
filePath: "",
id: "",
importerName: "--rulesdir",
importerPath: ""
})
}
});
}
return baseConfigArray;
}
/**
* Create the config array from CLI options.
* @param {CascadingConfigArrayFactoryInternalSlots} slots The slots.
* @returns {ConfigArray} The config array of the base configs.
*/
function createCLIConfigArray({
cliConfigData,
configArrayFactory,
cwd,
ignorePath,
specificConfigPath
}) {
const cliConfigArray = configArrayFactory.create(
cliConfigData,
{ name: "CLIOptions" }
);
cliConfigArray.unshift(
...(ignorePath
? configArrayFactory.loadESLintIgnore(ignorePath)
: configArrayFactory.loadDefaultESLintIgnore())
);
if (specificConfigPath) {
cliConfigArray.unshift(
...configArrayFactory.loadFile(
specificConfigPath,
{ name: "--config", basePath: cwd }
)
);
}
return cliConfigArray;
}
/**
* The error type when there are files matched by a glob, but all of them have been ignored.
*/
class ConfigurationNotFoundError extends Error {
// eslint-disable-next-line jsdoc/require-description
/**
* @param {string} directoryPath The directory path.
*/
constructor(directoryPath) {
super(`No ESLint configuration found in ${directoryPath}.`);
this.messageTemplate = "no-config-found";
this.messageData = { directoryPath };
}
}
/**
* This class provides the functionality that enumerates every file which is
* matched by given glob patterns and that configuration.
*/
class CascadingConfigArrayFactory {
/**
* Initialize this enumerator.
* @param {CascadingConfigArrayFactoryOptions} options The options.
*/
constructor({
additionalPluginPool = new Map(),
baseConfig: baseConfigData = null,
cliConfig: cliConfigData = null,
cwd = process.cwd(),
ignorePath,
resolvePluginsRelativeTo,
rulePaths = [],
specificConfigPath = null,
useEslintrc = true,
builtInRules = new Map(),
loadRules,
resolver,
eslintRecommendedPath,
getEslintRecommendedConfig,
eslintAllPath,
getEslintAllConfig
} = {}) {
const configArrayFactory = new ConfigArrayFactory({
additionalPluginPool,
cwd,
resolvePluginsRelativeTo,
builtInRules,
resolver,
eslintRecommendedPath,
getEslintRecommendedConfig,
eslintAllPath,
getEslintAllConfig
});
internalSlotsMap.set(this, {
baseConfigArray: createBaseConfigArray({
baseConfigData,
configArrayFactory,
cwd,
rulePaths,
loadRules
}),
baseConfigData,
cliConfigArray: createCLIConfigArray({
cliConfigData,
configArrayFactory,
cwd,
ignorePath,
specificConfigPath
}),
cliConfigData,
configArrayFactory,
configCache: new Map(),
cwd,
finalizeCache: new WeakMap(),
ignorePath,
rulePaths,
specificConfigPath,
useEslintrc,
builtInRules,
loadRules
});
}
/**
* The path to the current working directory.
* This is used by tests.
* @type {string}
*/
get cwd() {
const { cwd } = internalSlotsMap.get(this);
return cwd;
}
/**
* Get the config array of a given file.
* If `filePath` was not given, it returns the config which contains only
* `baseConfigData` and `cliConfigData`.
* @param {string} [filePath] The file path to a file.
* @param {Object} [options] The options.
* @param {boolean} [options.ignoreNotFoundError] If `true` then it doesn't throw `ConfigurationNotFoundError`.
* @returns {ConfigArray} The config array of the file.
*/
getConfigArrayForFile(filePath, { ignoreNotFoundError = false } = {}) {
const {
baseConfigArray,
cliConfigArray,
cwd
} = internalSlotsMap.get(this);
if (!filePath) {
return new ConfigArray(...baseConfigArray, ...cliConfigArray);
}
const directoryPath = path.dirname(path.resolve(cwd, filePath));
debug(`Load config files for ${directoryPath}.`);
return this._finalizeConfigArray(
this._loadConfigInAncestors(directoryPath),
directoryPath,
ignoreNotFoundError
);
}
/**
* Set the config data to override all configs.
* Require to call `clearCache()` method after this method is called.
* @param {ConfigData} configData The config data to override all configs.
* @returns {void}
*/
setOverrideConfig(configData) {
const slots = internalSlotsMap.get(this);
slots.cliConfigData = configData;
}
/**
* Clear config cache.
* @returns {void}
*/
clearCache() {
const slots = internalSlotsMap.get(this);
slots.baseConfigArray = createBaseConfigArray(slots);
slots.cliConfigArray = createCLIConfigArray(slots);
slots.configCache.clear();
}
/**
* Load and normalize config files from the ancestor directories.
* @param {string} directoryPath The path to a leaf directory.
* @param {boolean} configsExistInSubdirs `true` if configurations exist in subdirectories.
* @returns {ConfigArray} The loaded config.
* @private
*/
_loadConfigInAncestors(directoryPath, configsExistInSubdirs = false) {
const {
baseConfigArray,
configArrayFactory,
configCache,
cwd,
useEslintrc
} = internalSlotsMap.get(this);
if (!useEslintrc) {
return baseConfigArray;
}
let configArray = configCache.get(directoryPath);
// Hit cache.
if (configArray) {
debug(`Cache hit: ${directoryPath}.`);
return configArray;
}
debug(`No cache found: ${directoryPath}.`);
const homePath = os.homedir();
// Consider this is root.
if (directoryPath === homePath && cwd !== homePath) {
debug("Stop traversing because of considered root.");
if (configsExistInSubdirs) {
const filePath = ConfigArrayFactory.getPathToConfigFileInDirectory(directoryPath);
if (filePath) {
emitDeprecationWarning(
filePath,
"ESLINT_PERSONAL_CONFIG_SUPPRESS"
);
}
}
return this._cacheConfig(directoryPath, baseConfigArray);
}
// Load the config on this directory.
try {
configArray = configArrayFactory.loadInDirectory(directoryPath);
} catch (error) {
/* istanbul ignore next */
if (error.code === "EACCES") {
debug("Stop traversing because of 'EACCES' error.");
return this._cacheConfig(directoryPath, baseConfigArray);
}
throw error;
}
if (configArray.length > 0 && configArray.isRoot()) {
debug("Stop traversing because of 'root:true'.");
configArray.unshift(...baseConfigArray);
return this._cacheConfig(directoryPath, configArray);
}
// Load from the ancestors and merge it.
const parentPath = path.dirname(directoryPath);
const parentConfigArray = parentPath && parentPath !== directoryPath
? this._loadConfigInAncestors(
parentPath,
configsExistInSubdirs || configArray.length > 0
)
: baseConfigArray;
if (configArray.length > 0) {
configArray.unshift(...parentConfigArray);
} else {
configArray = parentConfigArray;
}
// Cache and return.
return this._cacheConfig(directoryPath, configArray);
}
/**
* Freeze and cache a given config.
* @param {string} directoryPath The path to a directory as a cache key.
* @param {ConfigArray} configArray The config array as a cache value.
* @returns {ConfigArray} The `configArray` (frozen).
*/
_cacheConfig(directoryPath, configArray) {
const { configCache } = internalSlotsMap.get(this);
Object.freeze(configArray);
configCache.set(directoryPath, configArray);
return configArray;
}
/**
* Finalize a given config array.
* Concatenate `--config` and other CLI options.
* @param {ConfigArray} configArray The parent config array.
* @param {string} directoryPath The path to the leaf directory to find config files.
* @param {boolean} ignoreNotFoundError If `true` then it doesn't throw `ConfigurationNotFoundError`.
* @returns {ConfigArray} The loaded config.
* @private
*/
_finalizeConfigArray(configArray, directoryPath, ignoreNotFoundError) {
const {
cliConfigArray,
configArrayFactory,
finalizeCache,
useEslintrc,
builtInRules
} = internalSlotsMap.get(this);
let finalConfigArray = finalizeCache.get(configArray);
if (!finalConfigArray) {
finalConfigArray = configArray;
// Load the personal config if there are no regular config files.
if (
useEslintrc &&
configArray.every(c => !c.filePath) &&
cliConfigArray.every(c => !c.filePath) // `--config` option can be a file.
) {
const homePath = os.homedir();
debug("Loading the config file of the home directory:", homePath);
const personalConfigArray = configArrayFactory.loadInDirectory(
homePath,
{ name: "PersonalConfig" }
);
if (
personalConfigArray.length > 0 &&
!directoryPath.startsWith(homePath)
) {
const lastElement =
personalConfigArray[personalConfigArray.length - 1];
emitDeprecationWarning(
lastElement.filePath,
"ESLINT_PERSONAL_CONFIG_LOAD"
);
}
finalConfigArray = finalConfigArray.concat(personalConfigArray);
}
// Apply CLI options.
if (cliConfigArray.length > 0) {
finalConfigArray = finalConfigArray.concat(cliConfigArray);
}
// Validate rule settings and environments.
const validator = new ConfigValidator({
builtInRules
});
validator.validateConfigArray(finalConfigArray);
// Cache it.
Object.freeze(finalConfigArray);
finalizeCache.set(configArray, finalConfigArray);
debug(
"Configuration was determined: %o on %s",
finalConfigArray,
directoryPath
);
}
// At least one element (the default ignore patterns) exists.
if (!ignoreNotFoundError && useEslintrc && finalConfigArray.length <= 1) {
throw new ConfigurationNotFoundError(directoryPath);
}
return finalConfigArray;
}
}
//------------------------------------------------------------------------------
// Public Interface
//------------------------------------------------------------------------------
export { CascadingConfigArrayFactory };

File diff suppressed because it is too large Load Diff

@ -0,0 +1,523 @@
/**
* @fileoverview `ConfigArray` class.
*
* `ConfigArray` class expresses the full of a configuration. It has the entry
* config file, base config files that were extended, loaded parsers, and loaded
* plugins.
*
* `ConfigArray` class provides three properties and two methods.
*
* - `pluginEnvironments`
* - `pluginProcessors`
* - `pluginRules`
* The `Map` objects that contain the members of all plugins that this
* config array contains. Those map objects don't have mutation methods.
* Those keys are the member ID such as `pluginId/memberName`.
* - `isRoot()`
* If `true` then this configuration has `root:true` property.
* - `extractConfig(filePath)`
* Extract the final configuration for a given file. This means merging
* every config array element which that `criteria` property matched. The
* `filePath` argument must be an absolute path.
*
* `ConfigArrayFactory` provides the loading logic of config files.
*
* @author Toru Nagashima <https://github.com/mysticatea>
*/
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import { ExtractedConfig } from "./extracted-config.js";
import { IgnorePattern } from "./ignore-pattern.js";
//------------------------------------------------------------------------------
// Helpers
//------------------------------------------------------------------------------
// Define types for VSCode IntelliSense.
/** @typedef {import("../../shared/types").Environment} Environment */
/** @typedef {import("../../shared/types").GlobalConf} GlobalConf */
/** @typedef {import("../../shared/types").RuleConf} RuleConf */
/** @typedef {import("../../shared/types").Rule} Rule */
/** @typedef {import("../../shared/types").Plugin} Plugin */
/** @typedef {import("../../shared/types").Processor} Processor */
/** @typedef {import("./config-dependency").DependentParser} DependentParser */
/** @typedef {import("./config-dependency").DependentPlugin} DependentPlugin */
/** @typedef {import("./override-tester")["OverrideTester"]} OverrideTester */
/**
* @typedef {Object} ConfigArrayElement
* @property {string} name The name of this config element.
* @property {string} filePath The path to the source file of this config element.
* @property {InstanceType<OverrideTester>|null} criteria The tester for the `files` and `excludedFiles` of this config element.
* @property {Record<string, boolean>|undefined} env The environment settings.
* @property {Record<string, GlobalConf>|undefined} globals The global variable settings.
* @property {IgnorePattern|undefined} ignorePattern The ignore patterns.
* @property {boolean|undefined} noInlineConfig The flag that disables directive comments.
* @property {DependentParser|undefined} parser The parser loader.
* @property {Object|undefined} parserOptions The parser options.
* @property {Record<string, DependentPlugin>|undefined} plugins The plugin loaders.
* @property {string|undefined} processor The processor name to refer plugin's processor.
* @property {boolean|undefined} reportUnusedDisableDirectives The flag to report unused `eslint-disable` comments.
* @property {boolean|undefined} root The flag to express root.
* @property {Record<string, RuleConf>|undefined} rules The rule settings
* @property {Object|undefined} settings The shared settings.
* @property {"config" | "ignore" | "implicit-processor"} type The element type.
*/
/**
* @typedef {Object} ConfigArrayInternalSlots
* @property {Map<string, ExtractedConfig>} cache The cache to extract configs.
* @property {ReadonlyMap<string, Environment>|null} envMap The map from environment ID to environment definition.
* @property {ReadonlyMap<string, Processor>|null} processorMap The map from processor ID to environment definition.
* @property {ReadonlyMap<string, Rule>|null} ruleMap The map from rule ID to rule definition.
*/
/** @type {WeakMap<ConfigArray, ConfigArrayInternalSlots>} */
const internalSlotsMap = new class extends WeakMap {
get(key) {
let value = super.get(key);
if (!value) {
value = {
cache: new Map(),
envMap: null,
processorMap: null,
ruleMap: null
};
super.set(key, value);
}
return value;
}
}();
/**
* Get the indices which are matched to a given file.
* @param {ConfigArrayElement[]} elements The elements.
* @param {string} filePath The path to a target file.
* @returns {number[]} The indices.
*/
function getMatchedIndices(elements, filePath) {
const indices = [];
for (let i = elements.length - 1; i >= 0; --i) {
const element = elements[i];
if (!element.criteria || (filePath && element.criteria.test(filePath))) {
indices.push(i);
}
}
return indices;
}
/**
* Check if a value is a non-null object.
* @param {any} x The value to check.
* @returns {boolean} `true` if the value is a non-null object.
*/
function isNonNullObject(x) {
return typeof x === "object" && x !== null;
}
/**
* Merge two objects.
*
* Assign every property values of `y` to `x` if `x` doesn't have the property.
* If `x`'s property value is an object, it does recursive.
* @param {Object} target The destination to merge
* @param {Object|undefined} source The source to merge.
* @returns {void}
*/
function mergeWithoutOverwrite(target, source) {
if (!isNonNullObject(source)) {
return;
}
for (const key of Object.keys(source)) {
if (key === "__proto__") {
continue;
}
if (isNonNullObject(target[key])) {
mergeWithoutOverwrite(target[key], source[key]);
} else if (target[key] === void 0) {
if (isNonNullObject(source[key])) {
target[key] = Array.isArray(source[key]) ? [] : {};
mergeWithoutOverwrite(target[key], source[key]);
} else if (source[key] !== void 0) {
target[key] = source[key];
}
}
}
}
/**
* The error for plugin conflicts.
*/
class PluginConflictError extends Error {
/**
* Initialize this error object.
* @param {string} pluginId The plugin ID.
* @param {{filePath:string, importerName:string}[]} plugins The resolved plugins.
*/
constructor(pluginId, plugins) {
super(`Plugin "${pluginId}" was conflicted between ${plugins.map(p => `"${p.importerName}"`).join(" and ")}.`);
this.messageTemplate = "plugin-conflict";
this.messageData = { pluginId, plugins };
}
}
/**
* Merge plugins.
* `target`'s definition is prior to `source`'s.
* @param {Record<string, DependentPlugin>} target The destination to merge
* @param {Record<string, DependentPlugin>|undefined} source The source to merge.
* @returns {void}
*/
function mergePlugins(target, source) {
if (!isNonNullObject(source)) {
return;
}
for (const key of Object.keys(source)) {
if (key === "__proto__") {
continue;
}
const targetValue = target[key];
const sourceValue = source[key];
// Adopt the plugin which was found at first.
if (targetValue === void 0) {
if (sourceValue.error) {
throw sourceValue.error;
}
target[key] = sourceValue;
} else if (sourceValue.filePath !== targetValue.filePath) {
throw new PluginConflictError(key, [
{
filePath: targetValue.filePath,
importerName: targetValue.importerName
},
{
filePath: sourceValue.filePath,
importerName: sourceValue.importerName
}
]);
}
}
}
/**
* Merge rule configs.
* `target`'s definition is prior to `source`'s.
* @param {Record<string, Array>} target The destination to merge
* @param {Record<string, RuleConf>|undefined} source The source to merge.
* @returns {void}
*/
function mergeRuleConfigs(target, source) {
if (!isNonNullObject(source)) {
return;
}
for (const key of Object.keys(source)) {
if (key === "__proto__") {
continue;
}
const targetDef = target[key];
const sourceDef = source[key];
// Adopt the rule config which was found at first.
if (targetDef === void 0) {
if (Array.isArray(sourceDef)) {
target[key] = [...sourceDef];
} else {
target[key] = [sourceDef];
}
/*
* If the first found rule config is severity only and the current rule
* config has options, merge the severity and the options.
*/
} else if (
targetDef.length === 1 &&
Array.isArray(sourceDef) &&
sourceDef.length >= 2
) {
targetDef.push(...sourceDef.slice(1));
}
}
}
/**
* Create the extracted config.
* @param {ConfigArray} instance The config elements.
* @param {number[]} indices The indices to use.
* @returns {ExtractedConfig} The extracted config.
*/
function createConfig(instance, indices) {
const config = new ExtractedConfig();
const ignorePatterns = [];
// Merge elements.
for (const index of indices) {
const element = instance[index];
// Adopt the parser which was found at first.
if (!config.parser && element.parser) {
if (element.parser.error) {
throw element.parser.error;
}
config.parser = element.parser;
}
// Adopt the processor which was found at first.
if (!config.processor && element.processor) {
config.processor = element.processor;
}
// Adopt the noInlineConfig which was found at first.
if (config.noInlineConfig === void 0 && element.noInlineConfig !== void 0) {
config.noInlineConfig = element.noInlineConfig;
config.configNameOfNoInlineConfig = element.name;
}
// Adopt the reportUnusedDisableDirectives which was found at first.
if (config.reportUnusedDisableDirectives === void 0 && element.reportUnusedDisableDirectives !== void 0) {
config.reportUnusedDisableDirectives = element.reportUnusedDisableDirectives;
}
// Collect ignorePatterns
if (element.ignorePattern) {
ignorePatterns.push(element.ignorePattern);
}
// Merge others.
mergeWithoutOverwrite(config.env, element.env);
mergeWithoutOverwrite(config.globals, element.globals);
mergeWithoutOverwrite(config.parserOptions, element.parserOptions);
mergeWithoutOverwrite(config.settings, element.settings);
mergePlugins(config.plugins, element.plugins);
mergeRuleConfigs(config.rules, element.rules);
}
// Create the predicate function for ignore patterns.
if (ignorePatterns.length > 0) {
config.ignores = IgnorePattern.createIgnore(ignorePatterns.reverse());
}
return config;
}
/**
* Collect definitions.
* @template T, U
* @param {string} pluginId The plugin ID for prefix.
* @param {Record<string,T>} defs The definitions to collect.
* @param {Map<string, U>} map The map to output.
* @param {function(T): U} [normalize] The normalize function for each value.
* @returns {void}
*/
function collect(pluginId, defs, map, normalize) {
if (defs) {
const prefix = pluginId && `${pluginId}/`;
for (const [key, value] of Object.entries(defs)) {
map.set(
`${prefix}${key}`,
normalize ? normalize(value) : value
);
}
}
}
/**
* Normalize a rule definition.
* @param {Function|Rule} rule The rule definition to normalize.
* @returns {Rule} The normalized rule definition.
*/
function normalizePluginRule(rule) {
return typeof rule === "function" ? { create: rule } : rule;
}
/**
* Delete the mutation methods from a given map.
* @param {Map<any, any>} map The map object to delete.
* @returns {void}
*/
function deleteMutationMethods(map) {
Object.defineProperties(map, {
clear: { configurable: true, value: void 0 },
delete: { configurable: true, value: void 0 },
set: { configurable: true, value: void 0 }
});
}
/**
* Create `envMap`, `processorMap`, `ruleMap` with the plugins in the config array.
* @param {ConfigArrayElement[]} elements The config elements.
* @param {ConfigArrayInternalSlots} slots The internal slots.
* @returns {void}
*/
function initPluginMemberMaps(elements, slots) {
const processed = new Set();
slots.envMap = new Map();
slots.processorMap = new Map();
slots.ruleMap = new Map();
for (const element of elements) {
if (!element.plugins) {
continue;
}
for (const [pluginId, value] of Object.entries(element.plugins)) {
const plugin = value.definition;
if (!plugin || processed.has(pluginId)) {
continue;
}
processed.add(pluginId);
collect(pluginId, plugin.environments, slots.envMap);
collect(pluginId, plugin.processors, slots.processorMap);
collect(pluginId, plugin.rules, slots.ruleMap, normalizePluginRule);
}
}
deleteMutationMethods(slots.envMap);
deleteMutationMethods(slots.processorMap);
deleteMutationMethods(slots.ruleMap);
}
/**
* Create `envMap`, `processorMap`, `ruleMap` with the plugins in the config array.
* @param {ConfigArray} instance The config elements.
* @returns {ConfigArrayInternalSlots} The extracted config.
*/
function ensurePluginMemberMaps(instance) {
const slots = internalSlotsMap.get(instance);
if (!slots.ruleMap) {
initPluginMemberMaps(instance, slots);
}
return slots;
}
//------------------------------------------------------------------------------
// Public Interface
//------------------------------------------------------------------------------
/**
* The Config Array.
*
* `ConfigArray` instance contains all settings, parsers, and plugins.
* You need to call `ConfigArray#extractConfig(filePath)` method in order to
* extract, merge and get only the config data which is related to an arbitrary
* file.
* @extends {Array<ConfigArrayElement>}
*/
class ConfigArray extends Array {
/**
* Get the plugin environments.
* The returned map cannot be mutated.
* @type {ReadonlyMap<string, Environment>} The plugin environments.
*/
get pluginEnvironments() {
return ensurePluginMemberMaps(this).envMap;
}
/**
* Get the plugin processors.
* The returned map cannot be mutated.
* @type {ReadonlyMap<string, Processor>} The plugin processors.
*/
get pluginProcessors() {
return ensurePluginMemberMaps(this).processorMap;
}
/**
* Get the plugin rules.
* The returned map cannot be mutated.
* @returns {ReadonlyMap<string, Rule>} The plugin rules.
*/
get pluginRules() {
return ensurePluginMemberMaps(this).ruleMap;
}
/**
* Check if this config has `root` flag.
* @returns {boolean} `true` if this config array is root.
*/
isRoot() {
for (let i = this.length - 1; i >= 0; --i) {
const root = this[i].root;
if (typeof root === "boolean") {
return root;
}
}
return false;
}
/**
* Extract the config data which is related to a given file.
* @param {string} filePath The absolute path to the target file.
* @returns {ExtractedConfig} The extracted config data.
*/
extractConfig(filePath) {
const { cache } = internalSlotsMap.get(this);
const indices = getMatchedIndices(this, filePath);
const cacheKey = indices.join(",");
if (!cache.has(cacheKey)) {
cache.set(cacheKey, createConfig(this, indices));
}
return cache.get(cacheKey);
}
/**
* Check if a given path is an additional lint target.
* @param {string} filePath The absolute path to the target file.
* @returns {boolean} `true` if the file is an additional lint target.
*/
isAdditionalTargetPath(filePath) {
for (const { criteria, type } of this) {
if (
type === "config" &&
criteria &&
!criteria.endsWithWildcard &&
criteria.test(filePath)
) {
return true;
}
}
return false;
}
}
/**
* Get the used extracted configs.
* CLIEngine will use this method to collect used deprecated rules.
* @param {ConfigArray} instance The config array object to get.
* @returns {ExtractedConfig[]} The used extracted configs.
* @private
*/
function getUsedExtractedConfigs(instance) {
const { cache } = internalSlotsMap.get(instance);
return Array.from(cache.values());
}
export {
ConfigArray,
getUsedExtractedConfigs
};

@ -0,0 +1,115 @@
/**
* @fileoverview `ConfigDependency` class.
*
* `ConfigDependency` class expresses a loaded parser or plugin.
*
* If the parser or plugin was loaded successfully, it has `definition` property
* and `filePath` property. Otherwise, it has `error` property.
*
* When `JSON.stringify()` converted a `ConfigDependency` object to a JSON, it
* omits `definition` property.
*
* `ConfigArrayFactory` creates `ConfigDependency` objects when it loads parsers
* or plugins.
*
* @author Toru Nagashima <https://github.com/mysticatea>
*/
import util from "util";
/**
* The class is to store parsers or plugins.
* This class hides the loaded object from `JSON.stringify()` and `console.log`.
* @template T
*/
class ConfigDependency {
/**
* Initialize this instance.
* @param {Object} data The dependency data.
* @param {T} [data.definition] The dependency if the loading succeeded.
* @param {Error} [data.error] The error object if the loading failed.
* @param {string} [data.filePath] The actual path to the dependency if the loading succeeded.
* @param {string} data.id The ID of this dependency.
* @param {string} data.importerName The name of the config file which loads this dependency.
* @param {string} data.importerPath The path to the config file which loads this dependency.
*/
constructor({
definition = null,
error = null,
filePath = null,
id,
importerName,
importerPath
}) {
/**
* The loaded dependency if the loading succeeded.
* @type {T|null}
*/
this.definition = definition;
/**
* The error object if the loading failed.
* @type {Error|null}
*/
this.error = error;
/**
* The loaded dependency if the loading succeeded.
* @type {string|null}
*/
this.filePath = filePath;
/**
* The ID of this dependency.
* @type {string}
*/
this.id = id;
/**
* The name of the config file which loads this dependency.
* @type {string}
*/
this.importerName = importerName;
/**
* The path to the config file which loads this dependency.
* @type {string}
*/
this.importerPath = importerPath;
}
// eslint-disable-next-line jsdoc/require-description
/**
* @returns {Object} a JSON compatible object.
*/
toJSON() {
const obj = this[util.inspect.custom]();
// Display `error.message` (`Error#message` is unenumerable).
if (obj.error instanceof Error) {
obj.error = { ...obj.error, message: obj.error.message };
}
return obj;
}
// eslint-disable-next-line jsdoc/require-description
/**
* @returns {Object} an object to display by `console.log()`.
*/
[util.inspect.custom]() {
const {
definition: _ignore, // eslint-disable-line no-unused-vars
...obj
} = this;
return obj;
}
}
/** @typedef {ConfigDependency<import("../../shared/types").Parser>} DependentParser */
/** @typedef {ConfigDependency<import("../../shared/types").Plugin>} DependentPlugin */
export { ConfigDependency };

@ -0,0 +1,145 @@
/**
* @fileoverview `ExtractedConfig` class.
*
* `ExtractedConfig` class expresses a final configuration for a specific file.
*
* It provides one method.
*
* - `toCompatibleObjectAsConfigFileContent()`
* Convert this configuration to the compatible object as the content of
* config files. It converts the loaded parser and plugins to strings.
* `CLIEngine#getConfigForFile(filePath)` method uses this method.
*
* `ConfigArray#extractConfig(filePath)` creates a `ExtractedConfig` instance.
*
* @author Toru Nagashima <https://github.com/mysticatea>
*/
import { IgnorePattern } from "./ignore-pattern.js";
// For VSCode intellisense
/** @typedef {import("../../shared/types").ConfigData} ConfigData */
/** @typedef {import("../../shared/types").GlobalConf} GlobalConf */
/** @typedef {import("../../shared/types").SeverityConf} SeverityConf */
/** @typedef {import("./config-dependency").DependentParser} DependentParser */
/** @typedef {import("./config-dependency").DependentPlugin} DependentPlugin */
/**
* Check if `xs` starts with `ys`.
* @template T
* @param {T[]} xs The array to check.
* @param {T[]} ys The array that may be the first part of `xs`.
* @returns {boolean} `true` if `xs` starts with `ys`.
*/
function startsWith(xs, ys) {
return xs.length >= ys.length && ys.every((y, i) => y === xs[i]);
}
/**
* The class for extracted config data.
*/
class ExtractedConfig {
constructor() {
/**
* The config name what `noInlineConfig` setting came from.
* @type {string}
*/
this.configNameOfNoInlineConfig = "";
/**
* Environments.
* @type {Record<string, boolean>}
*/
this.env = {};
/**
* Global variables.
* @type {Record<string, GlobalConf>}
*/
this.globals = {};
/**
* The glob patterns that ignore to lint.
* @type {(((filePath:string, dot?:boolean) => boolean) & { basePath:string; patterns:string[] }) | undefined}
*/
this.ignores = void 0;
/**
* The flag that disables directive comments.
* @type {boolean|undefined}
*/
this.noInlineConfig = void 0;
/**
* Parser definition.
* @type {DependentParser|null}
*/
this.parser = null;
/**
* Options for the parser.
* @type {Object}
*/
this.parserOptions = {};
/**
* Plugin definitions.
* @type {Record<string, DependentPlugin>}
*/
this.plugins = {};
/**
* Processor ID.
* @type {string|null}
*/
this.processor = null;
/**
* The flag that reports unused `eslint-disable` directive comments.
* @type {boolean|undefined}
*/
this.reportUnusedDisableDirectives = void 0;
/**
* Rule settings.
* @type {Record<string, [SeverityConf, ...any[]]>}
*/
this.rules = {};
/**
* Shared settings.
* @type {Object}
*/
this.settings = {};
}
/**
* Convert this config to the compatible object as a config file content.
* @returns {ConfigData} The converted object.
*/
toCompatibleObjectAsConfigFileContent() {
const {
/* eslint-disable no-unused-vars */
configNameOfNoInlineConfig: _ignore1,
processor: _ignore2,
/* eslint-enable no-unused-vars */
ignores,
...config
} = this;
config.parser = config.parser && config.parser.filePath;
config.plugins = Object.keys(config.plugins).filter(Boolean).reverse();
config.ignorePatterns = ignores ? ignores.patterns : [];
// Strip the default patterns from `ignorePatterns`.
if (startsWith(config.ignorePatterns, IgnorePattern.DefaultPatterns)) {
config.ignorePatterns =
config.ignorePatterns.slice(IgnorePattern.DefaultPatterns.length);
}
return config;
}
}
export { ExtractedConfig };

@ -0,0 +1,238 @@
/**
* @fileoverview `IgnorePattern` class.
*
* `IgnorePattern` class has the set of glob patterns and the base path.
*
* It provides two static methods.
*
* - `IgnorePattern.createDefaultIgnore(cwd)`
* Create the default predicate function.
* - `IgnorePattern.createIgnore(ignorePatterns)`
* Create the predicate function from multiple `IgnorePattern` objects.
*
* It provides two properties and a method.
*
* - `patterns`
* The glob patterns that ignore to lint.
* - `basePath`
* The base path of the glob patterns. If absolute paths existed in the
* glob patterns, those are handled as relative paths to the base path.
* - `getPatternsRelativeTo(basePath)`
* Get `patterns` as modified for a given base path. It modifies the
* absolute paths in the patterns as prepending the difference of two base
* paths.
*
* `ConfigArrayFactory` creates `IgnorePattern` objects when it processes
* `ignorePatterns` properties.
*
* @author Toru Nagashima <https://github.com/mysticatea>
*/
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import assert from "assert";
import path from "path";
import ignore from "ignore";
import debugOrig from "debug";
const debug = debugOrig("eslintrc:ignore-pattern");
/** @typedef {ReturnType<import("ignore").default>} Ignore */
//------------------------------------------------------------------------------
// Helpers
//------------------------------------------------------------------------------
/**
* Get the path to the common ancestor directory of given paths.
* @param {string[]} sourcePaths The paths to calculate the common ancestor.
* @returns {string} The path to the common ancestor directory.
*/
function getCommonAncestorPath(sourcePaths) {
let result = sourcePaths[0];
for (let i = 1; i < sourcePaths.length; ++i) {
const a = result;
const b = sourcePaths[i];
// Set the shorter one (it's the common ancestor if one includes the other).
result = a.length < b.length ? a : b;
// Set the common ancestor.
for (let j = 0, lastSepPos = 0; j < a.length && j < b.length; ++j) {
if (a[j] !== b[j]) {
result = a.slice(0, lastSepPos);
break;
}
if (a[j] === path.sep) {
lastSepPos = j;
}
}
}
let resolvedResult = result || path.sep;
// if Windows common ancestor is root of drive must have trailing slash to be absolute.
if (resolvedResult && resolvedResult.endsWith(":") && process.platform === "win32") {
resolvedResult += path.sep;
}
return resolvedResult;
}
/**
* Make relative path.
* @param {string} from The source path to get relative path.
* @param {string} to The destination path to get relative path.
* @returns {string} The relative path.
*/
function relative(from, to) {
const relPath = path.relative(from, to);
if (path.sep === "/") {
return relPath;
}
return relPath.split(path.sep).join("/");
}
/**
* Get the trailing slash if existed.
* @param {string} filePath The path to check.
* @returns {string} The trailing slash if existed.
*/
function dirSuffix(filePath) {
const isDir = (
filePath.endsWith(path.sep) ||
(process.platform === "win32" && filePath.endsWith("/"))
);
return isDir ? "/" : "";
}
const DefaultPatterns = Object.freeze(["/**/node_modules/*"]);
const DotPatterns = Object.freeze([".*", "!.eslintrc.*", "!../"]);
//------------------------------------------------------------------------------
// Public
//------------------------------------------------------------------------------
class IgnorePattern {
/**
* The default patterns.
* @type {string[]}
*/
static get DefaultPatterns() {
return DefaultPatterns;
}
/**
* Create the default predicate function.
* @param {string} cwd The current working directory.
* @returns {((filePath:string, dot:boolean) => boolean) & {basePath:string; patterns:string[]}}
* The preficate function.
* The first argument is an absolute path that is checked.
* The second argument is the flag to not ignore dotfiles.
* If the predicate function returned `true`, it means the path should be ignored.
*/
static createDefaultIgnore(cwd) {
return this.createIgnore([new IgnorePattern(DefaultPatterns, cwd)]);
}
/**
* Create the predicate function from multiple `IgnorePattern` objects.
* @param {IgnorePattern[]} ignorePatterns The list of ignore patterns.
* @returns {((filePath:string, dot?:boolean) => boolean) & {basePath:string; patterns:string[]}}
* The preficate function.
* The first argument is an absolute path that is checked.
* The second argument is the flag to not ignore dotfiles.
* If the predicate function returned `true`, it means the path should be ignored.
*/
static createIgnore(ignorePatterns) {
debug("Create with: %o", ignorePatterns);
const basePath = getCommonAncestorPath(ignorePatterns.map(p => p.basePath));
const patterns = [].concat(
...ignorePatterns.map(p => p.getPatternsRelativeTo(basePath))
);
const ig = ignore({ allowRelativePaths: true }).add([...DotPatterns, ...patterns]);
const dotIg = ignore({ allowRelativePaths: true }).add(patterns);
debug(" processed: %o", { basePath, patterns });
return Object.assign(
(filePath, dot = false) => {
assert(path.isAbsolute(filePath), "'filePath' should be an absolute path.");
const relPathRaw = relative(basePath, filePath);
const relPath = relPathRaw && (relPathRaw + dirSuffix(filePath));
const adoptedIg = dot ? dotIg : ig;
const result = relPath !== "" && adoptedIg.ignores(relPath);
debug("Check", { filePath, dot, relativePath: relPath, result });
return result;
},
{ basePath, patterns }
);
}
/**
* Initialize a new `IgnorePattern` instance.
* @param {string[]} patterns The glob patterns that ignore to lint.
* @param {string} basePath The base path of `patterns`.
*/
constructor(patterns, basePath) {
assert(path.isAbsolute(basePath), "'basePath' should be an absolute path.");
/**
* The glob patterns that ignore to lint.
* @type {string[]}
*/
this.patterns = patterns;
/**
* The base path of `patterns`.
* @type {string}
*/
this.basePath = basePath;
/**
* If `true` then patterns which don't start with `/` will match the paths to the outside of `basePath`. Defaults to `false`.
*
* It's set `true` for `.eslintignore`, `package.json`, and `--ignore-path` for backward compatibility.
* It's `false` as-is for `ignorePatterns` property in config files.
* @type {boolean}
*/
this.loose = false;
}
/**
* Get `patterns` as modified for a given base path. It modifies the
* absolute paths in the patterns as prepending the difference of two base
* paths.
* @param {string} newBasePath The base path.
* @returns {string[]} Modifired patterns.
*/
getPatternsRelativeTo(newBasePath) {
assert(path.isAbsolute(newBasePath), "'newBasePath' should be an absolute path.");
const { basePath, loose, patterns } = this;
if (newBasePath === basePath) {
return patterns;
}
const prefix = `/${relative(newBasePath, basePath)}`;
return patterns.map(pattern => {
const negative = pattern.startsWith("!");
const head = negative ? "!" : "";
const body = negative ? pattern.slice(1) : pattern;
if (body.startsWith("/") || body.startsWith("../")) {
return `${head}${prefix}${body}`;
}
return loose ? pattern : `${head}${prefix}/**/${body}`;
});
}
}
export { IgnorePattern };

@ -0,0 +1,19 @@
/**
* @fileoverview `ConfigArray` class.
* @author Toru Nagashima <https://github.com/mysticatea>
*/
import { ConfigArray, getUsedExtractedConfigs } from "./config-array.js";
import { ConfigDependency } from "./config-dependency.js";
import { ExtractedConfig } from "./extracted-config.js";
import { IgnorePattern } from "./ignore-pattern.js";
import { OverrideTester } from "./override-tester.js";
export {
ConfigArray,
ConfigDependency,
ExtractedConfig,
IgnorePattern,
OverrideTester,
getUsedExtractedConfigs
};

@ -0,0 +1,225 @@
/**
* @fileoverview `OverrideTester` class.
*
* `OverrideTester` class handles `files` property and `excludedFiles` property
* of `overrides` config.
*
* It provides one method.
*
* - `test(filePath)`
* Test if a file path matches the pair of `files` property and
* `excludedFiles` property. The `filePath` argument must be an absolute
* path.
*
* `ConfigArrayFactory` creates `OverrideTester` objects when it processes
* `overrides` properties.
*
* @author Toru Nagashima <https://github.com/mysticatea>
*/
import assert from "assert";
import path from "path";
import util from "util";
import minimatch from "minimatch";
const { Minimatch } = minimatch;
const minimatchOpts = { dot: true, matchBase: true };
/**
* @typedef {Object} Pattern
* @property {InstanceType<Minimatch>[] | null} includes The positive matchers.
* @property {InstanceType<Minimatch>[] | null} excludes The negative matchers.
*/
/**
* Normalize a given pattern to an array.
* @param {string|string[]|undefined} patterns A glob pattern or an array of glob patterns.
* @returns {string[]|null} Normalized patterns.
* @private
*/
function normalizePatterns(patterns) {
if (Array.isArray(patterns)) {
return patterns.filter(Boolean);
}
if (typeof patterns === "string" && patterns) {
return [patterns];
}
return [];
}
/**
* Create the matchers of given patterns.
* @param {string[]} patterns The patterns.
* @returns {InstanceType<Minimatch>[] | null} The matchers.
*/
function toMatcher(patterns) {
if (patterns.length === 0) {
return null;
}
return patterns.map(pattern => {
if (/^\.[/\\]/u.test(pattern)) {
return new Minimatch(
pattern.slice(2),
// `./*.js` should not match with `subdir/foo.js`
{ ...minimatchOpts, matchBase: false }
);
}
return new Minimatch(pattern, minimatchOpts);
});
}
/**
* Convert a given matcher to string.
* @param {Pattern} matchers The matchers.
* @returns {string} The string expression of the matcher.
*/
function patternToJson({ includes, excludes }) {
return {
includes: includes && includes.map(m => m.pattern),
excludes: excludes && excludes.map(m => m.pattern)
};
}
/**
* The class to test given paths are matched by the patterns.
*/
class OverrideTester {
/**
* Create a tester with given criteria.
* If there are no criteria, returns `null`.
* @param {string|string[]} files The glob patterns for included files.
* @param {string|string[]} excludedFiles The glob patterns for excluded files.
* @param {string} basePath The path to the base directory to test paths.
* @returns {OverrideTester|null} The created instance or `null`.
*/
static create(files, excludedFiles, basePath) {
const includePatterns = normalizePatterns(files);
const excludePatterns = normalizePatterns(excludedFiles);
let endsWithWildcard = false;
if (includePatterns.length === 0) {
return null;
}
// Rejects absolute paths or relative paths to parents.
for (const pattern of includePatterns) {
if (path.isAbsolute(pattern) || pattern.includes("..")) {
throw new Error(`Invalid override pattern (expected relative path not containing '..'): ${pattern}`);
}
if (pattern.endsWith("*")) {
endsWithWildcard = true;
}
}
for (const pattern of excludePatterns) {
if (path.isAbsolute(pattern) || pattern.includes("..")) {
throw new Error(`Invalid override pattern (expected relative path not containing '..'): ${pattern}`);
}
}
const includes = toMatcher(includePatterns);
const excludes = toMatcher(excludePatterns);
return new OverrideTester(
[{ includes, excludes }],
basePath,
endsWithWildcard
);
}
/**
* Combine two testers by logical and.
* If either of the testers was `null`, returns the other tester.
* The `basePath` property of the two must be the same value.
* @param {OverrideTester|null} a A tester.
* @param {OverrideTester|null} b Another tester.
* @returns {OverrideTester|null} Combined tester.
*/
static and(a, b) {
if (!b) {
return a && new OverrideTester(
a.patterns,
a.basePath,
a.endsWithWildcard
);
}
if (!a) {
return new OverrideTester(
b.patterns,
b.basePath,
b.endsWithWildcard
);
}
assert.strictEqual(a.basePath, b.basePath);
return new OverrideTester(
a.patterns.concat(b.patterns),
a.basePath,
a.endsWithWildcard || b.endsWithWildcard
);
}
/**
* Initialize this instance.
* @param {Pattern[]} patterns The matchers.
* @param {string} basePath The base path.
* @param {boolean} endsWithWildcard If `true` then a pattern ends with `*`.
*/
constructor(patterns, basePath, endsWithWildcard = false) {
/** @type {Pattern[]} */
this.patterns = patterns;
/** @type {string} */
this.basePath = basePath;
/** @type {boolean} */
this.endsWithWildcard = endsWithWildcard;
}
/**
* Test if a given path is matched or not.
* @param {string} filePath The absolute path to the target file.
* @returns {boolean} `true` if the path was matched.
*/
test(filePath) {
if (typeof filePath !== "string" || !path.isAbsolute(filePath)) {
throw new Error(`'filePath' should be an absolute path, but got ${filePath}.`);
}
const relativePath = path.relative(this.basePath, filePath);
return this.patterns.every(({ includes, excludes }) => (
(!includes || includes.some(m => m.match(relativePath))) &&
(!excludes || !excludes.some(m => m.match(relativePath)))
));
}
// eslint-disable-next-line jsdoc/require-description
/**
* @returns {Object} a JSON compatible object.
*/
toJSON() {
if (this.patterns.length === 1) {
return {
...patternToJson(this.patterns[0]),
basePath: this.basePath
};
}
return {
AND: this.patterns.map(patternToJson),
basePath: this.basePath
};
}
// eslint-disable-next-line jsdoc/require-description
/**
* @returns {Object} an object to display by `console.log()`.
*/
[util.inspect.custom]() {
return this.toJSON();
}
}
export { OverrideTester };

@ -0,0 +1,318 @@
/**
* @fileoverview Compatibility class for flat config.
* @author Nicholas C. Zakas
*/
//-----------------------------------------------------------------------------
// Requirements
//-----------------------------------------------------------------------------
import createDebug from "debug";
import path from "path";
import environments from "../conf/environments.js";
import { ConfigArrayFactory } from "./config-array-factory.js";
//-----------------------------------------------------------------------------
// Helpers
//-----------------------------------------------------------------------------
/** @typedef {import("../../shared/types").Environment} Environment */
/** @typedef {import("../../shared/types").Processor} Processor */
const debug = createDebug("eslintrc:flat-compat");
const cafactory = Symbol("cafactory");
/**
* Translates an ESLintRC-style config object into a flag-config-style config
* object.
* @param {Object} eslintrcConfig An ESLintRC-style config object.
* @param {Object} options Options to help translate the config.
* @param {string} options.resolveConfigRelativeTo To the directory to resolve
* configs from.
* @param {string} options.resolvePluginsRelativeTo The directory to resolve
* plugins from.
* @param {ReadOnlyMap<string,Environment>} options.pluginEnvironments A map of plugin environment
* names to objects.
* @param {ReadOnlyMap<string,Processor>} options.pluginProcessors A map of plugin processor
* names to objects.
* @returns {Object} A flag-config-style config object.
*/
function translateESLintRC(eslintrcConfig, {
resolveConfigRelativeTo,
resolvePluginsRelativeTo,
pluginEnvironments,
pluginProcessors
}) {
const flatConfig = {};
const configs = [];
const languageOptions = {};
const linterOptions = {};
const keysToCopy = ["settings", "rules", "processor"];
const languageOptionsKeysToCopy = ["globals", "parser", "parserOptions"];
const linterOptionsKeysToCopy = ["noInlineConfig", "reportUnusedDisableDirectives"];
// copy over simple translations
for (const key of keysToCopy) {
if (key in eslintrcConfig && typeof eslintrcConfig[key] !== "undefined") {
flatConfig[key] = eslintrcConfig[key];
}
}
// copy over languageOptions
for (const key of languageOptionsKeysToCopy) {
if (key in eslintrcConfig && typeof eslintrcConfig[key] !== "undefined") {
// create the languageOptions key in the flat config
flatConfig.languageOptions = languageOptions;
if (key === "parser") {
debug(`Resolving parser '${languageOptions[key]}' relative to ${resolveConfigRelativeTo}`);
if (eslintrcConfig[key].error) {
throw eslintrcConfig[key].error;
}
languageOptions[key] = eslintrcConfig[key].definition;
continue;
}
// clone any object values that are in the eslintrc config
if (eslintrcConfig[key] && typeof eslintrcConfig[key] === "object") {
languageOptions[key] = {
...eslintrcConfig[key]
};
} else {
languageOptions[key] = eslintrcConfig[key];
}
}
}
// copy over linterOptions
for (const key of linterOptionsKeysToCopy) {
if (key in eslintrcConfig && typeof eslintrcConfig[key] !== "undefined") {
flatConfig.linterOptions = linterOptions;
linterOptions[key] = eslintrcConfig[key];
}
}
// move ecmaVersion a level up
if (languageOptions.parserOptions) {
if ("ecmaVersion" in languageOptions.parserOptions) {
languageOptions.ecmaVersion = languageOptions.parserOptions.ecmaVersion;
delete languageOptions.parserOptions.ecmaVersion;
}
if ("sourceType" in languageOptions.parserOptions) {
languageOptions.sourceType = languageOptions.parserOptions.sourceType;
delete languageOptions.parserOptions.sourceType;
}
// check to see if we even need parserOptions anymore and remove it if not
if (Object.keys(languageOptions.parserOptions).length === 0) {
delete languageOptions.parserOptions;
}
}
// overrides
if (eslintrcConfig.criteria) {
flatConfig.files = [absoluteFilePath => eslintrcConfig.criteria.test(absoluteFilePath)];
}
// translate plugins
if (eslintrcConfig.plugins && typeof eslintrcConfig.plugins === "object") {
debug(`Translating plugins: ${eslintrcConfig.plugins}`);
flatConfig.plugins = {};
for (const pluginName of Object.keys(eslintrcConfig.plugins)) {
debug(`Translating plugin: ${pluginName}`);
debug(`Resolving plugin '${pluginName} relative to ${resolvePluginsRelativeTo}`);
const { definition: plugin, error } = eslintrcConfig.plugins[pluginName];
if (error) {
throw error;
}
flatConfig.plugins[pluginName] = plugin;
// create a config for any processors
if (plugin.processors) {
for (const processorName of Object.keys(plugin.processors)) {
if (processorName.startsWith(".")) {
debug(`Assigning processor: ${pluginName}/${processorName}`);
configs.unshift({
files: [`**/*${processorName}`],
processor: pluginProcessors.get(`${pluginName}/${processorName}`)
});
}
}
}
}
}
// translate env - must come after plugins
if (eslintrcConfig.env && typeof eslintrcConfig.env === "object") {
for (const envName of Object.keys(eslintrcConfig.env)) {
// only add environments that are true
if (eslintrcConfig.env[envName]) {
debug(`Translating environment: ${envName}`);
if (environments.has(envName)) {
// built-in environments should be defined first
configs.unshift(...translateESLintRC({
criteria: eslintrcConfig.criteria,
...environments.get(envName)
}, {
resolveConfigRelativeTo,
resolvePluginsRelativeTo
}));
} else if (pluginEnvironments.has(envName)) {
// if the environment comes from a plugin, it should come after the plugin config
configs.push(...translateESLintRC({
criteria: eslintrcConfig.criteria,
...pluginEnvironments.get(envName)
}, {
resolveConfigRelativeTo,
resolvePluginsRelativeTo
}));
}
}
}
}
// only add if there are actually keys in the config
if (Object.keys(flatConfig).length > 0) {
configs.push(flatConfig);
}
return configs;
}
//-----------------------------------------------------------------------------
// Exports
//-----------------------------------------------------------------------------
/**
* A compatibility class for working with configs.
*/
class FlatCompat {
constructor({
baseDirectory = process.cwd(),
resolvePluginsRelativeTo = baseDirectory,
recommendedConfig,
allConfig
} = {}) {
this.baseDirectory = baseDirectory;
this.resolvePluginsRelativeTo = resolvePluginsRelativeTo;
this[cafactory] = new ConfigArrayFactory({
cwd: baseDirectory,
resolvePluginsRelativeTo,
getEslintAllConfig: () => {
if (!allConfig) {
throw new TypeError("Missing parameter 'allConfig' in FlatCompat constructor.");
}
return allConfig;
},
getEslintRecommendedConfig: () => {
if (!recommendedConfig) {
throw new TypeError("Missing parameter 'recommendedConfig' in FlatCompat constructor.");
}
return recommendedConfig;
}
});
}
/**
* Translates an ESLintRC-style config into a flag-config-style config.
* @param {Object} eslintrcConfig The ESLintRC-style config object.
* @returns {Object} A flag-config-style config object.
*/
config(eslintrcConfig) {
const eslintrcArray = this[cafactory].create(eslintrcConfig, {
basePath: this.baseDirectory
});
const flatArray = [];
let hasIgnorePatterns = false;
eslintrcArray.forEach(configData => {
if (configData.type === "config") {
hasIgnorePatterns = hasIgnorePatterns || configData.ignorePattern;
flatArray.push(...translateESLintRC(configData, {
resolveConfigRelativeTo: path.join(this.baseDirectory, "__placeholder.js"),
resolvePluginsRelativeTo: path.join(this.resolvePluginsRelativeTo, "__placeholder.js"),
pluginEnvironments: eslintrcArray.pluginEnvironments,
pluginProcessors: eslintrcArray.pluginProcessors
}));
}
});
// combine ignorePatterns to emulate ESLintRC behavior better
if (hasIgnorePatterns) {
flatArray.unshift({
ignores: [filePath => {
// Compute the final config for this file.
// This filters config array elements by `files`/`excludedFiles` then merges the elements.
const finalConfig = eslintrcArray.extractConfig(filePath);
// Test the `ignorePattern` properties of the final config.
return Boolean(finalConfig.ignores) && finalConfig.ignores(filePath);
}]
});
}
return flatArray;
}
/**
* Translates the `env` section of an ESLintRC-style config.
* @param {Object} envConfig The `env` section of an ESLintRC config.
* @returns {Object[]} An array of flag-config objects representing the environments.
*/
env(envConfig) {
return this.config({
env: envConfig
});
}
/**
* Translates the `extends` section of an ESLintRC-style config.
* @param {...string} configsToExtend The names of the configs to load.
* @returns {Object[]} An array of flag-config objects representing the config.
*/
extends(...configsToExtend) {
return this.config({
extends: configsToExtend
});
}
/**
* Translates the `plugins` section of an ESLintRC-style config.
* @param {...string} plugins The names of the plugins to load.
* @returns {Object[]} An array of flag-config objects representing the plugins.
*/
plugins(...plugins) {
return this.config({
plugins
});
}
}
export { FlatCompat };

@ -0,0 +1,29 @@
/**
* @fileoverview Package exports for @eslint/eslintrc
* @author Nicholas C. Zakas
*/
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import * as ConfigOps from "./shared/config-ops.js";
import ConfigValidator from "./shared/config-validator.js";
import * as naming from "./shared/naming.js";
import environments from "../conf/environments.js";
//-----------------------------------------------------------------------------
// Exports
//-----------------------------------------------------------------------------
const Legacy = {
environments,
// shared
ConfigOps,
ConfigValidator,
naming
};
export {
Legacy
};

@ -0,0 +1,56 @@
/**
* @fileoverview Package exports for @eslint/eslintrc
* @author Nicholas C. Zakas
*/
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import {
ConfigArrayFactory,
createContext as createConfigArrayFactoryContext
} from "./config-array-factory.js";
import { CascadingConfigArrayFactory } from "./cascading-config-array-factory.js";
import * as ModuleResolver from "./shared/relative-module-resolver.js";
import { ConfigArray, getUsedExtractedConfigs } from "./config-array/index.js";
import { ConfigDependency } from "./config-array/config-dependency.js";
import { ExtractedConfig } from "./config-array/extracted-config.js";
import { IgnorePattern } from "./config-array/ignore-pattern.js";
import { OverrideTester } from "./config-array/override-tester.js";
import * as ConfigOps from "./shared/config-ops.js";
import ConfigValidator from "./shared/config-validator.js";
import * as naming from "./shared/naming.js";
import { FlatCompat } from "./flat-compat.js";
import environments from "../conf/environments.js";
//-----------------------------------------------------------------------------
// Exports
//-----------------------------------------------------------------------------
const Legacy = {
ConfigArray,
createConfigArrayFactoryContext,
CascadingConfigArrayFactory,
ConfigArrayFactory,
ConfigDependency,
ExtractedConfig,
IgnorePattern,
OverrideTester,
getUsedExtractedConfigs,
environments,
// shared
ConfigOps,
ConfigValidator,
ModuleResolver,
naming
};
export {
Legacy,
FlatCompat
};

@ -0,0 +1,191 @@
/**
* @fileoverview The instance of Ajv validator.
* @author Evgeny Poberezkin
*/
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import Ajv from "ajv";
//-----------------------------------------------------------------------------
// Helpers
//-----------------------------------------------------------------------------
/*
* Copied from ajv/lib/refs/json-schema-draft-04.json
* The MIT License (MIT)
* Copyright (c) 2015-2017 Evgeny Poberezkin
*/
const metaSchema = {
id: "http://json-schema.org/draft-04/schema#",
$schema: "http://json-schema.org/draft-04/schema#",
description: "Core schema meta-schema",
definitions: {
schemaArray: {
type: "array",
minItems: 1,
items: { $ref: "#" }
},
positiveInteger: {
type: "integer",
minimum: 0
},
positiveIntegerDefault0: {
allOf: [{ $ref: "#/definitions/positiveInteger" }, { default: 0 }]
},
simpleTypes: {
enum: ["array", "boolean", "integer", "null", "number", "object", "string"]
},
stringArray: {
type: "array",
items: { type: "string" },
minItems: 1,
uniqueItems: true
}
},
type: "object",
properties: {
id: {
type: "string"
},
$schema: {
type: "string"
},
title: {
type: "string"
},
description: {
type: "string"
},
default: { },
multipleOf: {
type: "number",
minimum: 0,
exclusiveMinimum: true
},
maximum: {
type: "number"
},
exclusiveMaximum: {
type: "boolean",
default: false
},
minimum: {
type: "number"
},
exclusiveMinimum: {
type: "boolean",
default: false
},
maxLength: { $ref: "#/definitions/positiveInteger" },
minLength: { $ref: "#/definitions/positiveIntegerDefault0" },
pattern: {
type: "string",
format: "regex"
},
additionalItems: {
anyOf: [
{ type: "boolean" },
{ $ref: "#" }
],
default: { }
},
items: {
anyOf: [
{ $ref: "#" },
{ $ref: "#/definitions/schemaArray" }
],
default: { }
},
maxItems: { $ref: "#/definitions/positiveInteger" },
minItems: { $ref: "#/definitions/positiveIntegerDefault0" },
uniqueItems: {
type: "boolean",
default: false
},
maxProperties: { $ref: "#/definitions/positiveInteger" },
minProperties: { $ref: "#/definitions/positiveIntegerDefault0" },
required: { $ref: "#/definitions/stringArray" },
additionalProperties: {
anyOf: [
{ type: "boolean" },
{ $ref: "#" }
],
default: { }
},
definitions: {
type: "object",
additionalProperties: { $ref: "#" },
default: { }
},
properties: {
type: "object",
additionalProperties: { $ref: "#" },
default: { }
},
patternProperties: {
type: "object",
additionalProperties: { $ref: "#" },
default: { }
},
dependencies: {
type: "object",
additionalProperties: {
anyOf: [
{ $ref: "#" },
{ $ref: "#/definitions/stringArray" }
]
}
},
enum: {
type: "array",
minItems: 1,
uniqueItems: true
},
type: {
anyOf: [
{ $ref: "#/definitions/simpleTypes" },
{
type: "array",
items: { $ref: "#/definitions/simpleTypes" },
minItems: 1,
uniqueItems: true
}
]
},
format: { type: "string" },
allOf: { $ref: "#/definitions/schemaArray" },
anyOf: { $ref: "#/definitions/schemaArray" },
oneOf: { $ref: "#/definitions/schemaArray" },
not: { $ref: "#" }
},
dependencies: {
exclusiveMaximum: ["maximum"],
exclusiveMinimum: ["minimum"]
},
default: { }
};
//------------------------------------------------------------------------------
// Public Interface
//------------------------------------------------------------------------------
export default (additionalOptions = {}) => {
const ajv = new Ajv({
meta: false,
useDefaults: true,
validateSchema: false,
missingRefs: "ignore",
verbose: true,
schemaId: "auto",
...additionalOptions
});
ajv.addMetaSchema(metaSchema);
// eslint-disable-next-line no-underscore-dangle
ajv._opts.defaultMeta = metaSchema.id;
return ajv;
};

@ -0,0 +1,135 @@
/**
* @fileoverview Config file operations. This file must be usable in the browser,
* so no Node-specific code can be here.
* @author Nicholas C. Zakas
*/
//------------------------------------------------------------------------------
// Private
//------------------------------------------------------------------------------
const RULE_SEVERITY_STRINGS = ["off", "warn", "error"],
RULE_SEVERITY = RULE_SEVERITY_STRINGS.reduce((map, value, index) => {
map[value] = index;
return map;
}, {}),
VALID_SEVERITIES = [0, 1, 2, "off", "warn", "error"];
//------------------------------------------------------------------------------
// Public Interface
//------------------------------------------------------------------------------
/**
* Normalizes the severity value of a rule's configuration to a number
* @param {(number|string|[number, ...*]|[string, ...*])} ruleConfig A rule's configuration value, generally
* received from the user. A valid config value is either 0, 1, 2, the string "off" (treated the same as 0),
* the string "warn" (treated the same as 1), the string "error" (treated the same as 2), or an array
* whose first element is one of the above values. Strings are matched case-insensitively.
* @returns {(0|1|2)} The numeric severity value if the config value was valid, otherwise 0.
*/
function getRuleSeverity(ruleConfig) {
const severityValue = Array.isArray(ruleConfig) ? ruleConfig[0] : ruleConfig;
if (severityValue === 0 || severityValue === 1 || severityValue === 2) {
return severityValue;
}
if (typeof severityValue === "string") {
return RULE_SEVERITY[severityValue.toLowerCase()] || 0;
}
return 0;
}
/**
* Converts old-style severity settings (0, 1, 2) into new-style
* severity settings (off, warn, error) for all rules. Assumption is that severity
* values have already been validated as correct.
* @param {Object} config The config object to normalize.
* @returns {void}
*/
function normalizeToStrings(config) {
if (config.rules) {
Object.keys(config.rules).forEach(ruleId => {
const ruleConfig = config.rules[ruleId];
if (typeof ruleConfig === "number") {
config.rules[ruleId] = RULE_SEVERITY_STRINGS[ruleConfig] || RULE_SEVERITY_STRINGS[0];
} else if (Array.isArray(ruleConfig) && typeof ruleConfig[0] === "number") {
ruleConfig[0] = RULE_SEVERITY_STRINGS[ruleConfig[0]] || RULE_SEVERITY_STRINGS[0];
}
});
}
}
/**
* Determines if the severity for the given rule configuration represents an error.
* @param {int|string|Array} ruleConfig The configuration for an individual rule.
* @returns {boolean} True if the rule represents an error, false if not.
*/
function isErrorSeverity(ruleConfig) {
return getRuleSeverity(ruleConfig) === 2;
}
/**
* Checks whether a given config has valid severity or not.
* @param {number|string|Array} ruleConfig The configuration for an individual rule.
* @returns {boolean} `true` if the configuration has valid severity.
*/
function isValidSeverity(ruleConfig) {
let severity = Array.isArray(ruleConfig) ? ruleConfig[0] : ruleConfig;
if (typeof severity === "string") {
severity = severity.toLowerCase();
}
return VALID_SEVERITIES.indexOf(severity) !== -1;
}
/**
* Checks whether every rule of a given config has valid severity or not.
* @param {Object} config The configuration for rules.
* @returns {boolean} `true` if the configuration has valid severity.
*/
function isEverySeverityValid(config) {
return Object.keys(config).every(ruleId => isValidSeverity(config[ruleId]));
}
/**
* Normalizes a value for a global in a config
* @param {(boolean|string|null)} configuredValue The value given for a global in configuration or in
* a global directive comment
* @returns {("readable"|"writeable"|"off")} The value normalized as a string
* @throws Error if global value is invalid
*/
function normalizeConfigGlobal(configuredValue) {
switch (configuredValue) {
case "off":
return "off";
case true:
case "true":
case "writeable":
case "writable":
return "writable";
case null:
case false:
case "false":
case "readable":
case "readonly":
return "readonly";
default:
throw new Error(`'${configuredValue}' is not a valid configuration for a global (use 'readonly', 'writable', or 'off')`);
}
}
export {
getRuleSeverity,
normalizeToStrings,
isErrorSeverity,
isValidSeverity,
isEverySeverityValid,
normalizeConfigGlobal
};

@ -0,0 +1,325 @@
/**
* @fileoverview Validates configs.
* @author Brandon Mills
*/
/* eslint class-methods-use-this: "off" */
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import util from "util";
import * as ConfigOps from "./config-ops.js";
import { emitDeprecationWarning } from "./deprecation-warnings.js";
import ajvOrig from "./ajv.js";
import configSchema from "../../conf/config-schema.js";
import BuiltInEnvironments from "../../conf/environments.js";
const ajv = ajvOrig();
const ruleValidators = new WeakMap();
const noop = Function.prototype;
//------------------------------------------------------------------------------
// Private
//------------------------------------------------------------------------------
let validateSchema;
const severityMap = {
error: 2,
warn: 1,
off: 0
};
const validated = new WeakSet();
//-----------------------------------------------------------------------------
// Exports
//-----------------------------------------------------------------------------
export default class ConfigValidator {
constructor({ builtInRules = new Map() } = {}) {
this.builtInRules = builtInRules;
}
/**
* Gets a complete options schema for a rule.
* @param {{create: Function, schema: (Array|null)}} rule A new-style rule object
* @returns {Object} JSON Schema for the rule's options.
*/
getRuleOptionsSchema(rule) {
if (!rule) {
return null;
}
const schema = rule.schema || rule.meta && rule.meta.schema;
// Given a tuple of schemas, insert warning level at the beginning
if (Array.isArray(schema)) {
if (schema.length) {
return {
type: "array",
items: schema,
minItems: 0,
maxItems: schema.length
};
}
return {
type: "array",
minItems: 0,
maxItems: 0
};
}
// Given a full schema, leave it alone
return schema || null;
}
/**
* Validates a rule's severity and returns the severity value. Throws an error if the severity is invalid.
* @param {options} options The given options for the rule.
* @returns {number|string} The rule's severity value
*/
validateRuleSeverity(options) {
const severity = Array.isArray(options) ? options[0] : options;
const normSeverity = typeof severity === "string" ? severityMap[severity.toLowerCase()] : severity;
if (normSeverity === 0 || normSeverity === 1 || normSeverity === 2) {
return normSeverity;
}
throw new Error(`\tSeverity should be one of the following: 0 = off, 1 = warn, 2 = error (you passed '${util.inspect(severity).replace(/'/gu, "\"").replace(/\n/gu, "")}').\n`);
}
/**
* Validates the non-severity options passed to a rule, based on its schema.
* @param {{create: Function}} rule The rule to validate
* @param {Array} localOptions The options for the rule, excluding severity
* @returns {void}
*/
validateRuleSchema(rule, localOptions) {
if (!ruleValidators.has(rule)) {
const schema = this.getRuleOptionsSchema(rule);
if (schema) {
ruleValidators.set(rule, ajv.compile(schema));
}
}
const validateRule = ruleValidators.get(rule);
if (validateRule) {
validateRule(localOptions);
if (validateRule.errors) {
throw new Error(validateRule.errors.map(
error => `\tValue ${JSON.stringify(error.data)} ${error.message}.\n`
).join(""));
}
}
}
/**
* Validates a rule's options against its schema.
* @param {{create: Function}|null} rule The rule that the config is being validated for
* @param {string} ruleId The rule's unique name.
* @param {Array|number} options The given options for the rule.
* @param {string|null} source The name of the configuration source to report in any errors. If null or undefined,
* no source is prepended to the message.
* @returns {void}
*/
validateRuleOptions(rule, ruleId, options, source = null) {
try {
const severity = this.validateRuleSeverity(options);
if (severity !== 0) {
this.validateRuleSchema(rule, Array.isArray(options) ? options.slice(1) : []);
}
} catch (err) {
const enhancedMessage = `Configuration for rule "${ruleId}" is invalid:\n${err.message}`;
if (typeof source === "string") {
throw new Error(`${source}:\n\t${enhancedMessage}`);
} else {
throw new Error(enhancedMessage);
}
}
}
/**
* Validates an environment object
* @param {Object} environment The environment config object to validate.
* @param {string} source The name of the configuration source to report in any errors.
* @param {function(envId:string): Object} [getAdditionalEnv] A map from strings to loaded environments.
* @returns {void}
*/
validateEnvironment(
environment,
source,
getAdditionalEnv = noop
) {
// not having an environment is ok
if (!environment) {
return;
}
Object.keys(environment).forEach(id => {
const env = getAdditionalEnv(id) || BuiltInEnvironments.get(id) || null;
if (!env) {
const message = `${source}:\n\tEnvironment key "${id}" is unknown\n`;
throw new Error(message);
}
});
}
/**
* Validates a rules config object
* @param {Object} rulesConfig The rules config object to validate.
* @param {string} source The name of the configuration source to report in any errors.
* @param {function(ruleId:string): Object} getAdditionalRule A map from strings to loaded rules
* @returns {void}
*/
validateRules(
rulesConfig,
source,
getAdditionalRule = noop
) {
if (!rulesConfig) {
return;
}
Object.keys(rulesConfig).forEach(id => {
const rule = getAdditionalRule(id) || this.builtInRules.get(id) || null;
this.validateRuleOptions(rule, id, rulesConfig[id], source);
});
}
/**
* Validates a `globals` section of a config file
* @param {Object} globalsConfig The `globals` section
* @param {string|null} source The name of the configuration source to report in the event of an error.
* @returns {void}
*/
validateGlobals(globalsConfig, source = null) {
if (!globalsConfig) {
return;
}
Object.entries(globalsConfig)
.forEach(([configuredGlobal, configuredValue]) => {
try {
ConfigOps.normalizeConfigGlobal(configuredValue);
} catch (err) {
throw new Error(`ESLint configuration of global '${configuredGlobal}' in ${source} is invalid:\n${err.message}`);
}
});
}
/**
* Validate `processor` configuration.
* @param {string|undefined} processorName The processor name.
* @param {string} source The name of config file.
* @param {function(id:string): Processor} getProcessor The getter of defined processors.
* @returns {void}
*/
validateProcessor(processorName, source, getProcessor) {
if (processorName && !getProcessor(processorName)) {
throw new Error(`ESLint configuration of processor in '${source}' is invalid: '${processorName}' was not found.`);
}
}
/**
* Formats an array of schema validation errors.
* @param {Array} errors An array of error messages to format.
* @returns {string} Formatted error message
*/
formatErrors(errors) {
return errors.map(error => {
if (error.keyword === "additionalProperties") {
const formattedPropertyPath = error.dataPath.length ? `${error.dataPath.slice(1)}.${error.params.additionalProperty}` : error.params.additionalProperty;
return `Unexpected top-level property "${formattedPropertyPath}"`;
}
if (error.keyword === "type") {
const formattedField = error.dataPath.slice(1);
const formattedExpectedType = Array.isArray(error.schema) ? error.schema.join("/") : error.schema;
const formattedValue = JSON.stringify(error.data);
return `Property "${formattedField}" is the wrong type (expected ${formattedExpectedType} but got \`${formattedValue}\`)`;
}
const field = error.dataPath[0] === "." ? error.dataPath.slice(1) : error.dataPath;
return `"${field}" ${error.message}. Value: ${JSON.stringify(error.data)}`;
}).map(message => `\t- ${message}.\n`).join("");
}
/**
* Validates the top level properties of the config object.
* @param {Object} config The config object to validate.
* @param {string} source The name of the configuration source to report in any errors.
* @returns {void}
*/
validateConfigSchema(config, source = null) {
validateSchema = validateSchema || ajv.compile(configSchema);
if (!validateSchema(config)) {
throw new Error(`ESLint configuration in ${source} is invalid:\n${this.formatErrors(validateSchema.errors)}`);
}
if (Object.hasOwnProperty.call(config, "ecmaFeatures")) {
emitDeprecationWarning(source, "ESLINT_LEGACY_ECMAFEATURES");
}
}
/**
* Validates an entire config object.
* @param {Object} config The config object to validate.
* @param {string} source The name of the configuration source to report in any errors.
* @param {function(ruleId:string): Object} [getAdditionalRule] A map from strings to loaded rules.
* @param {function(envId:string): Object} [getAdditionalEnv] A map from strings to loaded envs.
* @returns {void}
*/
validate(config, source, getAdditionalRule, getAdditionalEnv) {
this.validateConfigSchema(config, source);
this.validateRules(config.rules, source, getAdditionalRule);
this.validateEnvironment(config.env, source, getAdditionalEnv);
this.validateGlobals(config.globals, source);
for (const override of config.overrides || []) {
this.validateRules(override.rules, source, getAdditionalRule);
this.validateEnvironment(override.env, source, getAdditionalEnv);
this.validateGlobals(config.globals, source);
}
}
/**
* Validate config array object.
* @param {ConfigArray} configArray The config array to validate.
* @returns {void}
*/
validateConfigArray(configArray) {
const getPluginEnv = Map.prototype.get.bind(configArray.pluginEnvironments);
const getPluginProcessor = Map.prototype.get.bind(configArray.pluginProcessors);
const getPluginRule = Map.prototype.get.bind(configArray.pluginRules);
// Validate.
for (const element of configArray) {
if (validated.has(element)) {
continue;
}
validated.add(element);
this.validateEnvironment(element.env, element.name, getPluginEnv);
this.validateGlobals(element.globals, element.name);
this.validateProcessor(element.processor, element.name, getPluginProcessor);
this.validateRules(element.rules, element.name, getPluginRule);
}
}
}

@ -0,0 +1,63 @@
/**
* @fileoverview Provide the function that emits deprecation warnings.
* @author Toru Nagashima <http://github.com/mysticatea>
*/
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
import path from "path";
//------------------------------------------------------------------------------
// Private
//------------------------------------------------------------------------------
// Defitions for deprecation warnings.
const deprecationWarningMessages = {
ESLINT_LEGACY_ECMAFEATURES:
"The 'ecmaFeatures' config file property is deprecated and has no effect.",
ESLINT_PERSONAL_CONFIG_LOAD:
"'~/.eslintrc.*' config files have been deprecated. " +
"Please use a config file per project or the '--config' option.",
ESLINT_PERSONAL_CONFIG_SUPPRESS:
"'~/.eslintrc.*' config files have been deprecated. " +
"Please remove it or add 'root:true' to the config files in your " +
"projects in order to avoid loading '~/.eslintrc.*' accidentally."
};
const sourceFileErrorCache = new Set();
/**
* Emits a deprecation warning containing a given filepath. A new deprecation warning is emitted
* for each unique file path, but repeated invocations with the same file path have no effect.
* No warnings are emitted if the `--no-deprecation` or `--no-warnings` Node runtime flags are active.
* @param {string} source The name of the configuration source to report the warning for.
* @param {string} errorCode The warning message to show.
* @returns {void}
*/
function emitDeprecationWarning(source, errorCode) {
const cacheKey = JSON.stringify({ source, errorCode });
if (sourceFileErrorCache.has(cacheKey)) {
return;
}
sourceFileErrorCache.add(cacheKey);
const rel = path.relative(process.cwd(), source);
const message = deprecationWarningMessages[errorCode];
process.emitWarning(
`${message} (found in "${rel}")`,
"DeprecationWarning",
errorCode
);
}
//------------------------------------------------------------------------------
// Public Interface
//------------------------------------------------------------------------------
export {
emitDeprecationWarning
};

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save