feat: docker compose maybe

This commit is contained in:
2023-11-13 16:10:04 -05:00
parent 180b261e40
commit b625ccd8d6
8031 changed files with 2182966 additions and 0 deletions

19
node_modules/css-tree/LICENSE generated vendored Normal file
View File

@ -0,0 +1,19 @@
Copyright (C) 2016-2022 by Roman Dvornov
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

193
node_modules/css-tree/README.md generated vendored Normal file
View File

@ -0,0 +1,193 @@
<img align="right" width="111" height="111"
alt="CSSTree logo"
src="https://cloud.githubusercontent.com/assets/270491/19243723/6f9136c6-8f21-11e6-82ac-eeeee4c6c452.png"/>
# CSSTree
[![NPM version](https://img.shields.io/npm/v/css-tree.svg)](https://www.npmjs.com/package/css-tree)
[![Build Status](https://github.com/csstree/csstree/actions/workflows/build.yml/badge.svg)](https://github.com/csstree/csstree/actions/workflows/build.yml)
[![Coverage Status](https://coveralls.io/repos/github/csstree/csstree/badge.svg?branch=master)](https://coveralls.io/github/csstree/csstree?branch=master)
[![NPM Downloads](https://img.shields.io/npm/dm/css-tree.svg)](https://www.npmjs.com/package/css-tree)
[![Twitter](https://img.shields.io/badge/Twitter-@csstree-blue.svg)](https://twitter.com/csstree)
CSSTree is a tool set for CSS: [fast](https://github.com/postcss/benchmark) detailed parser (CSS → AST), walker (AST traversal), generator (AST → CSS) and lexer (validation and matching) based on specs and browser implementations. The main goal is to be efficient and W3C spec compliant, with focus on CSS analyzing and source-to-source transforming tasks.
## Features
- **Detailed parsing with an adjustable level of detail**
By default CSSTree parses CSS as detailed as possible, i.e. each single logical part is representing with its own AST node (see [AST format](docs/ast.md) for all possible node types). The parsing detail level can be changed through [parser options](docs/parsing.md#parsesource-options), for example, you can disable parsing of selectors or declaration values for component parts.
- **Tolerant to errors by design**
Parser behaves as [spec says](https://www.w3.org/TR/css-syntax-3/#error-handling): "When errors occur in CSS, the parser attempts to recover gracefully, throwing away only the minimum amount of content before returning to parsing as normal". The only thing the parser departs from the specification is that it doesn't throw away bad content, but wraps it in a special node type (`Raw`) that allows processing it later.
- **Fast and efficient**
CSSTree is created with focus on performance and effective memory consumption. Therefore it's [one of the fastest CSS parsers](https://github.com/postcss/benchmark) at the moment.
- **Syntax validation**
The build-in lexer can test CSS against syntaxes defined by W3C. CSSTree uses [mdn/data](https://github.com/mdn/data/) as a basis for lexer's dictionaries and extends it with vendor specific and legacy syntaxes. Lexer can only check the declaration values currently, but this feature will be extended to other parts of the CSS in the future.
## Projects using CSSTree
- [Svelte](https://github.com/sveltejs/svelte) Cybernetically enhanced web apps
- [SVGO](https://github.com/svg/svgo) Node.js tool for optimizing SVG files
- [CSSO](https://github.com/css/csso) CSS minifier with structural optimizations
- [NativeScript](https://github.com/NativeScript/NativeScript) NativeScript empowers you to access native APIs from JavaScript directly
- [react-native-svg](https://github.com/react-native-svg/react-native-svg) SVG library for React Native, React Native Web, and plain React web projects
- [penthouse](https://github.com/pocketjoso/penthouse) Critical Path CSS Generator
- [Bit](https://github.com/teambit/bit) Bit is the platform for collaborating on components
- and more...
## Documentation
- [AST format](docs/ast.md)
- [Parsing CSS → AST](docs/parsing.md)
- [parse(source[, options])](docs/parsing.md#parsesource-options)
- [Serialization AST → CSS](docs/generate.md)
- [generate(ast[, options])](docs/generate.md#generateast-options)
- [AST traversal](docs/traversal.md)
- [walk(ast, options)](docs/traversal.md#walkast-options)
- [find(ast, fn)](docs/traversal.md#findast-fn)
- [findLast(ast, fn)](docs/traversal.md#findlastast-fn)
- [findAll(ast, fn)](docs/traversal.md#findallast-fn)
- [Util functions](docs/utils.md)
- Value encoding & decoding
- [property(name)](docs/utils.md#propertyname)
- [keyword(name)](docs/utils.md#keywordname)
- [ident](docs/utils.md#ident)
- [string](docs/utils.md#string)
- [url](docs/utils.md#url)
- AST transforming
- [clone(ast)](docs/utils.md#cloneast)
- [fromPlainObject(object)](docs/utils.md#fromplainobjectobject)
- [toPlainObject(ast)](docs/utils.md#toplainobjectast)
- [Value Definition Syntax](docs/definition-syntax.md)
- [parse(source)](docs/definition-syntax.md#parsesource)
- [walk(node, options, context)](docs/definition-syntax.md#walknode-options-context)
- [generate(node, options)](docs/definition-syntax.md#generatenode-options)
- [AST format](docs/definition-syntax.md#ast-format)
## Tools
* [AST Explorer](https://astexplorer.net/#/gist/244e2fb4da940df52bf0f4b94277db44/e79aff44611020b22cfd9708f3a99ce09b7d67a8) explore CSSTree AST format with zero setup
* [CSS syntax reference](https://csstree.github.io/docs/syntax.html)
* [CSS syntax validator](https://csstree.github.io/docs/validator.html)
## Related projects
* [csstree-validator](https://github.com/csstree/validator)  NPM package to validate CSS
* [stylelint-csstree-validator](https://github.com/csstree/stylelint-validator) plugin for stylelint to validate CSS
* [Grunt plugin](https://github.com/sergejmueller/grunt-csstree-validator)
* [Gulp plugin](https://github.com/csstree/gulp-csstree)
* [Sublime plugin](https://github.com/csstree/SublimeLinter-contrib-csstree)
* [VS Code plugin](https://github.com/csstree/vscode-plugin)
* [Atom plugin](https://github.com/csstree/atom-plugin)
## Usage
Install with npm:
```
npm install css-tree
```
Basic usage:
```js
import * as csstree from 'css-tree';
// parse CSS to AST
const ast = csstree.parse('.example { world: "!" }');
// traverse AST and modify it
csstree.walk(ast, (node) => {
if (node.type === 'ClassSelector' && node.name === 'example') {
node.name = 'hello';
}
});
// generate CSS from AST
console.log(csstree.generate(ast));
// .hello{world:"!"}
```
Syntax matching:
```js
// parse CSS to AST as a declaration value
const ast = csstree.parse('red 1px solid', { context: 'value' });
// match to syntax of `border` property
const matchResult = csstree.lexer.matchProperty('border', ast);
// check first value node is a <color>
console.log(matchResult.isType(ast.children.first, 'color'));
// true
// get a type list matched to a node
console.log(matchResult.getTrace(ast.children.first));
// [ { type: 'Property', name: 'border' },
// { type: 'Type', name: 'color' },
// { type: 'Type', name: 'named-color' },
// { type: 'Keyword', name: 'red' } ]
```
### Exports
Is it possible to import just a needed part of library like a parser or a walker. That's might useful for loading time or bundle size optimisations.
```js
import * as tokenizer from 'css-tree/tokenizer';
import * as parser from 'css-tree/parser';
import * as walker from 'css-tree/walker';
import * as lexer from 'css-tree/lexer';
import * as definitionSyntax from 'css-tree/definition-syntax';
import * as data from 'css-tree/definition-syntax-data';
import * as dataPatch from 'css-tree/definition-syntax-data-patch';
import * as utils from 'css-tree/utils';
```
### Using in a browser
Bundles are available for use in a browser:
- `dist/csstree.js` minified IIFE with `csstree` as global
```html
<script src="node_modules/css-tree/dist/csstree.js"></script>
<script>
csstree.parse('.example { color: green }');
</script>
```
- `dist/csstree.esm.js` minified ES module
```html
<script type="module">
import { parse } from 'node_modules/css-tree/dist/csstree.esm.js'
parse('.example { color: green }');
</script>
```
One of CDN services like `unpkg` or `jsDelivr` can be used. By default (for short path) a ESM version is exposing. For IIFE version a full path to a bundle should be specified:
```html
<!-- ESM -->
<script type="module">
import * as csstree from 'https://cdn.jsdelivr.net/npm/css-tree';
import * as csstree from 'https://unpkg.com/css-tree';
</script>
<!-- IIFE with an export to global -->
<script src="https://cdn.jsdelivr.net/npm/css-tree/dist/csstree.js"></script>
<script src="https://unpkg.com/css-tree/dist/csstree.js"></script>
```
## Top level API
![API map](https://cdn.rawgit.com/csstree/csstree/aaf327e/docs/api-map.svg)
## License
MIT

32
node_modules/css-tree/cjs/convertor/create.cjs generated vendored Normal file
View File

@ -0,0 +1,32 @@
'use strict';
const List = require('../utils/List.cjs');
function createConvertor(walk) {
return {
fromPlainObject(ast) {
walk(ast, {
enter(node) {
if (node.children && node.children instanceof List.List === false) {
node.children = new List.List().fromArray(node.children);
}
}
});
return ast;
},
toPlainObject(ast) {
walk(ast, {
leave(node) {
if (node.children && node.children instanceof List.List) {
node.children = node.children.toArray();
}
}
});
return ast;
}
};
}
exports.createConvertor = createConvertor;

8
node_modules/css-tree/cjs/convertor/index.cjs generated vendored Normal file
View File

@ -0,0 +1,8 @@
'use strict';
const create = require('./create.cjs');
const index$1 = require('../walker/index.cjs');
const index = create.createConvertor(index$1);
module.exports = index;

7
node_modules/css-tree/cjs/data-patch.cjs generated vendored Normal file
View File

@ -0,0 +1,7 @@
'use strict';
const patch = require('../data/patch.json');
const patch$1 = patch;
module.exports = patch$1;

99
node_modules/css-tree/cjs/data.cjs generated vendored Normal file
View File

@ -0,0 +1,99 @@
'use strict';
const dataPatch = require('./data-patch.cjs');
const mdnAtrules = require('mdn-data/css/at-rules.json');
const mdnProperties = require('mdn-data/css/properties.json');
const mdnSyntaxes = require('mdn-data/css/syntaxes.json');
const extendSyntax = /^\s*\|\s*/;
function preprocessAtrules(dict) {
const result = Object.create(null);
for (const atruleName in dict) {
const atrule = dict[atruleName];
let descriptors = null;
if (atrule.descriptors) {
descriptors = Object.create(null);
for (const descriptor in atrule.descriptors) {
descriptors[descriptor] = atrule.descriptors[descriptor].syntax;
}
}
result[atruleName.substr(1)] = {
prelude: atrule.syntax.trim().replace(/\{(.|\s)+\}/, '').match(/^@\S+\s+([^;\{]*)/)[1].trim() || null,
descriptors
};
}
return result;
}
function patchDictionary(dict, patchDict) {
const result = {};
// copy all syntaxes for an original dict
for (const key in dict) {
result[key] = dict[key].syntax || dict[key];
}
// apply a patch
for (const key in patchDict) {
if (key in dict) {
if (patchDict[key].syntax) {
result[key] = extendSyntax.test(patchDict[key].syntax)
? result[key] + ' ' + patchDict[key].syntax.trim()
: patchDict[key].syntax;
} else {
delete result[key];
}
} else {
if (patchDict[key].syntax) {
result[key] = patchDict[key].syntax.replace(extendSyntax, '');
}
}
}
return result;
}
function patchAtrules(dict, patchDict) {
const result = {};
// copy all syntaxes for an original dict
for (const key in dict) {
const atrulePatch = patchDict[key] || {};
result[key] = {
prelude: key in patchDict && 'prelude' in atrulePatch
? atrulePatch.prelude
: dict[key].prelude || null,
descriptors: patchDictionary(dict[key].descriptors || {}, atrulePatch.descriptors || {})
};
}
// apply a patch
for (const key in patchDict) {
if (!hasOwnProperty.call(dict, key)) {
const atrulePatch = patchDict[key] || {};
result[key] = {
prelude: atrulePatch.prelude || null,
descriptors: atrulePatch.descriptors && patchDictionary({}, atrulePatch.descriptors)
};
}
}
return result;
}
const definitions = {
types: patchDictionary(mdnSyntaxes, dataPatch.types),
atrules: patchAtrules(preprocessAtrules(mdnAtrules), dataPatch.atrules),
properties: patchDictionary(mdnProperties, dataPatch.properties)
};
module.exports = definitions;

View File

@ -0,0 +1,16 @@
'use strict';
const createCustomError = require('../utils/create-custom-error.cjs');
function SyntaxError(message, input, offset) {
return Object.assign(createCustomError.createCustomError('SyntaxError', message), {
input,
offset,
rawMessage: message,
message: message + '\n' +
' ' + input + '\n' +
'--' + new Array((offset || input.length) + 1).join('-') + '^'
});
}
exports.SyntaxError = SyntaxError;

View File

@ -0,0 +1,135 @@
'use strict';
function noop(value) {
return value;
}
function generateMultiplier(multiplier) {
const { min, max, comma } = multiplier;
if (min === 0 && max === 0) {
return comma ? '#?' : '*';
}
if (min === 0 && max === 1) {
return '?';
}
if (min === 1 && max === 0) {
return comma ? '#' : '+';
}
if (min === 1 && max === 1) {
return '';
}
return (
(comma ? '#' : '') +
(min === max
? '{' + min + '}'
: '{' + min + ',' + (max !== 0 ? max : '') + '}'
)
);
}
function generateTypeOpts(node) {
switch (node.type) {
case 'Range':
return (
' [' +
(node.min === null ? '-∞' : node.min) +
',' +
(node.max === null ? '∞' : node.max) +
']'
);
default:
throw new Error('Unknown node type `' + node.type + '`');
}
}
function generateSequence(node, decorate, forceBraces, compact) {
const combinator = node.combinator === ' ' || compact ? node.combinator : ' ' + node.combinator + ' ';
const result = node.terms
.map(term => internalGenerate(term, decorate, forceBraces, compact))
.join(combinator);
if (node.explicit || forceBraces) {
return (compact || result[0] === ',' ? '[' : '[ ') + result + (compact ? ']' : ' ]');
}
return result;
}
function internalGenerate(node, decorate, forceBraces, compact) {
let result;
switch (node.type) {
case 'Group':
result =
generateSequence(node, decorate, forceBraces, compact) +
(node.disallowEmpty ? '!' : '');
break;
case 'Multiplier':
// return since node is a composition
return (
internalGenerate(node.term, decorate, forceBraces, compact) +
decorate(generateMultiplier(node), node)
);
case 'Type':
result = '<' + node.name + (node.opts ? decorate(generateTypeOpts(node.opts), node.opts) : '') + '>';
break;
case 'Property':
result = '<\'' + node.name + '\'>';
break;
case 'Keyword':
result = node.name;
break;
case 'AtKeyword':
result = '@' + node.name;
break;
case 'Function':
result = node.name + '(';
break;
case 'String':
case 'Token':
result = node.value;
break;
case 'Comma':
result = ',';
break;
default:
throw new Error('Unknown node type `' + node.type + '`');
}
return decorate(result, node);
}
function generate(node, options) {
let decorate = noop;
let forceBraces = false;
let compact = false;
if (typeof options === 'function') {
decorate = options;
} else if (options) {
forceBraces = Boolean(options.forceBraces);
compact = Boolean(options.compact);
if (typeof options.decorate === 'function') {
decorate = options.decorate;
}
}
return internalGenerate(node, decorate, forceBraces, compact);
}
exports.generate = generate;

13
node_modules/css-tree/cjs/definition-syntax/index.cjs generated vendored Normal file
View File

@ -0,0 +1,13 @@
'use strict';
const SyntaxError = require('./SyntaxError.cjs');
const generate = require('./generate.cjs');
const parse = require('./parse.cjs');
const walk = require('./walk.cjs');
exports.SyntaxError = SyntaxError.SyntaxError;
exports.generate = generate.generate;
exports.parse = parse.parse;
exports.walk = walk.walk;

588
node_modules/css-tree/cjs/definition-syntax/parse.cjs generated vendored Normal file
View File

@ -0,0 +1,588 @@
'use strict';
const tokenizer = require('./tokenizer.cjs');
const TAB = 9;
const N = 10;
const F = 12;
const R = 13;
const SPACE = 32;
const EXCLAMATIONMARK = 33; // !
const NUMBERSIGN = 35; // #
const AMPERSAND = 38; // &
const APOSTROPHE = 39; // '
const LEFTPARENTHESIS = 40; // (
const RIGHTPARENTHESIS = 41; // )
const ASTERISK = 42; // *
const PLUSSIGN = 43; // +
const COMMA = 44; // ,
const HYPERMINUS = 45; // -
const LESSTHANSIGN = 60; // <
const GREATERTHANSIGN = 62; // >
const QUESTIONMARK = 63; // ?
const COMMERCIALAT = 64; // @
const LEFTSQUAREBRACKET = 91; // [
const RIGHTSQUAREBRACKET = 93; // ]
const LEFTCURLYBRACKET = 123; // {
const VERTICALLINE = 124; // |
const RIGHTCURLYBRACKET = 125; // }
const INFINITY = 8734; // ∞
const NAME_CHAR = new Uint8Array(128).map((_, idx) =>
/[a-zA-Z0-9\-]/.test(String.fromCharCode(idx)) ? 1 : 0
);
const COMBINATOR_PRECEDENCE = {
' ': 1,
'&&': 2,
'||': 3,
'|': 4
};
function scanSpaces(tokenizer) {
return tokenizer.substringToPos(
tokenizer.findWsEnd(tokenizer.pos)
);
}
function scanWord(tokenizer) {
let end = tokenizer.pos;
for (; end < tokenizer.str.length; end++) {
const code = tokenizer.str.charCodeAt(end);
if (code >= 128 || NAME_CHAR[code] === 0) {
break;
}
}
if (tokenizer.pos === end) {
tokenizer.error('Expect a keyword');
}
return tokenizer.substringToPos(end);
}
function scanNumber(tokenizer) {
let end = tokenizer.pos;
for (; end < tokenizer.str.length; end++) {
const code = tokenizer.str.charCodeAt(end);
if (code < 48 || code > 57) {
break;
}
}
if (tokenizer.pos === end) {
tokenizer.error('Expect a number');
}
return tokenizer.substringToPos(end);
}
function scanString(tokenizer) {
const end = tokenizer.str.indexOf('\'', tokenizer.pos + 1);
if (end === -1) {
tokenizer.pos = tokenizer.str.length;
tokenizer.error('Expect an apostrophe');
}
return tokenizer.substringToPos(end + 1);
}
function readMultiplierRange(tokenizer) {
let min = null;
let max = null;
tokenizer.eat(LEFTCURLYBRACKET);
min = scanNumber(tokenizer);
if (tokenizer.charCode() === COMMA) {
tokenizer.pos++;
if (tokenizer.charCode() !== RIGHTCURLYBRACKET) {
max = scanNumber(tokenizer);
}
} else {
max = min;
}
tokenizer.eat(RIGHTCURLYBRACKET);
return {
min: Number(min),
max: max ? Number(max) : 0
};
}
function readMultiplier(tokenizer) {
let range = null;
let comma = false;
switch (tokenizer.charCode()) {
case ASTERISK:
tokenizer.pos++;
range = {
min: 0,
max: 0
};
break;
case PLUSSIGN:
tokenizer.pos++;
range = {
min: 1,
max: 0
};
break;
case QUESTIONMARK:
tokenizer.pos++;
range = {
min: 0,
max: 1
};
break;
case NUMBERSIGN:
tokenizer.pos++;
comma = true;
if (tokenizer.charCode() === LEFTCURLYBRACKET) {
range = readMultiplierRange(tokenizer);
} else if (tokenizer.charCode() === QUESTIONMARK) {
// https://www.w3.org/TR/css-values-4/#component-multipliers
// > the # and ? multipliers may be stacked as #?
// In this case just treat "#?" as a single multiplier
// { min: 0, max: 0, comma: true }
tokenizer.pos++;
range = {
min: 0,
max: 0
};
} else {
range = {
min: 1,
max: 0
};
}
break;
case LEFTCURLYBRACKET:
range = readMultiplierRange(tokenizer);
break;
default:
return null;
}
return {
type: 'Multiplier',
comma,
min: range.min,
max: range.max,
term: null
};
}
function maybeMultiplied(tokenizer, node) {
const multiplier = readMultiplier(tokenizer);
if (multiplier !== null) {
multiplier.term = node;
// https://www.w3.org/TR/css-values-4/#component-multipliers
// > The + and # multipliers may be stacked as +#;
// Represent "+#" as nested multipliers:
// { ...<multiplier #>,
// term: {
// ...<multipler +>,
// term: node
// }
// }
if (tokenizer.charCode() === NUMBERSIGN &&
tokenizer.charCodeAt(tokenizer.pos - 1) === PLUSSIGN) {
return maybeMultiplied(tokenizer, multiplier);
}
return multiplier;
}
return node;
}
function maybeToken(tokenizer) {
const ch = tokenizer.peek();
if (ch === '') {
return null;
}
return {
type: 'Token',
value: ch
};
}
function readProperty(tokenizer) {
let name;
tokenizer.eat(LESSTHANSIGN);
tokenizer.eat(APOSTROPHE);
name = scanWord(tokenizer);
tokenizer.eat(APOSTROPHE);
tokenizer.eat(GREATERTHANSIGN);
return maybeMultiplied(tokenizer, {
type: 'Property',
name
});
}
// https://drafts.csswg.org/css-values-3/#numeric-ranges
// 4.1. Range Restrictions and Range Definition Notation
//
// Range restrictions can be annotated in the numeric type notation using CSS bracketed
// range notation—[min,max]—within the angle brackets, after the identifying keyword,
// indicating a closed range between (and including) min and max.
// For example, <integer [0, 10]> indicates an integer between 0 and 10, inclusive.
function readTypeRange(tokenizer) {
// use null for Infinity to make AST format JSON serializable/deserializable
let min = null; // -Infinity
let max = null; // Infinity
let sign = 1;
tokenizer.eat(LEFTSQUAREBRACKET);
if (tokenizer.charCode() === HYPERMINUS) {
tokenizer.peek();
sign = -1;
}
if (sign == -1 && tokenizer.charCode() === INFINITY) {
tokenizer.peek();
} else {
min = sign * Number(scanNumber(tokenizer));
if (NAME_CHAR[tokenizer.charCode()] !== 0) {
min += scanWord(tokenizer);
}
}
scanSpaces(tokenizer);
tokenizer.eat(COMMA);
scanSpaces(tokenizer);
if (tokenizer.charCode() === INFINITY) {
tokenizer.peek();
} else {
sign = 1;
if (tokenizer.charCode() === HYPERMINUS) {
tokenizer.peek();
sign = -1;
}
max = sign * Number(scanNumber(tokenizer));
if (NAME_CHAR[tokenizer.charCode()] !== 0) {
max += scanWord(tokenizer);
}
}
tokenizer.eat(RIGHTSQUAREBRACKET);
return {
type: 'Range',
min,
max
};
}
function readType(tokenizer) {
let name;
let opts = null;
tokenizer.eat(LESSTHANSIGN);
name = scanWord(tokenizer);
if (tokenizer.charCode() === LEFTPARENTHESIS &&
tokenizer.nextCharCode() === RIGHTPARENTHESIS) {
tokenizer.pos += 2;
name += '()';
}
if (tokenizer.charCodeAt(tokenizer.findWsEnd(tokenizer.pos)) === LEFTSQUAREBRACKET) {
scanSpaces(tokenizer);
opts = readTypeRange(tokenizer);
}
tokenizer.eat(GREATERTHANSIGN);
return maybeMultiplied(tokenizer, {
type: 'Type',
name,
opts
});
}
function readKeywordOrFunction(tokenizer) {
const name = scanWord(tokenizer);
if (tokenizer.charCode() === LEFTPARENTHESIS) {
tokenizer.pos++;
return {
type: 'Function',
name
};
}
return maybeMultiplied(tokenizer, {
type: 'Keyword',
name
});
}
function regroupTerms(terms, combinators) {
function createGroup(terms, combinator) {
return {
type: 'Group',
terms,
combinator,
disallowEmpty: false,
explicit: false
};
}
let combinator;
combinators = Object.keys(combinators)
.sort((a, b) => COMBINATOR_PRECEDENCE[a] - COMBINATOR_PRECEDENCE[b]);
while (combinators.length > 0) {
combinator = combinators.shift();
let i = 0;
let subgroupStart = 0;
for (; i < terms.length; i++) {
const term = terms[i];
if (term.type === 'Combinator') {
if (term.value === combinator) {
if (subgroupStart === -1) {
subgroupStart = i - 1;
}
terms.splice(i, 1);
i--;
} else {
if (subgroupStart !== -1 && i - subgroupStart > 1) {
terms.splice(
subgroupStart,
i - subgroupStart,
createGroup(terms.slice(subgroupStart, i), combinator)
);
i = subgroupStart + 1;
}
subgroupStart = -1;
}
}
}
if (subgroupStart !== -1 && combinators.length) {
terms.splice(
subgroupStart,
i - subgroupStart,
createGroup(terms.slice(subgroupStart, i), combinator)
);
}
}
return combinator;
}
function readImplicitGroup(tokenizer) {
const terms = [];
const combinators = {};
let token;
let prevToken = null;
let prevTokenPos = tokenizer.pos;
while (token = peek(tokenizer)) {
if (token.type !== 'Spaces') {
if (token.type === 'Combinator') {
// check for combinator in group beginning and double combinator sequence
if (prevToken === null || prevToken.type === 'Combinator') {
tokenizer.pos = prevTokenPos;
tokenizer.error('Unexpected combinator');
}
combinators[token.value] = true;
} else if (prevToken !== null && prevToken.type !== 'Combinator') {
combinators[' '] = true; // a b
terms.push({
type: 'Combinator',
value: ' '
});
}
terms.push(token);
prevToken = token;
prevTokenPos = tokenizer.pos;
}
}
// check for combinator in group ending
if (prevToken !== null && prevToken.type === 'Combinator') {
tokenizer.pos -= prevTokenPos;
tokenizer.error('Unexpected combinator');
}
return {
type: 'Group',
terms,
combinator: regroupTerms(terms, combinators) || ' ',
disallowEmpty: false,
explicit: false
};
}
function readGroup(tokenizer) {
let result;
tokenizer.eat(LEFTSQUAREBRACKET);
result = readImplicitGroup(tokenizer);
tokenizer.eat(RIGHTSQUAREBRACKET);
result.explicit = true;
if (tokenizer.charCode() === EXCLAMATIONMARK) {
tokenizer.pos++;
result.disallowEmpty = true;
}
return result;
}
function peek(tokenizer) {
let code = tokenizer.charCode();
if (code < 128 && NAME_CHAR[code] === 1) {
return readKeywordOrFunction(tokenizer);
}
switch (code) {
case RIGHTSQUAREBRACKET:
// don't eat, stop scan a group
break;
case LEFTSQUAREBRACKET:
return maybeMultiplied(tokenizer, readGroup(tokenizer));
case LESSTHANSIGN:
return tokenizer.nextCharCode() === APOSTROPHE
? readProperty(tokenizer)
: readType(tokenizer);
case VERTICALLINE:
return {
type: 'Combinator',
value: tokenizer.substringToPos(
tokenizer.pos + (tokenizer.nextCharCode() === VERTICALLINE ? 2 : 1)
)
};
case AMPERSAND:
tokenizer.pos++;
tokenizer.eat(AMPERSAND);
return {
type: 'Combinator',
value: '&&'
};
case COMMA:
tokenizer.pos++;
return {
type: 'Comma'
};
case APOSTROPHE:
return maybeMultiplied(tokenizer, {
type: 'String',
value: scanString(tokenizer)
});
case SPACE:
case TAB:
case N:
case R:
case F:
return {
type: 'Spaces',
value: scanSpaces(tokenizer)
};
case COMMERCIALAT:
code = tokenizer.nextCharCode();
if (code < 128 && NAME_CHAR[code] === 1) {
tokenizer.pos++;
return {
type: 'AtKeyword',
name: scanWord(tokenizer)
};
}
return maybeToken(tokenizer);
case ASTERISK:
case PLUSSIGN:
case QUESTIONMARK:
case NUMBERSIGN:
case EXCLAMATIONMARK:
// prohibited tokens (used as a multiplier start)
break;
case LEFTCURLYBRACKET:
// LEFTCURLYBRACKET is allowed since mdn/data uses it w/o quoting
// check next char isn't a number, because it's likely a disjoined multiplier
code = tokenizer.nextCharCode();
if (code < 48 || code > 57) {
return maybeToken(tokenizer);
}
break;
default:
return maybeToken(tokenizer);
}
}
function parse(source) {
const tokenizer$1 = new tokenizer.Tokenizer(source);
const result = readImplicitGroup(tokenizer$1);
if (tokenizer$1.pos !== source.length) {
tokenizer$1.error('Unexpected input');
}
// reduce redundant groups with single group term
if (result.terms.length === 1 && result.terms[0].type === 'Group') {
return result.terms[0];
}
return result;
}
exports.parse = parse;

View File

@ -0,0 +1,56 @@
'use strict';
const SyntaxError = require('./SyntaxError.cjs');
const TAB = 9;
const N = 10;
const F = 12;
const R = 13;
const SPACE = 32;
class Tokenizer {
constructor(str) {
this.str = str;
this.pos = 0;
}
charCodeAt(pos) {
return pos < this.str.length ? this.str.charCodeAt(pos) : 0;
}
charCode() {
return this.charCodeAt(this.pos);
}
nextCharCode() {
return this.charCodeAt(this.pos + 1);
}
nextNonWsCode(pos) {
return this.charCodeAt(this.findWsEnd(pos));
}
findWsEnd(pos) {
for (; pos < this.str.length; pos++) {
const code = this.str.charCodeAt(pos);
if (code !== R && code !== N && code !== F && code !== SPACE && code !== TAB) {
break;
}
}
return pos;
}
substringToPos(end) {
return this.str.substring(this.pos, this.pos = end);
}
eat(code) {
if (this.charCode() !== code) {
this.error('Expect `' + String.fromCharCode(code) + '`');
}
this.pos++;
}
peek() {
return this.pos < this.str.length ? this.str.charAt(this.pos++) : '';
}
error(message) {
throw new SyntaxError.SyntaxError(message, this.str, this.pos);
}
}
exports.Tokenizer = Tokenizer;

56
node_modules/css-tree/cjs/definition-syntax/walk.cjs generated vendored Normal file
View File

@ -0,0 +1,56 @@
'use strict';
const noop = function() {};
function ensureFunction(value) {
return typeof value === 'function' ? value : noop;
}
function walk(node, options, context) {
function walk(node) {
enter.call(context, node);
switch (node.type) {
case 'Group':
node.terms.forEach(walk);
break;
case 'Multiplier':
walk(node.term);
break;
case 'Type':
case 'Property':
case 'Keyword':
case 'AtKeyword':
case 'Function':
case 'String':
case 'Token':
case 'Comma':
break;
default:
throw new Error('Unknown type: ' + node.type);
}
leave.call(context, node);
}
let enter = noop;
let leave = noop;
if (typeof options === 'function') {
enter = options;
} else if (options) {
enter = ensureFunction(options.enter);
leave = ensureFunction(options.leave);
}
if (enter === noop && leave === noop) {
throw new Error('Neither `enter` nor `leave` walker handler is set or both aren\'t a function');
}
walk(node);
}
exports.walk = walk;

103
node_modules/css-tree/cjs/generator/create.cjs generated vendored Normal file
View File

@ -0,0 +1,103 @@
'use strict';
const index = require('../tokenizer/index.cjs');
const sourceMap = require('./sourceMap.cjs');
const tokenBefore = require('./token-before.cjs');
const types = require('../tokenizer/types.cjs');
const REVERSESOLIDUS = 0x005c; // U+005C REVERSE SOLIDUS (\)
function processChildren(node, delimeter) {
if (typeof delimeter === 'function') {
let prev = null;
node.children.forEach(node => {
if (prev !== null) {
delimeter.call(this, prev);
}
this.node(node);
prev = node;
});
return;
}
node.children.forEach(this.node, this);
}
function processChunk(chunk) {
index.tokenize(chunk, (type, start, end) => {
this.token(type, chunk.slice(start, end));
});
}
function createGenerator(config) {
const types$1 = new Map();
for (let name in config.node) {
const item = config.node[name];
const fn = item.generate || item;
if (typeof fn === 'function') {
types$1.set(name, item.generate || item);
}
}
return function(node, options) {
let buffer = '';
let prevCode = 0;
let handlers = {
node(node) {
if (types$1.has(node.type)) {
types$1.get(node.type).call(publicApi, node);
} else {
throw new Error('Unknown node type: ' + node.type);
}
},
tokenBefore: tokenBefore.safe,
token(type, value) {
prevCode = this.tokenBefore(prevCode, type, value);
this.emit(value, type, false);
if (type === types.Delim && value.charCodeAt(0) === REVERSESOLIDUS) {
this.emit('\n', types.WhiteSpace, true);
}
},
emit(value) {
buffer += value;
},
result() {
return buffer;
}
};
if (options) {
if (typeof options.decorator === 'function') {
handlers = options.decorator(handlers);
}
if (options.sourceMap) {
handlers = sourceMap.generateSourceMap(handlers);
}
if (options.mode in tokenBefore) {
handlers.tokenBefore = tokenBefore[options.mode];
}
}
const publicApi = {
node: (node) => handlers.node(node),
children: processChildren,
token: (type, value) => handlers.token(type, value),
tokenize: processChunk
};
handlers.node(node);
return handlers.result();
};
}
exports.createGenerator = createGenerator;

8
node_modules/css-tree/cjs/generator/index.cjs generated vendored Normal file
View File

@ -0,0 +1,8 @@
'use strict';
const create = require('./create.cjs');
const generator = require('../syntax/config/generator.cjs');
const index = create.createGenerator(generator);
module.exports = index;

96
node_modules/css-tree/cjs/generator/sourceMap.cjs generated vendored Normal file
View File

@ -0,0 +1,96 @@
'use strict';
const sourceMapGenerator_js = require('source-map-js/lib/source-map-generator.js');
const trackNodes = new Set(['Atrule', 'Selector', 'Declaration']);
function generateSourceMap(handlers) {
const map = new sourceMapGenerator_js.SourceMapGenerator();
const generated = {
line: 1,
column: 0
};
const original = {
line: 0, // should be zero to add first mapping
column: 0
};
const activatedGenerated = {
line: 1,
column: 0
};
const activatedMapping = {
generated: activatedGenerated
};
let line = 1;
let column = 0;
let sourceMappingActive = false;
const origHandlersNode = handlers.node;
handlers.node = function(node) {
if (node.loc && node.loc.start && trackNodes.has(node.type)) {
const nodeLine = node.loc.start.line;
const nodeColumn = node.loc.start.column - 1;
if (original.line !== nodeLine ||
original.column !== nodeColumn) {
original.line = nodeLine;
original.column = nodeColumn;
generated.line = line;
generated.column = column;
if (sourceMappingActive) {
sourceMappingActive = false;
if (generated.line !== activatedGenerated.line ||
generated.column !== activatedGenerated.column) {
map.addMapping(activatedMapping);
}
}
sourceMappingActive = true;
map.addMapping({
source: node.loc.source,
original,
generated
});
}
}
origHandlersNode.call(this, node);
if (sourceMappingActive && trackNodes.has(node.type)) {
activatedGenerated.line = line;
activatedGenerated.column = column;
}
};
const origHandlersEmit = handlers.emit;
handlers.emit = function(value, type, auto) {
for (let i = 0; i < value.length; i++) {
if (value.charCodeAt(i) === 10) { // \n
line++;
column = 0;
} else {
column++;
}
}
origHandlersEmit(value, type, auto);
};
const origHandlersResult = handlers.result;
handlers.result = function() {
if (sourceMappingActive) {
map.addMapping(activatedMapping);
}
return {
css: origHandlersResult(),
map
};
};
return handlers;
}
exports.generateSourceMap = generateSourceMap;

170
node_modules/css-tree/cjs/generator/token-before.cjs generated vendored Normal file
View File

@ -0,0 +1,170 @@
'use strict';
const types = require('../tokenizer/types.cjs');
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const code = (type, value) => {
if (type === types.Delim) {
type = value;
}
if (typeof type === 'string') {
const charCode = type.charCodeAt(0);
return charCode > 0x7F ? 0x8000 : charCode << 8;
}
return type;
};
// https://www.w3.org/TR/css-syntax-3/#serialization
// The only requirement for serialization is that it must "round-trip" with parsing,
// that is, parsing the stylesheet must produce the same data structures as parsing,
// serializing, and parsing again, except for consecutive <whitespace-token>s,
// which may be collapsed into a single token.
const specPairs = [
[types.Ident, types.Ident],
[types.Ident, types.Function],
[types.Ident, types.Url],
[types.Ident, types.BadUrl],
[types.Ident, '-'],
[types.Ident, types.Number],
[types.Ident, types.Percentage],
[types.Ident, types.Dimension],
[types.Ident, types.CDC],
[types.Ident, types.LeftParenthesis],
[types.AtKeyword, types.Ident],
[types.AtKeyword, types.Function],
[types.AtKeyword, types.Url],
[types.AtKeyword, types.BadUrl],
[types.AtKeyword, '-'],
[types.AtKeyword, types.Number],
[types.AtKeyword, types.Percentage],
[types.AtKeyword, types.Dimension],
[types.AtKeyword, types.CDC],
[types.Hash, types.Ident],
[types.Hash, types.Function],
[types.Hash, types.Url],
[types.Hash, types.BadUrl],
[types.Hash, '-'],
[types.Hash, types.Number],
[types.Hash, types.Percentage],
[types.Hash, types.Dimension],
[types.Hash, types.CDC],
[types.Dimension, types.Ident],
[types.Dimension, types.Function],
[types.Dimension, types.Url],
[types.Dimension, types.BadUrl],
[types.Dimension, '-'],
[types.Dimension, types.Number],
[types.Dimension, types.Percentage],
[types.Dimension, types.Dimension],
[types.Dimension, types.CDC],
['#', types.Ident],
['#', types.Function],
['#', types.Url],
['#', types.BadUrl],
['#', '-'],
['#', types.Number],
['#', types.Percentage],
['#', types.Dimension],
['#', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['-', types.Ident],
['-', types.Function],
['-', types.Url],
['-', types.BadUrl],
['-', '-'],
['-', types.Number],
['-', types.Percentage],
['-', types.Dimension],
['-', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
[types.Number, types.Ident],
[types.Number, types.Function],
[types.Number, types.Url],
[types.Number, types.BadUrl],
[types.Number, types.Number],
[types.Number, types.Percentage],
[types.Number, types.Dimension],
[types.Number, '%'],
[types.Number, types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['@', types.Ident],
['@', types.Function],
['@', types.Url],
['@', types.BadUrl],
['@', '-'],
['@', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['.', types.Number],
['.', types.Percentage],
['.', types.Dimension],
['+', types.Number],
['+', types.Percentage],
['+', types.Dimension],
['/', '*']
];
// validate with scripts/generate-safe
const safePairs = specPairs.concat([
[types.Ident, types.Hash],
[types.Dimension, types.Hash],
[types.Hash, types.Hash],
[types.AtKeyword, types.LeftParenthesis],
[types.AtKeyword, types.String],
[types.AtKeyword, types.Colon],
[types.Percentage, types.Percentage],
[types.Percentage, types.Dimension],
[types.Percentage, types.Function],
[types.Percentage, '-'],
[types.RightParenthesis, types.Ident],
[types.RightParenthesis, types.Function],
[types.RightParenthesis, types.Percentage],
[types.RightParenthesis, types.Dimension],
[types.RightParenthesis, types.Hash],
[types.RightParenthesis, '-']
]);
function createMap(pairs) {
const isWhiteSpaceRequired = new Set(
pairs.map(([prev, next]) => (code(prev) << 16 | code(next)))
);
return function(prevCode, type, value) {
const nextCode = code(type, value);
const nextCharCode = value.charCodeAt(0);
const emitWs =
(nextCharCode === HYPHENMINUS &&
type !== types.Ident &&
type !== types.Function &&
type !== types.CDC) ||
(nextCharCode === PLUSSIGN)
? isWhiteSpaceRequired.has(prevCode << 16 | nextCharCode << 8)
: isWhiteSpaceRequired.has(prevCode << 16 | nextCode);
if (emitWs) {
this.emit(' ', types.WhiteSpace, true);
}
return nextCode;
};
}
const spec = createMap(specPairs);
const safe = createMap(safePairs);
exports.safe = safe;
exports.spec = spec;

63
node_modules/css-tree/cjs/index.cjs generated vendored Normal file
View File

@ -0,0 +1,63 @@
'use strict';
const index$1 = require('./syntax/index.cjs');
const version = require('./version.cjs');
const create = require('./syntax/create.cjs');
const List = require('./utils/List.cjs');
const Lexer = require('./lexer/Lexer.cjs');
const index = require('./definition-syntax/index.cjs');
const clone = require('./utils/clone.cjs');
const names$1 = require('./utils/names.cjs');
const ident = require('./utils/ident.cjs');
const string = require('./utils/string.cjs');
const url = require('./utils/url.cjs');
const types = require('./tokenizer/types.cjs');
const names = require('./tokenizer/names.cjs');
const TokenStream = require('./tokenizer/TokenStream.cjs');
const {
tokenize,
parse,
generate,
lexer,
createLexer,
walk,
find,
findLast,
findAll,
toPlainObject,
fromPlainObject,
fork
} = index$1;
exports.version = version.version;
exports.createSyntax = create;
exports.List = List.List;
exports.Lexer = Lexer.Lexer;
exports.definitionSyntax = index;
exports.clone = clone.clone;
exports.isCustomProperty = names$1.isCustomProperty;
exports.keyword = names$1.keyword;
exports.property = names$1.property;
exports.vendorPrefix = names$1.vendorPrefix;
exports.ident = ident;
exports.string = string;
exports.url = url;
exports.tokenTypes = types;
exports.tokenNames = names;
exports.TokenStream = TokenStream.TokenStream;
exports.createLexer = createLexer;
exports.find = find;
exports.findAll = findAll;
exports.findLast = findLast;
exports.fork = fork;
exports.fromPlainObject = fromPlainObject;
exports.generate = generate;
exports.lexer = lexer;
exports.parse = parse;
exports.toPlainObject = toPlainObject;
exports.tokenize = tokenize;
exports.walk = walk;

470
node_modules/css-tree/cjs/lexer/Lexer.cjs generated vendored Normal file
View File

@ -0,0 +1,470 @@
'use strict';
const error = require('./error.cjs');
const names = require('../utils/names.cjs');
const genericConst = require('./generic-const.cjs');
const generic = require('./generic.cjs');
const units = require('./units.cjs');
const prepareTokens = require('./prepare-tokens.cjs');
const matchGraph = require('./match-graph.cjs');
const match = require('./match.cjs');
const trace = require('./trace.cjs');
const search = require('./search.cjs');
const structure = require('./structure.cjs');
const parse = require('../definition-syntax/parse.cjs');
const generate = require('../definition-syntax/generate.cjs');
const walk = require('../definition-syntax/walk.cjs');
const cssWideKeywordsSyntax = matchGraph.buildMatchGraph(genericConst.cssWideKeywords.join(' | '));
function dumpMapSyntax(map, compact, syntaxAsAst) {
const result = {};
for (const name in map) {
if (map[name].syntax) {
result[name] = syntaxAsAst
? map[name].syntax
: generate.generate(map[name].syntax, { compact });
}
}
return result;
}
function dumpAtruleMapSyntax(map, compact, syntaxAsAst) {
const result = {};
for (const [name, atrule] of Object.entries(map)) {
result[name] = {
prelude: atrule.prelude && (
syntaxAsAst
? atrule.prelude.syntax
: generate.generate(atrule.prelude.syntax, { compact })
),
descriptors: atrule.descriptors && dumpMapSyntax(atrule.descriptors, compact, syntaxAsAst)
};
}
return result;
}
function valueHasVar(tokens) {
for (let i = 0; i < tokens.length; i++) {
if (tokens[i].value.toLowerCase() === 'var(') {
return true;
}
}
return false;
}
function buildMatchResult(matched, error, iterations) {
return {
matched,
iterations,
error,
...trace
};
}
function matchSyntax(lexer, syntax, value, useCssWideKeywords) {
const tokens = prepareTokens(value, lexer.syntax);
let result;
if (valueHasVar(tokens)) {
return buildMatchResult(null, new Error('Matching for a tree with var() is not supported'));
}
if (useCssWideKeywords) {
result = match.matchAsTree(tokens, lexer.cssWideKeywordsSyntax, lexer);
}
if (!useCssWideKeywords || !result.match) {
result = match.matchAsTree(tokens, syntax.match, lexer);
if (!result.match) {
return buildMatchResult(
null,
new error.SyntaxMatchError(result.reason, syntax.syntax, value, result),
result.iterations
);
}
}
return buildMatchResult(result.match, null, result.iterations);
}
class Lexer {
constructor(config, syntax, structure$1) {
this.cssWideKeywordsSyntax = cssWideKeywordsSyntax;
this.syntax = syntax;
this.generic = false;
this.units = { ...units };
this.atrules = Object.create(null);
this.properties = Object.create(null);
this.types = Object.create(null);
this.structure = structure$1 || structure.getStructureFromConfig(config);
if (config) {
if (config.units) {
for (const group of Object.keys(units)) {
if (Array.isArray(config.units[group])) {
this.units[group] = config.units[group];
}
}
}
if (config.types) {
for (const name in config.types) {
this.addType_(name, config.types[name]);
}
}
if (config.generic) {
this.generic = true;
for (const [name, value] of Object.entries(generic.createGenericTypes(this.units))) {
this.addType_(name, value);
}
}
if (config.atrules) {
for (const name in config.atrules) {
this.addAtrule_(name, config.atrules[name]);
}
}
if (config.properties) {
for (const name in config.properties) {
this.addProperty_(name, config.properties[name]);
}
}
}
}
checkStructure(ast) {
function collectWarning(node, message) {
warns.push({ node, message });
}
const structure = this.structure;
const warns = [];
this.syntax.walk(ast, function(node) {
if (structure.hasOwnProperty(node.type)) {
structure[node.type].check(node, collectWarning);
} else {
collectWarning(node, 'Unknown node type `' + node.type + '`');
}
});
return warns.length ? warns : false;
}
createDescriptor(syntax, type, name, parent = null) {
const ref = {
type,
name
};
const descriptor = {
type,
name,
parent,
serializable: typeof syntax === 'string' || (syntax && typeof syntax.type === 'string'),
syntax: null,
match: null
};
if (typeof syntax === 'function') {
descriptor.match = matchGraph.buildMatchGraph(syntax, ref);
} else {
if (typeof syntax === 'string') {
// lazy parsing on first access
Object.defineProperty(descriptor, 'syntax', {
get() {
Object.defineProperty(descriptor, 'syntax', {
value: parse.parse(syntax)
});
return descriptor.syntax;
}
});
} else {
descriptor.syntax = syntax;
}
// lazy graph build on first access
Object.defineProperty(descriptor, 'match', {
get() {
Object.defineProperty(descriptor, 'match', {
value: matchGraph.buildMatchGraph(descriptor.syntax, ref)
});
return descriptor.match;
}
});
}
return descriptor;
}
addAtrule_(name, syntax) {
if (!syntax) {
return;
}
this.atrules[name] = {
type: 'Atrule',
name: name,
prelude: syntax.prelude ? this.createDescriptor(syntax.prelude, 'AtrulePrelude', name) : null,
descriptors: syntax.descriptors
? Object.keys(syntax.descriptors).reduce(
(map, descName) => {
map[descName] = this.createDescriptor(syntax.descriptors[descName], 'AtruleDescriptor', descName, name);
return map;
},
Object.create(null)
)
: null
};
}
addProperty_(name, syntax) {
if (!syntax) {
return;
}
this.properties[name] = this.createDescriptor(syntax, 'Property', name);
}
addType_(name, syntax) {
if (!syntax) {
return;
}
this.types[name] = this.createDescriptor(syntax, 'Type', name);
}
checkAtruleName(atruleName) {
if (!this.getAtrule(atruleName)) {
return new error.SyntaxReferenceError('Unknown at-rule', '@' + atruleName);
}
}
checkAtrulePrelude(atruleName, prelude) {
const error = this.checkAtruleName(atruleName);
if (error) {
return error;
}
const atrule = this.getAtrule(atruleName);
if (!atrule.prelude && prelude) {
return new SyntaxError('At-rule `@' + atruleName + '` should not contain a prelude');
}
if (atrule.prelude && !prelude) {
if (!matchSyntax(this, atrule.prelude, '', false).matched) {
return new SyntaxError('At-rule `@' + atruleName + '` should contain a prelude');
}
}
}
checkAtruleDescriptorName(atruleName, descriptorName) {
const error$1 = this.checkAtruleName(atruleName);
if (error$1) {
return error$1;
}
const atrule = this.getAtrule(atruleName);
const descriptor = names.keyword(descriptorName);
if (!atrule.descriptors) {
return new SyntaxError('At-rule `@' + atruleName + '` has no known descriptors');
}
if (!atrule.descriptors[descriptor.name] &&
!atrule.descriptors[descriptor.basename]) {
return new error.SyntaxReferenceError('Unknown at-rule descriptor', descriptorName);
}
}
checkPropertyName(propertyName) {
if (!this.getProperty(propertyName)) {
return new error.SyntaxReferenceError('Unknown property', propertyName);
}
}
matchAtrulePrelude(atruleName, prelude) {
const error = this.checkAtrulePrelude(atruleName, prelude);
if (error) {
return buildMatchResult(null, error);
}
const atrule = this.getAtrule(atruleName);
if (!atrule.prelude) {
return buildMatchResult(null, null);
}
return matchSyntax(this, atrule.prelude, prelude || '', false);
}
matchAtruleDescriptor(atruleName, descriptorName, value) {
const error = this.checkAtruleDescriptorName(atruleName, descriptorName);
if (error) {
return buildMatchResult(null, error);
}
const atrule = this.getAtrule(atruleName);
const descriptor = names.keyword(descriptorName);
return matchSyntax(this, atrule.descriptors[descriptor.name] || atrule.descriptors[descriptor.basename], value, false);
}
matchDeclaration(node) {
if (node.type !== 'Declaration') {
return buildMatchResult(null, new Error('Not a Declaration node'));
}
return this.matchProperty(node.property, node.value);
}
matchProperty(propertyName, value) {
// don't match syntax for a custom property at the moment
if (names.property(propertyName).custom) {
return buildMatchResult(null, new Error('Lexer matching doesn\'t applicable for custom properties'));
}
const error = this.checkPropertyName(propertyName);
if (error) {
return buildMatchResult(null, error);
}
return matchSyntax(this, this.getProperty(propertyName), value, true);
}
matchType(typeName, value) {
const typeSyntax = this.getType(typeName);
if (!typeSyntax) {
return buildMatchResult(null, new error.SyntaxReferenceError('Unknown type', typeName));
}
return matchSyntax(this, typeSyntax, value, false);
}
match(syntax, value) {
if (typeof syntax !== 'string' && (!syntax || !syntax.type)) {
return buildMatchResult(null, new error.SyntaxReferenceError('Bad syntax'));
}
if (typeof syntax === 'string' || !syntax.match) {
syntax = this.createDescriptor(syntax, 'Type', 'anonymous');
}
return matchSyntax(this, syntax, value, false);
}
findValueFragments(propertyName, value, type, name) {
return search.matchFragments(this, value, this.matchProperty(propertyName, value), type, name);
}
findDeclarationValueFragments(declaration, type, name) {
return search.matchFragments(this, declaration.value, this.matchDeclaration(declaration), type, name);
}
findAllFragments(ast, type, name) {
const result = [];
this.syntax.walk(ast, {
visit: 'Declaration',
enter: (declaration) => {
result.push.apply(result, this.findDeclarationValueFragments(declaration, type, name));
}
});
return result;
}
getAtrule(atruleName, fallbackBasename = true) {
const atrule = names.keyword(atruleName);
const atruleEntry = atrule.vendor && fallbackBasename
? this.atrules[atrule.name] || this.atrules[atrule.basename]
: this.atrules[atrule.name];
return atruleEntry || null;
}
getAtrulePrelude(atruleName, fallbackBasename = true) {
const atrule = this.getAtrule(atruleName, fallbackBasename);
return atrule && atrule.prelude || null;
}
getAtruleDescriptor(atruleName, name) {
return this.atrules.hasOwnProperty(atruleName) && this.atrules.declarators
? this.atrules[atruleName].declarators[name] || null
: null;
}
getProperty(propertyName, fallbackBasename = true) {
const property = names.property(propertyName);
const propertyEntry = property.vendor && fallbackBasename
? this.properties[property.name] || this.properties[property.basename]
: this.properties[property.name];
return propertyEntry || null;
}
getType(name) {
return hasOwnProperty.call(this.types, name) ? this.types[name] : null;
}
validate() {
function validate(syntax, name, broken, descriptor) {
if (broken.has(name)) {
return broken.get(name);
}
broken.set(name, false);
if (descriptor.syntax !== null) {
walk.walk(descriptor.syntax, function(node) {
if (node.type !== 'Type' && node.type !== 'Property') {
return;
}
const map = node.type === 'Type' ? syntax.types : syntax.properties;
const brokenMap = node.type === 'Type' ? brokenTypes : brokenProperties;
if (!hasOwnProperty.call(map, node.name) || validate(syntax, node.name, brokenMap, map[node.name])) {
broken.set(name, true);
}
}, this);
}
}
let brokenTypes = new Map();
let brokenProperties = new Map();
for (const key in this.types) {
validate(this, key, brokenTypes, this.types[key]);
}
for (const key in this.properties) {
validate(this, key, brokenProperties, this.properties[key]);
}
brokenTypes = [...brokenTypes.keys()].filter(name => brokenTypes.get(name));
brokenProperties = [...brokenProperties.keys()].filter(name => brokenProperties.get(name));
if (brokenTypes.length || brokenProperties.length) {
return {
types: brokenTypes,
properties: brokenProperties
};
}
return null;
}
dump(syntaxAsAst, pretty) {
return {
generic: this.generic,
units: this.units,
types: dumpMapSyntax(this.types, !pretty, syntaxAsAst),
properties: dumpMapSyntax(this.properties, !pretty, syntaxAsAst),
atrules: dumpAtruleMapSyntax(this.atrules, !pretty, syntaxAsAst)
};
}
toString() {
return JSON.stringify(this.dump());
}
}
exports.Lexer = Lexer;

128
node_modules/css-tree/cjs/lexer/error.cjs generated vendored Normal file
View File

@ -0,0 +1,128 @@
'use strict';
const createCustomError = require('../utils/create-custom-error.cjs');
const generate = require('../definition-syntax/generate.cjs');
const defaultLoc = { offset: 0, line: 1, column: 1 };
function locateMismatch(matchResult, node) {
const tokens = matchResult.tokens;
const longestMatch = matchResult.longestMatch;
const mismatchNode = longestMatch < tokens.length ? tokens[longestMatch].node || null : null;
const badNode = mismatchNode !== node ? mismatchNode : null;
let mismatchOffset = 0;
let mismatchLength = 0;
let entries = 0;
let css = '';
let start;
let end;
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i].value;
if (i === longestMatch) {
mismatchLength = token.length;
mismatchOffset = css.length;
}
if (badNode !== null && tokens[i].node === badNode) {
if (i <= longestMatch) {
entries++;
} else {
entries = 0;
}
}
css += token;
}
if (longestMatch === tokens.length || entries > 1) { // last
start = fromLoc(badNode || node, 'end') || buildLoc(defaultLoc, css);
end = buildLoc(start);
} else {
start = fromLoc(badNode, 'start') ||
buildLoc(fromLoc(node, 'start') || defaultLoc, css.slice(0, mismatchOffset));
end = fromLoc(badNode, 'end') ||
buildLoc(start, css.substr(mismatchOffset, mismatchLength));
}
return {
css,
mismatchOffset,
mismatchLength,
start,
end
};
}
function fromLoc(node, point) {
const value = node && node.loc && node.loc[point];
if (value) {
return 'line' in value ? buildLoc(value) : value;
}
return null;
}
function buildLoc({ offset, line, column }, extra) {
const loc = {
offset,
line,
column
};
if (extra) {
const lines = extra.split(/\n|\r\n?|\f/);
loc.offset += extra.length;
loc.line += lines.length - 1;
loc.column = lines.length === 1 ? loc.column + extra.length : lines.pop().length + 1;
}
return loc;
}
const SyntaxReferenceError = function(type, referenceName) {
const error = createCustomError.createCustomError(
'SyntaxReferenceError',
type + (referenceName ? ' `' + referenceName + '`' : '')
);
error.reference = referenceName;
return error;
};
const SyntaxMatchError = function(message, syntax, node, matchResult) {
const error = createCustomError.createCustomError('SyntaxMatchError', message);
const {
css,
mismatchOffset,
mismatchLength,
start,
end
} = locateMismatch(matchResult, node);
error.rawMessage = message;
error.syntax = syntax ? generate.generate(syntax) : '<generic>';
error.css = css;
error.mismatchOffset = mismatchOffset;
error.mismatchLength = mismatchLength;
error.message = message + '\n' +
' syntax: ' + error.syntax + '\n' +
' value: ' + (css || '<empty string>') + '\n' +
' --------' + new Array(error.mismatchOffset + 1).join('-') + '^';
Object.assign(error, start);
error.loc = {
source: (node && node.loc && node.loc.source) || '<unknown>',
start,
end
};
return error;
};
exports.SyntaxMatchError = SyntaxMatchError;
exports.SyntaxReferenceError = SyntaxReferenceError;

235
node_modules/css-tree/cjs/lexer/generic-an-plus-b.cjs generated vendored Normal file
View File

@ -0,0 +1,235 @@
'use strict';
const charCodeDefinitions = require('../tokenizer/char-code-definitions.cjs');
const types = require('../tokenizer/types.cjs');
const utils = require('../tokenizer/utils.cjs');
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
const DISALLOW_SIGN = true;
const ALLOW_SIGN = false;
function isDelim(token, code) {
return token !== null && token.type === types.Delim && token.value.charCodeAt(0) === code;
}
function skipSC(token, offset, getNextToken) {
while (token !== null && (token.type === types.WhiteSpace || token.type === types.Comment)) {
token = getNextToken(++offset);
}
return offset;
}
function checkInteger(token, valueOffset, disallowSign, offset) {
if (!token) {
return 0;
}
const code = token.value.charCodeAt(valueOffset);
if (code === PLUSSIGN || code === HYPHENMINUS) {
if (disallowSign) {
// Number sign is not allowed
return 0;
}
valueOffset++;
}
for (; valueOffset < token.value.length; valueOffset++) {
if (!charCodeDefinitions.isDigit(token.value.charCodeAt(valueOffset))) {
// Integer is expected
return 0;
}
}
return offset + 1;
}
// ... <signed-integer>
// ... ['+' | '-'] <signless-integer>
function consumeB(token, offset_, getNextToken) {
let sign = false;
let offset = skipSC(token, offset_, getNextToken);
token = getNextToken(offset);
if (token === null) {
return offset_;
}
if (token.type !== types.Number) {
if (isDelim(token, PLUSSIGN) || isDelim(token, HYPHENMINUS)) {
sign = true;
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
if (token === null || token.type !== types.Number) {
return 0;
}
} else {
return offset_;
}
}
if (!sign) {
const code = token.value.charCodeAt(0);
if (code !== PLUSSIGN && code !== HYPHENMINUS) {
// Number sign is expected
return 0;
}
}
return checkInteger(token, sign ? 0 : 1, sign, offset);
}
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
function anPlusB(token, getNextToken) {
/* eslint-disable brace-style*/
let offset = 0;
if (!token) {
return 0;
}
// <integer>
if (token.type === types.Number) {
return checkInteger(token, 0, ALLOW_SIGN, offset); // b
}
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
// -n- <signless-integer>
// <dashndashdigit-ident>
else if (token.type === types.Ident && token.value.charCodeAt(0) === HYPHENMINUS) {
// expect 1st char is N
if (!utils.cmpChar(token.value, 1, N)) {
return 0;
}
switch (token.value.length) {
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
case 2:
return consumeB(getNextToken(++offset), offset, getNextToken);
// -n- <signless-integer>
case 3:
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
return 0;
}
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
return checkInteger(token, 0, DISALLOW_SIGN, offset);
// <dashndashdigit-ident>
default:
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
return 0;
}
return checkInteger(token, 3, DISALLOW_SIGN, offset);
}
}
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
// '+'? n- <signless-integer>
// '+'? <ndashdigit-ident>
else if (token.type === types.Ident || (isDelim(token, PLUSSIGN) && getNextToken(offset + 1).type === types.Ident)) {
// just ignore a plus
if (token.type !== types.Ident) {
token = getNextToken(++offset);
}
if (token === null || !utils.cmpChar(token.value, 0, N)) {
return 0;
}
switch (token.value.length) {
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
case 1:
return consumeB(getNextToken(++offset), offset, getNextToken);
// '+'? n- <signless-integer>
case 2:
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
return 0;
}
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
return checkInteger(token, 0, DISALLOW_SIGN, offset);
// '+'? <ndashdigit-ident>
default:
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
return 0;
}
return checkInteger(token, 2, DISALLOW_SIGN, offset);
}
}
// <ndashdigit-dimension>
// <ndash-dimension> <signless-integer>
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
else if (token.type === types.Dimension) {
let code = token.value.charCodeAt(0);
let sign = code === PLUSSIGN || code === HYPHENMINUS ? 1 : 0;
let i = sign;
for (; i < token.value.length; i++) {
if (!charCodeDefinitions.isDigit(token.value.charCodeAt(i))) {
break;
}
}
if (i === sign) {
// Integer is expected
return 0;
}
if (!utils.cmpChar(token.value, i, N)) {
return 0;
}
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
if (i + 1 === token.value.length) {
return consumeB(getNextToken(++offset), offset, getNextToken);
} else {
if (token.value.charCodeAt(i + 1) !== HYPHENMINUS) {
return 0;
}
// <ndash-dimension> <signless-integer>
if (i + 2 === token.value.length) {
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
return checkInteger(token, 0, DISALLOW_SIGN, offset);
}
// <ndashdigit-dimension>
else {
return checkInteger(token, i + 2, DISALLOW_SIGN, offset);
}
}
}
return 0;
}
module.exports = anPlusB;

12
node_modules/css-tree/cjs/lexer/generic-const.cjs generated vendored Normal file
View File

@ -0,0 +1,12 @@
'use strict';
// https://drafts.csswg.org/css-cascade-5/
const cssWideKeywords = [
'initial',
'inherit',
'unset',
'revert',
'revert-layer'
];
exports.cssWideKeywords = cssWideKeywords;

149
node_modules/css-tree/cjs/lexer/generic-urange.cjs generated vendored Normal file
View File

@ -0,0 +1,149 @@
'use strict';
const charCodeDefinitions = require('../tokenizer/char-code-definitions.cjs');
const types = require('../tokenizer/types.cjs');
const utils = require('../tokenizer/utils.cjs');
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
const U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
function isDelim(token, code) {
return token !== null && token.type === types.Delim && token.value.charCodeAt(0) === code;
}
function startsWith(token, code) {
return token.value.charCodeAt(0) === code;
}
function hexSequence(token, offset, allowDash) {
let hexlen = 0;
for (let pos = offset; pos < token.value.length; pos++) {
const code = token.value.charCodeAt(pos);
if (code === HYPHENMINUS && allowDash && hexlen !== 0) {
hexSequence(token, offset + hexlen + 1, false);
return 6; // dissallow following question marks
}
if (!charCodeDefinitions.isHexDigit(code)) {
return 0; // not a hex digit
}
if (++hexlen > 6) {
return 0; // too many hex digits
} }
return hexlen;
}
function withQuestionMarkSequence(consumed, length, getNextToken) {
if (!consumed) {
return 0; // nothing consumed
}
while (isDelim(getNextToken(length), QUESTIONMARK)) {
if (++consumed > 6) {
return 0; // too many question marks
}
length++;
}
return length;
}
// https://drafts.csswg.org/css-syntax/#urange
// Informally, the <urange> production has three forms:
// U+0001
// Defines a range consisting of a single code point, in this case the code point "1".
// U+0001-00ff
// Defines a range of codepoints between the first and the second value, in this case
// the range between "1" and "ff" (255 in decimal) inclusive.
// U+00??
// Defines a range of codepoints where the "?" characters range over all hex digits,
// in this case defining the same as the value U+0000-00ff.
// In each form, a maximum of 6 digits is allowed for each hexadecimal number (if you treat "?" as a hexadecimal digit).
//
// <urange> =
// u '+' <ident-token> '?'* |
// u <dimension-token> '?'* |
// u <number-token> '?'* |
// u <number-token> <dimension-token> |
// u <number-token> <number-token> |
// u '+' '?'+
function urange(token, getNextToken) {
let length = 0;
// should start with `u` or `U`
if (token === null || token.type !== types.Ident || !utils.cmpChar(token.value, 0, U)) {
return 0;
}
token = getNextToken(++length);
if (token === null) {
return 0;
}
// u '+' <ident-token> '?'*
// u '+' '?'+
if (isDelim(token, PLUSSIGN)) {
token = getNextToken(++length);
if (token === null) {
return 0;
}
if (token.type === types.Ident) {
// u '+' <ident-token> '?'*
return withQuestionMarkSequence(hexSequence(token, 0, true), ++length, getNextToken);
}
if (isDelim(token, QUESTIONMARK)) {
// u '+' '?'+
return withQuestionMarkSequence(1, ++length, getNextToken);
}
// Hex digit or question mark is expected
return 0;
}
// u <number-token> '?'*
// u <number-token> <dimension-token>
// u <number-token> <number-token>
if (token.type === types.Number) {
const consumedHexLength = hexSequence(token, 1, true);
if (consumedHexLength === 0) {
return 0;
}
token = getNextToken(++length);
if (token === null) {
// u <number-token> <eof>
return length;
}
if (token.type === types.Dimension || token.type === types.Number) {
// u <number-token> <dimension-token>
// u <number-token> <number-token>
if (!startsWith(token, HYPHENMINUS) || !hexSequence(token, 1, false)) {
return 0;
}
return length + 1;
}
// u <number-token> '?'*
return withQuestionMarkSequence(consumedHexLength, length, getNextToken);
}
// u <dimension-token> '?'*
if (token.type === types.Dimension) {
return withQuestionMarkSequence(hexSequence(token, 1, true), ++length, getNextToken);
}
return 0;
}
module.exports = urange;

573
node_modules/css-tree/cjs/lexer/generic.cjs generated vendored Normal file
View File

@ -0,0 +1,573 @@
'use strict';
const genericConst = require('./generic-const.cjs');
const genericAnPlusB = require('./generic-an-plus-b.cjs');
const genericUrange = require('./generic-urange.cjs');
const types = require('../tokenizer/types.cjs');
const charCodeDefinitions = require('../tokenizer/char-code-definitions.cjs');
const utils = require('../tokenizer/utils.cjs');
const calcFunctionNames = ['calc(', '-moz-calc(', '-webkit-calc('];
const balancePair = new Map([
[types.Function, types.RightParenthesis],
[types.LeftParenthesis, types.RightParenthesis],
[types.LeftSquareBracket, types.RightSquareBracket],
[types.LeftCurlyBracket, types.RightCurlyBracket]
]);
// safe char code getter
function charCodeAt(str, index) {
return index < str.length ? str.charCodeAt(index) : 0;
}
function eqStr(actual, expected) {
return utils.cmpStr(actual, 0, actual.length, expected);
}
function eqStrAny(actual, expected) {
for (let i = 0; i < expected.length; i++) {
if (eqStr(actual, expected[i])) {
return true;
}
}
return false;
}
// IE postfix hack, i.e. 123\0 or 123px\9
function isPostfixIeHack(str, offset) {
if (offset !== str.length - 2) {
return false;
}
return (
charCodeAt(str, offset) === 0x005C && // U+005C REVERSE SOLIDUS (\)
charCodeDefinitions.isDigit(charCodeAt(str, offset + 1))
);
}
function outOfRange(opts, value, numEnd) {
if (opts && opts.type === 'Range') {
const num = Number(
numEnd !== undefined && numEnd !== value.length
? value.substr(0, numEnd)
: value
);
if (isNaN(num)) {
return true;
}
// FIXME: when opts.min is a string it's a dimension, skip a range validation
// for now since it requires a type covertation which is not implmented yet
if (opts.min !== null && num < opts.min && typeof opts.min !== 'string') {
return true;
}
// FIXME: when opts.max is a string it's a dimension, skip a range validation
// for now since it requires a type covertation which is not implmented yet
if (opts.max !== null && num > opts.max && typeof opts.max !== 'string') {
return true;
}
}
return false;
}
function consumeFunction(token, getNextToken) {
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// balanced token consuming
scan:
do {
switch (token.type) {
case types.RightCurlyBracket:
case types.RightParenthesis:
case types.RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
balanceCloseType = balanceStash.pop();
if (balanceStash.length === 0) {
length++;
break scan;
}
break;
case types.Function:
case types.LeftParenthesis:
case types.LeftSquareBracket:
case types.LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
} while (token = getNextToken(length));
return length;
}
// TODO: implement
// can be used wherever <length>, <frequency>, <angle>, <time>, <percentage>, <number>, or <integer> values are allowed
// https://drafts.csswg.org/css-values/#calc-notation
function calc(next) {
return function(token, getNextToken, opts) {
if (token === null) {
return 0;
}
if (token.type === types.Function && eqStrAny(token.value, calcFunctionNames)) {
return consumeFunction(token, getNextToken);
}
return next(token, getNextToken, opts);
};
}
function tokenType(expectedTokenType) {
return function(token) {
if (token === null || token.type !== expectedTokenType) {
return 0;
}
return 1;
};
}
// =========================
// Complex types
//
// https://drafts.csswg.org/css-values-4/#custom-idents
// 4.2. Author-defined Identifiers: the <custom-ident> type
// Some properties accept arbitrary author-defined identifiers as a component value.
// This generic data type is denoted by <custom-ident>, and represents any valid CSS identifier
// that would not be misinterpreted as a pre-defined keyword in that propertys value definition.
//
// See also: https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident
function customIdent(token) {
if (token === null || token.type !== types.Ident) {
return 0;
}
const name = token.value.toLowerCase();
// The CSS-wide keywords are not valid <custom-ident>s
if (eqStrAny(name, genericConst.cssWideKeywords)) {
return 0;
}
// The default keyword is reserved and is also not a valid <custom-ident>
if (eqStr(name, 'default')) {
return 0;
}
// TODO: ignore property specific keywords (as described https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident)
// Specifications using <custom-ident> must specify clearly what other keywords
// are excluded from <custom-ident>, if any—for example by saying that any pre-defined keywords
// in that propertys value definition are excluded. Excluded keywords are excluded
// in all ASCII case permutations.
return 1;
}
// https://drafts.csswg.org/css-variables/#typedef-custom-property-name
// A custom property is any property whose name starts with two dashes (U+002D HYPHEN-MINUS), like --foo.
// The <custom-property-name> production corresponds to this: its defined as any valid identifier
// that starts with two dashes, except -- itself, which is reserved for future use by CSS.
// NOTE: Current implementation treat `--` as a valid name since most (all?) major browsers treat it as valid.
function customPropertyName(token) {
// ... defined as any valid identifier
if (token === null || token.type !== types.Ident) {
return 0;
}
// ... that starts with two dashes (U+002D HYPHEN-MINUS)
if (charCodeAt(token.value, 0) !== 0x002D || charCodeAt(token.value, 1) !== 0x002D) {
return 0;
}
return 1;
}
// https://drafts.csswg.org/css-color-4/#hex-notation
// The syntax of a <hex-color> is a <hash-token> token whose value consists of 3, 4, 6, or 8 hexadecimal digits.
// In other words, a hex color is written as a hash character, "#", followed by some number of digits 0-9 or
// letters a-f (the case of the letters doesnt matter - #00ff00 is identical to #00FF00).
function hexColor(token) {
if (token === null || token.type !== types.Hash) {
return 0;
}
const length = token.value.length;
// valid values (length): #rgb (4), #rgba (5), #rrggbb (7), #rrggbbaa (9)
if (length !== 4 && length !== 5 && length !== 7 && length !== 9) {
return 0;
}
for (let i = 1; i < length; i++) {
if (!charCodeDefinitions.isHexDigit(charCodeAt(token.value, i))) {
return 0;
}
}
return 1;
}
function idSelector(token) {
if (token === null || token.type !== types.Hash) {
return 0;
}
if (!charCodeDefinitions.isIdentifierStart(charCodeAt(token.value, 1), charCodeAt(token.value, 2), charCodeAt(token.value, 3))) {
return 0;
}
return 1;
}
// https://drafts.csswg.org/css-syntax/#any-value
// It represents the entirety of what a valid declaration can have as its value.
function declarationValue(token, getNextToken) {
if (!token) {
return 0;
}
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// The <declaration-value> production matches any sequence of one or more tokens,
// so long as the sequence does not contain ...
scan:
do {
switch (token.type) {
// ... <bad-string-token>, <bad-url-token>,
case types.BadString:
case types.BadUrl:
break scan;
// ... unmatched <)-token>, <]-token>, or <}-token>,
case types.RightCurlyBracket:
case types.RightParenthesis:
case types.RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
balanceCloseType = balanceStash.pop();
break;
// ... or top-level <semicolon-token> tokens
case types.Semicolon:
if (balanceCloseType === 0) {
break scan;
}
break;
// ... or <delim-token> tokens with a value of "!"
case types.Delim:
if (balanceCloseType === 0 && token.value === '!') {
break scan;
}
break;
case types.Function:
case types.LeftParenthesis:
case types.LeftSquareBracket:
case types.LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
} while (token = getNextToken(length));
return length;
}
// https://drafts.csswg.org/css-syntax/#any-value
// The <any-value> production is identical to <declaration-value>, but also
// allows top-level <semicolon-token> tokens and <delim-token> tokens
// with a value of "!". It represents the entirety of what valid CSS can be in any context.
function anyValue(token, getNextToken) {
if (!token) {
return 0;
}
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// The <any-value> production matches any sequence of one or more tokens,
// so long as the sequence ...
scan:
do {
switch (token.type) {
// ... does not contain <bad-string-token>, <bad-url-token>,
case types.BadString:
case types.BadUrl:
break scan;
// ... unmatched <)-token>, <]-token>, or <}-token>,
case types.RightCurlyBracket:
case types.RightParenthesis:
case types.RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
balanceCloseType = balanceStash.pop();
break;
case types.Function:
case types.LeftParenthesis:
case types.LeftSquareBracket:
case types.LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
} while (token = getNextToken(length));
return length;
}
// =========================
// Dimensions
//
function dimension(type) {
if (type) {
type = new Set(type);
}
return function(token, getNextToken, opts) {
if (token === null || token.type !== types.Dimension) {
return 0;
}
const numberEnd = utils.consumeNumber(token.value, 0);
// check unit
if (type !== null) {
// check for IE postfix hack, i.e. 123px\0 or 123px\9
const reverseSolidusOffset = token.value.indexOf('\\', numberEnd);
const unit = reverseSolidusOffset === -1 || !isPostfixIeHack(token.value, reverseSolidusOffset)
? token.value.substr(numberEnd)
: token.value.substring(numberEnd, reverseSolidusOffset);
if (type.has(unit.toLowerCase()) === false) {
return 0;
}
}
// check range if specified
if (outOfRange(opts, token.value, numberEnd)) {
return 0;
}
return 1;
};
}
// =========================
// Percentage
//
// §5.5. Percentages: the <percentage> type
// https://drafts.csswg.org/css-values-4/#percentages
function percentage(token, getNextToken, opts) {
// ... corresponds to the <percentage-token> production
if (token === null || token.type !== types.Percentage) {
return 0;
}
// check range if specified
if (outOfRange(opts, token.value, token.value.length - 1)) {
return 0;
}
return 1;
}
// =========================
// Numeric
//
// https://drafts.csswg.org/css-values-4/#numbers
// The value <zero> represents a literal number with the value 0. Expressions that merely
// evaluate to a <number> with the value 0 (for example, calc(0)) do not match <zero>;
// only literal <number-token>s do.
function zero(next) {
if (typeof next !== 'function') {
next = function() {
return 0;
};
}
return function(token, getNextToken, opts) {
if (token !== null && token.type === types.Number) {
if (Number(token.value) === 0) {
return 1;
}
}
return next(token, getNextToken, opts);
};
}
// § 5.3. Real Numbers: the <number> type
// https://drafts.csswg.org/css-values-4/#numbers
// Number values are denoted by <number>, and represent real numbers, possibly with a fractional component.
// ... It corresponds to the <number-token> production
function number(token, getNextToken, opts) {
if (token === null) {
return 0;
}
const numberEnd = utils.consumeNumber(token.value, 0);
const isNumber = numberEnd === token.value.length;
if (!isNumber && !isPostfixIeHack(token.value, numberEnd)) {
return 0;
}
// check range if specified
if (outOfRange(opts, token.value, numberEnd)) {
return 0;
}
return 1;
}
// §5.2. Integers: the <integer> type
// https://drafts.csswg.org/css-values-4/#integers
function integer(token, getNextToken, opts) {
// ... corresponds to a subset of the <number-token> production
if (token === null || token.type !== types.Number) {
return 0;
}
// The first digit of an integer may be immediately preceded by `-` or `+` to indicate the integers sign.
let i = charCodeAt(token.value, 0) === 0x002B || // U+002B PLUS SIGN (+)
charCodeAt(token.value, 0) === 0x002D ? 1 : 0; // U+002D HYPHEN-MINUS (-)
// When written literally, an integer is one or more decimal digits 0 through 9 ...
for (; i < token.value.length; i++) {
if (!charCodeDefinitions.isDigit(charCodeAt(token.value, i))) {
return 0;
}
}
// check range if specified
if (outOfRange(opts, token.value, i)) {
return 0;
}
return 1;
}
// token types
const tokenTypes = {
'ident-token': tokenType(types.Ident),
'function-token': tokenType(types.Function),
'at-keyword-token': tokenType(types.AtKeyword),
'hash-token': tokenType(types.Hash),
'string-token': tokenType(types.String),
'bad-string-token': tokenType(types.BadString),
'url-token': tokenType(types.Url),
'bad-url-token': tokenType(types.BadUrl),
'delim-token': tokenType(types.Delim),
'number-token': tokenType(types.Number),
'percentage-token': tokenType(types.Percentage),
'dimension-token': tokenType(types.Dimension),
'whitespace-token': tokenType(types.WhiteSpace),
'CDO-token': tokenType(types.CDO),
'CDC-token': tokenType(types.CDC),
'colon-token': tokenType(types.Colon),
'semicolon-token': tokenType(types.Semicolon),
'comma-token': tokenType(types.Comma),
'[-token': tokenType(types.LeftSquareBracket),
']-token': tokenType(types.RightSquareBracket),
'(-token': tokenType(types.LeftParenthesis),
')-token': tokenType(types.RightParenthesis),
'{-token': tokenType(types.LeftCurlyBracket),
'}-token': tokenType(types.RightCurlyBracket)
};
// token production types
const productionTypes = {
// token type aliases
'string': tokenType(types.String),
'ident': tokenType(types.Ident),
// percentage
'percentage': calc(percentage),
// numeric
'zero': zero(),
'number': calc(number),
'integer': calc(integer),
// complex types
'custom-ident': customIdent,
'custom-property-name': customPropertyName,
'hex-color': hexColor,
'id-selector': idSelector, // element( <id-selector> )
'an-plus-b': genericAnPlusB,
'urange': genericUrange,
'declaration-value': declarationValue,
'any-value': anyValue
};
// dimensions types depend on units set
function createDemensionTypes(units) {
const {
angle,
decibel,
frequency,
flex,
length,
resolution,
semitones,
time
} = units || {};
return {
'dimension': calc(dimension(null)),
'angle': calc(dimension(angle)),
'decibel': calc(dimension(decibel)),
'frequency': calc(dimension(frequency)),
'flex': calc(dimension(flex)),
'length': calc(zero(dimension(length))),
'resolution': calc(dimension(resolution)),
'semitones': calc(dimension(semitones)),
'time': calc(dimension(time))
};
}
function createGenericTypes(units) {
return {
...tokenTypes,
...productionTypes,
...createDemensionTypes(units)
};
}
exports.createDemensionTypes = createDemensionTypes;
exports.createGenericTypes = createGenericTypes;
exports.productionTypes = productionTypes;
exports.tokenTypes = tokenTypes;

7
node_modules/css-tree/cjs/lexer/index.cjs generated vendored Normal file
View File

@ -0,0 +1,7 @@
'use strict';
const Lexer = require('./Lexer.cjs');
exports.Lexer = Lexer.Lexer;

459
node_modules/css-tree/cjs/lexer/match-graph.cjs generated vendored Normal file
View File

@ -0,0 +1,459 @@
'use strict';
const parse = require('../definition-syntax/parse.cjs');
const MATCH = { type: 'Match' };
const MISMATCH = { type: 'Mismatch' };
const DISALLOW_EMPTY = { type: 'DisallowEmpty' };
const LEFTPARENTHESIS = 40; // (
const RIGHTPARENTHESIS = 41; // )
function createCondition(match, thenBranch, elseBranch) {
// reduce node count
if (thenBranch === MATCH && elseBranch === MISMATCH) {
return match;
}
if (match === MATCH && thenBranch === MATCH && elseBranch === MATCH) {
return match;
}
if (match.type === 'If' && match.else === MISMATCH && thenBranch === MATCH) {
thenBranch = match.then;
match = match.match;
}
return {
type: 'If',
match,
then: thenBranch,
else: elseBranch
};
}
function isFunctionType(name) {
return (
name.length > 2 &&
name.charCodeAt(name.length - 2) === LEFTPARENTHESIS &&
name.charCodeAt(name.length - 1) === RIGHTPARENTHESIS
);
}
function isEnumCapatible(term) {
return (
term.type === 'Keyword' ||
term.type === 'AtKeyword' ||
term.type === 'Function' ||
term.type === 'Type' && isFunctionType(term.name)
);
}
function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
switch (combinator) {
case ' ': {
// Juxtaposing components means that all of them must occur, in the given order.
//
// a b c
// =
// match a
// then match b
// then match c
// then MATCH
// else MISMATCH
// else MISMATCH
// else MISMATCH
let result = MATCH;
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
result = createCondition(
term,
result,
MISMATCH
);
}
return result;
}
case '|': {
// A bar (|) separates two or more alternatives: exactly one of them must occur.
//
// a | b | c
// =
// match a
// then MATCH
// else match b
// then MATCH
// else match c
// then MATCH
// else MISMATCH
let result = MISMATCH;
let map = null;
for (let i = terms.length - 1; i >= 0; i--) {
let term = terms[i];
// reduce sequence of keywords into a Enum
if (isEnumCapatible(term)) {
if (map === null && i > 0 && isEnumCapatible(terms[i - 1])) {
map = Object.create(null);
result = createCondition(
{
type: 'Enum',
map
},
MATCH,
result
);
}
if (map !== null) {
const key = (isFunctionType(term.name) ? term.name.slice(0, -1) : term.name).toLowerCase();
if (key in map === false) {
map[key] = term;
continue;
}
}
}
map = null;
// create a new conditonal node
result = createCondition(
term,
MATCH,
result
);
}
return result;
}
case '&&': {
// A double ampersand (&&) separates two or more components,
// all of which must occur, in any order.
// Use MatchOnce for groups with a large number of terms,
// since &&-groups produces at least N!-node trees
if (terms.length > 5) {
return {
type: 'MatchOnce',
terms,
all: true
};
}
// Use a combination tree for groups with small number of terms
//
// a && b && c
// =
// match a
// then [b && c]
// else match b
// then [a && c]
// else match c
// then [a && b]
// else MISMATCH
//
// a && b
// =
// match a
// then match b
// then MATCH
// else MISMATCH
// else match b
// then match a
// then MATCH
// else MISMATCH
// else MISMATCH
let result = MISMATCH;
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
let thenClause;
if (terms.length > 1) {
thenClause = buildGroupMatchGraph(
combinator,
terms.filter(function(newGroupTerm) {
return newGroupTerm !== term;
}),
false
);
} else {
thenClause = MATCH;
}
result = createCondition(
term,
thenClause,
result
);
}
return result;
}
case '||': {
// A double bar (||) separates two or more options:
// one or more of them must occur, in any order.
// Use MatchOnce for groups with a large number of terms,
// since ||-groups produces at least N!-node trees
if (terms.length > 5) {
return {
type: 'MatchOnce',
terms,
all: false
};
}
// Use a combination tree for groups with small number of terms
//
// a || b || c
// =
// match a
// then [b || c]
// else match b
// then [a || c]
// else match c
// then [a || b]
// else MISMATCH
//
// a || b
// =
// match a
// then match b
// then MATCH
// else MATCH
// else match b
// then match a
// then MATCH
// else MATCH
// else MISMATCH
let result = atLeastOneTermMatched ? MATCH : MISMATCH;
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
let thenClause;
if (terms.length > 1) {
thenClause = buildGroupMatchGraph(
combinator,
terms.filter(function(newGroupTerm) {
return newGroupTerm !== term;
}),
true
);
} else {
thenClause = MATCH;
}
result = createCondition(
term,
thenClause,
result
);
}
return result;
}
}
}
function buildMultiplierMatchGraph(node) {
let result = MATCH;
let matchTerm = buildMatchGraphInternal(node.term);
if (node.max === 0) {
// disable repeating of empty match to prevent infinite loop
matchTerm = createCondition(
matchTerm,
DISALLOW_EMPTY,
MISMATCH
);
// an occurrence count is not limited, make a cycle;
// to collect more terms on each following matching mismatch
result = createCondition(
matchTerm,
null, // will be a loop
MISMATCH
);
result.then = createCondition(
MATCH,
MATCH,
result // make a loop
);
if (node.comma) {
result.then.else = createCondition(
{ type: 'Comma', syntax: node },
result,
MISMATCH
);
}
} else {
// create a match node chain for [min .. max] interval with optional matches
for (let i = node.min || 1; i <= node.max; i++) {
if (node.comma && result !== MATCH) {
result = createCondition(
{ type: 'Comma', syntax: node },
result,
MISMATCH
);
}
result = createCondition(
matchTerm,
createCondition(
MATCH,
MATCH,
result
),
MISMATCH
);
}
}
if (node.min === 0) {
// allow zero match
result = createCondition(
MATCH,
MATCH,
result
);
} else {
// create a match node chain to collect [0 ... min - 1] required matches
for (let i = 0; i < node.min - 1; i++) {
if (node.comma && result !== MATCH) {
result = createCondition(
{ type: 'Comma', syntax: node },
result,
MISMATCH
);
}
result = createCondition(
matchTerm,
result,
MISMATCH
);
}
}
return result;
}
function buildMatchGraphInternal(node) {
if (typeof node === 'function') {
return {
type: 'Generic',
fn: node
};
}
switch (node.type) {
case 'Group': {
let result = buildGroupMatchGraph(
node.combinator,
node.terms.map(buildMatchGraphInternal),
false
);
if (node.disallowEmpty) {
result = createCondition(
result,
DISALLOW_EMPTY,
MISMATCH
);
}
return result;
}
case 'Multiplier':
return buildMultiplierMatchGraph(node);
case 'Type':
case 'Property':
return {
type: node.type,
name: node.name,
syntax: node
};
case 'Keyword':
return {
type: node.type,
name: node.name.toLowerCase(),
syntax: node
};
case 'AtKeyword':
return {
type: node.type,
name: '@' + node.name.toLowerCase(),
syntax: node
};
case 'Function':
return {
type: node.type,
name: node.name.toLowerCase() + '(',
syntax: node
};
case 'String':
// convert a one char length String to a Token
if (node.value.length === 3) {
return {
type: 'Token',
value: node.value.charAt(1),
syntax: node
};
}
// otherwise use it as is
return {
type: node.type,
value: node.value.substr(1, node.value.length - 2).replace(/\\'/g, '\''),
syntax: node
};
case 'Token':
return {
type: node.type,
value: node.value,
syntax: node
};
case 'Comma':
return {
type: node.type,
syntax: node
};
default:
throw new Error('Unknown node type:', node.type);
}
}
function buildMatchGraph(syntaxTree, ref) {
if (typeof syntaxTree === 'string') {
syntaxTree = parse.parse(syntaxTree);
}
return {
type: 'MatchGraph',
match: buildMatchGraphInternal(syntaxTree),
syntax: ref || null,
source: syntaxTree
};
}
exports.DISALLOW_EMPTY = DISALLOW_EMPTY;
exports.MATCH = MATCH;
exports.MISMATCH = MISMATCH;
exports.buildMatchGraph = buildMatchGraph;

632
node_modules/css-tree/cjs/lexer/match.cjs generated vendored Normal file
View File

@ -0,0 +1,632 @@
'use strict';
const matchGraph = require('./match-graph.cjs');
const types = require('../tokenizer/types.cjs');
const { hasOwnProperty } = Object.prototype;
const STUB = 0;
const TOKEN = 1;
const OPEN_SYNTAX = 2;
const CLOSE_SYNTAX = 3;
const EXIT_REASON_MATCH = 'Match';
const EXIT_REASON_MISMATCH = 'Mismatch';
const EXIT_REASON_ITERATION_LIMIT = 'Maximum iteration number exceeded (please fill an issue on https://github.com/csstree/csstree/issues)';
const ITERATION_LIMIT = 15000;
function reverseList(list) {
let prev = null;
let next = null;
let item = list;
while (item !== null) {
next = item.prev;
item.prev = prev;
prev = item;
item = next;
}
return prev;
}
function areStringsEqualCaseInsensitive(testStr, referenceStr) {
if (testStr.length !== referenceStr.length) {
return false;
}
for (let i = 0; i < testStr.length; i++) {
const referenceCode = referenceStr.charCodeAt(i);
let testCode = testStr.charCodeAt(i);
// testCode.toLowerCase() for U+0041 LATIN CAPITAL LETTER A (A) .. U+005A LATIN CAPITAL LETTER Z (Z).
if (testCode >= 0x0041 && testCode <= 0x005A) {
testCode = testCode | 32;
}
if (testCode !== referenceCode) {
return false;
}
}
return true;
}
function isContextEdgeDelim(token) {
if (token.type !== types.Delim) {
return false;
}
// Fix matching for unicode-range: U+30??, U+FF00-FF9F
// Probably we need to check out previous match instead
return token.value !== '?';
}
function isCommaContextStart(token) {
if (token === null) {
return true;
}
return (
token.type === types.Comma ||
token.type === types.Function ||
token.type === types.LeftParenthesis ||
token.type === types.LeftSquareBracket ||
token.type === types.LeftCurlyBracket ||
isContextEdgeDelim(token)
);
}
function isCommaContextEnd(token) {
if (token === null) {
return true;
}
return (
token.type === types.RightParenthesis ||
token.type === types.RightSquareBracket ||
token.type === types.RightCurlyBracket ||
(token.type === types.Delim && token.value === '/')
);
}
function internalMatch(tokens, state, syntaxes) {
function moveToNextToken() {
do {
tokenIndex++;
token = tokenIndex < tokens.length ? tokens[tokenIndex] : null;
} while (token !== null && (token.type === types.WhiteSpace || token.type === types.Comment));
}
function getNextToken(offset) {
const nextIndex = tokenIndex + offset;
return nextIndex < tokens.length ? tokens[nextIndex] : null;
}
function stateSnapshotFromSyntax(nextState, prev) {
return {
nextState,
matchStack,
syntaxStack,
thenStack,
tokenIndex,
prev
};
}
function pushThenStack(nextState) {
thenStack = {
nextState,
matchStack,
syntaxStack,
prev: thenStack
};
}
function pushElseStack(nextState) {
elseStack = stateSnapshotFromSyntax(nextState, elseStack);
}
function addTokenToMatch() {
matchStack = {
type: TOKEN,
syntax: state.syntax,
token,
prev: matchStack
};
moveToNextToken();
syntaxStash = null;
if (tokenIndex > longestMatch) {
longestMatch = tokenIndex;
}
}
function openSyntax() {
syntaxStack = {
syntax: state.syntax,
opts: state.syntax.opts || (syntaxStack !== null && syntaxStack.opts) || null,
prev: syntaxStack
};
matchStack = {
type: OPEN_SYNTAX,
syntax: state.syntax,
token: matchStack.token,
prev: matchStack
};
}
function closeSyntax() {
if (matchStack.type === OPEN_SYNTAX) {
matchStack = matchStack.prev;
} else {
matchStack = {
type: CLOSE_SYNTAX,
syntax: syntaxStack.syntax,
token: matchStack.token,
prev: matchStack
};
}
syntaxStack = syntaxStack.prev;
}
let syntaxStack = null;
let thenStack = null;
let elseStack = null;
// null stashing allowed, nothing stashed
// false stashing disabled, nothing stashed
// anithing else fail stashable syntaxes, some syntax stashed
let syntaxStash = null;
let iterationCount = 0; // count iterations and prevent infinite loop
let exitReason = null;
let token = null;
let tokenIndex = -1;
let longestMatch = 0;
let matchStack = {
type: STUB,
syntax: null,
token: null,
prev: null
};
moveToNextToken();
while (exitReason === null && ++iterationCount < ITERATION_LIMIT) {
// function mapList(list, fn) {
// const result = [];
// while (list) {
// result.unshift(fn(list));
// list = list.prev;
// }
// return result;
// }
// console.log('--\n',
// '#' + iterationCount,
// require('util').inspect({
// match: mapList(matchStack, x => x.type === TOKEN ? x.token && x.token.value : x.syntax ? ({ [OPEN_SYNTAX]: '<', [CLOSE_SYNTAX]: '</' }[x.type] || x.type) + '!' + x.syntax.name : null),
// token: token && token.value,
// tokenIndex,
// syntax: syntax.type + (syntax.id ? ' #' + syntax.id : '')
// }, { depth: null })
// );
switch (state.type) {
case 'Match':
if (thenStack === null) {
// turn to MISMATCH when some tokens left unmatched
if (token !== null) {
// doesn't mismatch if just one token left and it's an IE hack
if (tokenIndex !== tokens.length - 1 || (token.value !== '\\0' && token.value !== '\\9')) {
state = matchGraph.MISMATCH;
break;
}
}
// break the main loop, return a result - MATCH
exitReason = EXIT_REASON_MATCH;
break;
}
// go to next syntax (`then` branch)
state = thenStack.nextState;
// check match is not empty
if (state === matchGraph.DISALLOW_EMPTY) {
if (thenStack.matchStack === matchStack) {
state = matchGraph.MISMATCH;
break;
} else {
state = matchGraph.MATCH;
}
}
// close syntax if needed
while (thenStack.syntaxStack !== syntaxStack) {
closeSyntax();
}
// pop stack
thenStack = thenStack.prev;
break;
case 'Mismatch':
// when some syntax is stashed
if (syntaxStash !== null && syntaxStash !== false) {
// there is no else branches or a branch reduce match stack
if (elseStack === null || tokenIndex > elseStack.tokenIndex) {
// restore state from the stash
elseStack = syntaxStash;
syntaxStash = false; // disable stashing
}
} else if (elseStack === null) {
// no else branches -> break the main loop
// return a result - MISMATCH
exitReason = EXIT_REASON_MISMATCH;
break;
}
// go to next syntax (`else` branch)
state = elseStack.nextState;
// restore all the rest stack states
thenStack = elseStack.thenStack;
syntaxStack = elseStack.syntaxStack;
matchStack = elseStack.matchStack;
tokenIndex = elseStack.tokenIndex;
token = tokenIndex < tokens.length ? tokens[tokenIndex] : null;
// pop stack
elseStack = elseStack.prev;
break;
case 'MatchGraph':
state = state.match;
break;
case 'If':
// IMPORTANT: else stack push must go first,
// since it stores the state of thenStack before changes
if (state.else !== matchGraph.MISMATCH) {
pushElseStack(state.else);
}
if (state.then !== matchGraph.MATCH) {
pushThenStack(state.then);
}
state = state.match;
break;
case 'MatchOnce':
state = {
type: 'MatchOnceBuffer',
syntax: state,
index: 0,
mask: 0
};
break;
case 'MatchOnceBuffer': {
const terms = state.syntax.terms;
if (state.index === terms.length) {
// no matches at all or it's required all terms to be matched
if (state.mask === 0 || state.syntax.all) {
state = matchGraph.MISMATCH;
break;
}
// a partial match is ok
state = matchGraph.MATCH;
break;
}
// all terms are matched
if (state.mask === (1 << terms.length) - 1) {
state = matchGraph.MATCH;
break;
}
for (; state.index < terms.length; state.index++) {
const matchFlag = 1 << state.index;
if ((state.mask & matchFlag) === 0) {
// IMPORTANT: else stack push must go first,
// since it stores the state of thenStack before changes
pushElseStack(state);
pushThenStack({
type: 'AddMatchOnce',
syntax: state.syntax,
mask: state.mask | matchFlag
});
// match
state = terms[state.index++];
break;
}
}
break;
}
case 'AddMatchOnce':
state = {
type: 'MatchOnceBuffer',
syntax: state.syntax,
index: 0,
mask: state.mask
};
break;
case 'Enum':
if (token !== null) {
let name = token.value.toLowerCase();
// drop \0 and \9 hack from keyword name
if (name.indexOf('\\') !== -1) {
name = name.replace(/\\[09].*$/, '');
}
if (hasOwnProperty.call(state.map, name)) {
state = state.map[name];
break;
}
}
state = matchGraph.MISMATCH;
break;
case 'Generic': {
const opts = syntaxStack !== null ? syntaxStack.opts : null;
const lastTokenIndex = tokenIndex + Math.floor(state.fn(token, getNextToken, opts));
if (!isNaN(lastTokenIndex) && lastTokenIndex > tokenIndex) {
while (tokenIndex < lastTokenIndex) {
addTokenToMatch();
}
state = matchGraph.MATCH;
} else {
state = matchGraph.MISMATCH;
}
break;
}
case 'Type':
case 'Property': {
const syntaxDict = state.type === 'Type' ? 'types' : 'properties';
const dictSyntax = hasOwnProperty.call(syntaxes, syntaxDict) ? syntaxes[syntaxDict][state.name] : null;
if (!dictSyntax || !dictSyntax.match) {
throw new Error(
'Bad syntax reference: ' +
(state.type === 'Type'
? '<' + state.name + '>'
: '<\'' + state.name + '\'>')
);
}
// stash a syntax for types with low priority
if (syntaxStash !== false && token !== null && state.type === 'Type') {
const lowPriorityMatching =
// https://drafts.csswg.org/css-values-4/#custom-idents
// When parsing positionally-ambiguous keywords in a property value, a <custom-ident> production
// can only claim the keyword if no other unfulfilled production can claim it.
(state.name === 'custom-ident' && token.type === types.Ident) ||
// https://drafts.csswg.org/css-values-4/#lengths
// ... if a `0` could be parsed as either a <number> or a <length> in a property (such as line-height),
// it must parse as a <number>
(state.name === 'length' && token.value === '0');
if (lowPriorityMatching) {
if (syntaxStash === null) {
syntaxStash = stateSnapshotFromSyntax(state, elseStack);
}
state = matchGraph.MISMATCH;
break;
}
}
openSyntax();
state = dictSyntax.match;
break;
}
case 'Keyword': {
const name = state.name;
if (token !== null) {
let keywordName = token.value;
// drop \0 and \9 hack from keyword name
if (keywordName.indexOf('\\') !== -1) {
keywordName = keywordName.replace(/\\[09].*$/, '');
}
if (areStringsEqualCaseInsensitive(keywordName, name)) {
addTokenToMatch();
state = matchGraph.MATCH;
break;
}
}
state = matchGraph.MISMATCH;
break;
}
case 'AtKeyword':
case 'Function':
if (token !== null && areStringsEqualCaseInsensitive(token.value, state.name)) {
addTokenToMatch();
state = matchGraph.MATCH;
break;
}
state = matchGraph.MISMATCH;
break;
case 'Token':
if (token !== null && token.value === state.value) {
addTokenToMatch();
state = matchGraph.MATCH;
break;
}
state = matchGraph.MISMATCH;
break;
case 'Comma':
if (token !== null && token.type === types.Comma) {
if (isCommaContextStart(matchStack.token)) {
state = matchGraph.MISMATCH;
} else {
addTokenToMatch();
state = isCommaContextEnd(token) ? matchGraph.MISMATCH : matchGraph.MATCH;
}
} else {
state = isCommaContextStart(matchStack.token) || isCommaContextEnd(token) ? matchGraph.MATCH : matchGraph.MISMATCH;
}
break;
case 'String':
let string = '';
let lastTokenIndex = tokenIndex;
for (; lastTokenIndex < tokens.length && string.length < state.value.length; lastTokenIndex++) {
string += tokens[lastTokenIndex].value;
}
if (areStringsEqualCaseInsensitive(string, state.value)) {
while (tokenIndex < lastTokenIndex) {
addTokenToMatch();
}
state = matchGraph.MATCH;
} else {
state = matchGraph.MISMATCH;
}
break;
default:
throw new Error('Unknown node type: ' + state.type);
}
}
switch (exitReason) {
case null:
console.warn('[csstree-match] BREAK after ' + ITERATION_LIMIT + ' iterations');
exitReason = EXIT_REASON_ITERATION_LIMIT;
matchStack = null;
break;
case EXIT_REASON_MATCH:
while (syntaxStack !== null) {
closeSyntax();
}
break;
default:
matchStack = null;
}
return {
tokens,
reason: exitReason,
iterations: iterationCount,
match: matchStack,
longestMatch
};
}
function matchAsList(tokens, matchGraph, syntaxes) {
const matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
if (matchResult.match !== null) {
let item = reverseList(matchResult.match).prev;
matchResult.match = [];
while (item !== null) {
switch (item.type) {
case OPEN_SYNTAX:
case CLOSE_SYNTAX:
matchResult.match.push({
type: item.type,
syntax: item.syntax
});
break;
default:
matchResult.match.push({
token: item.token.value,
node: item.token.node
});
break;
}
item = item.prev;
}
}
return matchResult;
}
function matchAsTree(tokens, matchGraph, syntaxes) {
const matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
if (matchResult.match === null) {
return matchResult;
}
let item = matchResult.match;
let host = matchResult.match = {
syntax: matchGraph.syntax || null,
match: []
};
const hostStack = [host];
// revert a list and start with 2nd item since 1st is a stub item
item = reverseList(item).prev;
// build a tree
while (item !== null) {
switch (item.type) {
case OPEN_SYNTAX:
host.match.push(host = {
syntax: item.syntax,
match: []
});
hostStack.push(host);
break;
case CLOSE_SYNTAX:
hostStack.pop();
host = hostStack[hostStack.length - 1];
break;
default:
host.match.push({
syntax: item.syntax || null,
token: item.token.value,
node: item.token.node
});
}
item = item.prev;
}
return matchResult;
}
exports.matchAsList = matchAsList;
exports.matchAsTree = matchAsTree;

54
node_modules/css-tree/cjs/lexer/prepare-tokens.cjs generated vendored Normal file
View File

@ -0,0 +1,54 @@
'use strict';
const index = require('../tokenizer/index.cjs');
const astToTokens = {
decorator(handlers) {
const tokens = [];
let curNode = null;
return {
...handlers,
node(node) {
const tmp = curNode;
curNode = node;
handlers.node.call(this, node);
curNode = tmp;
},
emit(value, type, auto) {
tokens.push({
type,
value,
node: auto ? null : curNode
});
},
result() {
return tokens;
}
};
}
};
function stringToTokens(str) {
const tokens = [];
index.tokenize(str, (type, start, end) =>
tokens.push({
type,
value: str.slice(start, end),
node: null
})
);
return tokens;
}
function prepareTokens(value, syntax) {
if (typeof value === 'string') {
return stringToTokens(value);
}
return syntax.generate(value, astToTokens);
}
module.exports = prepareTokens;

65
node_modules/css-tree/cjs/lexer/search.cjs generated vendored Normal file
View File

@ -0,0 +1,65 @@
'use strict';
const List = require('../utils/List.cjs');
function getFirstMatchNode(matchNode) {
if ('node' in matchNode) {
return matchNode.node;
}
return getFirstMatchNode(matchNode.match[0]);
}
function getLastMatchNode(matchNode) {
if ('node' in matchNode) {
return matchNode.node;
}
return getLastMatchNode(matchNode.match[matchNode.match.length - 1]);
}
function matchFragments(lexer, ast, match, type, name) {
function findFragments(matchNode) {
if (matchNode.syntax !== null &&
matchNode.syntax.type === type &&
matchNode.syntax.name === name) {
const start = getFirstMatchNode(matchNode);
const end = getLastMatchNode(matchNode);
lexer.syntax.walk(ast, function(node, item, list) {
if (node === start) {
const nodes = new List.List();
do {
nodes.appendData(item.data);
if (item.data === end) {
break;
}
item = item.next;
} while (item !== null);
fragments.push({
parent: list,
nodes
});
}
});
}
if (Array.isArray(matchNode.match)) {
matchNode.match.forEach(findFragments);
}
}
const fragments = [];
if (match.matched !== null) {
findFragments(match.matched);
}
return fragments;
}
exports.matchFragments = matchFragments;

168
node_modules/css-tree/cjs/lexer/structure.cjs generated vendored Normal file
View File

@ -0,0 +1,168 @@
'use strict';
const List = require('../utils/List.cjs');
const { hasOwnProperty } = Object.prototype;
function isValidNumber(value) {
// Number.isInteger(value) && value >= 0
return (
typeof value === 'number' &&
isFinite(value) &&
Math.floor(value) === value &&
value >= 0
);
}
function isValidLocation(loc) {
return (
Boolean(loc) &&
isValidNumber(loc.offset) &&
isValidNumber(loc.line) &&
isValidNumber(loc.column)
);
}
function createNodeStructureChecker(type, fields) {
return function checkNode(node, warn) {
if (!node || node.constructor !== Object) {
return warn(node, 'Type of node should be an Object');
}
for (let key in node) {
let valid = true;
if (hasOwnProperty.call(node, key) === false) {
continue;
}
if (key === 'type') {
if (node.type !== type) {
warn(node, 'Wrong node type `' + node.type + '`, expected `' + type + '`');
}
} else if (key === 'loc') {
if (node.loc === null) {
continue;
} else if (node.loc && node.loc.constructor === Object) {
if (typeof node.loc.source !== 'string') {
key += '.source';
} else if (!isValidLocation(node.loc.start)) {
key += '.start';
} else if (!isValidLocation(node.loc.end)) {
key += '.end';
} else {
continue;
}
}
valid = false;
} else if (fields.hasOwnProperty(key)) {
valid = false;
for (let i = 0; !valid && i < fields[key].length; i++) {
const fieldType = fields[key][i];
switch (fieldType) {
case String:
valid = typeof node[key] === 'string';
break;
case Boolean:
valid = typeof node[key] === 'boolean';
break;
case null:
valid = node[key] === null;
break;
default:
if (typeof fieldType === 'string') {
valid = node[key] && node[key].type === fieldType;
} else if (Array.isArray(fieldType)) {
valid = node[key] instanceof List.List;
}
}
}
} else {
warn(node, 'Unknown field `' + key + '` for ' + type + ' node type');
}
if (!valid) {
warn(node, 'Bad value for `' + type + '.' + key + '`');
}
}
for (const key in fields) {
if (hasOwnProperty.call(fields, key) &&
hasOwnProperty.call(node, key) === false) {
warn(node, 'Field `' + type + '.' + key + '` is missed');
}
}
};
}
function processStructure(name, nodeType) {
const structure = nodeType.structure;
const fields = {
type: String,
loc: true
};
const docs = {
type: '"' + name + '"'
};
for (const key in structure) {
if (hasOwnProperty.call(structure, key) === false) {
continue;
}
const docsTypes = [];
const fieldTypes = fields[key] = Array.isArray(structure[key])
? structure[key].slice()
: [structure[key]];
for (let i = 0; i < fieldTypes.length; i++) {
const fieldType = fieldTypes[i];
if (fieldType === String || fieldType === Boolean) {
docsTypes.push(fieldType.name);
} else if (fieldType === null) {
docsTypes.push('null');
} else if (typeof fieldType === 'string') {
docsTypes.push('<' + fieldType + '>');
} else if (Array.isArray(fieldType)) {
docsTypes.push('List'); // TODO: use type enum
} else {
throw new Error('Wrong value `' + fieldType + '` in `' + name + '.' + key + '` structure definition');
}
}
docs[key] = docsTypes.join(' | ');
}
return {
docs,
check: createNodeStructureChecker(name, fields)
};
}
function getStructureFromConfig(config) {
const structure = {};
if (config.node) {
for (const name in config.node) {
if (hasOwnProperty.call(config.node, name)) {
const nodeType = config.node[name];
if (nodeType.structure) {
structure[name] = processStructure(name, nodeType);
} else {
throw new Error('Missed `structure` field in `' + name + '` node type definition');
}
}
}
}
return structure;
}
exports.getStructureFromConfig = getStructureFromConfig;

73
node_modules/css-tree/cjs/lexer/trace.cjs generated vendored Normal file
View File

@ -0,0 +1,73 @@
'use strict';
function getTrace(node) {
function shouldPutToTrace(syntax) {
if (syntax === null) {
return false;
}
return (
syntax.type === 'Type' ||
syntax.type === 'Property' ||
syntax.type === 'Keyword'
);
}
function hasMatch(matchNode) {
if (Array.isArray(matchNode.match)) {
// use for-loop for better perfomance
for (let i = 0; i < matchNode.match.length; i++) {
if (hasMatch(matchNode.match[i])) {
if (shouldPutToTrace(matchNode.syntax)) {
result.unshift(matchNode.syntax);
}
return true;
}
}
} else if (matchNode.node === node) {
result = shouldPutToTrace(matchNode.syntax)
? [matchNode.syntax]
: [];
return true;
}
return false;
}
let result = null;
if (this.matched !== null) {
hasMatch(this.matched);
}
return result;
}
function isType(node, type) {
return testNode(this, node, match => match.type === 'Type' && match.name === type);
}
function isProperty(node, property) {
return testNode(this, node, match => match.type === 'Property' && match.name === property);
}
function isKeyword(node) {
return testNode(this, node, match => match.type === 'Keyword');
}
function testNode(match, node, fn) {
const trace = getTrace.call(match, node);
if (trace === null) {
return false;
}
return trace.some(fn);
}
exports.getTrace = getTrace;
exports.isKeyword = isKeyword;
exports.isProperty = isProperty;
exports.isType = isType;

38
node_modules/css-tree/cjs/lexer/units.cjs generated vendored Normal file
View File

@ -0,0 +1,38 @@
'use strict';
const length = [
// absolute length units https://www.w3.org/TR/css-values-3/#lengths
'cm', 'mm', 'q', 'in', 'pt', 'pc', 'px',
// font-relative length units https://drafts.csswg.org/css-values-4/#font-relative-lengths
'em', 'rem',
'ex', 'rex',
'cap', 'rcap',
'ch', 'rch',
'ic', 'ric',
'lh', 'rlh',
// viewport-percentage lengths https://drafts.csswg.org/css-values-4/#viewport-relative-lengths
'vw', 'svw', 'lvw', 'dvw',
'vh', 'svh', 'lvh', 'dvh',
'vi', 'svi', 'lvi', 'dvi',
'vb', 'svb', 'lvb', 'dvb',
'vmin', 'svmin', 'lvmin', 'dvmin',
'vmax', 'svmax', 'lvmax', 'dvmax',
// container relative lengths https://drafts.csswg.org/css-contain-3/#container-lengths
'cqw', 'cqh', 'cqi', 'cqb', 'cqmin', 'cqmax'
];
const angle = ['deg', 'grad', 'rad', 'turn']; // https://www.w3.org/TR/css-values-3/#angles
const time = ['s', 'ms']; // https://www.w3.org/TR/css-values-3/#time
const frequency = ['hz', 'khz']; // https://www.w3.org/TR/css-values-3/#frequency
const resolution = ['dpi', 'dpcm', 'dppx', 'x']; // https://www.w3.org/TR/css-values-3/#resolution
const flex = ['fr']; // https://drafts.csswg.org/css-grid/#fr-unit
const decibel = ['db']; // https://www.w3.org/TR/css3-speech/#mixing-props-voice-volume
const semitones = ['st']; // https://www.w3.org/TR/css3-speech/#voice-props-voice-pitch
exports.angle = angle;
exports.decibel = decibel;
exports.flex = flex;
exports.frequency = frequency;
exports.length = length;
exports.resolution = resolution;
exports.semitones = semitones;
exports.time = time;

69
node_modules/css-tree/cjs/parser/SyntaxError.cjs generated vendored Normal file
View File

@ -0,0 +1,69 @@
'use strict';
const createCustomError = require('../utils/create-custom-error.cjs');
const MAX_LINE_LENGTH = 100;
const OFFSET_CORRECTION = 60;
const TAB_REPLACEMENT = ' ';
function sourceFragment({ source, line, column }, extraLines) {
function processLines(start, end) {
return lines
.slice(start, end)
.map((line, idx) =>
String(start + idx + 1).padStart(maxNumLength) + ' |' + line
).join('\n');
}
const lines = source.split(/\r\n?|\n|\f/);
const startLine = Math.max(1, line - extraLines) - 1;
const endLine = Math.min(line + extraLines, lines.length + 1);
const maxNumLength = Math.max(4, String(endLine).length) + 1;
let cutLeft = 0;
// column correction according to replaced tab before column
column += (TAB_REPLACEMENT.length - 1) * (lines[line - 1].substr(0, column - 1).match(/\t/g) || []).length;
if (column > MAX_LINE_LENGTH) {
cutLeft = column - OFFSET_CORRECTION + 3;
column = OFFSET_CORRECTION - 2;
}
for (let i = startLine; i <= endLine; i++) {
if (i >= 0 && i < lines.length) {
lines[i] = lines[i].replace(/\t/g, TAB_REPLACEMENT);
lines[i] =
(cutLeft > 0 && lines[i].length > cutLeft ? '\u2026' : '') +
lines[i].substr(cutLeft, MAX_LINE_LENGTH - 2) +
(lines[i].length > cutLeft + MAX_LINE_LENGTH - 1 ? '\u2026' : '');
}
}
return [
processLines(startLine, line),
new Array(column + maxNumLength + 2).join('-') + '^',
processLines(line, endLine)
].filter(Boolean).join('\n');
}
function SyntaxError(message, source, offset, line, column) {
const error = Object.assign(createCustomError.createCustomError('SyntaxError', message), {
source,
offset,
line,
column,
sourceFragment(extraLines) {
return sourceFragment({ source, line, column }, isNaN(extraLines) ? 0 : extraLines);
},
get formattedMessage() {
return (
`Parse error: ${message}\n` +
sourceFragment({ source, line, column }, 2)
);
}
});
return error;
}
exports.SyntaxError = SyntaxError;

336
node_modules/css-tree/cjs/parser/create.cjs generated vendored Normal file
View File

@ -0,0 +1,336 @@
'use strict';
const List = require('../utils/List.cjs');
const SyntaxError = require('./SyntaxError.cjs');
const index = require('../tokenizer/index.cjs');
const sequence = require('./sequence.cjs');
const OffsetToLocation = require('../tokenizer/OffsetToLocation.cjs');
const TokenStream = require('../tokenizer/TokenStream.cjs');
const utils = require('../tokenizer/utils.cjs');
const types = require('../tokenizer/types.cjs');
const names = require('../tokenizer/names.cjs');
const NOOP = () => {};
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
const SEMICOLON = 0x003B; // U+003B SEMICOLON (;)
const LEFTCURLYBRACKET = 0x007B; // U+007B LEFT CURLY BRACKET ({)
const NULL = 0;
function createParseContext(name) {
return function() {
return this[name]();
};
}
function fetchParseValues(dict) {
const result = Object.create(null);
for (const name in dict) {
const item = dict[name];
const fn = item.parse || item;
if (fn) {
result[name] = fn;
}
}
return result;
}
function processConfig(config) {
const parseConfig = {
context: Object.create(null),
scope: Object.assign(Object.create(null), config.scope),
atrule: fetchParseValues(config.atrule),
pseudo: fetchParseValues(config.pseudo),
node: fetchParseValues(config.node)
};
for (const name in config.parseContext) {
switch (typeof config.parseContext[name]) {
case 'function':
parseConfig.context[name] = config.parseContext[name];
break;
case 'string':
parseConfig.context[name] = createParseContext(config.parseContext[name]);
break;
}
}
return {
config: parseConfig,
...parseConfig,
...parseConfig.node
};
}
function createParser(config) {
let source = '';
let filename = '<unknown>';
let needPositions = false;
let onParseError = NOOP;
let onParseErrorThrow = false;
const locationMap = new OffsetToLocation.OffsetToLocation();
const parser = Object.assign(new TokenStream.TokenStream(), processConfig(config || {}), {
parseAtrulePrelude: true,
parseRulePrelude: true,
parseValue: true,
parseCustomProperty: false,
readSequence: sequence.readSequence,
consumeUntilBalanceEnd: () => 0,
consumeUntilLeftCurlyBracket(code) {
return code === LEFTCURLYBRACKET ? 1 : 0;
},
consumeUntilLeftCurlyBracketOrSemicolon(code) {
return code === LEFTCURLYBRACKET || code === SEMICOLON ? 1 : 0;
},
consumeUntilExclamationMarkOrSemicolon(code) {
return code === EXCLAMATIONMARK || code === SEMICOLON ? 1 : 0;
},
consumeUntilSemicolonIncluded(code) {
return code === SEMICOLON ? 2 : 0;
},
createList() {
return new List.List();
},
createSingleNodeList(node) {
return new List.List().appendData(node);
},
getFirstListNode(list) {
return list && list.first;
},
getLastListNode(list) {
return list && list.last;
},
parseWithFallback(consumer, fallback) {
const startToken = this.tokenIndex;
try {
return consumer.call(this);
} catch (e) {
if (onParseErrorThrow) {
throw e;
}
const fallbackNode = fallback.call(this, startToken);
onParseErrorThrow = true;
onParseError(e, fallbackNode);
onParseErrorThrow = false;
return fallbackNode;
}
},
lookupNonWSType(offset) {
let type;
do {
type = this.lookupType(offset++);
if (type !== types.WhiteSpace) {
return type;
}
} while (type !== NULL);
return NULL;
},
charCodeAt(offset) {
return offset >= 0 && offset < source.length ? source.charCodeAt(offset) : 0;
},
substring(offsetStart, offsetEnd) {
return source.substring(offsetStart, offsetEnd);
},
substrToCursor(start) {
return this.source.substring(start, this.tokenStart);
},
cmpChar(offset, charCode) {
return utils.cmpChar(source, offset, charCode);
},
cmpStr(offsetStart, offsetEnd, str) {
return utils.cmpStr(source, offsetStart, offsetEnd, str);
},
consume(tokenType) {
const start = this.tokenStart;
this.eat(tokenType);
return this.substrToCursor(start);
},
consumeFunctionName() {
const name = source.substring(this.tokenStart, this.tokenEnd - 1);
this.eat(types.Function);
return name;
},
consumeNumber(type) {
const number = source.substring(this.tokenStart, utils.consumeNumber(source, this.tokenStart));
this.eat(type);
return number;
},
eat(tokenType) {
if (this.tokenType !== tokenType) {
const tokenName = names[tokenType].slice(0, -6).replace(/-/g, ' ').replace(/^./, m => m.toUpperCase());
let message = `${/[[\](){}]/.test(tokenName) ? `"${tokenName}"` : tokenName} is expected`;
let offset = this.tokenStart;
// tweak message and offset
switch (tokenType) {
case types.Ident:
// when identifier is expected but there is a function or url
if (this.tokenType === types.Function || this.tokenType === types.Url) {
offset = this.tokenEnd - 1;
message = 'Identifier is expected but function found';
} else {
message = 'Identifier is expected';
}
break;
case types.Hash:
if (this.isDelim(NUMBERSIGN)) {
this.next();
offset++;
message = 'Name is expected';
}
break;
case types.Percentage:
if (this.tokenType === types.Number) {
offset = this.tokenEnd;
message = 'Percent sign is expected';
}
break;
}
this.error(message, offset);
}
this.next();
},
eatIdent(name) {
if (this.tokenType !== types.Ident || this.lookupValue(0, name) === false) {
this.error(`Identifier "${name}" is expected`);
}
this.next();
},
eatDelim(code) {
if (!this.isDelim(code)) {
this.error(`Delim "${String.fromCharCode(code)}" is expected`);
}
this.next();
},
getLocation(start, end) {
if (needPositions) {
return locationMap.getLocationRange(
start,
end,
filename
);
}
return null;
},
getLocationFromList(list) {
if (needPositions) {
const head = this.getFirstListNode(list);
const tail = this.getLastListNode(list);
return locationMap.getLocationRange(
head !== null ? head.loc.start.offset - locationMap.startOffset : this.tokenStart,
tail !== null ? tail.loc.end.offset - locationMap.startOffset : this.tokenStart,
filename
);
}
return null;
},
error(message, offset) {
const location = typeof offset !== 'undefined' && offset < source.length
? locationMap.getLocation(offset)
: this.eof
? locationMap.getLocation(utils.findWhiteSpaceStart(source, source.length - 1))
: locationMap.getLocation(this.tokenStart);
throw new SyntaxError.SyntaxError(
message || 'Unexpected input',
source,
location.offset,
location.line,
location.column
);
}
});
const parse = function(source_, options) {
source = source_;
options = options || {};
parser.setSource(source, index.tokenize);
locationMap.setSource(
source,
options.offset,
options.line,
options.column
);
filename = options.filename || '<unknown>';
needPositions = Boolean(options.positions);
onParseError = typeof options.onParseError === 'function' ? options.onParseError : NOOP;
onParseErrorThrow = false;
parser.parseAtrulePrelude = 'parseAtrulePrelude' in options ? Boolean(options.parseAtrulePrelude) : true;
parser.parseRulePrelude = 'parseRulePrelude' in options ? Boolean(options.parseRulePrelude) : true;
parser.parseValue = 'parseValue' in options ? Boolean(options.parseValue) : true;
parser.parseCustomProperty = 'parseCustomProperty' in options ? Boolean(options.parseCustomProperty) : false;
const { context = 'default', onComment } = options;
if (context in parser.context === false) {
throw new Error('Unknown context `' + context + '`');
}
if (typeof onComment === 'function') {
parser.forEachToken((type, start, end) => {
if (type === types.Comment) {
const loc = parser.getLocation(start, end);
const value = utils.cmpStr(source, end - 2, end, '*/')
? source.slice(start + 2, end - 2)
: source.slice(start + 2, end);
onComment(value, loc);
}
});
}
const ast = parser.context[context].call(parser, options);
if (!parser.eof) {
parser.error();
}
return ast;
};
return Object.assign(parse, {
SyntaxError: SyntaxError.SyntaxError,
config: parser.config
});
}
exports.createParser = createParser;

8
node_modules/css-tree/cjs/parser/index.cjs generated vendored Normal file
View File

@ -0,0 +1,8 @@
'use strict';
const create = require('./create.cjs');
const parser = require('../syntax/config/parser.cjs');
const index = create.createParser(parser);
module.exports = index;

8
node_modules/css-tree/cjs/parser/parse-selector.cjs generated vendored Normal file
View File

@ -0,0 +1,8 @@
'use strict';
const create = require('./create.cjs');
const parserSelector = require('../syntax/config/parser-selector.cjs');
const parseSelector = create.createParser(parserSelector);
module.exports = parseSelector;

47
node_modules/css-tree/cjs/parser/sequence.cjs generated vendored Normal file
View File

@ -0,0 +1,47 @@
'use strict';
const types = require('../tokenizer/types.cjs');
function readSequence(recognizer) {
const children = this.createList();
let space = false;
const context = {
recognizer
};
while (!this.eof) {
switch (this.tokenType) {
case types.Comment:
this.next();
continue;
case types.WhiteSpace:
space = true;
this.next();
continue;
}
let child = recognizer.getNode.call(this, context);
if (child === undefined) {
break;
}
if (space) {
if (recognizer.onWhiteSpace) {
recognizer.onWhiteSpace.call(this, child, children, context);
}
space = false;
}
children.push(child);
}
if (space && recognizer.onWhiteSpace) {
recognizer.onWhiteSpace.call(this, null, children, context);
}
return children;
}
exports.readSequence = readSequence;

12
node_modules/css-tree/cjs/syntax/atrule/font-face.cjs generated vendored Normal file
View File

@ -0,0 +1,12 @@
'use strict';
const fontFace = {
parse: {
prelude: null,
block() {
return this.Block(true);
}
}
};
module.exports = fontFace;

37
node_modules/css-tree/cjs/syntax/atrule/import.cjs generated vendored Normal file
View File

@ -0,0 +1,37 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const importAtrule = {
parse: {
prelude() {
const children = this.createList();
this.skipSC();
switch (this.tokenType) {
case types.String:
children.push(this.String());
break;
case types.Url:
case types.Function:
children.push(this.Url());
break;
default:
this.error('String or url() is expected');
}
if (this.lookupNonWSType(0) === types.Ident ||
this.lookupNonWSType(0) === types.LeftParenthesis) {
children.push(this.MediaQueryList());
}
return children;
},
block: null
}
};
module.exports = importAtrule;

19
node_modules/css-tree/cjs/syntax/atrule/index.cjs generated vendored Normal file
View File

@ -0,0 +1,19 @@
'use strict';
const fontFace = require('./font-face.cjs');
const _import = require('./import.cjs');
const media = require('./media.cjs');
const nest = require('./nest.cjs');
const page = require('./page.cjs');
const supports = require('./supports.cjs');
const atrule = {
'font-face': fontFace,
'import': _import,
media,
nest,
page,
supports
};
module.exports = atrule;

16
node_modules/css-tree/cjs/syntax/atrule/media.cjs generated vendored Normal file
View File

@ -0,0 +1,16 @@
'use strict';
const media = {
parse: {
prelude() {
return this.createSingleNodeList(
this.MediaQueryList()
);
},
block(isStyleBlock = false) {
return this.Block(isStyleBlock);
}
}
};
module.exports = media;

16
node_modules/css-tree/cjs/syntax/atrule/nest.cjs generated vendored Normal file
View File

@ -0,0 +1,16 @@
'use strict';
const nest = {
parse: {
prelude() {
return this.createSingleNodeList(
this.SelectorList()
);
},
block() {
return this.Block(true);
}
}
};
module.exports = nest;

16
node_modules/css-tree/cjs/syntax/atrule/page.cjs generated vendored Normal file
View File

@ -0,0 +1,16 @@
'use strict';
const page = {
parse: {
prelude() {
return this.createSingleNodeList(
this.SelectorList()
);
},
block() {
return this.Block(true);
}
}
};
module.exports = page;

77
node_modules/css-tree/cjs/syntax/atrule/supports.cjs generated vendored Normal file
View File

@ -0,0 +1,77 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
function consumeRaw() {
return this.createSingleNodeList(
this.Raw(this.tokenIndex, null, false)
);
}
function parentheses() {
this.skipSC();
if (this.tokenType === types.Ident &&
this.lookupNonWSType(1) === types.Colon) {
return this.createSingleNodeList(
this.Declaration()
);
}
return readSequence.call(this);
}
function readSequence() {
const children = this.createList();
let child;
this.skipSC();
scan:
while (!this.eof) {
switch (this.tokenType) {
case types.Comment:
case types.WhiteSpace:
this.next();
continue;
case types.Function:
child = this.Function(consumeRaw, this.scope.AtrulePrelude);
break;
case types.Ident:
child = this.Identifier();
break;
case types.LeftParenthesis:
child = this.Parentheses(parentheses, this.scope.AtrulePrelude);
break;
default:
break scan;
}
children.push(child);
}
return children;
}
const supports = {
parse: {
prelude() {
const children = readSequence.call(this);
if (this.getFirstListNode(children) === null) {
this.error('Condition is expected');
}
return children;
},
block(isStyleBlock = false) {
return this.Block(isStyleBlock);
}
}
};
module.exports = supports;

View File

@ -0,0 +1,9 @@
'use strict';
const indexGenerate = require('../node/index-generate.cjs');
const config = {
node: indexGenerate
};
module.exports = config;

12
node_modules/css-tree/cjs/syntax/config/lexer.cjs generated vendored Normal file
View File

@ -0,0 +1,12 @@
'use strict';
const data = require('../../data.cjs');
const index = require('../node/index.cjs');
const lexerConfig = {
generic: true,
...data,
node: index
};
module.exports = lexerConfig;

119
node_modules/css-tree/cjs/syntax/config/mix.cjs generated vendored Normal file
View File

@ -0,0 +1,119 @@
'use strict';
function appendOrSet(a, b) {
if (typeof b === 'string' && /^\s*\|/.test(b)) {
return typeof a === 'string'
? a + b
: b.replace(/^\s*\|\s*/, '');
}
return b || null;
}
function sliceProps(obj, props) {
const result = Object.create(null);
for (const [key, value] of Object.entries(obj)) {
if (value) {
result[key] = {};
for (const prop of Object.keys(value)) {
if (props.includes(prop)) {
result[key][prop] = value[prop];
}
}
}
}
return result;
}
function mix(dest, src) {
const result = { ...dest };
for (const [prop, value] of Object.entries(src)) {
switch (prop) {
case 'generic':
result[prop] = Boolean(value);
break;
case 'units':
result[prop] = { ...dest[prop] };
for (const [name, patch] of Object.entries(value)) {
result[prop][name] = Array.isArray(patch) ? patch : [];
}
break;
case 'atrules':
result[prop] = { ...dest[prop] };
for (const [name, atrule] of Object.entries(value)) {
const exists = result[prop][name] || {};
const current = result[prop][name] = {
prelude: exists.prelude || null,
descriptors: {
...exists.descriptors
}
};
if (!atrule) {
continue;
}
current.prelude = atrule.prelude
? appendOrSet(current.prelude, atrule.prelude)
: current.prelude || null;
for (const [descriptorName, descriptorValue] of Object.entries(atrule.descriptors || {})) {
current.descriptors[descriptorName] = descriptorValue
? appendOrSet(current.descriptors[descriptorName], descriptorValue)
: null;
}
if (!Object.keys(current.descriptors).length) {
current.descriptors = null;
}
}
break;
case 'types':
case 'properties':
result[prop] = { ...dest[prop] };
for (const [name, syntax] of Object.entries(value)) {
result[prop][name] = appendOrSet(result[prop][name], syntax);
}
break;
case 'scope':
result[prop] = { ...dest[prop] };
for (const [name, props] of Object.entries(value)) {
result[prop][name] = { ...result[prop][name], ...props };
}
break;
case 'parseContext':
result[prop] = {
...dest[prop],
...value
};
break;
case 'atrule':
case 'pseudo':
result[prop] = {
...dest[prop],
...sliceProps(value, ['parse']) };
break;
case 'node':
result[prop] = {
...dest[prop],
...sliceProps(value, ['name', 'structure', 'parse', 'generate', 'walkContext'])
};
break;
}
}
return result;
}
module.exports = mix;

View File

@ -0,0 +1,19 @@
'use strict';
const index = require('../pseudo/index.cjs');
const indexParseSelector = require('../node/index-parse-selector.cjs');
const selector = require('../scope/selector.cjs');
const config = {
parseContext: {
default: 'SelectorList',
selectorList: 'SelectorList',
selector: 'Selector'
},
scope: { Selector: selector },
atrule: {},
pseudo: index,
node: indexParseSelector
};
module.exports = config;

34
node_modules/css-tree/cjs/syntax/config/parser.cjs generated vendored Normal file
View File

@ -0,0 +1,34 @@
'use strict';
const index = require('../scope/index.cjs');
const index$1 = require('../atrule/index.cjs');
const index$2 = require('../pseudo/index.cjs');
const indexParse = require('../node/index-parse.cjs');
const config = {
parseContext: {
default: 'StyleSheet',
stylesheet: 'StyleSheet',
atrule: 'Atrule',
atrulePrelude(options) {
return this.AtrulePrelude(options.atrule ? String(options.atrule) : null);
},
mediaQueryList: 'MediaQueryList',
mediaQuery: 'MediaQuery',
rule: 'Rule',
selectorList: 'SelectorList',
selector: 'Selector',
block() {
return this.Block(true);
},
declarationList: 'DeclarationList',
declaration: 'Declaration',
value: 'Value'
},
scope: index,
atrule: index$1,
pseudo: index$2,
node: indexParse
};
module.exports = config;

9
node_modules/css-tree/cjs/syntax/config/walker.cjs generated vendored Normal file
View File

@ -0,0 +1,9 @@
'use strict';
const index = require('../node/index.cjs');
const config = {
node: index
};
module.exports = config;

57
node_modules/css-tree/cjs/syntax/create.cjs generated vendored Normal file
View File

@ -0,0 +1,57 @@
'use strict';
const index = require('../tokenizer/index.cjs');
const create = require('../parser/create.cjs');
const create$2 = require('../generator/create.cjs');
const create$3 = require('../convertor/create.cjs');
const create$1 = require('../walker/create.cjs');
const Lexer = require('../lexer/Lexer.cjs');
const mix = require('./config/mix.cjs');
function createSyntax(config) {
const parse = create.createParser(config);
const walk = create$1.createWalker(config);
const generate = create$2.createGenerator(config);
const { fromPlainObject, toPlainObject } = create$3.createConvertor(walk);
const syntax = {
lexer: null,
createLexer: config => new Lexer.Lexer(config, syntax, syntax.lexer.structure),
tokenize: index.tokenize,
parse,
generate,
walk,
find: walk.find,
findLast: walk.findLast,
findAll: walk.findAll,
fromPlainObject,
toPlainObject,
fork(extension) {
const base = mix({}, config); // copy of config
return createSyntax(
typeof extension === 'function'
? extension(base, Object.assign)
: mix(base, extension)
);
}
};
syntax.lexer = new Lexer.Lexer({
generic: true,
units: config.units,
types: config.types,
atrules: config.atrules,
properties: config.properties,
node: config.node
}, syntax);
return syntax;
}
const createSyntax$1 = config => createSyntax(mix({}, config));
module.exports = createSyntax$1;

View File

@ -0,0 +1,11 @@
'use strict';
// legacy IE function
// expression( <any-value> )
function expressionFn() {
return this.createSingleNodeList(
this.Raw(this.tokenIndex, null, false)
);
}
module.exports = expressionFn;

43
node_modules/css-tree/cjs/syntax/function/var.cjs generated vendored Normal file
View File

@ -0,0 +1,43 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
// var( <ident> , <value>? )
function varFn() {
const children = this.createList();
this.skipSC();
// NOTE: Don't check more than a first argument is an ident, rest checks are for lexer
children.push(this.Identifier());
this.skipSC();
if (this.tokenType === types.Comma) {
children.push(this.Operator());
const startIndex = this.tokenIndex;
const value = this.parseCustomProperty
? this.Value(null)
: this.Raw(this.tokenIndex, this.consumeUntilExclamationMarkOrSemicolon, false);
if (value.type === 'Value' && value.children.isEmpty) {
for (let offset = startIndex - this.tokenIndex; offset <= 0; offset++) {
if (this.lookupType(offset) === types.WhiteSpace) {
value.children.appendData({
type: 'WhiteSpace',
loc: null,
value: ' '
});
break;
}
}
}
children.push(value);
}
return children;
}
module.exports = varFn;

14
node_modules/css-tree/cjs/syntax/index.cjs generated vendored Normal file
View File

@ -0,0 +1,14 @@
'use strict';
const create = require('./create.cjs');
const lexer = require('./config/lexer.cjs');
const parser = require('./config/parser.cjs');
const walker = require('./config/walker.cjs');
const syntax = create({
...lexer,
...parser,
...walker
});
module.exports = syntax;

293
node_modules/css-tree/cjs/syntax/node/AnPlusB.cjs generated vendored Normal file
View File

@ -0,0 +1,293 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const charCodeDefinitions = require('../../tokenizer/char-code-definitions.cjs');
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
const DISALLOW_SIGN = true;
const ALLOW_SIGN = false;
function checkInteger(offset, disallowSign) {
let pos = this.tokenStart + offset;
const code = this.charCodeAt(pos);
if (code === PLUSSIGN || code === HYPHENMINUS) {
if (disallowSign) {
this.error('Number sign is not allowed');
}
pos++;
}
for (; pos < this.tokenEnd; pos++) {
if (!charCodeDefinitions.isDigit(this.charCodeAt(pos))) {
this.error('Integer is expected', pos);
}
}
}
function checkTokenIsInteger(disallowSign) {
return checkInteger.call(this, 0, disallowSign);
}
function expectCharCode(offset, code) {
if (!this.cmpChar(this.tokenStart + offset, code)) {
let msg = '';
switch (code) {
case N:
msg = 'N is expected';
break;
case HYPHENMINUS:
msg = 'HyphenMinus is expected';
break;
}
this.error(msg, this.tokenStart + offset);
}
}
// ... <signed-integer>
// ... ['+' | '-'] <signless-integer>
function consumeB() {
let offset = 0;
let sign = 0;
let type = this.tokenType;
while (type === types.WhiteSpace || type === types.Comment) {
type = this.lookupType(++offset);
}
if (type !== types.Number) {
if (this.isDelim(PLUSSIGN, offset) ||
this.isDelim(HYPHENMINUS, offset)) {
sign = this.isDelim(PLUSSIGN, offset) ? PLUSSIGN : HYPHENMINUS;
do {
type = this.lookupType(++offset);
} while (type === types.WhiteSpace || type === types.Comment);
if (type !== types.Number) {
this.skip(offset);
checkTokenIsInteger.call(this, DISALLOW_SIGN);
}
} else {
return null;
}
}
if (offset > 0) {
this.skip(offset);
}
if (sign === 0) {
type = this.charCodeAt(this.tokenStart);
if (type !== PLUSSIGN && type !== HYPHENMINUS) {
this.error('Number sign is expected');
}
}
checkTokenIsInteger.call(this, sign !== 0);
return sign === HYPHENMINUS ? '-' + this.consume(types.Number) : this.consume(types.Number);
}
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
const name = 'AnPlusB';
const structure = {
a: [String, null],
b: [String, null]
};
function parse() {
/* eslint-disable brace-style*/
const start = this.tokenStart;
let a = null;
let b = null;
// <integer>
if (this.tokenType === types.Number) {
checkTokenIsInteger.call(this, ALLOW_SIGN);
b = this.consume(types.Number);
}
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
// -n- <signless-integer>
// <dashndashdigit-ident>
else if (this.tokenType === types.Ident && this.cmpChar(this.tokenStart, HYPHENMINUS)) {
a = '-1';
expectCharCode.call(this, 1, N);
switch (this.tokenEnd - this.tokenStart) {
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
case 2:
this.next();
b = consumeB.call(this);
break;
// -n- <signless-integer>
case 3:
expectCharCode.call(this, 2, HYPHENMINUS);
this.next();
this.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(types.Number);
break;
// <dashndashdigit-ident>
default:
expectCharCode.call(this, 2, HYPHENMINUS);
checkInteger.call(this, 3, DISALLOW_SIGN);
this.next();
b = this.substrToCursor(start + 2);
}
}
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
// '+'? n- <signless-integer>
// '+'? <ndashdigit-ident>
else if (this.tokenType === types.Ident || (this.isDelim(PLUSSIGN) && this.lookupType(1) === types.Ident)) {
let sign = 0;
a = '1';
// just ignore a plus
if (this.isDelim(PLUSSIGN)) {
sign = 1;
this.next();
}
expectCharCode.call(this, 0, N);
switch (this.tokenEnd - this.tokenStart) {
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
case 1:
this.next();
b = consumeB.call(this);
break;
// '+'? n- <signless-integer>
case 2:
expectCharCode.call(this, 1, HYPHENMINUS);
this.next();
this.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(types.Number);
break;
// '+'? <ndashdigit-ident>
default:
expectCharCode.call(this, 1, HYPHENMINUS);
checkInteger.call(this, 2, DISALLOW_SIGN);
this.next();
b = this.substrToCursor(start + sign + 1);
}
}
// <ndashdigit-dimension>
// <ndash-dimension> <signless-integer>
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
else if (this.tokenType === types.Dimension) {
const code = this.charCodeAt(this.tokenStart);
const sign = code === PLUSSIGN || code === HYPHENMINUS;
let i = this.tokenStart + sign;
for (; i < this.tokenEnd; i++) {
if (!charCodeDefinitions.isDigit(this.charCodeAt(i))) {
break;
}
}
if (i === this.tokenStart + sign) {
this.error('Integer is expected', this.tokenStart + sign);
}
expectCharCode.call(this, i - this.tokenStart, N);
a = this.substring(start, i);
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
if (i + 1 === this.tokenEnd) {
this.next();
b = consumeB.call(this);
} else {
expectCharCode.call(this, i - this.tokenStart + 1, HYPHENMINUS);
// <ndash-dimension> <signless-integer>
if (i + 2 === this.tokenEnd) {
this.next();
this.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(types.Number);
}
// <ndashdigit-dimension>
else {
checkInteger.call(this, i - this.tokenStart + 2, DISALLOW_SIGN);
this.next();
b = this.substrToCursor(i + 1);
}
}
} else {
this.error();
}
if (a !== null && a.charCodeAt(0) === PLUSSIGN) {
a = a.substr(1);
}
if (b !== null && b.charCodeAt(0) === PLUSSIGN) {
b = b.substr(1);
}
return {
type: 'AnPlusB',
loc: this.getLocation(start, this.tokenStart),
a,
b
};
}
function generate(node) {
if (node.a) {
const a =
node.a === '+1' && 'n' ||
node.a === '1' && 'n' ||
node.a === '-1' && '-n' ||
node.a + 'n';
if (node.b) {
const b = node.b[0] === '-' || node.b[0] === '+'
? node.b
: '+' + node.b;
this.tokenize(a + b);
} else {
this.tokenize(a);
}
} else {
this.tokenize(node.b);
}
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

103
node_modules/css-tree/cjs/syntax/node/Atrule.cjs generated vendored Normal file
View File

@ -0,0 +1,103 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
function consumeRaw(startToken) {
return this.Raw(startToken, this.consumeUntilLeftCurlyBracketOrSemicolon, true);
}
function isDeclarationBlockAtrule() {
for (let offset = 1, type; type = this.lookupType(offset); offset++) {
if (type === types.RightCurlyBracket) {
return true;
}
if (type === types.LeftCurlyBracket ||
type === types.AtKeyword) {
return false;
}
}
return false;
}
const name = 'Atrule';
const walkContext = 'atrule';
const structure = {
name: String,
prelude: ['AtrulePrelude', 'Raw', null],
block: ['Block', null]
};
function parse(isDeclaration = false) {
const start = this.tokenStart;
let name;
let nameLowerCase;
let prelude = null;
let block = null;
this.eat(types.AtKeyword);
name = this.substrToCursor(start + 1);
nameLowerCase = name.toLowerCase();
this.skipSC();
// parse prelude
if (this.eof === false &&
this.tokenType !== types.LeftCurlyBracket &&
this.tokenType !== types.Semicolon) {
if (this.parseAtrulePrelude) {
prelude = this.parseWithFallback(this.AtrulePrelude.bind(this, name, isDeclaration), consumeRaw);
} else {
prelude = consumeRaw.call(this, this.tokenIndex);
}
this.skipSC();
}
switch (this.tokenType) {
case types.Semicolon:
this.next();
break;
case types.LeftCurlyBracket:
if (hasOwnProperty.call(this.atrule, nameLowerCase) &&
typeof this.atrule[nameLowerCase].block === 'function') {
block = this.atrule[nameLowerCase].block.call(this, isDeclaration);
} else {
// TODO: should consume block content as Raw?
block = this.Block(isDeclarationBlockAtrule.call(this));
}
break;
}
return {
type: 'Atrule',
loc: this.getLocation(start, this.tokenStart),
name,
prelude,
block
};
}
function generate(node) {
this.token(types.AtKeyword, '@' + node.name);
if (node.prelude !== null) {
this.node(node.prelude);
}
if (node.block) {
this.node(node.block);
} else {
this.token(types.Semicolon, ';');
}
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

View File

@ -0,0 +1,52 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'AtrulePrelude';
const walkContext = 'atrulePrelude';
const structure = {
children: [[]]
};
function parse(name) {
let children = null;
if (name !== null) {
name = name.toLowerCase();
}
this.skipSC();
if (hasOwnProperty.call(this.atrule, name) &&
typeof this.atrule[name].prelude === 'function') {
// custom consumer
children = this.atrule[name].prelude.call(this);
} else {
// default consumer
children = this.readSequence(this.scope.AtrulePrelude);
}
this.skipSC();
if (this.eof !== true &&
this.tokenType !== types.LeftCurlyBracket &&
this.tokenType !== types.Semicolon) {
this.error('Semicolon or block is expected');
}
return {
type: 'AtrulePrelude',
loc: this.getLocationFromList(children),
children
};
}
function generate(node) {
this.children(node);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

View File

@ -0,0 +1,148 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const EQUALSSIGN = 0x003D; // U+003D EQUALS SIGN (=)
const CIRCUMFLEXACCENT = 0x005E; // U+005E (^)
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
const TILDE = 0x007E; // U+007E TILDE (~)
function getAttributeName() {
if (this.eof) {
this.error('Unexpected end of input');
}
const start = this.tokenStart;
let expectIdent = false;
if (this.isDelim(ASTERISK)) {
expectIdent = true;
this.next();
} else if (!this.isDelim(VERTICALLINE)) {
this.eat(types.Ident);
}
if (this.isDelim(VERTICALLINE)) {
if (this.charCodeAt(this.tokenStart + 1) !== EQUALSSIGN) {
this.next();
this.eat(types.Ident);
} else if (expectIdent) {
this.error('Identifier is expected', this.tokenEnd);
}
} else if (expectIdent) {
this.error('Vertical line is expected');
}
return {
type: 'Identifier',
loc: this.getLocation(start, this.tokenStart),
name: this.substrToCursor(start)
};
}
function getOperator() {
const start = this.tokenStart;
const code = this.charCodeAt(start);
if (code !== EQUALSSIGN && // =
code !== TILDE && // ~=
code !== CIRCUMFLEXACCENT && // ^=
code !== DOLLARSIGN && // $=
code !== ASTERISK && // *=
code !== VERTICALLINE // |=
) {
this.error('Attribute selector (=, ~=, ^=, $=, *=, |=) is expected');
}
this.next();
if (code !== EQUALSSIGN) {
if (!this.isDelim(EQUALSSIGN)) {
this.error('Equal sign is expected');
}
this.next();
}
return this.substrToCursor(start);
}
// '[' <wq-name> ']'
// '[' <wq-name> <attr-matcher> [ <string-token> | <ident-token> ] <attr-modifier>? ']'
const name = 'AttributeSelector';
const structure = {
name: 'Identifier',
matcher: [String, null],
value: ['String', 'Identifier', null],
flags: [String, null]
};
function parse() {
const start = this.tokenStart;
let name;
let matcher = null;
let value = null;
let flags = null;
this.eat(types.LeftSquareBracket);
this.skipSC();
name = getAttributeName.call(this);
this.skipSC();
if (this.tokenType !== types.RightSquareBracket) {
// avoid case `[name i]`
if (this.tokenType !== types.Ident) {
matcher = getOperator.call(this);
this.skipSC();
value = this.tokenType === types.String
? this.String()
: this.Identifier();
this.skipSC();
}
// attribute flags
if (this.tokenType === types.Ident) {
flags = this.consume(types.Ident);
this.skipSC();
}
}
this.eat(types.RightSquareBracket);
return {
type: 'AttributeSelector',
loc: this.getLocation(start, this.tokenStart),
name,
matcher,
value,
flags
};
}
function generate(node) {
this.token(types.Delim, '[');
this.node(node.name);
if (node.matcher !== null) {
this.tokenize(node.matcher);
this.node(node.value);
}
if (node.flags !== null) {
this.token(types.Ident, node.flags);
}
this.token(types.Delim, ']');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

96
node_modules/css-tree/cjs/syntax/node/Block.cjs generated vendored Normal file
View File

@ -0,0 +1,96 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
function consumeRaw(startToken) {
return this.Raw(startToken, null, true);
}
function consumeRule() {
return this.parseWithFallback(this.Rule, consumeRaw);
}
function consumeRawDeclaration(startToken) {
return this.Raw(startToken, this.consumeUntilSemicolonIncluded, true);
}
function consumeDeclaration() {
if (this.tokenType === types.Semicolon) {
return consumeRawDeclaration.call(this, this.tokenIndex);
}
const node = this.parseWithFallback(this.Declaration, consumeRawDeclaration);
if (this.tokenType === types.Semicolon) {
this.next();
}
return node;
}
const name = 'Block';
const walkContext = 'block';
const structure = {
children: [[
'Atrule',
'Rule',
'Declaration'
]]
};
function parse(isStyleBlock) {
const consumer = isStyleBlock ? consumeDeclaration : consumeRule;
const start = this.tokenStart;
let children = this.createList();
this.eat(types.LeftCurlyBracket);
scan:
while (!this.eof) {
switch (this.tokenType) {
case types.RightCurlyBracket:
break scan;
case types.WhiteSpace:
case types.Comment:
this.next();
break;
case types.AtKeyword:
children.push(this.parseWithFallback(this.Atrule.bind(this, isStyleBlock), consumeRaw));
break;
default:
if (isStyleBlock && this.isDelim(AMPERSAND)) {
children.push(consumeRule.call(this));
} else {
children.push(consumer.call(this));
}
}
}
if (!this.eof) {
this.eat(types.RightCurlyBracket);
}
return {
type: 'Block',
loc: this.getLocation(start, this.tokenStart),
children
};
}
function generate(node) {
this.token(types.LeftCurlyBracket, '{');
this.children(node, prev => {
if (prev.type === 'Declaration') {
this.token(types.Semicolon, ';');
}
});
this.token(types.RightCurlyBracket, '}');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

38
node_modules/css-tree/cjs/syntax/node/Brackets.cjs generated vendored Normal file
View File

@ -0,0 +1,38 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Brackets';
const structure = {
children: [[]]
};
function parse(readSequence, recognizer) {
const start = this.tokenStart;
let children = null;
this.eat(types.LeftSquareBracket);
children = readSequence.call(this, recognizer);
if (!this.eof) {
this.eat(types.RightSquareBracket);
}
return {
type: 'Brackets',
loc: this.getLocation(start, this.tokenStart),
children
};
}
function generate(node) {
this.token(types.Delim, '[');
this.children(node);
this.token(types.Delim, ']');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

26
node_modules/css-tree/cjs/syntax/node/CDC.cjs generated vendored Normal file
View File

@ -0,0 +1,26 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'CDC';
const structure = [];
function parse() {
const start = this.tokenStart;
this.eat(types.CDC); // -->
return {
type: 'CDC',
loc: this.getLocation(start, this.tokenStart)
};
}
function generate() {
this.token(types.CDC, '-->');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

26
node_modules/css-tree/cjs/syntax/node/CDO.cjs generated vendored Normal file
View File

@ -0,0 +1,26 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'CDO';
const structure = [];
function parse() {
const start = this.tokenStart;
this.eat(types.CDO); // <!--
return {
type: 'CDO',
loc: this.getLocation(start, this.tokenStart)
};
}
function generate() {
this.token(types.CDO, '<!--');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

View File

@ -0,0 +1,31 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
// '.' ident
const name = 'ClassSelector';
const structure = {
name: String
};
function parse() {
this.eatDelim(FULLSTOP);
return {
type: 'ClassSelector',
loc: this.getLocation(this.tokenStart - 1, this.tokenEnd),
name: this.consume(types.Ident)
};
}
function generate(node) {
this.token(types.Delim, '.');
this.token(types.Ident, node.name);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

61
node_modules/css-tree/cjs/syntax/node/Combinator.cjs generated vendored Normal file
View File

@ -0,0 +1,61 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const GREATERTHANSIGN = 0x003E; // U+003E GREATER-THAN SIGN (>)
const TILDE = 0x007E; // U+007E TILDE (~)
const name = 'Combinator';
const structure = {
name: String
};
// + | > | ~ | /deep/
function parse() {
const start = this.tokenStart;
let name;
switch (this.tokenType) {
case types.WhiteSpace:
name = ' ';
break;
case types.Delim:
switch (this.charCodeAt(this.tokenStart)) {
case GREATERTHANSIGN:
case PLUSSIGN:
case TILDE:
this.next();
break;
case SOLIDUS:
this.next();
this.eatIdent('deep');
this.eatDelim(SOLIDUS);
break;
default:
this.error('Combinator is expected');
}
name = this.substrToCursor(start);
break;
}
return {
type: 'Combinator',
loc: this.getLocation(start, this.tokenStart),
name
};
}
function generate(node) {
this.tokenize(node.name);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

40
node_modules/css-tree/cjs/syntax/node/Comment.cjs generated vendored Normal file
View File

@ -0,0 +1,40 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const name = 'Comment';
const structure = {
value: String
};
function parse() {
const start = this.tokenStart;
let end = this.tokenEnd;
this.eat(types.Comment);
if ((end - start + 2) >= 2 &&
this.charCodeAt(end - 2) === ASTERISK &&
this.charCodeAt(end - 1) === SOLIDUS) {
end -= 2;
}
return {
type: 'Comment',
loc: this.getLocation(start, this.tokenStart),
value: this.substring(start + 2, end)
};
}
function generate(node) {
this.token(types.Comment, '/*' + node.value + '*/');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

127
node_modules/css-tree/cjs/syntax/node/Condition.cjs generated vendored Normal file
View File

@ -0,0 +1,127 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const MediaFeatureToken = new Set([types.Colon, types.RightParenthesis, types.EOF]);
const SupportsFeatureToken = new Set([types.Colon, types.EOF]);
const name = 'Condition';
const structure = {
kind: String,
children: [[
'Identifier',
'Feature',
'FeatureRange'
]]
};
const conditions = {
media() {
if (this.tokenType === types.LeftParenthesis) {
const firstToken = this.lookupTypeNonSC(1);
if (firstToken === types.Ident && MediaFeatureToken.has(this.lookupTypeNonSC(2))) {
return this.Feature('media');
} else if (firstToken !== types.LeftParenthesis) {
return this.parseWithFallback(() => this.FeatureRange('media'), (startIndex) => {
this.skip(startIndex - this.tokenIndex);
});
}
}
},
supports() {
if (this.tokenType === types.LeftParenthesis) {
if (this.lookupTypeNonSC(1) === types.Ident && SupportsFeatureToken.has(this.lookupTypeNonSC(2))) {
return this.Declaration();
}
}
},
container() {
if (this.tokenType === types.LeftParenthesis) {
if (this.lookupTypeNonSC(1) === types.Ident && MediaFeatureToken.has(this.lookupTypeNonSC(2))) {
return this.Feature('size');
} else if (this.lookupTypeNonSC(1) !== types.LeftParenthesis) {
return this.FeatureRange('size');
}
}
}
};
function parse(kind = 'media') {
const children = this.createList();
const termParser = conditions[kind];
scan: while (!this.eof) {
switch (this.tokenType) {
case types.Comment:
case types.WhiteSpace:
this.next();
continue;
case types.Ident:
children.push(this.Identifier());
break;
case types.LeftParenthesis: {
let term = termParser.call(this);
if (!term) {
term = this.parseWithFallback(() => {
this.next();
const res = this.Condition(kind);
this.eat(types.RightParenthesis);
return res;
}, (startIndex) => {
this.skip(startIndex - this.tokenIndex);
return this.GeneralEnclosed();
});
}
children.push(term);
break;
}
case types.Function: {
let term = termParser.call(this);
if (!term) {
term = this.GeneralEnclosed();
}
children.push(term);
break;
}
default:
break scan;
}
}
if (children.isEmpty) {
this.error('Condition can\'t be empty');
}
return {
type: 'Condition',
loc: this.getLocationFromList(children),
kind,
children
};
}
function generate(node) {
node.children.forEach(child => {
if (child.type === 'Condition') {
this.token(types.LeftParenthesis, '(');
this.node(child);
this.token(types.RightParenthesis, ')');
} else {
this.node(child);
}
});
}
exports.conditions = conditions;
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

166
node_modules/css-tree/cjs/syntax/node/Declaration.cjs generated vendored Normal file
View File

@ -0,0 +1,166 @@
'use strict';
const names = require('../../utils/names.cjs');
const types = require('../../tokenizer/types.cjs');
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
function consumeValueRaw(startToken) {
return this.Raw(startToken, this.consumeUntilExclamationMarkOrSemicolon, true);
}
function consumeCustomPropertyRaw(startToken) {
return this.Raw(startToken, this.consumeUntilExclamationMarkOrSemicolon, false);
}
function consumeValue() {
const startValueToken = this.tokenIndex;
const value = this.Value();
if (value.type !== 'Raw' &&
this.eof === false &&
this.tokenType !== types.Semicolon &&
this.isDelim(EXCLAMATIONMARK) === false &&
this.isBalanceEdge(startValueToken) === false) {
this.error();
}
return value;
}
const name = 'Declaration';
const walkContext = 'declaration';
const structure = {
important: [Boolean, String],
property: String,
value: ['Value', 'Raw']
};
function parse() {
const start = this.tokenStart;
const startToken = this.tokenIndex;
const property = readProperty.call(this);
const customProperty = names.isCustomProperty(property);
const parseValue = customProperty ? this.parseCustomProperty : this.parseValue;
const consumeRaw = customProperty ? consumeCustomPropertyRaw : consumeValueRaw;
let important = false;
let value;
this.skipSC();
this.eat(types.Colon);
const valueStart = this.tokenIndex;
if (!customProperty) {
this.skipSC();
}
if (parseValue) {
value = this.parseWithFallback(consumeValue, consumeRaw);
} else {
value = consumeRaw.call(this, this.tokenIndex);
}
if (customProperty && value.type === 'Value' && value.children.isEmpty) {
for (let offset = valueStart - this.tokenIndex; offset <= 0; offset++) {
if (this.lookupType(offset) === types.WhiteSpace) {
value.children.appendData({
type: 'WhiteSpace',
loc: null,
value: ' '
});
break;
}
}
}
if (this.isDelim(EXCLAMATIONMARK)) {
important = getImportant.call(this);
this.skipSC();
}
// Do not include semicolon to range per spec
// https://drafts.csswg.org/css-syntax/#declaration-diagram
if (this.eof === false &&
this.tokenType !== types.Semicolon &&
this.isBalanceEdge(startToken) === false) {
this.error();
}
return {
type: 'Declaration',
loc: this.getLocation(start, this.tokenStart),
important,
property,
value
};
}
function generate(node) {
this.token(types.Ident, node.property);
this.token(types.Colon, ':');
this.node(node.value);
if (node.important) {
this.token(types.Delim, '!');
this.token(types.Ident, node.important === true ? 'important' : node.important);
}
}
function readProperty() {
const start = this.tokenStart;
// hacks
if (this.tokenType === types.Delim) {
switch (this.charCodeAt(this.tokenStart)) {
case ASTERISK:
case DOLLARSIGN:
case PLUSSIGN:
case NUMBERSIGN:
case AMPERSAND:
this.next();
break;
// TODO: not sure we should support this hack
case SOLIDUS:
this.next();
if (this.isDelim(SOLIDUS)) {
this.next();
}
break;
}
}
if (this.tokenType === types.Hash) {
this.eat(types.Hash);
} else {
this.eat(types.Ident);
}
return this.substrToCursor(start);
}
// ! ws* important
function getImportant() {
this.eat(types.Delim);
this.skipSC();
const important = this.consume(types.Ident);
// store original value in case it differ from `important`
// for better original source restoring and hacks like `!ie` support
return important === 'important' ? true : important;
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

View File

@ -0,0 +1,62 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
function consumeRaw(startToken) {
return this.Raw(startToken, this.consumeUntilSemicolonIncluded, true);
}
const name = 'DeclarationList';
const structure = {
children: [[
'Declaration',
'Atrule',
'Rule'
]]
};
function parse() {
const children = this.createList();
while (!this.eof) {
switch (this.tokenType) {
case types.WhiteSpace:
case types.Comment:
case types.Semicolon:
this.next();
break;
case types.AtKeyword:
children.push(this.parseWithFallback(this.Atrule.bind(this, true), consumeRaw));
break;
default:
if (this.isDelim(AMPERSAND)) {
children.push(this.parseWithFallback(this.Rule, consumeRaw));
} else {
children.push(this.parseWithFallback(this.Declaration, consumeRaw));
}
}
}
return {
type: 'DeclarationList',
loc: this.getLocationFromList(children),
children
};
}
function generate(node) {
this.children(node, prev => {
if (prev.type === 'Declaration') {
this.token(types.Semicolon, ';');
}
});
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

30
node_modules/css-tree/cjs/syntax/node/Dimension.cjs generated vendored Normal file
View File

@ -0,0 +1,30 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Dimension';
const structure = {
value: String,
unit: String
};
function parse() {
const start = this.tokenStart;
const value = this.consumeNumber(types.Dimension);
return {
type: 'Dimension',
loc: this.getLocation(start, this.tokenStart),
value,
unit: this.substring(start + value.length, this.tokenStart)
};
}
function generate(node) {
this.token(types.Dimension, node.value + node.unit);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

78
node_modules/css-tree/cjs/syntax/node/Feature.cjs generated vendored Normal file
View File

@ -0,0 +1,78 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Feature';
const structure = {
kind: String,
name: String,
value: ['Identifier', 'Number', 'Dimension', 'Ratio', null]
};
function parse(kind = 'unknown') {
const start = this.tokenStart;
let name;
let value = null;
this.eat(types.LeftParenthesis);
this.skipSC();
name = this.consume(types.Ident);
this.skipSC();
if (this.tokenType !== types.RightParenthesis) {
this.eat(types.Colon);
this.skipSC();
switch (this.tokenType) {
case types.Number:
if (this.lookupNonWSType(1) === types.Delim) {
value = this.Ratio();
} else {
value = this.Number();
}
break;
case types.Dimension:
value = this.Dimension();
break;
case types.Ident:
value = this.Identifier();
break;
default:
this.error('Number, dimension, ratio or identifier is expected');
}
this.skipSC();
}
this.eat(types.RightParenthesis);
return {
type: 'Feature',
loc: this.getLocation(start, this.tokenStart),
kind,
name,
value
};
}
function generate(node) {
this.token(types.LeftParenthesis, '(');
this.token(types.Ident, node.name);
if (node.value !== null) {
this.token(types.Colon, ':');
this.node(node.value);
}
this.token(types.RightParenthesis, ')');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

114
node_modules/css-tree/cjs/syntax/node/FeatureRange.cjs generated vendored Normal file
View File

@ -0,0 +1,114 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const LESSTHANSIGN = 60; // <
const EQUALSIGN = 61; // =
const GREATERTHANSIGN = 62; // >
const name = 'FeatureRange';
const structure = {
kind: String,
left: ['Identifier', 'Number', 'Dimension', 'Ratio'],
leftComparison: String,
middle: ['Identifier', 'Number', 'Dimension', 'Ratio'],
rightComparison: [String, null],
right: ['Identifier', 'Number', 'Dimension', 'Ratio', null]
};
function readTerm() {
this.skipSC();
switch (this.tokenType) {
case types.Number:
if (this.lookupNonWSType(1) === types.Delim) {
return this.Ratio();
} else {
return this.Number();
}
case types.Dimension:
return this.Dimension();
case types.Ident:
return this.Identifier();
default:
this.error('Number, dimension, ratio or identifier is expected');
}
}
function readComparison(expectColon) {
this.skipSC();
if (this.isDelim(LESSTHANSIGN) ||
this.isDelim(GREATERTHANSIGN)) {
const value = this.source[this.tokenStart];
this.next();
if (this.isDelim(EQUALSIGN)) {
this.next();
return value + '=';
}
return value;
}
if (this.isDelim(EQUALSIGN)) {
return '=';
}
this.error(`Expected ${expectColon ? '":", ' : ''}"<", ">", "=" or ")"`);
}
function parse(kind = 'unknown') {
const start = this.tokenStart;
this.skipSC();
this.eat(types.LeftParenthesis);
const left = readTerm.call(this);
const leftComparison = readComparison.call(this, left.type === 'Identifier');
const middle = readTerm.call(this);
let rightComparison = null;
let right = null;
if (this.lookupNonWSType(0) !== types.RightParenthesis) {
rightComparison = readComparison.call(this);
right = readTerm.call(this);
}
this.skipSC();
this.eat(types.RightParenthesis);
return {
type: 'FeatureRange',
loc: this.getLocation(start, this.tokenStart),
kind,
left,
leftComparison,
middle,
rightComparison,
right
};
}
function generate(node) {
this.token(types.LeftParenthesis, '(');
this.node(node.left);
this.tokenize(node.leftComparison);
this.node(node.middle);
if (node.right) {
this.tokenize(node.rightComparison);
this.node(node.right);
}
this.token(types.RightParenthesis, ')');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

45
node_modules/css-tree/cjs/syntax/node/Function.cjs generated vendored Normal file
View File

@ -0,0 +1,45 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Function';
const walkContext = 'function';
const structure = {
name: String,
children: [[]]
};
// <function-token> <sequence> )
function parse(readSequence, recognizer) {
const start = this.tokenStart;
const name = this.consumeFunctionName();
const nameLowerCase = name.toLowerCase();
let children;
children = recognizer.hasOwnProperty(nameLowerCase)
? recognizer[nameLowerCase].call(this, recognizer)
: readSequence.call(this, recognizer);
if (!this.eof) {
this.eat(types.RightParenthesis);
}
return {
type: 'Function',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
function generate(node) {
this.token(types.Function, node.name + '(');
this.children(node);
this.token(types.RightParenthesis, ')');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

View File

@ -0,0 +1,51 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'GeneralEnclosed';
const structure = {
function: [String, null],
children: [[]]
};
// <function-token> <any-value> )
// ( <any-value> )
function parse() {
const start = this.tokenStart;
let functionName = null;
if (this.tokenType === types.Function) {
functionName = this.consumeFunctionName();
} else {
this.eat(types.LeftParenthesis);
}
const children = this.readSequence(this.scope.Value);
if (!this.eof) {
this.eat(types.RightParenthesis);
}
return {
type: 'GeneralEnclosed',
loc: this.getLocation(start, this.tokenStart),
function: functionName,
children
};
}
function generate(node) {
if (node.function) {
this.token(types.Function, node.function + '(');
} else {
this.token(types.LeftParenthesis, '(');
}
this.children(node);
this.token(types.RightParenthesis, ')');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

30
node_modules/css-tree/cjs/syntax/node/Hash.cjs generated vendored Normal file
View File

@ -0,0 +1,30 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
// '#' ident
const xxx = 'XXX';
const name = 'Hash';
const structure = {
value: String
};
function parse() {
const start = this.tokenStart;
this.eat(types.Hash);
return {
type: 'Hash',
loc: this.getLocation(start, this.tokenStart),
value: this.substrToCursor(start + 1)
};
}
function generate(node) {
this.token(types.Hash, '#' + node.value);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.xxx = xxx;

33
node_modules/css-tree/cjs/syntax/node/IdSelector.cjs generated vendored Normal file
View File

@ -0,0 +1,33 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'IdSelector';
const structure = {
name: String
};
function parse() {
const start = this.tokenStart;
// TODO: check value is an ident
this.eat(types.Hash);
return {
type: 'IdSelector',
loc: this.getLocation(start, this.tokenStart),
name: this.substrToCursor(start + 1)
};
}
function generate(node) {
// Using Delim instead of Hash is a hack to avoid for a whitespace between ident and id-selector
// in safe mode (e.g. "a#id"), because IE11 doesn't allow a sequence <ident-token> <hash-token>
// without a whitespace in values (e.g. "1px solid#000")
this.token(types.Delim, '#' + node.name);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

25
node_modules/css-tree/cjs/syntax/node/Identifier.cjs generated vendored Normal file
View File

@ -0,0 +1,25 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Identifier';
const structure = {
name: String
};
function parse() {
return {
type: 'Identifier',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
name: this.consume(types.Ident)
};
}
function generate(node) {
this.token(types.Ident, node.name);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

View File

@ -0,0 +1,70 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const MediaFeatureToken = new Set([types.Colon, types.RightParenthesis, types.EOF]);
const name = 'MediaCondition';
const structure = {
children: [[
'Identifier',
'MediaFeature',
'MediaFeatureRange'
]]
};
function parse() {
const children = this.createList();
scan: while (!this.eof) {
switch (this.tokenType) {
case types.Comment:
case types.WhiteSpace:
this.next();
continue;
case types.Ident:
children.push(this.Identifier());
break;
case types.LeftParenthesis:
if (this.lookupTypeNonSC(1) === types.Ident && MediaFeatureToken.has(this.lookupTypeNonSC(2))) {
children.push(this.MediaFeature());
} else if (this.lookupTypeNonSC(1) === types.LeftParenthesis || this.lookupTypeNonSC(2) === types.LeftParenthesis) {
this.next();
children.push(this.MediaCondition());
this.eat(types.RightParenthesis);
} else {
children.push(this.MediaFeatureRange());
}
break;
default:
break scan;
}
}
return {
type: 'MediaCondition',
loc: this.getLocationFromList(children),
children
};
}
function generate(node) {
node.children.forEach(child => {
if (child.type === 'MediaCondition') {
this.token(types.LeftParenthesis, '(');
this.node(child);
this.token(types.RightParenthesis, ')');
} else {
this.node(child);
}
});
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

76
node_modules/css-tree/cjs/syntax/node/MediaFeature.cjs generated vendored Normal file
View File

@ -0,0 +1,76 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'MediaFeature';
const structure = {
name: String,
value: ['Identifier', 'Number', 'Dimension', 'Ratio', null]
};
function parse() {
const start = this.tokenStart;
let name;
let value = null;
this.eat(types.LeftParenthesis);
this.skipSC();
name = this.consume(types.Ident);
this.skipSC();
if (this.tokenType !== types.RightParenthesis) {
this.eat(types.Colon);
this.skipSC();
switch (this.tokenType) {
case types.Number:
if (this.lookupNonWSType(1) === types.Delim) {
value = this.Ratio();
} else {
value = this.Number();
}
break;
case types.Dimension:
value = this.Dimension();
break;
case types.Ident:
value = this.Identifier();
break;
default:
this.error('Number, dimension, ratio or identifier is expected');
}
this.skipSC();
}
this.eat(types.RightParenthesis);
return {
type: 'MediaFeature',
loc: this.getLocation(start, this.tokenStart),
name,
value
};
}
function generate(node) {
this.token(types.LeftParenthesis, '(');
this.token(types.Ident, node.name);
if (node.value !== null) {
this.token(types.Colon, ':');
this.node(node.value);
}
this.token(types.RightParenthesis, ')');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

View File

@ -0,0 +1,11 @@
'use strict';
const featureRange = require('./common/feature-range.cjs');
const name = 'MediaFeatureRange';
const parse = featureRange.createParse(name);
exports.generate = featureRange.generate;
exports.structure = featureRange.structure;
exports.name = name;
exports.parse = parse;

61
node_modules/css-tree/cjs/syntax/node/MediaQuery.cjs generated vendored Normal file
View File

@ -0,0 +1,61 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'MediaQuery';
const structure = {
children: [[
'Identifier',
'MediaFeature',
'WhiteSpace'
]]
};
function parse() {
const children = this.createList();
let child = null;
this.skipSC();
scan:
while (!this.eof) {
switch (this.tokenType) {
case types.Comment:
case types.WhiteSpace:
this.next();
continue;
case types.Ident:
child = this.Identifier();
break;
case types.LeftParenthesis:
child = this.MediaFeature();
break;
default:
break scan;
}
children.push(child);
}
if (child === null) {
this.error('Identifier or parenthesis is expected');
}
return {
type: 'MediaQuery',
loc: this.getLocationFromList(children),
children
};
}
function generate(node) {
this.children(node);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

View File

@ -0,0 +1,41 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'MediaQueryList';
const structure = {
children: [[
'MediaQuery'
]]
};
function parse() {
const children = this.createList();
this.skipSC();
while (!this.eof) {
children.push(this.MediaQuery());
if (this.tokenType !== types.Comma) {
break;
}
this.next();
}
return {
type: 'MediaQueryList',
loc: this.getLocationFromList(children),
children
};
}
function generate(node) {
this.children(node, () => this.token(types.Comma, ','));
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

View File

@ -0,0 +1,29 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
const name = 'NestingSelector';
const structure = {
};
function parse() {
const start = this.tokenStart;
this.eatDelim(AMPERSAND);
return {
type: 'NestingSelector',
loc: this.getLocation(start, this.tokenStart)
};
}
function generate() {
this.token(types.Delim, '&');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

54
node_modules/css-tree/cjs/syntax/node/Nth.cjs generated vendored Normal file
View File

@ -0,0 +1,54 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Nth';
const structure = {
nth: ['AnPlusB', 'Identifier'],
selector: ['SelectorList', null]
};
function parse() {
this.skipSC();
const start = this.tokenStart;
let end = start;
let selector = null;
let nth;
if (this.lookupValue(0, 'odd') || this.lookupValue(0, 'even')) {
nth = this.Identifier();
} else {
nth = this.AnPlusB();
}
end = this.tokenStart;
this.skipSC();
if (this.lookupValue(0, 'of')) {
this.next();
selector = this.SelectorList();
end = this.tokenStart;
}
return {
type: 'Nth',
loc: this.getLocation(start, end),
nth,
selector
};
}
function generate(node) {
this.node(node.nth);
if (node.selector !== null) {
this.token(types.Ident, 'of');
this.node(node.selector);
}
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

25
node_modules/css-tree/cjs/syntax/node/Number.cjs generated vendored Normal file
View File

@ -0,0 +1,25 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Number';
const structure = {
value: String
};
function parse() {
return {
type: 'Number',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
value: this.consume(types.Number)
};
}
function generate(node) {
this.token(types.Number, node.value);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

28
node_modules/css-tree/cjs/syntax/node/Operator.cjs generated vendored Normal file
View File

@ -0,0 +1,28 @@
'use strict';
// '/' | '*' | ',' | ':' | '+' | '-'
const name = 'Operator';
const structure = {
value: String
};
function parse() {
const start = this.tokenStart;
this.next();
return {
type: 'Operator',
loc: this.getLocation(start, this.tokenStart),
value: this.substrToCursor(start)
};
}
function generate(node) {
this.tokenize(node.value);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

38
node_modules/css-tree/cjs/syntax/node/Parentheses.cjs generated vendored Normal file
View File

@ -0,0 +1,38 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Parentheses';
const structure = {
children: [[]]
};
function parse(readSequence, recognizer) {
const start = this.tokenStart;
let children = null;
this.eat(types.LeftParenthesis);
children = readSequence.call(this, recognizer);
if (!this.eof) {
this.eat(types.RightParenthesis);
}
return {
type: 'Parentheses',
loc: this.getLocation(start, this.tokenStart),
children
};
}
function generate(node) {
this.token(types.LeftParenthesis, '(');
this.children(node);
this.token(types.RightParenthesis, ')');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

25
node_modules/css-tree/cjs/syntax/node/Percentage.cjs generated vendored Normal file
View File

@ -0,0 +1,25 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'Percentage';
const structure = {
value: String
};
function parse() {
return {
type: 'Percentage',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
value: this.consumeNumber(types.Percentage)
};
}
function generate(node) {
this.token(types.Percentage, node.value + '%');
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

View File

@ -0,0 +1,65 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'PseudoClassSelector';
const walkContext = 'function';
const structure = {
name: String,
children: [['Raw'], null]
};
// : [ <ident> | <function-token> <any-value>? ) ]
function parse() {
const start = this.tokenStart;
let children = null;
let name;
let nameLowerCase;
this.eat(types.Colon);
if (this.tokenType === types.Function) {
name = this.consumeFunctionName();
nameLowerCase = name.toLowerCase();
if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
this.skipSC();
children = this.pseudo[nameLowerCase].call(this);
this.skipSC();
} else {
children = this.createList();
children.push(
this.Raw(this.tokenIndex, null, false)
);
}
this.eat(types.RightParenthesis);
} else {
name = this.consume(types.Ident);
}
return {
type: 'PseudoClassSelector',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
function generate(node) {
this.token(types.Colon, ':');
if (node.children === null) {
this.token(types.Ident, node.name);
} else {
this.token(types.Function, node.name + '(');
this.children(node);
this.token(types.RightParenthesis, ')');
}
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

View File

@ -0,0 +1,67 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'PseudoElementSelector';
const walkContext = 'function';
const structure = {
name: String,
children: [['Raw'], null]
};
// :: [ <ident> | <function-token> <any-value>? ) ]
function parse() {
const start = this.tokenStart;
let children = null;
let name;
let nameLowerCase;
this.eat(types.Colon);
this.eat(types.Colon);
if (this.tokenType === types.Function) {
name = this.consumeFunctionName();
nameLowerCase = name.toLowerCase();
if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
this.skipSC();
children = this.pseudo[nameLowerCase].call(this);
this.skipSC();
} else {
children = this.createList();
children.push(
this.Raw(this.tokenIndex, null, false)
);
}
this.eat(types.RightParenthesis);
} else {
name = this.consume(types.Ident);
}
return {
type: 'PseudoElementSelector',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
function generate(node) {
this.token(types.Colon, ':');
this.token(types.Colon, ':');
if (node.children === null) {
this.token(types.Ident, node.name);
} else {
this.token(types.Function, node.name + '(');
this.children(node);
this.token(types.RightParenthesis, ')');
}
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

67
node_modules/css-tree/cjs/syntax/node/Ratio.cjs generated vendored Normal file
View File

@ -0,0 +1,67 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const charCodeDefinitions = require('../../tokenizer/char-code-definitions.cjs');
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
// Terms of <ratio> should be a positive numbers (not zero or negative)
// (see https://drafts.csswg.org/mediaqueries-3/#values)
// However, -o-min-device-pixel-ratio takes fractional values as a ratio's term
// and this is using by various sites. Therefore we relax checking on parse
// to test a term is unsigned number without an exponent part.
// Additional checking may be applied on lexer validation.
function consumeNumber() {
this.skipSC();
const value = this.consume(types.Number);
for (let i = 0; i < value.length; i++) {
const code = value.charCodeAt(i);
if (!charCodeDefinitions.isDigit(code) && code !== FULLSTOP) {
this.error('Unsigned number is expected', this.tokenStart - value.length + i);
}
}
if (Number(value) === 0) {
this.error('Zero number is not allowed', this.tokenStart - value.length);
}
return value;
}
const name = 'Ratio';
const structure = {
left: String,
right: String
};
// <positive-integer> S* '/' S* <positive-integer>
function parse() {
const start = this.tokenStart;
const left = consumeNumber.call(this);
let right;
this.skipSC();
this.eatDelim(SOLIDUS);
right = consumeNumber.call(this);
return {
type: 'Ratio',
loc: this.getLocation(start, this.tokenStart),
left,
right
};
}
function generate(node) {
this.token(types.Number, node.left);
this.token(types.Delim, '/');
this.token(types.Number, node.right);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

48
node_modules/css-tree/cjs/syntax/node/Raw.cjs generated vendored Normal file
View File

@ -0,0 +1,48 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
function getOffsetExcludeWS() {
if (this.tokenIndex > 0) {
if (this.lookupType(-1) === types.WhiteSpace) {
return this.tokenIndex > 1
? this.getTokenStart(this.tokenIndex - 1)
: this.firstCharOffset;
}
}
return this.tokenStart;
}
const name = 'Raw';
const structure = {
value: String
};
function parse(startToken, consumeUntil, excludeWhiteSpace) {
const startOffset = this.getTokenStart(startToken);
let endOffset;
this.skipUntilBalanced(startToken, consumeUntil || this.consumeUntilBalanceEnd);
if (excludeWhiteSpace && this.tokenStart > startOffset) {
endOffset = getOffsetExcludeWS.call(this);
} else {
endOffset = this.tokenStart;
}
return {
type: 'Raw',
loc: this.getLocation(startOffset, endOffset),
value: this.substring(startOffset, endOffset)
};
}
function generate(node) {
this.tokenize(node.value);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

58
node_modules/css-tree/cjs/syntax/node/Rule.cjs generated vendored Normal file
View File

@ -0,0 +1,58 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
function consumeRaw(startToken) {
return this.Raw(startToken, this.consumeUntilLeftCurlyBracket, true);
}
function consumePrelude() {
const prelude = this.SelectorList();
if (prelude.type !== 'Raw' &&
this.eof === false &&
this.tokenType !== types.LeftCurlyBracket) {
this.error();
}
return prelude;
}
const name = 'Rule';
const walkContext = 'rule';
const structure = {
prelude: ['SelectorList', 'Raw'],
block: ['Block']
};
function parse() {
const startToken = this.tokenIndex;
const startOffset = this.tokenStart;
let prelude;
let block;
if (this.parseRulePrelude) {
prelude = this.parseWithFallback(consumePrelude, consumeRaw);
} else {
prelude = consumeRaw.call(this, startToken);
}
block = this.Block(true);
return {
type: 'Rule',
loc: this.getLocation(startOffset, this.tokenStart),
prelude,
block
};
}
function generate(node) {
this.node(node.prelude);
this.node(node.block);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

39
node_modules/css-tree/cjs/syntax/node/Selector.cjs generated vendored Normal file
View File

@ -0,0 +1,39 @@
'use strict';
const name = 'Selector';
const structure = {
children: [[
'TypeSelector',
'IdSelector',
'ClassSelector',
'AttributeSelector',
'PseudoClassSelector',
'PseudoElementSelector',
'Combinator',
'WhiteSpace'
]]
};
function parse() {
const children = this.readSequence(this.scope.Selector);
// nothing were consumed
if (this.getFirstListNode(children) === null) {
this.error('Selector is expected');
}
return {
type: 'Selector',
loc: this.getLocationFromList(children),
children
};
}
function generate(node) {
this.children(node);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

43
node_modules/css-tree/cjs/syntax/node/SelectorList.cjs generated vendored Normal file
View File

@ -0,0 +1,43 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const name = 'SelectorList';
const walkContext = 'selector';
const structure = {
children: [[
'Selector',
'Raw'
]]
};
function parse() {
const children = this.createList();
while (!this.eof) {
children.push(this.Selector());
if (this.tokenType === types.Comma) {
this.next();
continue;
}
break;
}
return {
type: 'SelectorList',
loc: this.getLocationFromList(children),
children
};
}
function generate(node) {
this.children(node, () => this.token(types.Comma, ','));
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

26
node_modules/css-tree/cjs/syntax/node/String.cjs generated vendored Normal file
View File

@ -0,0 +1,26 @@
'use strict';
const string = require('../../utils/string.cjs');
const types = require('../../tokenizer/types.cjs');
const name = 'String';
const structure = {
value: String
};
function parse() {
return {
type: 'String',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
value: string.decode(this.consume(types.String))
};
}
function generate(node) {
this.token(types.String, string.encode(node.value));
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

83
node_modules/css-tree/cjs/syntax/node/StyleSheet.cjs generated vendored Normal file
View File

@ -0,0 +1,83 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
function consumeRaw(startToken) {
return this.Raw(startToken, null, false);
}
const name = 'StyleSheet';
const walkContext = 'stylesheet';
const structure = {
children: [[
'Comment',
'CDO',
'CDC',
'Atrule',
'Rule',
'Raw'
]]
};
function parse() {
const start = this.tokenStart;
const children = this.createList();
let child;
while (!this.eof) {
switch (this.tokenType) {
case types.WhiteSpace:
this.next();
continue;
case types.Comment:
// ignore comments except exclamation comments (i.e. /*! .. */) on top level
if (this.charCodeAt(this.tokenStart + 2) !== EXCLAMATIONMARK) {
this.next();
continue;
}
child = this.Comment();
break;
case types.CDO: // <!--
child = this.CDO();
break;
case types.CDC: // -->
child = this.CDC();
break;
// CSS Syntax Module Level 3
// §2.2 Error handling
// At the "top level" of a stylesheet, an <at-keyword-token> starts an at-rule.
case types.AtKeyword:
child = this.parseWithFallback(this.Atrule, consumeRaw);
break;
// Anything else starts a qualified rule ...
default:
child = this.parseWithFallback(this.Rule, consumeRaw);
}
children.push(child);
}
return {
type: 'StyleSheet',
loc: this.getLocation(start, this.tokenStart),
children
};
}
function generate(node) {
this.children(node);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;
exports.walkContext = walkContext;

59
node_modules/css-tree/cjs/syntax/node/TypeSelector.cjs generated vendored Normal file
View File

@ -0,0 +1,59 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
function eatIdentifierOrAsterisk() {
if (this.tokenType !== types.Ident &&
this.isDelim(ASTERISK) === false) {
this.error('Identifier or asterisk is expected');
}
this.next();
}
const name = 'TypeSelector';
const structure = {
name: String
};
// ident
// ident|ident
// ident|*
// *
// *|ident
// *|*
// |ident
// |*
function parse() {
const start = this.tokenStart;
if (this.isDelim(VERTICALLINE)) {
this.next();
eatIdentifierOrAsterisk.call(this);
} else {
eatIdentifierOrAsterisk.call(this);
if (this.isDelim(VERTICALLINE)) {
this.next();
eatIdentifierOrAsterisk.call(this);
}
}
return {
type: 'TypeSelector',
loc: this.getLocation(start, this.tokenStart),
name: this.substrToCursor(start)
};
}
function generate(node) {
this.tokenize(node.name);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

158
node_modules/css-tree/cjs/syntax/node/UnicodeRange.cjs generated vendored Normal file
View File

@ -0,0 +1,158 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const charCodeDefinitions = require('../../tokenizer/char-code-definitions.cjs');
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
function eatHexSequence(offset, allowDash) {
let len = 0;
for (let pos = this.tokenStart + offset; pos < this.tokenEnd; pos++) {
const code = this.charCodeAt(pos);
if (code === HYPHENMINUS && allowDash && len !== 0) {
eatHexSequence.call(this, offset + len + 1, false);
return -1;
}
if (!charCodeDefinitions.isHexDigit(code)) {
this.error(
allowDash && len !== 0
? 'Hyphen minus' + (len < 6 ? ' or hex digit' : '') + ' is expected'
: (len < 6 ? 'Hex digit is expected' : 'Unexpected input'),
pos
);
}
if (++len > 6) {
this.error('Too many hex digits', pos);
} }
this.next();
return len;
}
function eatQuestionMarkSequence(max) {
let count = 0;
while (this.isDelim(QUESTIONMARK)) {
if (++count > max) {
this.error('Too many question marks');
}
this.next();
}
}
function startsWith(code) {
if (this.charCodeAt(this.tokenStart) !== code) {
this.error((code === PLUSSIGN ? 'Plus sign' : 'Hyphen minus') + ' is expected');
}
}
// https://drafts.csswg.org/css-syntax/#urange
// Informally, the <urange> production has three forms:
// U+0001
// Defines a range consisting of a single code point, in this case the code point "1".
// U+0001-00ff
// Defines a range of codepoints between the first and the second value, in this case
// the range between "1" and "ff" (255 in decimal) inclusive.
// U+00??
// Defines a range of codepoints where the "?" characters range over all hex digits,
// in this case defining the same as the value U+0000-00ff.
// In each form, a maximum of 6 digits is allowed for each hexadecimal number (if you treat "?" as a hexadecimal digit).
//
// <urange> =
// u '+' <ident-token> '?'* |
// u <dimension-token> '?'* |
// u <number-token> '?'* |
// u <number-token> <dimension-token> |
// u <number-token> <number-token> |
// u '+' '?'+
function scanUnicodeRange() {
let hexLength = 0;
switch (this.tokenType) {
case types.Number:
// u <number-token> '?'*
// u <number-token> <dimension-token>
// u <number-token> <number-token>
hexLength = eatHexSequence.call(this, 1, true);
if (this.isDelim(QUESTIONMARK)) {
eatQuestionMarkSequence.call(this, 6 - hexLength);
break;
}
if (this.tokenType === types.Dimension ||
this.tokenType === types.Number) {
startsWith.call(this, HYPHENMINUS);
eatHexSequence.call(this, 1, false);
break;
}
break;
case types.Dimension:
// u <dimension-token> '?'*
hexLength = eatHexSequence.call(this, 1, true);
if (hexLength > 0) {
eatQuestionMarkSequence.call(this, 6 - hexLength);
}
break;
default:
// u '+' <ident-token> '?'*
// u '+' '?'+
this.eatDelim(PLUSSIGN);
if (this.tokenType === types.Ident) {
hexLength = eatHexSequence.call(this, 0, true);
if (hexLength > 0) {
eatQuestionMarkSequence.call(this, 6 - hexLength);
}
break;
}
if (this.isDelim(QUESTIONMARK)) {
this.next();
eatQuestionMarkSequence.call(this, 5);
break;
}
this.error('Hex digit or question mark is expected');
}
}
const name = 'UnicodeRange';
const structure = {
value: String
};
function parse() {
const start = this.tokenStart;
// U or u
this.eatIdent('u');
scanUnicodeRange.call(this);
return {
type: 'UnicodeRange',
loc: this.getLocation(start, this.tokenStart),
value: this.substrToCursor(start)
};
}
function generate(node) {
this.tokenize(node.value);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

54
node_modules/css-tree/cjs/syntax/node/Url.cjs generated vendored Normal file
View File

@ -0,0 +1,54 @@
'use strict';
const url = require('../../utils/url.cjs');
const string = require('../../utils/string.cjs');
const types = require('../../tokenizer/types.cjs');
const name = 'Url';
const structure = {
value: String
};
// <url-token> | <function-token> <string> )
function parse() {
const start = this.tokenStart;
let value;
switch (this.tokenType) {
case types.Url:
value = url.decode(this.consume(types.Url));
break;
case types.Function:
if (!this.cmpStr(this.tokenStart, this.tokenEnd, 'url(')) {
this.error('Function name must be `url`');
}
this.eat(types.Function);
this.skipSC();
value = string.decode(this.consume(types.String));
this.skipSC();
if (!this.eof) {
this.eat(types.RightParenthesis);
}
break;
default:
this.error('Url or Function is expected');
}
return {
type: 'Url',
loc: this.getLocation(start, this.tokenStart),
value
};
}
function generate(node) {
this.token(types.Url, url.encode(node.value));
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

26
node_modules/css-tree/cjs/syntax/node/Value.cjs generated vendored Normal file
View File

@ -0,0 +1,26 @@
'use strict';
const name = 'Value';
const structure = {
children: [[]]
};
function parse() {
const start = this.tokenStart;
const children = this.readSequence(this.scope.Value);
return {
type: 'Value',
loc: this.getLocation(start, this.tokenStart),
children
};
}
function generate(node) {
this.children(node);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

34
node_modules/css-tree/cjs/syntax/node/WhiteSpace.cjs generated vendored Normal file
View File

@ -0,0 +1,34 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const SPACE = Object.freeze({
type: 'WhiteSpace',
loc: null,
value: ' '
});
const name = 'WhiteSpace';
const structure = {
value: String
};
function parse() {
this.eat(types.WhiteSpace);
return SPACE;
// return {
// type: 'WhiteSpace',
// loc: this.getLocation(this.tokenStart, this.tokenEnd),
// value: this.consume(WHITESPACE)
// };
}
function generate(node) {
this.token(types.WhiteSpace, node.value);
}
exports.generate = generate;
exports.name = name;
exports.parse = parse;
exports.structure = structure;

Some files were not shown because too many files have changed in this diff Show More