feat: docker compose maybe
This commit is contained in:
60
node_modules/sucrase/dist/esm/parser/traverser/base.js
generated
vendored
Normal file
60
node_modules/sucrase/dist/esm/parser/traverser/base.js
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
import State from "../tokenizer/state";
|
||||
import {charCodes} from "../util/charcodes";
|
||||
|
||||
export let isJSXEnabled;
|
||||
export let isTypeScriptEnabled;
|
||||
export let isFlowEnabled;
|
||||
export let state;
|
||||
export let input;
|
||||
export let nextContextId;
|
||||
|
||||
export function getNextContextId() {
|
||||
return nextContextId++;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function augmentError(error) {
|
||||
if ("pos" in error) {
|
||||
const loc = locationForIndex(error.pos);
|
||||
error.message += ` (${loc.line}:${loc.column})`;
|
||||
error.loc = loc;
|
||||
}
|
||||
return error;
|
||||
}
|
||||
|
||||
export class Loc {
|
||||
|
||||
|
||||
constructor(line, column) {
|
||||
this.line = line;
|
||||
this.column = column;
|
||||
}
|
||||
}
|
||||
|
||||
export function locationForIndex(pos) {
|
||||
let line = 1;
|
||||
let column = 1;
|
||||
for (let i = 0; i < pos; i++) {
|
||||
if (input.charCodeAt(i) === charCodes.lineFeed) {
|
||||
line++;
|
||||
column = 1;
|
||||
} else {
|
||||
column++;
|
||||
}
|
||||
}
|
||||
return new Loc(line, column);
|
||||
}
|
||||
|
||||
export function initParser(
|
||||
inputCode,
|
||||
isJSXEnabledArg,
|
||||
isTypeScriptEnabledArg,
|
||||
isFlowEnabledArg,
|
||||
) {
|
||||
input = inputCode;
|
||||
state = new State();
|
||||
nextContextId = 1;
|
||||
isJSXEnabled = isJSXEnabledArg;
|
||||
isTypeScriptEnabled = isTypeScriptEnabledArg;
|
||||
isFlowEnabled = isFlowEnabledArg;
|
||||
}
|
1022
node_modules/sucrase/dist/esm/parser/traverser/expression.js
generated
vendored
Normal file
1022
node_modules/sucrase/dist/esm/parser/traverser/expression.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
18
node_modules/sucrase/dist/esm/parser/traverser/index.js
generated
vendored
Normal file
18
node_modules/sucrase/dist/esm/parser/traverser/index.js
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
|
||||
import {nextToken, skipLineComment} from "../tokenizer/index";
|
||||
import {charCodes} from "../util/charcodes";
|
||||
import {input, state} from "./base";
|
||||
import {parseTopLevel} from "./statement";
|
||||
|
||||
export function parseFile() {
|
||||
// If enabled, skip leading hashbang line.
|
||||
if (
|
||||
state.pos === 0 &&
|
||||
input.charCodeAt(0) === charCodes.numberSign &&
|
||||
input.charCodeAt(1) === charCodes.exclamationMark
|
||||
) {
|
||||
skipLineComment(2);
|
||||
}
|
||||
nextToken();
|
||||
return parseTopLevel();
|
||||
}
|
159
node_modules/sucrase/dist/esm/parser/traverser/lval.js
generated
vendored
Normal file
159
node_modules/sucrase/dist/esm/parser/traverser/lval.js
generated
vendored
Normal file
@ -0,0 +1,159 @@
|
||||
import {flowParseAssignableListItemTypes} from "../plugins/flow";
|
||||
import {tsParseAssignableListItemTypes, tsParseModifiers} from "../plugins/typescript";
|
||||
import {
|
||||
eat,
|
||||
IdentifierRole,
|
||||
match,
|
||||
next,
|
||||
popTypeContext,
|
||||
pushTypeContext,
|
||||
} from "../tokenizer/index";
|
||||
import {ContextualKeyword} from "../tokenizer/keywords";
|
||||
import {TokenType, TokenType as tt} from "../tokenizer/types";
|
||||
import {isFlowEnabled, isTypeScriptEnabled, state} from "./base";
|
||||
import {parseIdentifier, parseMaybeAssign, parseObj} from "./expression";
|
||||
import {expect, unexpected} from "./util";
|
||||
|
||||
export function parseSpread() {
|
||||
next();
|
||||
parseMaybeAssign(false);
|
||||
}
|
||||
|
||||
export function parseRest(isBlockScope) {
|
||||
next();
|
||||
parseBindingAtom(isBlockScope);
|
||||
}
|
||||
|
||||
export function parseBindingIdentifier(isBlockScope) {
|
||||
parseIdentifier();
|
||||
markPriorBindingIdentifier(isBlockScope);
|
||||
}
|
||||
|
||||
export function parseImportedIdentifier() {
|
||||
parseIdentifier();
|
||||
state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportDeclaration;
|
||||
}
|
||||
|
||||
export function markPriorBindingIdentifier(isBlockScope) {
|
||||
let identifierRole;
|
||||
if (state.scopeDepth === 0) {
|
||||
identifierRole = IdentifierRole.TopLevelDeclaration;
|
||||
} else if (isBlockScope) {
|
||||
identifierRole = IdentifierRole.BlockScopedDeclaration;
|
||||
} else {
|
||||
identifierRole = IdentifierRole.FunctionScopedDeclaration;
|
||||
}
|
||||
state.tokens[state.tokens.length - 1].identifierRole = identifierRole;
|
||||
}
|
||||
|
||||
// Parses lvalue (assignable) atom.
|
||||
export function parseBindingAtom(isBlockScope) {
|
||||
switch (state.type) {
|
||||
case tt._this: {
|
||||
// In TypeScript, "this" may be the name of a parameter, so allow it.
|
||||
const oldIsType = pushTypeContext(0);
|
||||
next();
|
||||
popTypeContext(oldIsType);
|
||||
return;
|
||||
}
|
||||
|
||||
case tt._yield:
|
||||
case tt.name: {
|
||||
state.type = tt.name;
|
||||
parseBindingIdentifier(isBlockScope);
|
||||
return;
|
||||
}
|
||||
|
||||
case tt.bracketL: {
|
||||
next();
|
||||
parseBindingList(tt.bracketR, isBlockScope, true /* allowEmpty */);
|
||||
return;
|
||||
}
|
||||
|
||||
case tt.braceL:
|
||||
parseObj(true, isBlockScope);
|
||||
return;
|
||||
|
||||
default:
|
||||
unexpected();
|
||||
}
|
||||
}
|
||||
|
||||
export function parseBindingList(
|
||||
close,
|
||||
isBlockScope,
|
||||
allowEmpty = false,
|
||||
allowModifiers = false,
|
||||
contextId = 0,
|
||||
) {
|
||||
let first = true;
|
||||
|
||||
let hasRemovedComma = false;
|
||||
const firstItemTokenIndex = state.tokens.length;
|
||||
|
||||
while (!eat(close) && !state.error) {
|
||||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
expect(tt.comma);
|
||||
state.tokens[state.tokens.length - 1].contextId = contextId;
|
||||
// After a "this" type in TypeScript, we need to set the following comma (if any) to also be
|
||||
// a type token so that it will be removed.
|
||||
if (!hasRemovedComma && state.tokens[firstItemTokenIndex].isType) {
|
||||
state.tokens[state.tokens.length - 1].isType = true;
|
||||
hasRemovedComma = true;
|
||||
}
|
||||
}
|
||||
if (allowEmpty && match(tt.comma)) {
|
||||
// Empty item; nothing further to parse for this item.
|
||||
} else if (eat(close)) {
|
||||
break;
|
||||
} else if (match(tt.ellipsis)) {
|
||||
parseRest(isBlockScope);
|
||||
parseAssignableListItemTypes();
|
||||
// Support rest element trailing commas allowed by TypeScript <2.9.
|
||||
eat(TokenType.comma);
|
||||
expect(close);
|
||||
break;
|
||||
} else {
|
||||
parseAssignableListItem(allowModifiers, isBlockScope);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseAssignableListItem(allowModifiers, isBlockScope) {
|
||||
if (allowModifiers) {
|
||||
tsParseModifiers([
|
||||
ContextualKeyword._public,
|
||||
ContextualKeyword._protected,
|
||||
ContextualKeyword._private,
|
||||
ContextualKeyword._readonly,
|
||||
ContextualKeyword._override,
|
||||
]);
|
||||
}
|
||||
|
||||
parseMaybeDefault(isBlockScope);
|
||||
parseAssignableListItemTypes();
|
||||
parseMaybeDefault(isBlockScope, true /* leftAlreadyParsed */);
|
||||
}
|
||||
|
||||
function parseAssignableListItemTypes() {
|
||||
if (isFlowEnabled) {
|
||||
flowParseAssignableListItemTypes();
|
||||
} else if (isTypeScriptEnabled) {
|
||||
tsParseAssignableListItemTypes();
|
||||
}
|
||||
}
|
||||
|
||||
// Parses assignment pattern around given atom if possible.
|
||||
export function parseMaybeDefault(isBlockScope, leftAlreadyParsed = false) {
|
||||
if (!leftAlreadyParsed) {
|
||||
parseBindingAtom(isBlockScope);
|
||||
}
|
||||
if (!eat(tt.eq)) {
|
||||
return;
|
||||
}
|
||||
const eqIndex = state.tokens.length - 1;
|
||||
parseMaybeAssign();
|
||||
state.tokens[eqIndex].rhsEndIndex = state.tokens.length;
|
||||
}
|
1332
node_modules/sucrase/dist/esm/parser/traverser/statement.js
generated
vendored
Normal file
1332
node_modules/sucrase/dist/esm/parser/traverser/statement.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
104
node_modules/sucrase/dist/esm/parser/traverser/util.js
generated
vendored
Normal file
104
node_modules/sucrase/dist/esm/parser/traverser/util.js
generated
vendored
Normal file
@ -0,0 +1,104 @@
|
||||
import {eat, finishToken, lookaheadTypeAndKeyword, match, nextTokenStart} from "../tokenizer/index";
|
||||
|
||||
import {formatTokenType, TokenType as tt} from "../tokenizer/types";
|
||||
import {charCodes} from "../util/charcodes";
|
||||
import {input, state} from "./base";
|
||||
|
||||
// ## Parser utilities
|
||||
|
||||
// Tests whether parsed token is a contextual keyword.
|
||||
export function isContextual(contextualKeyword) {
|
||||
return state.contextualKeyword === contextualKeyword;
|
||||
}
|
||||
|
||||
export function isLookaheadContextual(contextualKeyword) {
|
||||
const l = lookaheadTypeAndKeyword();
|
||||
return l.type === tt.name && l.contextualKeyword === contextualKeyword;
|
||||
}
|
||||
|
||||
// Consumes contextual keyword if possible.
|
||||
export function eatContextual(contextualKeyword) {
|
||||
return state.contextualKeyword === contextualKeyword && eat(tt.name);
|
||||
}
|
||||
|
||||
// Asserts that following token is given contextual keyword.
|
||||
export function expectContextual(contextualKeyword) {
|
||||
if (!eatContextual(contextualKeyword)) {
|
||||
unexpected();
|
||||
}
|
||||
}
|
||||
|
||||
// Test whether a semicolon can be inserted at the current position.
|
||||
export function canInsertSemicolon() {
|
||||
return match(tt.eof) || match(tt.braceR) || hasPrecedingLineBreak();
|
||||
}
|
||||
|
||||
export function hasPrecedingLineBreak() {
|
||||
const prevToken = state.tokens[state.tokens.length - 1];
|
||||
const lastTokEnd = prevToken ? prevToken.end : 0;
|
||||
for (let i = lastTokEnd; i < state.start; i++) {
|
||||
const code = input.charCodeAt(i);
|
||||
if (
|
||||
code === charCodes.lineFeed ||
|
||||
code === charCodes.carriageReturn ||
|
||||
code === 0x2028 ||
|
||||
code === 0x2029
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function hasFollowingLineBreak() {
|
||||
const nextStart = nextTokenStart();
|
||||
for (let i = state.end; i < nextStart; i++) {
|
||||
const code = input.charCodeAt(i);
|
||||
if (
|
||||
code === charCodes.lineFeed ||
|
||||
code === charCodes.carriageReturn ||
|
||||
code === 0x2028 ||
|
||||
code === 0x2029
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function isLineTerminator() {
|
||||
return eat(tt.semi) || canInsertSemicolon();
|
||||
}
|
||||
|
||||
// Consume a semicolon, or, failing that, see if we are allowed to
|
||||
// pretend that there is a semicolon at this position.
|
||||
export function semicolon() {
|
||||
if (!isLineTerminator()) {
|
||||
unexpected('Unexpected token, expected ";"');
|
||||
}
|
||||
}
|
||||
|
||||
// Expect a token of a given type. If found, consume it, otherwise,
|
||||
// raise an unexpected token error at given pos.
|
||||
export function expect(type) {
|
||||
const matched = eat(type);
|
||||
if (!matched) {
|
||||
unexpected(`Unexpected token, expected "${formatTokenType(type)}"`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transition the parser to an error state. All code needs to be written to naturally unwind in this
|
||||
* state, which allows us to backtrack without exceptions and without error plumbing everywhere.
|
||||
*/
|
||||
export function unexpected(message = "Unexpected token", pos = state.start) {
|
||||
if (state.error) {
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const err = new SyntaxError(message);
|
||||
err.pos = pos;
|
||||
state.error = err;
|
||||
state.pos = input.length;
|
||||
finishToken(tt.eof);
|
||||
}
|
Reference in New Issue
Block a user