feat: docker compose maybe

This commit is contained in:
2023-11-13 16:10:04 -05:00
parent 180b261e40
commit b625ccd8d6
8031 changed files with 2182966 additions and 0 deletions

135
node_modules/sorcery/README.md generated vendored Normal file
View File

@ -0,0 +1,135 @@
# sorcery.js
Sourcemaps are great - if you have a JavaScript file, and you minify it, your minifier can generate a map that lets you debug as though you were looking at the original uncompressed code.
But if you have more than one transformation - say you want to transpile your JavaScript, concatenate several files into one, and minify the result - it gets a little trickier. Each intermediate step needs to be able to both *ingest* a sourcemap and *generate* one, all the time pointing back to the original source.
Most compilers don't do that. ([UglifyJS](https://github.com/mishoo/UglifyJS2) is an honourable exception.) So when you fire up devtools, instead of looking at the original source you find yourself looking at the final intermediate step in the chain of transformations.
**Sorcery aims to fix that.** Given an file at the end of a transformation chain (e.g., your minified JavaScript), it will follow the entire chain back to the original source, and generate a new sourcemap that describes the whole process. How? Magic.
This is a work-in-progress - suitable for playing around with, but don't rely on it to debug air traffic control software or medical equipment. Other than that, it can't do much harm.
## Usage
### As a node module
Install sorcery locally:
```bash
npm install sorcery
```
```js
var sorcery = require( 'sorcery' );
sorcery.load( 'some/generated/code.min.js' ).then( function ( chain ) {
// generate a flattened sourcemap
var map = chain.apply(); // { version: 3, file: 'code.min.js', ... }
// get a JSON representation of the sourcemap
map.toString(); // '{"version":3,"file":"code.min.js",...}'
// get a data URI representation
map.toUrl(); // 'data:application/json;charset=utf-8;base64,eyJ2ZXJ...'
// write to a new file - this will create `output.js` and
// `output.js.map`, and will preserve relative paths. It
// returns a Promise
chain.write( 'output.js' );
// write to a new file but use an absolute path for the
// sourceMappingURL
chain.write( 'output.js', { absolutePath: true });
// write to a new file, but append the flattened sourcemap as a data URI
chain.write( 'output.js', { inline: true });
// overwrite the existing file
chain.write();
chain.write({ inline: true });
// find the origin of line x, column y. Returns an object with
// `source`, `line`, `column` and (if applicable) `name` properties.
// Note - for consistency with other tools, line numbers are always
// one-based, column numbers are always zero-based. It's daft, I know.
var loc = chain.trace( x, y );
});
// You can also use sorcery synchronously:
var chain = sorcery.loadSync( 'some/generated/code.min.js' );
var map = chain.apply();
var loc = chain.trace( x, y );
chain.writeSync();
```
#### Advanced options
You can pass an optional second argument to `sorcery.load()` and `sorcery.loadSync()`, with zero or more of the following properties:
* `content` - a map of `filename: contents` pairs. `filename` will be resolved against the current working directory if needs be
* `sourcemaps` - a map of `filename: sourcemap` pairs, where `filename` is the name of the file the sourcemap is related to. This will override any `sourceMappingURL` comments in the file itself.
For example:
```js
sorcery.load( 'some/generated/code.min.js', {
content: {
'some/minified/code.min.js': '...',
'some/transpiled/code.js': '...',
'some/original/code.js': '...'
},
sourcemaps: {
'some/minified/code.min.js': {...},
'some/transpiled/code.js': {...}
}
}).then( chain => {
/* ... */
});
```
Any files not found will be read from the filesystem as normal.
### On the command line
First, install sorcery globally:
```bash
npm install -g sorcery
```
```
Usage:
sorcery [options]
Options:
-h, --help Show help message
-v, --version Show version
-i, --input <file> Input file
-o, --output <file> Output file (if absent, will overwrite input)
-d, --datauri Append map as a data URI, rather than separate file
-x, --excludeContent Don't populate the sourcesContent array
```
Examples:
```bash
# overwrite sourcemap in place (will write map to
# some/generated/code.min.js.map, and update
# sourceMappingURL comment if necessary
sorcery -i some/generated/code.min.js
# append flattened sourcemap as an inline data URI
# (will delete existing .map file, if applicable)
sorcery -d -i some/generated/code.min.js
# write to a new file (will create newfile.js and
# newfile.js.map)
sorcery -i some/generated/code.min.js -o newfile.js
```
## License
MIT

22
node_modules/sorcery/bin/help.md generated vendored Normal file
View File

@ -0,0 +1,22 @@
Sorcery version <%= version %>
=====================================
Usage:
sorcery [options]
Options:
-h, --help Show help message
-v, --version Show version
-i, --input <file|folder> Input file
-o, --output <file|folder> Output file (if absent, will overwrite input)
-d, --datauri Append map as a data URI, rather than separate file
-x, --excludeContent Don't populate the sourcesContent array
Example:
sorcery --input some/generated/code.min.js
sorcery --input tmp --output dist
For more information visit https://github.com/Rich-Harris/sorcery

13
node_modules/sorcery/bin/showHelp.js generated vendored Normal file
View File

@ -0,0 +1,13 @@
var fs = require( 'fs' ),
path = require( 'path' );
module.exports = function ( stream ) {
fs.readFile( path.join( __dirname, 'help.md' ), function ( err, result ) {
var help;
if ( err ) throw err;
help = result.toString().replace( '<%= version %>', require( '../package.json' ).version );
( stream || process.stderr ).write( '\n' + help + '\n' );
});
};

74
node_modules/sorcery/bin/sorcery generated vendored Executable file
View File

@ -0,0 +1,74 @@
#!/usr/bin/env node
var path = require( 'path' );
var minimist = require( 'minimist' );
var sander = require( 'sander' );
var showHelp = require( './showHelp' );
var command;
var sorcery = require( '../' );
var validExtensions = { js: true };
command = minimist( process.argv.slice( 2 ), {
alias: {
i: 'input',
o: 'output',
v: 'version',
h: 'help',
d: 'datauri',
x: 'excludeContent'
}
});
if ( command.help ) {
showHelp( process.stdout );
}
else if ( process.argv.length <= 2 && process.stdin.isTTY ) {
showHelp( process.stderr );
}
else if ( command.version ) {
console.log( 'Sorcery version ' + require( '../package.json' ).version );
}
else if ( !command.input ) {
console.error( 'Error: You must supply an --input (-i) argument. Type sorcery --help for more info' );
}
else {
sander.stat( command.input ).then( function ( stats ) {
if ( stats.isDirectory() ) {
return sander.lsr( command.input ).then( function ( files ) {
files = files.filter( function ( file ) {
return validExtensions[ path.extname( file ).slice( 1 ) ];
});
return files.reduce( function ( promise, file ) {
return promise.then( function () {
var input = path.join( command.input, file );
var output = path.join( command.output || command.input, file );
return sorcery.load( input ).then( function ( chain ) {
return chain.write( output, {
inline: command.datauri,
includeContent: !command.excludeContent
});
});
});
}, Promise.resolve() );
});
}
return sorcery.load( command.input ).then( function ( chain ) {
return chain.write( command.output || command.input, {
inline: command.datauri,
includeContent: !command.excludeContent
});
});
}).catch( function ( err ) {
setTimeout( function () {
throw err;
});
});
}

737
node_modules/sorcery/dist/sorcery.cjs.js generated vendored Normal file
View File

@ -0,0 +1,737 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var path = require('path');
var sander = require('sander');
var __commonjs_global = typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : this;
function __commonjs(fn, module) { return module = { exports: {} }, fn(module, module.exports, __commonjs_global), module.exports; }
var sourcemapCodec_umd = __commonjs(function (module, exports, global) {
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
})(__commonjs_global, (function (exports) { 'use strict';
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
function decode(mappings) {
const state = new Int32Array(5);
const decoded = [];
let index = 0;
do {
const semi = indexOf(mappings, index);
const line = [];
let sorted = true;
let lastCol = 0;
state[0] = 0;
for (let i = index; i < semi; i++) {
let seg;
i = decodeInteger(mappings, i, state, 0); // genColumn
const col = state[0];
if (col < lastCol)
sorted = false;
lastCol = col;
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
i = decodeInteger(mappings, i, state, 2); // sourceLine
i = decodeInteger(mappings, i, state, 3); // sourceColumn
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 4); // namesIndex
seg = [col, state[1], state[2], state[3], state[4]];
}
else {
seg = [col, state[1], state[2], state[3]];
}
}
else {
seg = [col];
}
line.push(seg);
}
if (!sorted)
sort(line);
decoded.push(line);
index = semi + 1;
} while (index <= mappings.length);
return decoded;
}
function indexOf(mappings, index) {
const idx = mappings.indexOf(';', index);
return idx === -1 ? mappings.length : idx;
}
function decodeInteger(mappings, pos, state, j) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = mappings.charCodeAt(pos++);
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
state[j] += value;
return pos;
}
function hasMoreVlq(mappings, i, length) {
if (i >= length)
return false;
return mappings.charCodeAt(i) !== comma;
}
function sort(line) {
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
}
function encode(decoded) {
const state = new Int32Array(5);
const bufLength = 1024 * 16;
const subLength = bufLength - 36;
const buf = new Uint8Array(bufLength);
const sub = buf.subarray(0, subLength);
let pos = 0;
let out = '';
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) {
if (pos === bufLength) {
out += td.decode(buf);
pos = 0;
}
buf[pos++] = semicolon;
}
if (line.length === 0)
continue;
state[0] = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
// We can push up to 5 ints, each int can take at most 7 chars, and we
// may push a comma.
if (pos > subLength) {
out += td.decode(sub);
buf.copyWithin(0, subLength, pos);
pos -= subLength;
}
if (j > 0)
buf[pos++] = comma;
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
if (segment.length === 1)
continue;
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
if (segment.length === 4)
continue;
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
}
}
return out + td.decode(buf.subarray(0, pos));
}
function encodeInteger(buf, pos, state, segment, j) {
const next = segment[j];
let num = next - state[j];
state[j] = next;
num = num < 0 ? (-num << 1) | 1 : num << 1;
do {
let clamped = num & 0b011111;
num >>>= 5;
if (num > 0)
clamped |= 0b100000;
buf[pos++] = intToChar[clamped];
} while (num > 0);
return pos;
}
exports.decode = decode;
exports.encode = encode;
Object.defineProperty(exports, '__esModule', { value: true });
}));
});
var codec = (sourcemapCodec_umd && typeof sourcemapCodec_umd === 'object' && 'default' in sourcemapCodec_umd ? sourcemapCodec_umd['default'] : sourcemapCodec_umd);
/**
* Decodes a base64 string
* @param {string} base64 - the string to decode
* @returns {string}
*/
function atob ( base64 ) {
return new Buffer( base64, 'base64' ).toString( 'utf8' );
}
// this looks ridiculous, but it prevents sourcemap tooling from mistaking
// this for an actual sourceMappingURL
var SOURCEMAPPING_URL = 'sourceMa';
SOURCEMAPPING_URL += 'ppingURL';
var SOURCEMAPPING_URL$1 = SOURCEMAPPING_URL;
function parseJSON ( json, url ) {
try {
return JSON.parse( json );
} catch ( err ) {
throw new Error( ("Could not parse sourcemap (" + url + "): " + (err.message)) );
}
}
/**
* Turns a sourceMappingURL into a sourcemap
* @param {string} url - the sourceMappingURL. Can be a
base64-encoded data URI
* @param {string} base - the URL against which relative URLS
should be resolved
* @param {boolean} sync - if `true`, return a promise, otherwise
return the sourcemap
* @returns {object} - a version 3 sourcemap
*/
function getMapFromUrl ( url, base, sync ) {
if ( /^data:/.test( url ) ) { // TODO beef this up
var match = /base64,(.+)$/.exec( url );
if ( !match ) {
throw new Error( (SOURCEMAPPING_URL$1 + " is not base64-encoded") );
}
var json = atob( match[1] );
var map = parseJSON( json, ("data URI in " + base) );
return sync ? map : sander.Promise.resolve( map );
}
url = path.resolve( path.dirname( base ), decodeURI( url ) );
if ( sync ) {
return parseJSON( sander.readFileSync( url, { encoding: 'utf-8' }), url );
} else {
return sander.readFile( url, { encoding: 'utf-8' }).then( function (json) { return parseJSON( json, url ); } );
}
}
function getSourceMappingUrl ( str ) {
var index, substring, url, match;
// assume we want the last occurence
index = str.lastIndexOf( (SOURCEMAPPING_URL$1 + "=") );
if ( index === -1 ) {
return null;
}
substring = str.substring( index + 17 );
match = /^[^\r\n]+/.exec( substring );
url = match ? match[0] : null;
// possibly a better way to do this, but we don't want to exclude whitespace
// from the sourceMappingURL because it might not have been correctly encoded
if ( url && url.slice( -2 ) === '*/' ) {
url = url.slice( 0, -2 ).trim();
}
return url;
}
function getMap ( node, sourceMapByPath, sync ) {
if ( node.file in sourceMapByPath ) {
var map = sourceMapByPath[ node.file ];
return sync ? map : sander.Promise.resolve( map );
}
else {
var url = getSourceMappingUrl( node.content );
if ( !url ) {
node.isOriginalSource = true;
return sync ? null : sander.Promise.resolve( null );
}
return getMapFromUrl( url, node.file, sync );
}
}
function Node (ref) {
var file = ref.file;
var content = ref.content;
this.file = file ? path.resolve( file ) : null;
this.content = content || null; // sometimes exists in sourcesContent, sometimes doesn't
if ( !this.file && this.content === null ) {
throw new Error( 'A source must specify either file or content' );
}
// these get filled in later
this.map = null;
this.mappings = null;
this.sources = null;
this.isOriginalSource = null;
this._stats = {
decodingTime: 0,
encodingTime: 0,
tracingTime: 0,
untraceable: 0
};
}
Node.prototype = {
load: function load ( sourcesContentByPath, sourceMapByPath ) {
var this$1 = this;
return getContent( this, sourcesContentByPath ).then( function (content) {
this$1.content = sourcesContentByPath[ this$1.file ] = content;
return getMap( this$1, sourceMapByPath ).then( function (map) {
if ( !map ) return null;
this$1.map = map;
var decodingStart = process.hrtime();
this$1.mappings = codec.decode( map.mappings );
var decodingTime = process.hrtime( decodingStart );
this$1._stats.decodingTime = 1e9 * decodingTime[0] + decodingTime[1];
var sourcesContent = map.sourcesContent || [];
var sourceRoot = path.resolve( path.dirname( this$1.file ), map.sourceRoot || '' );
this$1.sources = map.sources.map( function ( source, i ) {
return new Node({
file: source ? path.resolve( sourceRoot, source ) : null,
content: sourcesContent[i]
});
});
var promises = this$1.sources.map( function (node) { return node.load( sourcesContentByPath, sourceMapByPath ); } );
return sander.Promise.all( promises );
});
});
},
loadSync: function loadSync ( sourcesContentByPath, sourceMapByPath ) {
if ( !this.content ) {
if ( !sourcesContentByPath[ this.file ] ) {
sourcesContentByPath[ this.file ] = sander.readFileSync( this.file, { encoding: 'utf-8' });
}
this.content = sourcesContentByPath[ this.file ];
}
var map = getMap( this, sourceMapByPath, true );
var sourcesContent;
if ( !map ) {
this.isOriginalSource = true;
} else {
this.map = map;
this.mappings = codec.decode( map.mappings );
sourcesContent = map.sourcesContent || [];
var sourceRoot = path.resolve( path.dirname( this.file ), map.sourceRoot || '' );
this.sources = map.sources.map( function ( source, i ) {
var node = new Node({
file: path.resolve( sourceRoot, source ),
content: sourcesContent[i]
});
node.loadSync( sourcesContentByPath, sourceMapByPath );
return node;
});
}
},
/**
* Traces a segment back to its origin
* @param {number} lineIndex - the zero-based line index of the
segment as found in `this`
* @param {number} columnIndex - the zero-based column index of the
segment as found in `this`
* @param {string || null} - if specified, the name that should be
(eventually) returned, as it is closest to the generated code
* @returns {object}
@property {string} source - the filepath of the source
@property {number} line - the one-based line index
@property {number} column - the zero-based column index
@property {string || null} name - the name corresponding
to the segment being traced
*/
trace: function trace ( lineIndex, columnIndex, name ) {
var this$1 = this;
// If this node doesn't have a source map, we have
// to assume it is the original source
if ( this.isOriginalSource ) {
return {
source: this.file,
line: lineIndex + 1,
column: columnIndex || 0,
name: name
};
}
// Otherwise, we need to figure out what this position in
// the intermediate file corresponds to in *its* source
var segments = this.mappings[ lineIndex ];
if ( !segments || segments.length === 0 ) {
return null;
}
if ( columnIndex != null ) {
var len = segments.length;
var i;
for ( i = 0; i < len; i += 1 ) {
var generatedCodeColumn = segments[i][0];
if ( generatedCodeColumn > columnIndex ) {
break;
}
if ( generatedCodeColumn === columnIndex ) {
if ( segments[i].length < 4 ) return null;
var sourceFileIndex$1 = segments[i][1];
var sourceCodeLine$1 = segments[i][2];
var sourceCodeColumn = segments[i][3];
var nameIndex$1 = segments[i][4];
var parent$1 = this$1.sources[ sourceFileIndex$1 ];
return parent$1.trace( sourceCodeLine$1, sourceCodeColumn, this$1.map.names[ nameIndex$1 ] || name );
}
}
}
// fall back to a line mapping
var sourceFileIndex = segments[0][1];
var sourceCodeLine = segments[0][2];
var nameIndex = segments[0][4];
var parent = this.sources[ sourceFileIndex ];
return parent.trace( sourceCodeLine, null, this.map.names[ nameIndex ] || name );
}
};
function getContent ( node, sourcesContentByPath ) {
if ( node.file in sourcesContentByPath ) {
node.content = sourcesContentByPath[ node.file ];
}
if ( !node.content ) {
return sander.readFile( node.file, { encoding: 'utf-8' });
}
return sander.Promise.resolve( node.content );
}
/**
* Encodes a string as base64
* @param {string} str - the string to encode
* @returns {string}
*/
function btoa ( str ) {
return new Buffer( str ).toString( 'base64' );
}
function SourceMap ( properties ) {
this.version = 3;
this.file = properties.file;
this.sources = properties.sources;
this.sourcesContent = properties.sourcesContent;
this.names = properties.names;
this.mappings = properties.mappings;
}
SourceMap.prototype = {
toString: function toString () {
return JSON.stringify( this );
},
toUrl: function toUrl () {
return 'data:application/json;charset=utf-8;base64,' + btoa( this.toString() );
}
};
function slash ( path ) {
return typeof path === 'string' ?
path.replace( /\\/g, '/' ) :
path;
}
var SOURCEMAP_COMMENT = new RegExp( "\n*(?:" +
"\\/\\/[@#]\\s*" + SOURCEMAPPING_URL$1 + "=([^'\"]+)|" + // js
"\\/\\*#?\\s*" + SOURCEMAPPING_URL$1 + "=([^'\"]+)\\s\\*\\/)" + // css
'\\s*$', 'g' );
function Chain ( node, sourcesContentByPath ) {
this.node = node;
this.sourcesContentByPath = sourcesContentByPath;
this._stats = {};
}
Chain.prototype = {
stat: function stat () {
return {
selfDecodingTime: this._stats.decodingTime / 1e6,
totalDecodingTime: ( this._stats.decodingTime + tally( this.node.sources, 'decodingTime' ) ) / 1e6,
encodingTime: this._stats.encodingTime / 1e6,
tracingTime: this._stats.tracingTime / 1e6,
untraceable: this._stats.untraceable
};
},
apply: function apply ( options ) {
var this$1 = this;
if ( options === void 0 ) options = {};
var allNames = [];
var allSources = [];
var applySegment = function ( segment, result ) {
if ( segment.length < 4 ) return;
var traced = this$1.node.sources[ segment[1] ].trace( // source
segment[2], // source code line
segment[3], // source code column
this$1.node.map.names[ segment[4] ]
);
if ( !traced ) {
this$1._stats.untraceable += 1;
return;
}
var sourceIndex = allSources.indexOf( traced.source );
if ( !~sourceIndex ) {
sourceIndex = allSources.length;
allSources.push( traced.source );
}
var newSegment = [
segment[0], // generated code column
sourceIndex,
traced.line - 1,
traced.column
];
if ( traced.name ) {
var nameIndex = allNames.indexOf( traced.name );
if ( !~nameIndex ) {
nameIndex = allNames.length;
allNames.push( traced.name );
}
newSegment[4] = nameIndex;
}
result[ result.length ] = newSegment;
};
// Trace mappings
var tracingStart = process.hrtime();
var i = this.node.mappings.length;
var resolved = new Array( i );
var j, line, result;
while ( i-- ) {
line = this$1.node.mappings[i];
resolved[i] = result = [];
for ( j = 0; j < line.length; j += 1 ) {
applySegment( line[j], result );
}
}
var tracingTime = process.hrtime( tracingStart );
this._stats.tracingTime = 1e9 * tracingTime[0] + tracingTime[1];
// Encode mappings
var encodingStart = process.hrtime();
var mappings = codec.encode( resolved );
var encodingTime = process.hrtime( encodingStart );
this._stats.encodingTime = 1e9 * encodingTime[0] + encodingTime[1];
var includeContent = options.includeContent !== false;
return new SourceMap({
file: path.basename( this.node.file ),
sources: allSources.map( function (source) { return slash( path.relative( options.base || path.dirname( this$1.node.file ), source ) ); } ),
sourcesContent: allSources.map( function (source) { return includeContent ? this$1.sourcesContentByPath[ source ] : null; } ),
names: allNames,
mappings: mappings
});
},
trace: function trace ( oneBasedLineIndex, zeroBasedColumnIndex ) {
return this.node.trace( oneBasedLineIndex - 1, zeroBasedColumnIndex, null );
},
write: function write ( dest, options ) {
if ( typeof dest !== 'string' ) {
options = dest;
dest = this.node.file;
}
options = options || {};
var ref = processWriteOptions( dest, this, options );
var resolved = ref.resolved;
var content = ref.content;
var map = ref.map;
var promises = [ sander.writeFile( resolved, content ) ];
if ( !options.inline ) {
promises.push( sander.writeFile( resolved + '.map', map.toString() ) );
}
return Promise.all( promises );
},
writeSync: function writeSync ( dest, options ) {
if ( typeof dest !== 'string' ) {
options = dest;
dest = this.node.file;
}
options = options || {};
var ref = processWriteOptions( dest, this, options );
var resolved = ref.resolved;
var content = ref.content;
var map = ref.map;
sander.writeFileSync( resolved, content );
if ( !options.inline ) {
sander.writeFileSync( resolved + '.map', map.toString() );
}
}
};
function processWriteOptions ( dest, chain, options ) {
var resolved = path.resolve( dest );
var map = chain.apply({
includeContent: options.includeContent,
base: options.base ? path.resolve( options.base ) : path.dirname( resolved )
});
var url = options.inline ? map.toUrl() : ( options.absolutePath ? resolved : path.basename( resolved ) ) + '.map';
// TODO shouldn't url be relative?
var content = chain.node.content.replace( SOURCEMAP_COMMENT, '' ) + sourcemapComment( url, resolved );
return { resolved: resolved, content: content, map: map };
}
function tally ( nodes, stat ) {
return nodes.reduce( function ( total, node ) {
return total + node._stats[ stat ];
}, 0 );
}
function sourcemapComment ( url, dest ) {
var ext = path.extname( dest );
url = encodeURI( url );
if ( ext === '.css' ) {
return ("\n/*# " + SOURCEMAPPING_URL$1 + "=" + url + " */\n");
}
return ("\n//# " + SOURCEMAPPING_URL$1 + "=" + url + "\n");
}
function load ( file, options ) {
var ref = init( file, options );
var node = ref.node;
var sourcesContentByPath = ref.sourcesContentByPath;
var sourceMapByPath = ref.sourceMapByPath;
return node.load( sourcesContentByPath, sourceMapByPath )
.then( function () { return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ); } );
}
function loadSync ( file, options ) {
if ( options === void 0 ) options = {};
var ref = init( file, options );
var node = ref.node;
var sourcesContentByPath = ref.sourcesContentByPath;
var sourceMapByPath = ref.sourceMapByPath;
node.loadSync( sourcesContentByPath, sourceMapByPath );
return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath );
}
function init ( file, options ) {
if ( options === void 0 ) options = {};
var node = new Node({ file: file });
var sourcesContentByPath = {};
var sourceMapByPath = {};
if ( options.content ) {
Object.keys( options.content ).forEach( function (key) {
sourcesContentByPath[ path.resolve( key ) ] = options.content[ key ];
});
}
if ( options.sourcemaps ) {
Object.keys( options.sourcemaps ).forEach( function (key) {
sourceMapByPath[ path.resolve( key ) ] = options.sourcemaps[ key ];
});
}
return { node: node, sourcesContentByPath: sourcesContentByPath, sourceMapByPath: sourceMapByPath };
}
exports.load = load;
exports.loadSync = loadSync;
//# sourceMappingURL=sorcery.cjs.js.map

1
node_modules/sorcery/dist/sorcery.cjs.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

732
node_modules/sorcery/dist/sorcery.es6.js generated vendored Normal file
View File

@ -0,0 +1,732 @@
import { resolve, dirname, relative, basename, extname } from 'path';
import { readFileSync, Promise as Promise$1, readFile, writeFileSync, writeFile } from 'sander';
var __commonjs_global = typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : this;
function __commonjs(fn, module) { return module = { exports: {} }, fn(module, module.exports, __commonjs_global), module.exports; }
var sourcemapCodec_umd = __commonjs(function (module, exports, global) {
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
})(__commonjs_global, (function (exports) { 'use strict';
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
function decode(mappings) {
const state = new Int32Array(5);
const decoded = [];
let index = 0;
do {
const semi = indexOf(mappings, index);
const line = [];
let sorted = true;
let lastCol = 0;
state[0] = 0;
for (let i = index; i < semi; i++) {
let seg;
i = decodeInteger(mappings, i, state, 0); // genColumn
const col = state[0];
if (col < lastCol)
sorted = false;
lastCol = col;
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
i = decodeInteger(mappings, i, state, 2); // sourceLine
i = decodeInteger(mappings, i, state, 3); // sourceColumn
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 4); // namesIndex
seg = [col, state[1], state[2], state[3], state[4]];
}
else {
seg = [col, state[1], state[2], state[3]];
}
}
else {
seg = [col];
}
line.push(seg);
}
if (!sorted)
sort(line);
decoded.push(line);
index = semi + 1;
} while (index <= mappings.length);
return decoded;
}
function indexOf(mappings, index) {
const idx = mappings.indexOf(';', index);
return idx === -1 ? mappings.length : idx;
}
function decodeInteger(mappings, pos, state, j) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = mappings.charCodeAt(pos++);
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
state[j] += value;
return pos;
}
function hasMoreVlq(mappings, i, length) {
if (i >= length)
return false;
return mappings.charCodeAt(i) !== comma;
}
function sort(line) {
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
}
function encode(decoded) {
const state = new Int32Array(5);
const bufLength = 1024 * 16;
const subLength = bufLength - 36;
const buf = new Uint8Array(bufLength);
const sub = buf.subarray(0, subLength);
let pos = 0;
let out = '';
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) {
if (pos === bufLength) {
out += td.decode(buf);
pos = 0;
}
buf[pos++] = semicolon;
}
if (line.length === 0)
continue;
state[0] = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
// We can push up to 5 ints, each int can take at most 7 chars, and we
// may push a comma.
if (pos > subLength) {
out += td.decode(sub);
buf.copyWithin(0, subLength, pos);
pos -= subLength;
}
if (j > 0)
buf[pos++] = comma;
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
if (segment.length === 1)
continue;
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
if (segment.length === 4)
continue;
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
}
}
return out + td.decode(buf.subarray(0, pos));
}
function encodeInteger(buf, pos, state, segment, j) {
const next = segment[j];
let num = next - state[j];
state[j] = next;
num = num < 0 ? (-num << 1) | 1 : num << 1;
do {
let clamped = num & 0b011111;
num >>>= 5;
if (num > 0)
clamped |= 0b100000;
buf[pos++] = intToChar[clamped];
} while (num > 0);
return pos;
}
exports.decode = decode;
exports.encode = encode;
Object.defineProperty(exports, '__esModule', { value: true });
}));
});
var codec = (sourcemapCodec_umd && typeof sourcemapCodec_umd === 'object' && 'default' in sourcemapCodec_umd ? sourcemapCodec_umd['default'] : sourcemapCodec_umd);
/**
* Decodes a base64 string
* @param {string} base64 - the string to decode
* @returns {string}
*/
function atob ( base64 ) {
return new Buffer( base64, 'base64' ).toString( 'utf8' );
}
// this looks ridiculous, but it prevents sourcemap tooling from mistaking
// this for an actual sourceMappingURL
var SOURCEMAPPING_URL = 'sourceMa';
SOURCEMAPPING_URL += 'ppingURL';
var SOURCEMAPPING_URL$1 = SOURCEMAPPING_URL;
function parseJSON ( json, url ) {
try {
return JSON.parse( json );
} catch ( err ) {
throw new Error( ("Could not parse sourcemap (" + url + "): " + (err.message)) );
}
}
/**
* Turns a sourceMappingURL into a sourcemap
* @param {string} url - the sourceMappingURL. Can be a
base64-encoded data URI
* @param {string} base - the URL against which relative URLS
should be resolved
* @param {boolean} sync - if `true`, return a promise, otherwise
return the sourcemap
* @returns {object} - a version 3 sourcemap
*/
function getMapFromUrl ( url, base, sync ) {
if ( /^data:/.test( url ) ) { // TODO beef this up
var match = /base64,(.+)$/.exec( url );
if ( !match ) {
throw new Error( (SOURCEMAPPING_URL$1 + " is not base64-encoded") );
}
var json = atob( match[1] );
var map = parseJSON( json, ("data URI in " + base) );
return sync ? map : Promise$1.resolve( map );
}
url = resolve( dirname( base ), decodeURI( url ) );
if ( sync ) {
return parseJSON( readFileSync( url, { encoding: 'utf-8' }), url );
} else {
return readFile( url, { encoding: 'utf-8' }).then( function (json) { return parseJSON( json, url ); } );
}
}
function getSourceMappingUrl ( str ) {
var index, substring, url, match;
// assume we want the last occurence
index = str.lastIndexOf( (SOURCEMAPPING_URL$1 + "=") );
if ( index === -1 ) {
return null;
}
substring = str.substring( index + 17 );
match = /^[^\r\n]+/.exec( substring );
url = match ? match[0] : null;
// possibly a better way to do this, but we don't want to exclude whitespace
// from the sourceMappingURL because it might not have been correctly encoded
if ( url && url.slice( -2 ) === '*/' ) {
url = url.slice( 0, -2 ).trim();
}
return url;
}
function getMap ( node, sourceMapByPath, sync ) {
if ( node.file in sourceMapByPath ) {
var map = sourceMapByPath[ node.file ];
return sync ? map : Promise$1.resolve( map );
}
else {
var url = getSourceMappingUrl( node.content );
if ( !url ) {
node.isOriginalSource = true;
return sync ? null : Promise$1.resolve( null );
}
return getMapFromUrl( url, node.file, sync );
}
}
function Node (ref) {
var file = ref.file;
var content = ref.content;
this.file = file ? resolve( file ) : null;
this.content = content || null; // sometimes exists in sourcesContent, sometimes doesn't
if ( !this.file && this.content === null ) {
throw new Error( 'A source must specify either file or content' );
}
// these get filled in later
this.map = null;
this.mappings = null;
this.sources = null;
this.isOriginalSource = null;
this._stats = {
decodingTime: 0,
encodingTime: 0,
tracingTime: 0,
untraceable: 0
};
}
Node.prototype = {
load: function load ( sourcesContentByPath, sourceMapByPath ) {
var this$1 = this;
return getContent( this, sourcesContentByPath ).then( function (content) {
this$1.content = sourcesContentByPath[ this$1.file ] = content;
return getMap( this$1, sourceMapByPath ).then( function (map) {
if ( !map ) return null;
this$1.map = map;
var decodingStart = process.hrtime();
this$1.mappings = codec.decode( map.mappings );
var decodingTime = process.hrtime( decodingStart );
this$1._stats.decodingTime = 1e9 * decodingTime[0] + decodingTime[1];
var sourcesContent = map.sourcesContent || [];
var sourceRoot = resolve( dirname( this$1.file ), map.sourceRoot || '' );
this$1.sources = map.sources.map( function ( source, i ) {
return new Node({
file: source ? resolve( sourceRoot, source ) : null,
content: sourcesContent[i]
});
});
var promises = this$1.sources.map( function (node) { return node.load( sourcesContentByPath, sourceMapByPath ); } );
return Promise$1.all( promises );
});
});
},
loadSync: function loadSync ( sourcesContentByPath, sourceMapByPath ) {
if ( !this.content ) {
if ( !sourcesContentByPath[ this.file ] ) {
sourcesContentByPath[ this.file ] = readFileSync( this.file, { encoding: 'utf-8' });
}
this.content = sourcesContentByPath[ this.file ];
}
var map = getMap( this, sourceMapByPath, true );
var sourcesContent;
if ( !map ) {
this.isOriginalSource = true;
} else {
this.map = map;
this.mappings = codec.decode( map.mappings );
sourcesContent = map.sourcesContent || [];
var sourceRoot = resolve( dirname( this.file ), map.sourceRoot || '' );
this.sources = map.sources.map( function ( source, i ) {
var node = new Node({
file: resolve( sourceRoot, source ),
content: sourcesContent[i]
});
node.loadSync( sourcesContentByPath, sourceMapByPath );
return node;
});
}
},
/**
* Traces a segment back to its origin
* @param {number} lineIndex - the zero-based line index of the
segment as found in `this`
* @param {number} columnIndex - the zero-based column index of the
segment as found in `this`
* @param {string || null} - if specified, the name that should be
(eventually) returned, as it is closest to the generated code
* @returns {object}
@property {string} source - the filepath of the source
@property {number} line - the one-based line index
@property {number} column - the zero-based column index
@property {string || null} name - the name corresponding
to the segment being traced
*/
trace: function trace ( lineIndex, columnIndex, name ) {
var this$1 = this;
// If this node doesn't have a source map, we have
// to assume it is the original source
if ( this.isOriginalSource ) {
return {
source: this.file,
line: lineIndex + 1,
column: columnIndex || 0,
name: name
};
}
// Otherwise, we need to figure out what this position in
// the intermediate file corresponds to in *its* source
var segments = this.mappings[ lineIndex ];
if ( !segments || segments.length === 0 ) {
return null;
}
if ( columnIndex != null ) {
var len = segments.length;
var i;
for ( i = 0; i < len; i += 1 ) {
var generatedCodeColumn = segments[i][0];
if ( generatedCodeColumn > columnIndex ) {
break;
}
if ( generatedCodeColumn === columnIndex ) {
if ( segments[i].length < 4 ) return null;
var sourceFileIndex$1 = segments[i][1];
var sourceCodeLine$1 = segments[i][2];
var sourceCodeColumn = segments[i][3];
var nameIndex$1 = segments[i][4];
var parent$1 = this$1.sources[ sourceFileIndex$1 ];
return parent$1.trace( sourceCodeLine$1, sourceCodeColumn, this$1.map.names[ nameIndex$1 ] || name );
}
}
}
// fall back to a line mapping
var sourceFileIndex = segments[0][1];
var sourceCodeLine = segments[0][2];
var nameIndex = segments[0][4];
var parent = this.sources[ sourceFileIndex ];
return parent.trace( sourceCodeLine, null, this.map.names[ nameIndex ] || name );
}
};
function getContent ( node, sourcesContentByPath ) {
if ( node.file in sourcesContentByPath ) {
node.content = sourcesContentByPath[ node.file ];
}
if ( !node.content ) {
return readFile( node.file, { encoding: 'utf-8' });
}
return Promise$1.resolve( node.content );
}
/**
* Encodes a string as base64
* @param {string} str - the string to encode
* @returns {string}
*/
function btoa ( str ) {
return new Buffer( str ).toString( 'base64' );
}
function SourceMap ( properties ) {
this.version = 3;
this.file = properties.file;
this.sources = properties.sources;
this.sourcesContent = properties.sourcesContent;
this.names = properties.names;
this.mappings = properties.mappings;
}
SourceMap.prototype = {
toString: function toString () {
return JSON.stringify( this );
},
toUrl: function toUrl () {
return 'data:application/json;charset=utf-8;base64,' + btoa( this.toString() );
}
};
function slash ( path ) {
return typeof path === 'string' ?
path.replace( /\\/g, '/' ) :
path;
}
var SOURCEMAP_COMMENT = new RegExp( "\n*(?:" +
"\\/\\/[@#]\\s*" + SOURCEMAPPING_URL$1 + "=([^'\"]+)|" + // js
"\\/\\*#?\\s*" + SOURCEMAPPING_URL$1 + "=([^'\"]+)\\s\\*\\/)" + // css
'\\s*$', 'g' );
function Chain ( node, sourcesContentByPath ) {
this.node = node;
this.sourcesContentByPath = sourcesContentByPath;
this._stats = {};
}
Chain.prototype = {
stat: function stat () {
return {
selfDecodingTime: this._stats.decodingTime / 1e6,
totalDecodingTime: ( this._stats.decodingTime + tally( this.node.sources, 'decodingTime' ) ) / 1e6,
encodingTime: this._stats.encodingTime / 1e6,
tracingTime: this._stats.tracingTime / 1e6,
untraceable: this._stats.untraceable
};
},
apply: function apply ( options ) {
var this$1 = this;
if ( options === void 0 ) options = {};
var allNames = [];
var allSources = [];
var applySegment = function ( segment, result ) {
if ( segment.length < 4 ) return;
var traced = this$1.node.sources[ segment[1] ].trace( // source
segment[2], // source code line
segment[3], // source code column
this$1.node.map.names[ segment[4] ]
);
if ( !traced ) {
this$1._stats.untraceable += 1;
return;
}
var sourceIndex = allSources.indexOf( traced.source );
if ( !~sourceIndex ) {
sourceIndex = allSources.length;
allSources.push( traced.source );
}
var newSegment = [
segment[0], // generated code column
sourceIndex,
traced.line - 1,
traced.column
];
if ( traced.name ) {
var nameIndex = allNames.indexOf( traced.name );
if ( !~nameIndex ) {
nameIndex = allNames.length;
allNames.push( traced.name );
}
newSegment[4] = nameIndex;
}
result[ result.length ] = newSegment;
};
// Trace mappings
var tracingStart = process.hrtime();
var i = this.node.mappings.length;
var resolved = new Array( i );
var j, line, result;
while ( i-- ) {
line = this$1.node.mappings[i];
resolved[i] = result = [];
for ( j = 0; j < line.length; j += 1 ) {
applySegment( line[j], result );
}
}
var tracingTime = process.hrtime( tracingStart );
this._stats.tracingTime = 1e9 * tracingTime[0] + tracingTime[1];
// Encode mappings
var encodingStart = process.hrtime();
var mappings = codec.encode( resolved );
var encodingTime = process.hrtime( encodingStart );
this._stats.encodingTime = 1e9 * encodingTime[0] + encodingTime[1];
var includeContent = options.includeContent !== false;
return new SourceMap({
file: basename( this.node.file ),
sources: allSources.map( function (source) { return slash( relative( options.base || dirname( this$1.node.file ), source ) ); } ),
sourcesContent: allSources.map( function (source) { return includeContent ? this$1.sourcesContentByPath[ source ] : null; } ),
names: allNames,
mappings: mappings
});
},
trace: function trace ( oneBasedLineIndex, zeroBasedColumnIndex ) {
return this.node.trace( oneBasedLineIndex - 1, zeroBasedColumnIndex, null );
},
write: function write ( dest, options ) {
if ( typeof dest !== 'string' ) {
options = dest;
dest = this.node.file;
}
options = options || {};
var ref = processWriteOptions( dest, this, options );
var resolved = ref.resolved;
var content = ref.content;
var map = ref.map;
var promises = [ writeFile( resolved, content ) ];
if ( !options.inline ) {
promises.push( writeFile( resolved + '.map', map.toString() ) );
}
return Promise.all( promises );
},
writeSync: function writeSync ( dest, options ) {
if ( typeof dest !== 'string' ) {
options = dest;
dest = this.node.file;
}
options = options || {};
var ref = processWriteOptions( dest, this, options );
var resolved = ref.resolved;
var content = ref.content;
var map = ref.map;
writeFileSync( resolved, content );
if ( !options.inline ) {
writeFileSync( resolved + '.map', map.toString() );
}
}
};
function processWriteOptions ( dest, chain, options ) {
var resolved = resolve( dest );
var map = chain.apply({
includeContent: options.includeContent,
base: options.base ? resolve( options.base ) : dirname( resolved )
});
var url = options.inline ? map.toUrl() : ( options.absolutePath ? resolved : basename( resolved ) ) + '.map';
// TODO shouldn't url be relative?
var content = chain.node.content.replace( SOURCEMAP_COMMENT, '' ) + sourcemapComment( url, resolved );
return { resolved: resolved, content: content, map: map };
}
function tally ( nodes, stat ) {
return nodes.reduce( function ( total, node ) {
return total + node._stats[ stat ];
}, 0 );
}
function sourcemapComment ( url, dest ) {
var ext = extname( dest );
url = encodeURI( url );
if ( ext === '.css' ) {
return ("\n/*# " + SOURCEMAPPING_URL$1 + "=" + url + " */\n");
}
return ("\n//# " + SOURCEMAPPING_URL$1 + "=" + url + "\n");
}
function load ( file, options ) {
var ref = init( file, options );
var node = ref.node;
var sourcesContentByPath = ref.sourcesContentByPath;
var sourceMapByPath = ref.sourceMapByPath;
return node.load( sourcesContentByPath, sourceMapByPath )
.then( function () { return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ); } );
}
function loadSync ( file, options ) {
if ( options === void 0 ) options = {};
var ref = init( file, options );
var node = ref.node;
var sourcesContentByPath = ref.sourcesContentByPath;
var sourceMapByPath = ref.sourceMapByPath;
node.loadSync( sourcesContentByPath, sourceMapByPath );
return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath );
}
function init ( file, options ) {
if ( options === void 0 ) options = {};
var node = new Node({ file: file });
var sourcesContentByPath = {};
var sourceMapByPath = {};
if ( options.content ) {
Object.keys( options.content ).forEach( function (key) {
sourcesContentByPath[ resolve( key ) ] = options.content[ key ];
});
}
if ( options.sourcemaps ) {
Object.keys( options.sourcemaps ).forEach( function (key) {
sourceMapByPath[ resolve( key ) ] = options.sourcemaps[ key ];
});
}
return { node: node, sourcesContentByPath: sourcesContentByPath, sourceMapByPath: sourceMapByPath };
}
export { load, loadSync };
//# sourceMappingURL=sorcery.es6.js.map

1
node_modules/sorcery/dist/sorcery.es6.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

56
node_modules/sorcery/package.json generated vendored Normal file
View File

@ -0,0 +1,56 @@
{
"name": "sorcery",
"description": "Resolve a chain of sourcemaps back to the original source",
"version": "0.11.0",
"author": "Rich Harris",
"repository": "https://github.com/Rich-Harris/sorcery",
"main": "dist/sorcery.cjs.js",
"jsnext:main": "dist/sorcery.es6.js",
"license": "MIT",
"dependencies": {
"buffer-crc32": "^0.2.5",
"minimist": "^1.2.0",
"sander": "^0.5.0",
"@jridgewell/sourcemap-codec": "^1.4.14"
},
"devDependencies": {
"buble": "^0.10.4",
"codecov.io": "^0.1.6",
"coffee-script": "^1.10.0",
"eslint": "^2.8.0",
"glob": "^7.0.3",
"istanbul": "^0.4.3",
"less": "^2.6.1",
"mocha": "^2.4.5",
"promise-map-series": "^0.2.2",
"remap-istanbul": "^0.6.3",
"rollup": "^0.31.0",
"rollup-plugin-buble": "^0.10.0",
"rollup-plugin-commonjs": "^2.2.1",
"rollup-plugin-npm": "^1.4.0",
"source-map": "^0.5.3",
"source-map-support": "^0.4.0",
"uglify-js": "^2.6.2"
},
"bin": {
"sorcery": "bin/sorcery"
},
"scripts": {
"build": "rm -rf dist && rollup -c -f cjs -o dist/sorcery.cjs.js && rollup -c -f es6 -o dist/sorcery.es6.js",
"pretest": "npm run build",
"prepare-tests": "node test/samples/prepare-tests.js",
"test": "mocha --compilers js:buble/register",
"prepublish": "npm test",
"lint": "eslint src",
"pretest-coverage": "npm run build",
"test-coverage": "rm -rf coverage/* && istanbul cover --report json node_modules/.bin/_mocha -- -u exports -R spec test/test.js",
"posttest-coverage": "remap-istanbul -i coverage/coverage-final.json -o coverage/coverage-remapped.json -b dist && remap-istanbul -i coverage/coverage-final.json -o coverage/coverage-remapped.lcov -t lcovonly -b dist && remap-istanbul -i coverage/coverage-final.json -o coverage/coverage-remapped -t html -b dist",
"ci": "npm run test-coverage && codecov < coverage/coverage-remapped.lcov"
},
"files": [
"src/",
"dist/",
"bin/",
"README.md"
]
}

186
node_modules/sorcery/src/Chain.js generated vendored Normal file
View File

@ -0,0 +1,186 @@
import { basename, dirname, extname, relative, resolve } from 'path';
import { writeFile, writeFileSync } from 'sander';
import codec from '@jridgewell/sourcemap-codec';
import SourceMap from './SourceMap.js';
import slash from './utils/slash.js';
import SOURCEMAPPING_URL from './utils/sourceMappingURL.js';
const SOURCEMAP_COMMENT = new RegExp( `\n*(?:` +
`\\/\\/[@#]\\s*${SOURCEMAPPING_URL}=([^'"]+)|` + // js
`\\/\\*#?\\s*${SOURCEMAPPING_URL}=([^'"]+)\\s\\*\\/)` + // css
'\\s*$', 'g' );
export default function Chain ( node, sourcesContentByPath ) {
this.node = node;
this.sourcesContentByPath = sourcesContentByPath;
this._stats = {};
}
Chain.prototype = {
stat () {
return {
selfDecodingTime: this._stats.decodingTime / 1e6,
totalDecodingTime: ( this._stats.decodingTime + tally( this.node.sources, 'decodingTime' ) ) / 1e6,
encodingTime: this._stats.encodingTime / 1e6,
tracingTime: this._stats.tracingTime / 1e6,
untraceable: this._stats.untraceable
};
},
apply ( options = {} ) {
let allNames = [];
let allSources = [];
const applySegment = ( segment, result ) => {
if ( segment.length < 4 ) return;
const traced = this.node.sources[ segment[1] ].trace( // source
segment[2], // source code line
segment[3], // source code column
this.node.map.names[ segment[4] ]
);
if ( !traced ) {
this._stats.untraceable += 1;
return;
}
let sourceIndex = allSources.indexOf( traced.source );
if ( !~sourceIndex ) {
sourceIndex = allSources.length;
allSources.push( traced.source );
}
let newSegment = [
segment[0], // generated code column
sourceIndex,
traced.line - 1,
traced.column
];
if ( traced.name ) {
let nameIndex = allNames.indexOf( traced.name );
if ( !~nameIndex ) {
nameIndex = allNames.length;
allNames.push( traced.name );
}
newSegment[4] = nameIndex;
}
result[ result.length ] = newSegment;
};
// Trace mappings
let tracingStart = process.hrtime();
let i = this.node.mappings.length;
let resolved = new Array( i );
let j, line, result;
while ( i-- ) {
line = this.node.mappings[i];
resolved[i] = result = [];
for ( j = 0; j < line.length; j += 1 ) {
applySegment( line[j], result );
}
}
let tracingTime = process.hrtime( tracingStart );
this._stats.tracingTime = 1e9 * tracingTime[0] + tracingTime[1];
// Encode mappings
let encodingStart = process.hrtime();
let mappings = codec.encode( resolved );
let encodingTime = process.hrtime( encodingStart );
this._stats.encodingTime = 1e9 * encodingTime[0] + encodingTime[1];
let includeContent = options.includeContent !== false;
return new SourceMap({
file: basename( this.node.file ),
sources: allSources.map( source => slash( relative( options.base || dirname( this.node.file ), source ) ) ),
sourcesContent: allSources.map( source => includeContent ? this.sourcesContentByPath[ source ] : null ),
names: allNames,
mappings
});
},
trace ( oneBasedLineIndex, zeroBasedColumnIndex ) {
return this.node.trace( oneBasedLineIndex - 1, zeroBasedColumnIndex, null );
},
write ( dest, options ) {
if ( typeof dest !== 'string' ) {
options = dest;
dest = this.node.file;
}
options = options || {};
const { resolved, content, map } = processWriteOptions( dest, this, options );
let promises = [ writeFile( resolved, content ) ];
if ( !options.inline ) {
promises.push( writeFile( resolved + '.map', map.toString() ) );
}
return Promise.all( promises );
},
writeSync ( dest, options ) {
if ( typeof dest !== 'string' ) {
options = dest;
dest = this.node.file;
}
options = options || {};
const { resolved, content, map } = processWriteOptions( dest, this, options );
writeFileSync( resolved, content );
if ( !options.inline ) {
writeFileSync( resolved + '.map', map.toString() );
}
}
};
function processWriteOptions ( dest, chain, options ) {
const resolved = resolve( dest );
const map = chain.apply({
includeContent: options.includeContent,
base: options.base ? resolve( options.base ) : dirname( resolved )
});
const url = options.inline ? map.toUrl() : ( options.absolutePath ? resolved : basename( resolved ) ) + '.map';
// TODO shouldn't url be relative?
const content = chain.node.content.replace( SOURCEMAP_COMMENT, '' ) + sourcemapComment( url, resolved );
return { resolved, content, map };
}
function tally ( nodes, stat ) {
return nodes.reduce( ( total, node ) => {
return total + node._stats[ stat ];
}, 0 );
}
function sourcemapComment ( url, dest ) {
const ext = extname( dest );
url = encodeURI( url );
if ( ext === '.css' ) {
return `\n/*# ${SOURCEMAPPING_URL}=${url} */\n`;
}
return `\n//# ${SOURCEMAPPING_URL}=${url}\n`;
}

175
node_modules/sorcery/src/Node.js generated vendored Normal file
View File

@ -0,0 +1,175 @@
import { dirname, resolve } from 'path';
import { readFile, readFileSync, Promise } from 'sander';
import codec from '@jridgewell/sourcemap-codec';
import getMap from './utils/getMap.js';
export default function Node ({ file, content }) {
this.file = file ? resolve( file ) : null;
this.content = content || null; // sometimes exists in sourcesContent, sometimes doesn't
if ( !this.file && this.content === null ) {
throw new Error( 'A source must specify either file or content' );
}
// these get filled in later
this.map = null;
this.mappings = null;
this.sources = null;
this.isOriginalSource = null;
this._stats = {
decodingTime: 0,
encodingTime: 0,
tracingTime: 0,
untraceable: 0
};
}
Node.prototype = {
load ( sourcesContentByPath, sourceMapByPath ) {
return getContent( this, sourcesContentByPath ).then( content => {
this.content = sourcesContentByPath[ this.file ] = content;
return getMap( this, sourceMapByPath ).then( map => {
if ( !map ) return null;
this.map = map;
let decodingStart = process.hrtime();
this.mappings = codec.decode( map.mappings );
let decodingTime = process.hrtime( decodingStart );
this._stats.decodingTime = 1e9 * decodingTime[0] + decodingTime[1];
const sourcesContent = map.sourcesContent || [];
const sourceRoot = resolve( dirname( this.file ), map.sourceRoot || '' );
this.sources = map.sources.map( ( source, i ) => {
return new Node({
file: source ? resolve( sourceRoot, source ) : null,
content: sourcesContent[i]
});
});
const promises = this.sources.map( node => node.load( sourcesContentByPath, sourceMapByPath ) );
return Promise.all( promises );
});
});
},
loadSync ( sourcesContentByPath, sourceMapByPath ) {
if ( !this.content ) {
if ( !sourcesContentByPath[ this.file ] ) {
sourcesContentByPath[ this.file ] = readFileSync( this.file, { encoding: 'utf-8' });
}
this.content = sourcesContentByPath[ this.file ];
}
const map = getMap( this, sourceMapByPath, true );
let sourcesContent;
if ( !map ) {
this.isOriginalSource = true;
} else {
this.map = map;
this.mappings = codec.decode( map.mappings );
sourcesContent = map.sourcesContent || [];
const sourceRoot = resolve( dirname( this.file ), map.sourceRoot || '' );
this.sources = map.sources.map( ( source, i ) => {
const node = new Node({
file: resolve( sourceRoot, source ),
content: sourcesContent[i]
});
node.loadSync( sourcesContentByPath, sourceMapByPath );
return node;
});
}
},
/**
* Traces a segment back to its origin
* @param {number} lineIndex - the zero-based line index of the
segment as found in `this`
* @param {number} columnIndex - the zero-based column index of the
segment as found in `this`
* @param {string || null} - if specified, the name that should be
(eventually) returned, as it is closest to the generated code
* @returns {object}
@property {string} source - the filepath of the source
@property {number} line - the one-based line index
@property {number} column - the zero-based column index
@property {string || null} name - the name corresponding
to the segment being traced
*/
trace ( lineIndex, columnIndex, name ) {
// If this node doesn't have a source map, we have
// to assume it is the original source
if ( this.isOriginalSource ) {
return {
source: this.file,
line: lineIndex + 1,
column: columnIndex || 0,
name: name
};
}
// Otherwise, we need to figure out what this position in
// the intermediate file corresponds to in *its* source
const segments = this.mappings[ lineIndex ];
if ( !segments || segments.length === 0 ) {
return null;
}
if ( columnIndex != null ) {
let len = segments.length;
let i;
for ( i = 0; i < len; i += 1 ) {
let generatedCodeColumn = segments[i][0];
if ( generatedCodeColumn > columnIndex ) {
break;
}
if ( generatedCodeColumn === columnIndex ) {
if ( segments[i].length < 4 ) return null;
let sourceFileIndex = segments[i][1];
let sourceCodeLine = segments[i][2];
let sourceCodeColumn = segments[i][3];
let nameIndex = segments[i][4];
let parent = this.sources[ sourceFileIndex ];
return parent.trace( sourceCodeLine, sourceCodeColumn, this.map.names[ nameIndex ] || name );
}
}
}
// fall back to a line mapping
let sourceFileIndex = segments[0][1];
let sourceCodeLine = segments[0][2];
let nameIndex = segments[0][4];
let parent = this.sources[ sourceFileIndex ];
return parent.trace( sourceCodeLine, null, this.map.names[ nameIndex ] || name );
}
};
function getContent ( node, sourcesContentByPath ) {
if ( node.file in sourcesContentByPath ) {
node.content = sourcesContentByPath[ node.file ];
}
if ( !node.content ) {
return readFile( node.file, { encoding: 'utf-8' });
}
return Promise.resolve( node.content );
}

21
node_modules/sorcery/src/SourceMap.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
import btoa from './utils/btoa.js';
export default function SourceMap ( properties ) {
this.version = 3;
this.file = properties.file;
this.sources = properties.sources;
this.sourcesContent = properties.sourcesContent;
this.names = properties.names;
this.mappings = properties.mappings;
}
SourceMap.prototype = {
toString () {
return JSON.stringify( this );
},
toUrl () {
return 'data:application/json;charset=utf-8;base64,' + btoa( this.toString() );
}
};

38
node_modules/sorcery/src/index.js generated vendored Normal file
View File

@ -0,0 +1,38 @@
import { resolve } from 'path';
import Node from './Node.js';
import Chain from './Chain.js';
export function load ( file, options ) {
const { node, sourcesContentByPath, sourceMapByPath } = init( file, options );
return node.load( sourcesContentByPath, sourceMapByPath )
.then( () => node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ) );
}
export function loadSync ( file, options = {} ) {
const { node, sourcesContentByPath, sourceMapByPath } = init( file, options );
node.loadSync( sourcesContentByPath, sourceMapByPath );
return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath );
}
function init ( file, options = {} ) {
const node = new Node({ file });
let sourcesContentByPath = {};
let sourceMapByPath = {};
if ( options.content ) {
Object.keys( options.content ).forEach( key => {
sourcesContentByPath[ resolve( key ) ] = options.content[ key ];
});
}
if ( options.sourcemaps ) {
Object.keys( options.sourcemaps ).forEach( key => {
sourceMapByPath[ resolve( key ) ] = options.sourcemaps[ key ];
});
}
return { node, sourcesContentByPath, sourceMapByPath };
}

8
node_modules/sorcery/src/utils/atob.js generated vendored Normal file
View File

@ -0,0 +1,8 @@
/**
* Decodes a base64 string
* @param {string} base64 - the string to decode
* @returns {string}
*/
export default function atob ( base64 ) {
return new Buffer( base64, 'base64' ).toString( 'utf8' );
}

8
node_modules/sorcery/src/utils/btoa.js generated vendored Normal file
View File

@ -0,0 +1,8 @@
/**
* Encodes a string as base64
* @param {string} str - the string to encode
* @returns {string}
*/
export default function btoa ( str ) {
return new Buffer( str ).toString( 'base64' );
}

21
node_modules/sorcery/src/utils/getMap.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
import { Promise } from 'sander';
import getMapFromUrl from './getMapFromUrl.js';
import getSourceMappingUrl from './getSourceMappingUrl.js';
export default function getMap ( node, sourceMapByPath, sync ) {
if ( node.file in sourceMapByPath ) {
const map = sourceMapByPath[ node.file ];
return sync ? map : Promise.resolve( map );
}
else {
const url = getSourceMappingUrl( node.content );
if ( !url ) {
node.isOriginalSource = true;
return sync ? null : Promise.resolve( null );
}
return getMapFromUrl( url, node.file, sync );
}
}

44
node_modules/sorcery/src/utils/getMapFromUrl.js generated vendored Normal file
View File

@ -0,0 +1,44 @@
import { dirname, resolve } from 'path';
import { readFile, readFileSync, Promise } from 'sander';
import atob from './atob.js';
import SOURCEMAPPING_URL from './sourceMappingURL.js';
function parseJSON ( json, url ) {
try {
return JSON.parse( json );
} catch ( err ) {
throw new Error( `Could not parse sourcemap (${url}): ${err.message}` );
}
}
/**
* Turns a sourceMappingURL into a sourcemap
* @param {string} url - the sourceMappingURL. Can be a
base64-encoded data URI
* @param {string} base - the URL against which relative URLS
should be resolved
* @param {boolean} sync - if `true`, return a promise, otherwise
return the sourcemap
* @returns {object} - a version 3 sourcemap
*/
export default function getMapFromUrl ( url, base, sync ) {
if ( /^data:/.test( url ) ) { // TODO beef this up
const match = /base64,(.+)$/.exec( url );
if ( !match ) {
throw new Error( `${SOURCEMAPPING_URL} is not base64-encoded` );
}
const json = atob( match[1] );
const map = parseJSON( json, `data URI in ${base}` );
return sync ? map : Promise.resolve( map );
}
url = resolve( dirname( base ), decodeURI( url ) );
if ( sync ) {
return parseJSON( readFileSync( url, { encoding: 'utf-8' }), url );
} else {
return readFile( url, { encoding: 'utf-8' }).then( json => parseJSON( json, url ) );
}
}

25
node_modules/sorcery/src/utils/getSourceMappingUrl.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
import SOURCEMAPPING_URL from './sourceMappingURL.js';
export default function getSourceMappingUrl ( str ) {
var index, substring, url, match;
// assume we want the last occurence
index = str.lastIndexOf( `${SOURCEMAPPING_URL}=` );
if ( index === -1 ) {
return null;
}
substring = str.substring( index + 17 );
match = /^[^\r\n]+/.exec( substring );
url = match ? match[0] : null;
// possibly a better way to do this, but we don't want to exclude whitespace
// from the sourceMappingURL because it might not have been correctly encoded
if ( url && url.slice( -2 ) === '*/' ) {
url = url.slice( 0, -2 ).trim();
}
return url;
}

5
node_modules/sorcery/src/utils/slash.js generated vendored Normal file
View File

@ -0,0 +1,5 @@
export default function slash ( path ) {
return typeof path === 'string' ?
path.replace( /\\/g, '/' ) :
path;
}

6
node_modules/sorcery/src/utils/sourceMappingURL.js generated vendored Normal file
View File

@ -0,0 +1,6 @@
// this looks ridiculous, but it prevents sourcemap tooling from mistaking
// this for an actual sourceMappingURL
let SOURCEMAPPING_URL = 'sourceMa';
SOURCEMAPPING_URL += 'ppingURL';
export default SOURCEMAPPING_URL;