first commit
This commit is contained in:
186
node_modules/sorcery/src/Chain.js
generated
vendored
Normal file
186
node_modules/sorcery/src/Chain.js
generated
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
import { basename, dirname, extname, relative, resolve } from 'path';
|
||||
import { writeFile, writeFileSync } from 'sander';
|
||||
import { encode } from '@jridgewell/sourcemap-codec';
|
||||
import SourceMap from './SourceMap.js';
|
||||
import slash from './utils/slash.js';
|
||||
import SOURCEMAPPING_URL from './utils/sourceMappingURL.js';
|
||||
|
||||
const SOURCEMAP_COMMENT = new RegExp( `\n*(?:` +
|
||||
`\\/\\/[@#]\\s*${SOURCEMAPPING_URL}=([^\n]+)|` + // js
|
||||
`\\/\\*#?\\s*${SOURCEMAPPING_URL}=([^'"]+)\\s\\*\\/)` + // css
|
||||
'\\s*$', 'g' );
|
||||
|
||||
export default function Chain ( node, sourcesContentByPath ) {
|
||||
this.node = node;
|
||||
this.sourcesContentByPath = sourcesContentByPath;
|
||||
|
||||
this._stats = {};
|
||||
}
|
||||
|
||||
Chain.prototype = {
|
||||
stat () {
|
||||
return {
|
||||
selfDecodingTime: this._stats.decodingTime / 1e6,
|
||||
totalDecodingTime: ( this._stats.decodingTime + tally( this.node.sources, 'decodingTime' ) ) / 1e6,
|
||||
|
||||
encodingTime: this._stats.encodingTime / 1e6,
|
||||
tracingTime: this._stats.tracingTime / 1e6,
|
||||
|
||||
untraceable: this._stats.untraceable
|
||||
};
|
||||
},
|
||||
|
||||
apply ( options = {} ) {
|
||||
let allNames = [];
|
||||
let allSources = [];
|
||||
|
||||
const applySegment = ( segment, result ) => {
|
||||
if ( segment.length < 4 ) return;
|
||||
|
||||
const traced = this.node.sources[ segment[1] ].trace( // source
|
||||
segment[2], // source code line
|
||||
segment[3], // source code column
|
||||
this.node.map.names[ segment[4] ]
|
||||
);
|
||||
|
||||
if ( !traced ) {
|
||||
this._stats.untraceable += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
let sourceIndex = allSources.indexOf( traced.source );
|
||||
if ( !~sourceIndex ) {
|
||||
sourceIndex = allSources.length;
|
||||
allSources.push( traced.source );
|
||||
}
|
||||
|
||||
let newSegment = [
|
||||
segment[0], // generated code column
|
||||
sourceIndex,
|
||||
traced.line - 1,
|
||||
traced.column
|
||||
];
|
||||
|
||||
if ( traced.name ) {
|
||||
let nameIndex = allNames.indexOf( traced.name );
|
||||
if ( !~nameIndex ) {
|
||||
nameIndex = allNames.length;
|
||||
allNames.push( traced.name );
|
||||
}
|
||||
|
||||
newSegment[4] = nameIndex;
|
||||
}
|
||||
|
||||
result[ result.length ] = newSegment;
|
||||
};
|
||||
|
||||
// Trace mappings
|
||||
let tracingStart = process.hrtime();
|
||||
|
||||
let i = this.node.mappings.length;
|
||||
let resolved = new Array( i );
|
||||
|
||||
let j, line, result;
|
||||
|
||||
while ( i-- ) {
|
||||
line = this.node.mappings[i];
|
||||
resolved[i] = result = [];
|
||||
|
||||
for ( j = 0; j < line.length; j += 1 ) {
|
||||
applySegment( line[j], result );
|
||||
}
|
||||
}
|
||||
|
||||
let tracingTime = process.hrtime( tracingStart );
|
||||
this._stats.tracingTime = 1e9 * tracingTime[0] + tracingTime[1];
|
||||
|
||||
// Encode mappings
|
||||
let encodingStart = process.hrtime();
|
||||
let mappings = encode( resolved );
|
||||
let encodingTime = process.hrtime( encodingStart );
|
||||
this._stats.encodingTime = 1e9 * encodingTime[0] + encodingTime[1];
|
||||
|
||||
let includeContent = options.includeContent !== false;
|
||||
|
||||
return new SourceMap({
|
||||
file: basename( this.node.file ),
|
||||
sources: allSources.map( source => slash( relative( options.base || dirname( this.node.file ), source ) ) ),
|
||||
sourcesContent: allSources.map( source => includeContent ? this.sourcesContentByPath[ source ] : null ),
|
||||
names: allNames,
|
||||
mappings
|
||||
});
|
||||
},
|
||||
|
||||
trace ( oneBasedLineIndex, zeroBasedColumnIndex ) {
|
||||
return this.node.trace( oneBasedLineIndex - 1, zeroBasedColumnIndex, null );
|
||||
},
|
||||
|
||||
write ( dest, options ) {
|
||||
if ( typeof dest !== 'string' ) {
|
||||
options = dest;
|
||||
dest = this.node.file;
|
||||
}
|
||||
|
||||
options = options || {};
|
||||
|
||||
const { resolved, content, map } = processWriteOptions( dest, this, options );
|
||||
|
||||
let promises = [ writeFile( resolved, content ) ];
|
||||
|
||||
if ( !options.inline ) {
|
||||
promises.push( writeFile( resolved + '.map', map.toString() ) );
|
||||
}
|
||||
|
||||
return Promise.all( promises );
|
||||
},
|
||||
|
||||
writeSync ( dest, options ) {
|
||||
if ( typeof dest !== 'string' ) {
|
||||
options = dest;
|
||||
dest = this.node.file;
|
||||
}
|
||||
|
||||
options = options || {};
|
||||
|
||||
const { resolved, content, map } = processWriteOptions( dest, this, options );
|
||||
|
||||
writeFileSync( resolved, content );
|
||||
|
||||
if ( !options.inline ) {
|
||||
writeFileSync( resolved + '.map', map.toString() );
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function processWriteOptions ( dest, chain, options ) {
|
||||
const resolved = resolve( dest );
|
||||
|
||||
const map = chain.apply({
|
||||
includeContent: options.includeContent,
|
||||
base: options.base ? resolve( options.base ) : dirname( resolved )
|
||||
});
|
||||
|
||||
const url = options.inline ? map.toUrl() : ( options.absolutePath ? resolved : basename( resolved ) ) + '.map';
|
||||
|
||||
// TODO shouldn't url be relative?
|
||||
const content = chain.node.content.replace( SOURCEMAP_COMMENT, '' ) + sourcemapComment( url, resolved );
|
||||
|
||||
return { resolved, content, map };
|
||||
}
|
||||
|
||||
function tally ( nodes, stat ) {
|
||||
return nodes.reduce( ( total, node ) => {
|
||||
return total + node._stats[ stat ];
|
||||
}, 0 );
|
||||
}
|
||||
|
||||
function sourcemapComment ( url, dest ) {
|
||||
const ext = extname( dest );
|
||||
url = encodeURI( url );
|
||||
|
||||
if ( ext === '.css' ) {
|
||||
return `\n/*# ${SOURCEMAPPING_URL}=${url} */\n`;
|
||||
}
|
||||
|
||||
return `\n//# ${SOURCEMAPPING_URL}=${url}\n`;
|
||||
}
|
177
node_modules/sorcery/src/Node.js
generated
vendored
Normal file
177
node_modules/sorcery/src/Node.js
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
import { dirname, resolve } from 'path';
|
||||
import { readFile, readFileSync, Promise } from 'sander';
|
||||
import { decode } from '@jridgewell/sourcemap-codec';
|
||||
import getMap from './utils/getMap.js';
|
||||
|
||||
export default function Node ({ file, content }) {
|
||||
this.file = file ? resolve( file ) : null;
|
||||
this.content = content || null; // sometimes exists in sourcesContent, sometimes doesn't
|
||||
|
||||
if ( !this.file && this.content === null ) {
|
||||
throw new Error( 'A source must specify either file or content' );
|
||||
}
|
||||
|
||||
// these get filled in later
|
||||
this.map = null;
|
||||
this.mappings = null;
|
||||
this.sources = null;
|
||||
this.isOriginalSource = null;
|
||||
|
||||
this._stats = {
|
||||
decodingTime: 0,
|
||||
encodingTime: 0,
|
||||
tracingTime: 0,
|
||||
|
||||
untraceable: 0
|
||||
};
|
||||
}
|
||||
|
||||
Node.prototype = {
|
||||
load ( sourcesContentByPath, sourceMapByPath ) {
|
||||
return getContent( this, sourcesContentByPath ).then( content => {
|
||||
this.content = sourcesContentByPath[ this.file ] = content;
|
||||
|
||||
return getMap( this, sourceMapByPath ).then( map => {
|
||||
if ( !map ) return null;
|
||||
|
||||
this.map = map;
|
||||
|
||||
let decodingStart = process.hrtime();
|
||||
this.mappings = decode( map.mappings );
|
||||
let decodingTime = process.hrtime( decodingStart );
|
||||
this._stats.decodingTime = 1e9 * decodingTime[0] + decodingTime[1];
|
||||
|
||||
const sourcesContent = map.sourcesContent || [];
|
||||
|
||||
const sourceRoot = resolve( dirname( this.file || '' ), map.sourceRoot || '' );
|
||||
|
||||
this.sources = map.sources.map( ( source, i ) => {
|
||||
return new Node({
|
||||
file: source ? resolve( sourceRoot, source ) : null,
|
||||
content: sourcesContent[i]
|
||||
});
|
||||
});
|
||||
|
||||
const promises = this.sources.map( node => node.load( sourcesContentByPath, sourceMapByPath ) );
|
||||
return Promise.all( promises );
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
loadSync ( sourcesContentByPath, sourceMapByPath ) {
|
||||
if ( !this.content ) {
|
||||
if ( !sourcesContentByPath[ this.file ] ) {
|
||||
sourcesContentByPath[ this.file ] = readFileSync( this.file, { encoding: 'utf-8' });
|
||||
}
|
||||
|
||||
this.content = sourcesContentByPath[ this.file ];
|
||||
} else {
|
||||
sourcesContentByPath[ this.file ] = this.content;
|
||||
}
|
||||
|
||||
const map = getMap( this, sourceMapByPath, true );
|
||||
let sourcesContent;
|
||||
|
||||
if ( !map ) {
|
||||
this.isOriginalSource = true;
|
||||
} else {
|
||||
this.map = map;
|
||||
this.mappings = decode( map.mappings );
|
||||
|
||||
sourcesContent = map.sourcesContent || [];
|
||||
|
||||
const sourceRoot = resolve( dirname( this.file || '' ), map.sourceRoot || '' );
|
||||
|
||||
this.sources = map.sources.map( ( source, i ) => {
|
||||
const node = new Node({
|
||||
file: resolve( sourceRoot, source ),
|
||||
content: sourcesContent[i]
|
||||
});
|
||||
|
||||
node.loadSync( sourcesContentByPath, sourceMapByPath );
|
||||
return node;
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Traces a segment back to its origin
|
||||
* @param {number} lineIndex - the zero-based line index of the
|
||||
segment as found in `this`
|
||||
* @param {number} columnIndex - the zero-based column index of the
|
||||
segment as found in `this`
|
||||
* @param {string || null} - if specified, the name that should be
|
||||
(eventually) returned, as it is closest to the generated code
|
||||
* @returns {object}
|
||||
@property {string} source - the filepath of the source
|
||||
@property {number} line - the one-based line index
|
||||
@property {number} column - the zero-based column index
|
||||
@property {string || null} name - the name corresponding
|
||||
to the segment being traced
|
||||
*/
|
||||
trace ( lineIndex, columnIndex, name ) {
|
||||
// If this node doesn't have a source map, we have
|
||||
// to assume it is the original source
|
||||
if ( this.isOriginalSource ) {
|
||||
return {
|
||||
source: this.file,
|
||||
line: lineIndex + 1,
|
||||
column: columnIndex || 0,
|
||||
name: name
|
||||
};
|
||||
}
|
||||
|
||||
// Otherwise, we need to figure out what this position in
|
||||
// the intermediate file corresponds to in *its* source
|
||||
const segments = this.mappings[ lineIndex ];
|
||||
|
||||
if ( !segments || segments.length === 0 ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if ( columnIndex != null ) {
|
||||
let len = segments.length;
|
||||
let i;
|
||||
|
||||
for ( i = 0; i < len; i += 1 ) {
|
||||
let generatedCodeColumn = segments[i][0];
|
||||
|
||||
if ( generatedCodeColumn > columnIndex ) {
|
||||
break;
|
||||
}
|
||||
|
||||
if ( generatedCodeColumn === columnIndex ) {
|
||||
if ( segments[i].length < 4 ) return null;
|
||||
|
||||
let sourceFileIndex = segments[i][1];
|
||||
let sourceCodeLine = segments[i][2];
|
||||
let sourceCodeColumn = segments[i][3];
|
||||
let nameIndex = segments[i][4];
|
||||
|
||||
let parent = this.sources[ sourceFileIndex ];
|
||||
return parent.trace( sourceCodeLine, sourceCodeColumn, this.map.names[ nameIndex ] || name );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// fall back to a line mapping
|
||||
let sourceFileIndex = segments[0][1];
|
||||
let sourceCodeLine = segments[0][2];
|
||||
let nameIndex = segments[0][4];
|
||||
|
||||
let parent = this.sources[ sourceFileIndex ];
|
||||
return parent.trace( sourceCodeLine, null, this.map.names[ nameIndex ] || name );
|
||||
}
|
||||
};
|
||||
|
||||
function getContent ( node, sourcesContentByPath ) {
|
||||
if ( node.file in sourcesContentByPath ) {
|
||||
node.content = sourcesContentByPath[ node.file ];
|
||||
}
|
||||
|
||||
if ( !node.content ) {
|
||||
return readFile( node.file, { encoding: 'utf-8' });
|
||||
}
|
||||
|
||||
return Promise.resolve( node.content );
|
||||
}
|
21
node_modules/sorcery/src/SourceMap.js
generated
vendored
Normal file
21
node_modules/sorcery/src/SourceMap.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import btoa from './utils/btoa.js';
|
||||
|
||||
export default function SourceMap ( properties ) {
|
||||
this.version = 3;
|
||||
|
||||
this.file = properties.file;
|
||||
this.sources = properties.sources;
|
||||
this.sourcesContent = properties.sourcesContent;
|
||||
this.names = properties.names;
|
||||
this.mappings = properties.mappings;
|
||||
}
|
||||
|
||||
SourceMap.prototype = {
|
||||
toString () {
|
||||
return JSON.stringify( this );
|
||||
},
|
||||
|
||||
toUrl () {
|
||||
return 'data:application/json;charset=utf-8;base64,' + btoa( this.toString() );
|
||||
}
|
||||
};
|
38
node_modules/sorcery/src/index.js
generated
vendored
Normal file
38
node_modules/sorcery/src/index.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
import { resolve } from 'path';
|
||||
import Node from './Node.js';
|
||||
import Chain from './Chain.js';
|
||||
|
||||
export function load ( file, options ) {
|
||||
const { node, sourcesContentByPath, sourceMapByPath } = init( file, options );
|
||||
|
||||
return node.load( sourcesContentByPath, sourceMapByPath )
|
||||
.then( () => node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ) );
|
||||
}
|
||||
|
||||
export function loadSync ( file, options = {} ) {
|
||||
const { node, sourcesContentByPath, sourceMapByPath } = init( file, options );
|
||||
|
||||
node.loadSync( sourcesContentByPath, sourceMapByPath );
|
||||
return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath );
|
||||
}
|
||||
|
||||
function init ( file, options = {} ) {
|
||||
const node = new Node({ file });
|
||||
|
||||
let sourcesContentByPath = {};
|
||||
let sourceMapByPath = {};
|
||||
|
||||
if ( options.content ) {
|
||||
Object.keys( options.content ).forEach( key => {
|
||||
sourcesContentByPath[ resolve( key ) ] = options.content[ key ];
|
||||
});
|
||||
}
|
||||
|
||||
if ( options.sourcemaps ) {
|
||||
Object.keys( options.sourcemaps ).forEach( key => {
|
||||
sourceMapByPath[ resolve( key ) ] = options.sourcemaps[ key ];
|
||||
});
|
||||
}
|
||||
|
||||
return { node, sourcesContentByPath, sourceMapByPath };
|
||||
}
|
8
node_modules/sorcery/src/utils/atob.js
generated
vendored
Normal file
8
node_modules/sorcery/src/utils/atob.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Decodes a base64 string
|
||||
* @param {string} base64 - the string to decode
|
||||
* @returns {string}
|
||||
*/
|
||||
export default function atob ( base64 ) {
|
||||
return new Buffer( base64, 'base64' ).toString( 'utf8' );
|
||||
}
|
8
node_modules/sorcery/src/utils/btoa.js
generated
vendored
Normal file
8
node_modules/sorcery/src/utils/btoa.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Encodes a string as base64
|
||||
* @param {string} str - the string to encode
|
||||
* @returns {string}
|
||||
*/
|
||||
export default function btoa ( str ) {
|
||||
return new Buffer( str ).toString( 'base64' );
|
||||
}
|
21
node_modules/sorcery/src/utils/getMap.js
generated
vendored
Normal file
21
node_modules/sorcery/src/utils/getMap.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Promise } from 'sander';
|
||||
import getMapFromUrl from './getMapFromUrl.js';
|
||||
import getSourceMappingUrl from './getSourceMappingUrl.js';
|
||||
|
||||
export default function getMap ( node, sourceMapByPath, sync ) {
|
||||
if ( node.file in sourceMapByPath ) {
|
||||
const map = sourceMapByPath[ node.file ];
|
||||
return sync ? map : Promise.resolve( map );
|
||||
}
|
||||
|
||||
else {
|
||||
const url = getSourceMappingUrl( node.content );
|
||||
|
||||
if ( !url ) {
|
||||
node.isOriginalSource = true;
|
||||
return sync ? null : Promise.resolve( null );
|
||||
}
|
||||
|
||||
return getMapFromUrl( url, node.file, sync );
|
||||
}
|
||||
}
|
44
node_modules/sorcery/src/utils/getMapFromUrl.js
generated
vendored
Normal file
44
node_modules/sorcery/src/utils/getMapFromUrl.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import { dirname, resolve } from 'path';
|
||||
import { readFile, readFileSync, Promise } from 'sander';
|
||||
import atob from './atob.js';
|
||||
import SOURCEMAPPING_URL from './sourceMappingURL.js';
|
||||
|
||||
function parseJSON ( json, url ) {
|
||||
try {
|
||||
return JSON.parse( json );
|
||||
} catch ( err ) {
|
||||
throw new Error( `Could not parse sourcemap (${url}): ${err.message}` );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Turns a sourceMappingURL into a sourcemap
|
||||
* @param {string} url - the sourceMappingURL. Can be a
|
||||
base64-encoded data URI
|
||||
* @param {string} base - the URL against which relative URLS
|
||||
should be resolved
|
||||
* @param {boolean} sync - if `true`, return a promise, otherwise
|
||||
return the sourcemap
|
||||
* @returns {object} - a version 3 sourcemap
|
||||
*/
|
||||
export default function getMapFromUrl ( url, base, sync ) {
|
||||
if ( /^data:/.test( url ) ) { // TODO beef this up
|
||||
const match = /base64,(.+)$/.exec( url );
|
||||
|
||||
if ( !match ) {
|
||||
throw new Error( `${SOURCEMAPPING_URL} is not base64-encoded` );
|
||||
}
|
||||
|
||||
const json = atob( match[1] );
|
||||
const map = parseJSON( json, `data URI in ${base}` );
|
||||
return sync ? map : Promise.resolve( map );
|
||||
}
|
||||
|
||||
url = resolve( dirname( base ), decodeURI( url ) );
|
||||
|
||||
if ( sync ) {
|
||||
return parseJSON( readFileSync( url, { encoding: 'utf-8' }), url );
|
||||
} else {
|
||||
return readFile( url, { encoding: 'utf-8' }).then( json => parseJSON( json, url ) );
|
||||
}
|
||||
}
|
25
node_modules/sorcery/src/utils/getSourceMappingUrl.js
generated
vendored
Normal file
25
node_modules/sorcery/src/utils/getSourceMappingUrl.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
import SOURCEMAPPING_URL from './sourceMappingURL.js';
|
||||
|
||||
export default function getSourceMappingUrl ( str ) {
|
||||
var index, substring, url, match;
|
||||
|
||||
// assume we want the last occurence
|
||||
index = str.lastIndexOf( `${SOURCEMAPPING_URL}=` );
|
||||
|
||||
if ( index === -1 ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
substring = str.substring( index + 17 );
|
||||
match = /^[^\r\n]+/.exec( substring );
|
||||
|
||||
url = match ? match[0] : null;
|
||||
|
||||
// possibly a better way to do this, but we don't want to exclude whitespace
|
||||
// from the sourceMappingURL because it might not have been correctly encoded
|
||||
if ( url && url.slice( -2 ) === '*/' ) {
|
||||
url = url.slice( 0, -2 ).trim();
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
5
node_modules/sorcery/src/utils/slash.js
generated
vendored
Normal file
5
node_modules/sorcery/src/utils/slash.js
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export default function slash ( path ) {
|
||||
return typeof path === 'string' ?
|
||||
path.replace( /\\/g, '/' ) :
|
||||
path;
|
||||
}
|
6
node_modules/sorcery/src/utils/sourceMappingURL.js
generated
vendored
Normal file
6
node_modules/sorcery/src/utils/sourceMappingURL.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
// this looks ridiculous, but it prevents sourcemap tooling from mistaking
|
||||
// this for an actual sourceMappingURL
|
||||
let SOURCEMAPPING_URL = 'sourceMa';
|
||||
SOURCEMAPPING_URL += 'ppingURL';
|
||||
|
||||
export default SOURCEMAPPING_URL;
|
Reference in New Issue
Block a user