full site update

This commit is contained in:
2025-07-24 18:46:24 +02:00
parent bfe2b90d8d
commit 37a6e0ab31
6912 changed files with 540482 additions and 361712 deletions

View File

@@ -1,4 +1,4 @@
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,230 +1,292 @@
import { SetArray, put, remove } from '@jridgewell/set-array';
import { encode } from '@jridgewell/sourcemap-codec';
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const NO_NAME = -1;
/**
* Provides the state to generate a sourcemap.
*/
class GenMapping {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
// src/set-array.ts
var SetArray = class {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
};
function cast(set) {
return set;
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map) {
return map;
function get(setarr, key) {
return cast(setarr)._indexes[key];
}
function put(setarr, key) {
const index = get(setarr, key);
if (index !== void 0) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return indexes[key] = length - 1;
}
function remove(setarr, key) {
const index = get(setarr, key);
if (index === void 0) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = void 0;
array.pop();
}
// src/gen-mapping.ts
import {
encode
} from "@jridgewell/sourcemap-codec";
import { TraceMap, decodedMappings } from "@jridgewell/trace-mapping";
// src/sourcemap-segment.ts
var COLUMN = 0;
var SOURCES_INDEX = 1;
var SOURCE_LINE = 2;
var SOURCE_COLUMN = 3;
var NAMES_INDEX = 4;
// src/gen-mapping.ts
var NO_NAME = -1;
var GenMapping = class {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
};
function cast2(map) {
return map;
}
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
}
function addMapping(map, mapping) {
return addMappingInternal(false, map, mapping);
return addMappingInternal(false, map, mapping);
}
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
};
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
const maybeAddMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping);
var maybeAddMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping);
};
/**
* Adds/removes the content of the source file to the source map.
*/
function setSourceContent(map, source, content) {
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
const index = put(sources, source);
sourcesContent[index] = content;
const {
_sources: sources,
_sourcesContent: sourcesContent
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length)
sourcesContent[index] = null;
if (ignore)
put(ignoreList, index);
else
remove(ignoreList, index);
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toDecodedMap(map) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || undefined,
names: names.array,
sourceRoot: map.sourceRoot || undefined,
sources: sources.array,
sourcesContent,
mappings,
ignoreList: ignoreList.array,
};
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast2(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || void 0,
names: names.array,
sourceRoot: map.sourceRoot || void 0,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array
};
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: encode(decoded.mappings)
});
}
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
function fromMap(input) {
const map = new TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast(gen)._names, map.names);
putAll(cast(gen)._sources, map.sources);
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast(gen)._mappings = decodedMappings(map);
if (map.ignoreList)
putAll(cast(gen)._ignoreList, map.ignoreList);
return gen;
const map = new TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast2(gen)._names, map.names);
putAll(cast2(gen)._sources, map.sources);
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast2(gen)._mappings = decodedMappings(map);
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);
return gen;
}
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
function allMappings(map) {
const out = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source = undefined;
let original = undefined;
let name = undefined;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5)
name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name });
}
const out = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source = void 0;
let original = void 0;
let name = void 0;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name });
}
return out;
}
return out;
}
// This split declaration is only so that terser can elminiate the static initialization block.
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
const line = getLine(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index))
return;
return insert(line, index, [genColumn]);
}
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length)
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(line, index, name
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names
// _originalScopes: originalScopes,
} = cast2(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
assert(sourceLine);
assert(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]
);
}
function getLine(mappings, index) {
for (let i = mappings.length; i <= index; i++) {
mappings[i] = [];
}
return mappings[index];
function assert(_val) {
}
function getIndex(arr, index) {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN])
break;
}
return index;
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0)
break;
}
if (len < length)
mappings.length = len;
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll(setarr, array) {
for (let i = 0; i < array.length; i++)
put(setarr, array[i]);
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line, index) {
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
// doesn't generate any useful information.
if (index === 0)
return true;
const prev = line[index - 1];
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
// genrate any new information. Else, this segment will end the source/named segment and point to
// a sourceless position, which is useful.
return prev.length === 1;
if (index === 0) return true;
const prev = line[index - 1];
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
// A source/named segment at the start of a line gives position at that genColumn
if (index === 0)
return false;
const prev = line[index - 1];
// If the previous segment is sourceless, then we're transitioning to a source.
if (prev.length === 1)
return false;
// If the previous segment maps to the exact same source position, then this segment doesn't
// provide any new position information.
return (sourcesIndex === prev[SOURCES_INDEX] &&
sourceLine === prev[SOURCE_LINE] &&
sourceColumn === prev[SOURCE_COLUMN] &&
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
if (index === 0) return false;
const prev = line[index - 1];
if (prev.length === 1) return false;
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
}
function addMappingInternal(skipable, map, mapping) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
}
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null
);
}
assert(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source,
original.line - 1,
original.column,
name,
content
);
}
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setIgnore, setSourceContent, toDecodedMap, toEncodedMap };
export {
GenMapping,
addMapping,
addSegment,
allMappings,
fromMap,
maybeAddMapping,
maybeAddSegment,
setIgnore,
setSourceContent,
toDecodedMap,
toEncodedMap
};
//# sourceMappingURL=gen-mapping.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -1,246 +1,346 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping));
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict';
(function (global, factory, m) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(module, require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
typeof define === 'function' && define.amd ? define(['module', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(m = { exports: {} }, global.sourcemapCodec, global.traceMapping), global.genMapping = 'default' in m.exports ? m.exports.default : m.exports);
})(this, (function (module, require_sourcemapCodec, require_traceMapping) {
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
// umd:@jridgewell/sourcemap-codec
var require_sourcemap_codec = __commonJS({
"umd:@jridgewell/sourcemap-codec"(exports, module2) {
module2.exports = require_sourcemapCodec;
}
});
const NO_NAME = -1;
/**
* Provides the state to generate a sourcemap.
*/
class GenMapping {
constructor({ file, sourceRoot } = {}) {
this._names = new setArray.SetArray();
this._sources = new setArray.SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new setArray.SetArray();
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map) {
return map;
}
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
}
function addMapping(map, mapping) {
return addMappingInternal(false, map, mapping);
}
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
};
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
const maybeAddMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping);
};
/**
* Adds/removes the content of the source file to the source map.
*/
function setSourceContent(map, source, content) {
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
const index = setArray.put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
const index = setArray.put(sources, source);
if (index === sourcesContent.length)
sourcesContent[index] = null;
if (ignore)
setArray.put(ignoreList, index);
else
setArray.remove(ignoreList, index);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toDecodedMap(map) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || undefined,
names: names.array,
sourceRoot: map.sourceRoot || undefined,
sources: sources.array,
sourcesContent,
mappings,
ignoreList: ignoreList.array,
};
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) });
}
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
function fromMap(input) {
const map = new traceMapping.TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast(gen)._names, map.names);
putAll(cast(gen)._sources, map.sources);
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast(gen)._mappings = traceMapping.decodedMappings(map);
if (map.ignoreList)
putAll(cast(gen)._ignoreList, map.ignoreList);
return gen;
}
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
function allMappings(map) {
const out = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source = undefined;
let original = undefined;
let name = undefined;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5)
name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name });
}
}
return out;
}
// This split declaration is only so that terser can elminiate the static initialization block.
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
const line = getLine(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index))
return;
return insert(line, index, [genColumn]);
}
const sourcesIndex = setArray.put(sources, source);
const namesIndex = name ? setArray.put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length)
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(line, index, name
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
}
function getLine(mappings, index) {
for (let i = mappings.length; i <= index; i++) {
mappings[i] = [];
}
return mappings[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN])
break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0)
break;
}
if (len < length)
mappings.length = len;
}
function putAll(setarr, array) {
for (let i = 0; i < array.length; i++)
setArray.put(setarr, array[i]);
}
function skipSourceless(line, index) {
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
// doesn't generate any useful information.
if (index === 0)
return true;
const prev = line[index - 1];
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
// genrate any new information. Else, this segment will end the source/named segment and point to
// a sourceless position, which is useful.
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
// A source/named segment at the start of a line gives position at that genColumn
if (index === 0)
return false;
const prev = line[index - 1];
// If the previous segment is sourceless, then we're transitioning to a source.
if (prev.length === 1)
return false;
// If the previous segment maps to the exact same source position, then this segment doesn't
// provide any new position information.
return (sourcesIndex === prev[SOURCES_INDEX] &&
sourceLine === prev[SOURCE_LINE] &&
sourceColumn === prev[SOURCE_COLUMN] &&
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
}
function addMappingInternal(skipable, map, mapping) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
}
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
}
// umd:@jridgewell/trace-mapping
var require_trace_mapping = __commonJS({
"umd:@jridgewell/trace-mapping"(exports, module2) {
module2.exports = require_traceMapping;
}
});
exports.GenMapping = GenMapping;
exports.addMapping = addMapping;
exports.addSegment = addSegment;
exports.allMappings = allMappings;
exports.fromMap = fromMap;
exports.maybeAddMapping = maybeAddMapping;
exports.maybeAddSegment = maybeAddSegment;
exports.setIgnore = setIgnore;
exports.setSourceContent = setSourceContent;
exports.toDecodedMap = toDecodedMap;
exports.toEncodedMap = toEncodedMap;
// src/gen-mapping.ts
var gen_mapping_exports = {};
__export(gen_mapping_exports, {
GenMapping: () => GenMapping,
addMapping: () => addMapping,
addSegment: () => addSegment,
allMappings: () => allMappings,
fromMap: () => fromMap,
maybeAddMapping: () => maybeAddMapping,
maybeAddSegment: () => maybeAddSegment,
setIgnore: () => setIgnore,
setSourceContent: () => setSourceContent,
toDecodedMap: () => toDecodedMap,
toEncodedMap: () => toEncodedMap
});
module.exports = __toCommonJS(gen_mapping_exports);
Object.defineProperty(exports, '__esModule', { value: true });
// src/set-array.ts
var SetArray = class {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
};
function cast(set) {
return set;
}
function get(setarr, key) {
return cast(setarr)._indexes[key];
}
function put(setarr, key) {
const index = get(setarr, key);
if (index !== void 0) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return indexes[key] = length - 1;
}
function remove(setarr, key) {
const index = get(setarr, key);
if (index === void 0) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = void 0;
array.pop();
}
// src/gen-mapping.ts
var import_sourcemap_codec = __toESM(require_sourcemap_codec());
var import_trace_mapping = __toESM(require_trace_mapping());
// src/sourcemap-segment.ts
var COLUMN = 0;
var SOURCES_INDEX = 1;
var SOURCE_LINE = 2;
var SOURCE_COLUMN = 3;
var NAMES_INDEX = 4;
// src/gen-mapping.ts
var NO_NAME = -1;
var GenMapping = class {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
};
function cast2(map) {
return map;
}
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
}
function addMapping(map, mapping) {
return addMappingInternal(false, map, mapping);
}
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
};
var maybeAddMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping);
};
function setSourceContent(map, source, content) {
const {
_sources: sources,
_sourcesContent: sourcesContent
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
function toDecodedMap(map) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast2(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || void 0,
names: names.array,
sourceRoot: map.sourceRoot || void 0,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array
};
}
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: (0, import_sourcemap_codec.encode)(decoded.mappings)
});
}
function fromMap(input) {
const map = new import_trace_mapping.TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast2(gen)._names, map.names);
putAll(cast2(gen)._sources, map.sources);
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast2(gen)._mappings = (0, import_trace_mapping.decodedMappings)(map);
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);
return gen;
}
function allMappings(map) {
const out = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source = void 0;
let original = void 0;
let name = void 0;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name });
}
}
return out;
}
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names
// _originalScopes: originalScopes,
} = cast2(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
assert(sourceLine);
assert(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]
);
}
function assert(_val) {
}
function getIndex(arr, index) {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll(setarr, array) {
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line, index) {
if (index === 0) return true;
const prev = line[index - 1];
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
if (index === 0) return false;
const prev = line[index - 1];
if (prev.length === 1) return false;
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
}
function addMappingInternal(skipable, map, mapping) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null
);
}
assert(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source,
original.line - 1,
original.column,
name,
content
);
}
}));
//# sourceMappingURL=gen-mapping.umd.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,12 +0,0 @@
declare type GeneratedColumn = number;
declare type SourcesIndex = number;
declare type SourceLine = number;
declare type SourceColumn = number;
declare type NamesIndex = number;
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export declare const COLUMN = 0;
export declare const SOURCES_INDEX = 1;
export declare const SOURCE_LINE = 2;
export declare const SOURCE_COLUMN = 3;
export declare const NAMES_INDEX = 4;
export {};

View File

@@ -1,76 +1,71 @@
{
"name": "@jridgewell/gen-mapping",
"version": "0.3.8",
"version": "0.3.12",
"description": "Generate source maps",
"keywords": [
"source",
"map"
],
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"repository": "https://github.com/jridgewell/gen-mapping",
"main": "dist/gen-mapping.umd.js",
"module": "dist/gen-mapping.mjs",
"types": "dist/types/gen-mapping.d.ts",
"types": "types/gen-mapping.d.cts",
"files": [
"dist",
"src",
"types"
],
"exports": {
".": [
{
"types": "./dist/types/gen-mapping.d.ts",
"browser": "./dist/gen-mapping.umd.js",
"require": "./dist/gen-mapping.umd.js",
"import": "./dist/gen-mapping.mjs"
"import": {
"types": "./types/gen-mapping.d.mts",
"default": "./dist/gen-mapping.mjs"
},
"require": {
"types": "./types/gen-mapping.d.cts",
"default": "./dist/gen-mapping.umd.js"
},
"browser": {
"types": "./types/gen-mapping.d.cts",
"default": "./dist/gen-mapping.umd.js"
}
},
"./dist/gen-mapping.umd.js"
],
"./package.json": "./package.json"
},
"files": [
"dist"
],
"engines": {
"node": ">=6.0.0"
},
"scripts": {
"benchmark": "run-s build:rollup benchmark:*",
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node benchmark/index.mjs",
"prebuild": "rm -rf dist",
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"test": "run-s -n test:lint test:only",
"test:debug": "mocha --inspect-brk",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "c8 mocha",
"test:watch": "mocha --watch",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build"
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs gen-mapping.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:types": "eslint '{src,test}/**/*.ts'",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test"
},
"devDependencies": {
"@rollup/plugin-typescript": "8.3.2",
"@types/mocha": "9.1.1",
"@types/node": "17.0.29",
"@typescript-eslint/eslint-plugin": "5.21.0",
"@typescript-eslint/parser": "5.21.0",
"benchmark": "2.1.4",
"c8": "7.11.2",
"eslint": "8.14.0",
"eslint-config-prettier": "8.5.0",
"mocha": "9.2.2",
"npm-run-all": "4.1.5",
"prettier": "2.6.2",
"rollup": "2.70.2",
"tsx": "4.7.1",
"typescript": "4.6.3"
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/gen-mapping",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/gen-mapping"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"dependencies": {
"@jridgewell/set-array": "^1.2.1",
"@jridgewell/sourcemap-codec": "^1.4.10",
"@jridgewell/sourcemap-codec": "^1.5.0",
"@jridgewell/trace-mapping": "^0.3.24"
}
}

614
node_modules/@jridgewell/gen-mapping/src/gen-mapping.ts generated vendored Normal file
View File

@@ -0,0 +1,614 @@
import { SetArray, put, remove } from './set-array';
import {
encode,
// encodeGeneratedRanges,
// encodeOriginalScopes
} from '@jridgewell/sourcemap-codec';
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
import {
COLUMN,
SOURCES_INDEX,
SOURCE_LINE,
SOURCE_COLUMN,
NAMES_INDEX,
} from './sourcemap-segment';
import type { SourceMapInput } from '@jridgewell/trace-mapping';
// import type { OriginalScope, GeneratedRange } from '@jridgewell/sourcemap-codec';
import type { SourceMapSegment } from './sourcemap-segment';
import type {
DecodedSourceMap,
EncodedSourceMap,
Pos,
Mapping,
// BindingExpressionRange,
// OriginalPos,
// OriginalScopeInfo,
// GeneratedRangeInfo,
} from './types';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
const NO_NAME = -1;
/**
* Provides the state to generate a sourcemap.
*/
export class GenMapping {
declare private _names: SetArray<string>;
declare private _sources: SetArray<string>;
declare private _sourcesContent: (string | null)[];
declare private _mappings: SourceMapSegment[][];
// private declare _originalScopes: OriginalScope[][];
// private declare _generatedRanges: GeneratedRange[];
declare private _ignoreList: SetArray<number>;
declare file: string | null | undefined;
declare sourceRoot: string | null | undefined;
constructor({ file, sourceRoot }: Options = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
// this._originalScopes = [];
// this._generatedRanges = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
}
interface PublicMap {
_names: GenMapping['_names'];
_sources: GenMapping['_sources'];
_sourcesContent: GenMapping['_sourcesContent'];
_mappings: GenMapping['_mappings'];
// _originalScopes: GenMapping['_originalScopes'];
// _generatedRanges: GenMapping['_generatedRanges'];
_ignoreList: GenMapping['_ignoreList'];
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map: unknown): PublicMap {
return map as any;
}
/**
* A low-level API to associate a generated position with an original source position. Line and
* column here are 0-based, unlike `addMapping`.
*/
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source?: null,
sourceLine?: null,
sourceColumn?: null,
name?: null,
content?: null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source: string,
sourceLine: number,
sourceColumn: number,
name?: null,
content?: string | null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source: string,
sourceLine: number,
sourceColumn: number,
name: string,
content?: string | null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source?: string | null,
sourceLine?: number | null,
sourceColumn?: number | null,
name?: string | null,
content?: string | null,
): void {
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
);
}
/**
* A high-level API to associate a generated position with an original source position. Line is
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
*/
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source?: null;
original?: null;
name?: null;
content?: null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source: string;
original: Pos;
name?: null;
content?: string | null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source: string;
original: Pos;
name: string;
content?: string | null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source?: string | null;
original?: Pos | null;
name?: string | null;
content?: string | null;
},
): void {
return addMappingInternal(false, map, mapping as Parameters<typeof addMappingInternal>[2]);
}
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
export const maybeAddSegment: typeof addSegment = (
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
);
};
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
export const maybeAddMapping: typeof addMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping as Parameters<typeof addMappingInternal>[2]);
};
/**
* Adds/removes the content of the source file to the source map.
*/
export function setSourceContent(map: GenMapping, source: string, content: string | null): void {
const {
_sources: sources,
_sourcesContent: sourcesContent,
// _originalScopes: originalScopes,
} = cast(map);
const index = put(sources, source);
sourcesContent[index] = content;
// if (index === originalScopes.length) originalScopes[index] = [];
}
export function setIgnore(map: GenMapping, source: string, ignore = true) {
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList,
// _originalScopes: originalScopes,
} = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
// if (index === originalScopes.length) originalScopes[index] = [];
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function toDecodedMap(map: GenMapping): DecodedSourceMap {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList,
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || undefined,
names: names.array,
sourceRoot: map.sourceRoot || undefined,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array,
};
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function toEncodedMap(map: GenMapping): EncodedSourceMap {
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: encode(decoded.mappings as SourceMapSegment[][]),
});
}
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
export function fromMap(input: SourceMapInput): GenMapping {
const map = new TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast(gen)._names, map.names);
putAll(cast(gen)._sources, map.sources as string[]);
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast(gen)._mappings = decodedMappings(map) as GenMapping['_mappings'];
// TODO: implement originalScopes/generatedRanges
if (map.ignoreList) putAll(cast(gen)._ignoreList, map.ignoreList);
return gen;
}
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
export function allMappings(map: GenMapping): Mapping[] {
const out: Mapping[] = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source: string | undefined = undefined;
let original: Pos | undefined = undefined;
let name: string | undefined = undefined;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name } as Mapping);
}
}
return out;
}
// This split declaration is only so that terser can elminiate the static initialization block.
function addSegmentInternal<S extends string | null | undefined>(
skipable: boolean,
map: GenMapping,
genLine: number,
genColumn: number,
source: S,
sourceLine: S extends string ? number : null | undefined,
sourceColumn: S extends string ? number : null | undefined,
name: S extends string ? string | null | undefined : null | undefined,
content: S extends string ? string | null | undefined : null | undefined,
): void {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
// _originalScopes: originalScopes,
} = cast(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
// Sigh, TypeScript can't figure out sourceLine and sourceColumn aren't nullish if source
// isn't nullish.
assert<number>(sourceLine);
assert<number>(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content ?? null;
// if (sourcesIndex === originalScopes.length) originalScopes[sourcesIndex] = [];
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
: [genColumn, sourcesIndex, sourceLine, sourceColumn],
);
}
function assert<T>(_val: unknown): asserts _val is T {
// noop.
}
function getIndex<T>(arr: T[][], index: number): T[] {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line: SourceMapSegment[], genColumn: number): number {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert<T>(array: T[], index: number, value: T) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings: SourceMapSegment[][]) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll<T extends string | number>(setarr: SetArray<T>, array: T[]) {
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line: SourceMapSegment[], index: number): boolean {
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
// doesn't generate any useful information.
if (index === 0) return true;
const prev = line[index - 1];
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
// genrate any new information. Else, this segment will end the source/named segment and point to
// a sourceless position, which is useful.
return prev.length === 1;
}
function skipSource(
line: SourceMapSegment[],
index: number,
sourcesIndex: number,
sourceLine: number,
sourceColumn: number,
namesIndex: number,
): boolean {
// A source/named segment at the start of a line gives position at that genColumn
if (index === 0) return false;
const prev = line[index - 1];
// If the previous segment is sourceless, then we're transitioning to a source.
if (prev.length === 1) return false;
// If the previous segment maps to the exact same source position, then this segment doesn't
// provide any new position information.
return (
sourcesIndex === prev[SOURCES_INDEX] &&
sourceLine === prev[SOURCE_LINE] &&
sourceColumn === prev[SOURCE_COLUMN] &&
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)
);
}
function addMappingInternal<S extends string | null | undefined>(
skipable: boolean,
map: GenMapping,
mapping: {
generated: Pos;
source: S;
original: S extends string ? Pos : null | undefined;
name: S extends string ? string | null | undefined : null | undefined;
content: S extends string ? string | null | undefined : null | undefined;
},
) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null,
);
}
assert<Pos>(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source as string,
original.line - 1,
original.column,
name,
content,
);
}
/*
export function addOriginalScope(
map: GenMapping,
data: {
start: Pos;
end: Pos;
source: string;
kind: string;
name?: string;
variables?: string[];
},
): OriginalScopeInfo {
const { start, end, source, kind, name, variables } = data;
const {
_sources: sources,
_sourcesContent: sourcesContent,
_originalScopes: originalScopes,
_names: names,
} = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (index === originalScopes.length) originalScopes[index] = [];
const kindIndex = put(names, kind);
const scope: OriginalScope = name
? [start.line - 1, start.column, end.line - 1, end.column, kindIndex, put(names, name)]
: [start.line - 1, start.column, end.line - 1, end.column, kindIndex];
if (variables) {
scope.vars = variables.map((v) => put(names, v));
}
const len = originalScopes[index].push(scope);
return [index, len - 1, variables];
}
*/
// Generated Ranges
/*
export function addGeneratedRange(
map: GenMapping,
data: {
start: Pos;
isScope: boolean;
originalScope?: OriginalScopeInfo;
callsite?: OriginalPos;
},
): GeneratedRangeInfo {
const { start, isScope, originalScope, callsite } = data;
const {
_originalScopes: originalScopes,
_sources: sources,
_sourcesContent: sourcesContent,
_generatedRanges: generatedRanges,
} = cast(map);
const range: GeneratedRange = [
start.line - 1,
start.column,
0,
0,
originalScope ? originalScope[0] : -1,
originalScope ? originalScope[1] : -1,
];
if (originalScope?.[2]) {
range.bindings = originalScope[2].map(() => [[-1]]);
}
if (callsite) {
const index = put(sources, callsite.source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (index === originalScopes.length) originalScopes[index] = [];
range.callsite = [index, callsite.line - 1, callsite.column];
}
if (isScope) range.isScope = true;
generatedRanges.push(range);
return [range, originalScope?.[2]];
}
export function setEndPosition(range: GeneratedRangeInfo, pos: Pos) {
range[0][2] = pos.line - 1;
range[0][3] = pos.column;
}
export function addBinding(
map: GenMapping,
range: GeneratedRangeInfo,
variable: string,
expression: string | BindingExpressionRange,
) {
const { _names: names } = cast(map);
const bindings = (range[0].bindings ||= []);
const vars = range[1];
const index = vars!.indexOf(variable);
const binding = getIndex(bindings, index);
if (typeof expression === 'string') binding[0] = [put(names, expression)];
else {
const { start } = expression;
binding.push([put(names, expression.expression), start.line - 1, start.column]);
}
}
*/

82
node_modules/@jridgewell/gen-mapping/src/set-array.ts generated vendored Normal file
View File

@@ -0,0 +1,82 @@
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
export class SetArray<T extends Key = Key> {
declare private _indexes: Record<T, number | undefined>;
declare array: readonly T[];
constructor() {
this._indexes = { __proto__: null } as any;
this.array = [];
}
}
interface PublicSet<T extends Key> {
array: T[];
_indexes: SetArray<T>['_indexes'];
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
* with public access modifiers.
*/
function cast<T extends Key>(set: SetArray<T>): PublicSet<T> {
return set as any;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
export function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {
return cast(setarr)._indexes[key];
}
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
export function put<T extends Key>(setarr: SetArray<T>, key: T): number {
// The key may or may not be present. If it is present, it's a number.
const index = get(setarr, key);
if (index !== undefined) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return (indexes[key] = length - 1);
}
/**
* Pops the last added item out of the SetArray.
*/
export function pop<T extends Key>(setarr: SetArray<T>): void {
const { array, _indexes: indexes } = cast(setarr);
if (array.length === 0) return;
const last = array.pop()!;
indexes[last] = undefined;
}
/**
* Removes the key, if it exists in the set.
*/
export function remove<T extends Key>(setarr: SetArray<T>, key: T): void {
const index = get(setarr, key);
if (index === undefined) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]!--;
}
indexes[key] = undefined;
array.pop();
}

View File

@@ -0,0 +1,16 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
export type SourceMapSegment =
| [GeneratedColumn]
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export const COLUMN = 0;
export const SOURCES_INDEX = 1;
export const SOURCE_LINE = 2;
export const SOURCE_COLUMN = 3;
export const NAMES_INDEX = 4;

61
node_modules/@jridgewell/gen-mapping/src/types.ts generated vendored Normal file
View File

@@ -0,0 +1,61 @@
// import type { GeneratedRange, OriginalScope } from '@jridgewell/sourcemap-codec';
import type { SourceMapSegment } from './sourcemap-segment';
export interface SourceMapV3 {
file?: string | null;
names: readonly string[];
sourceRoot?: string;
sources: readonly (string | null)[];
sourcesContent?: readonly (string | null)[];
version: 3;
ignoreList?: readonly number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
// originalScopes: string[];
// generatedRanges: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: readonly SourceMapSegment[][];
// originalScopes: readonly OriginalScope[][];
// generatedRanges: readonly GeneratedRange[];
}
export interface Pos {
line: number; // 1-based
column: number; // 0-based
}
export interface OriginalPos extends Pos {
source: string;
}
export interface BindingExpressionRange {
start: Pos;
expression: string;
}
// export type OriginalScopeInfo = [number, number, string[] | undefined];
// export type GeneratedRangeInfo = [GeneratedRange, string[] | undefined];
export type Mapping =
| {
generated: Pos;
source: undefined;
original: undefined;
name: undefined;
}
| {
generated: Pos;
source: string;
original: Pos;
name: string;
}
| {
generated: Pos;
source: string;
original: Pos;
name: undefined;
};

View File

@@ -1,7 +1,7 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types.cts';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export declare type Options = {
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
@@ -86,3 +86,4 @@ export declare function fromMap(input: SourceMapInput): GenMapping;
* passed to the `source-map` library.
*/
export declare function allMappings(map: GenMapping): Mapping[];
//# sourceMappingURL=gen-mapping.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"gen-mapping.d.ts","sourceRoot":"","sources":["../src/gen-mapping.ts"],"names":[],"mappings":"AAgBA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAGhE,OAAO,KAAK,EACV,gBAAgB,EAChB,gBAAgB,EAChB,GAAG,EACH,OAAO,EAKR,MAAM,SAAS,CAAC;AAEjB,YAAY,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,OAAO,EAAE,CAAC;AAE5D,MAAM,MAAM,OAAO,GAAG;IACpB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5B,CAAC;AAIF;;GAEG;AACH,qBAAa,UAAU;IACrB,QAAgB,MAAM,CAAmB;IACzC,QAAgB,QAAQ,CAAmB;IAC3C,QAAgB,eAAe,CAAoB;IACnD,QAAgB,SAAS,CAAuB;IAGhD,QAAgB,WAAW,CAAmB;IACtC,IAAI,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAChC,UAAU,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;gBAElC,EAAE,IAAI,EAAE,UAAU,EAAE,GAAE,OAAY;CAW/C;AAoBD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,CAAC,EAAE,IAAI,EACb,UAAU,CAAC,EAAE,IAAI,EACjB,YAAY,CAAC,EAAE,IAAI,EACnB,IAAI,CAAC,EAAE,IAAI,EACX,OAAO,CAAC,EAAE,IAAI,GACb,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,MAAM,EACpB,IAAI,CAAC,EAAE,IAAI,EACX,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GACtB,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,MAAM,EACpB,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GACtB,IAAI,CAAC;AAwBR;;;GAGG;AACH,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,CAAC,EAAE,IAAI,CAAC;IACd,QAAQ,CAAC,EAAE,IAAI,CAAC;IAChB,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,IAAI,CAAC;CAChB,GACA,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,GACA,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,GACA,IAAI,CAAC;AAcR;;;;GAIG;AACH,eAAO,MAAM,eAAe,EAAE,OAAO,UAqBpC,CAAC;AAEF;;;;GAIG;AACH,eAAO,MAAM,eAAe,EAAE,OAAO,UAEpC,CAAC;AAEF;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI,CAS9F;AAED,wBAAgB,SAAS,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,UAAO,QAYvE;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,gBAAgB,CAwB9D;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,gBAAgB,CAO9D;AAED;;GAEG;AACH,wBAAgB,OAAO,CAAC,KAAK,EAAE,cAAc,GAAG,UAAU,CAYzD;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,EAAE,CA0BtD"}

View File

@@ -0,0 +1,89 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types.mts';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
/**
* Provides the state to generate a sourcemap.
*/
export declare class GenMapping {
private _names;
private _sources;
private _sourcesContent;
private _mappings;
private _ignoreList;
file: string | null | undefined;
sourceRoot: string | null | undefined;
constructor({ file, sourceRoot }?: Options);
}
/**
* A low-level API to associate a generated position with an original source position. Line and
* column here are 0-based, unlike `addMapping`.
*/
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
/**
* A high-level API to associate a generated position with an original source position. Line is
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
*/
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source?: null;
original?: null;
name?: null;
content?: null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name?: null;
content?: string | null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name: string;
content?: string | null;
}): void;
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
export declare const maybeAddSegment: typeof addSegment;
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
export declare const maybeAddMapping: typeof addMapping;
/**
* Adds/removes the content of the source file to the source map.
*/
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
export declare function fromMap(input: SourceMapInput): GenMapping;
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
export declare function allMappings(map: GenMapping): Mapping[];
//# sourceMappingURL=gen-mapping.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"gen-mapping.d.ts","sourceRoot":"","sources":["../src/gen-mapping.ts"],"names":[],"mappings":"AAgBA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAGhE,OAAO,KAAK,EACV,gBAAgB,EAChB,gBAAgB,EAChB,GAAG,EACH,OAAO,EAKR,MAAM,SAAS,CAAC;AAEjB,YAAY,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,OAAO,EAAE,CAAC;AAE5D,MAAM,MAAM,OAAO,GAAG;IACpB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5B,CAAC;AAIF;;GAEG;AACH,qBAAa,UAAU;IACrB,QAAgB,MAAM,CAAmB;IACzC,QAAgB,QAAQ,CAAmB;IAC3C,QAAgB,eAAe,CAAoB;IACnD,QAAgB,SAAS,CAAuB;IAGhD,QAAgB,WAAW,CAAmB;IACtC,IAAI,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAChC,UAAU,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;gBAElC,EAAE,IAAI,EAAE,UAAU,EAAE,GAAE,OAAY;CAW/C;AAoBD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,CAAC,EAAE,IAAI,EACb,UAAU,CAAC,EAAE,IAAI,EACjB,YAAY,CAAC,EAAE,IAAI,EACnB,IAAI,CAAC,EAAE,IAAI,EACX,OAAO,CAAC,EAAE,IAAI,GACb,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,MAAM,EACpB,IAAI,CAAC,EAAE,IAAI,EACX,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GACtB,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,MAAM,EACpB,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GACtB,IAAI,CAAC;AAwBR;;;GAGG;AACH,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,CAAC,EAAE,IAAI,CAAC;IACd,QAAQ,CAAC,EAAE,IAAI,CAAC;IAChB,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,IAAI,CAAC;CAChB,GACA,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,GACA,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,GACA,IAAI,CAAC;AAcR;;;;GAIG;AACH,eAAO,MAAM,eAAe,EAAE,OAAO,UAqBpC,CAAC;AAEF;;;;GAIG;AACH,eAAO,MAAM,eAAe,EAAE,OAAO,UAEpC,CAAC;AAEF;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI,CAS9F;AAED,wBAAgB,SAAS,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,UAAO,QAYvE;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,gBAAgB,CAwB9D;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,gBAAgB,CAO9D;AAED;;GAEG;AACH,wBAAgB,OAAO,CAAC,KAAK,EAAE,cAAc,GAAG,UAAU,CAYzD;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,EAAE,CA0BtD"}

View File

@@ -1,4 +1,4 @@
declare type Key = string | number | symbol;
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
@@ -30,3 +30,4 @@ export declare function pop<T extends Key>(setarr: SetArray<T>): void;
*/
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
export {};
//# sourceMappingURL=set-array.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"set-array.d.ts","sourceRoot":"","sources":["../src/set-array.ts"],"names":[],"mappings":"AAAA,KAAK,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC;AAEpC;;;;;;;GAOG;AACH,qBAAa,QAAQ,CAAC,CAAC,SAAS,GAAG,GAAG,GAAG;IACvC,QAAgB,QAAQ,CAAgC;IAChD,KAAK,EAAE,SAAS,CAAC,EAAE,CAAC;;CAM7B;AAeD;;GAEG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,MAAM,GAAG,SAAS,CAElF;AAED;;;GAGG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,MAAM,CAStE;AAED;;GAEG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,GAAG,IAAI,CAM5D;AAED;;GAEG;AACH,wBAAgB,MAAM,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,IAAI,CAYvE"}

View File

@@ -0,0 +1,33 @@
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
export declare class SetArray<T extends Key = Key> {
private _indexes;
array: readonly T[];
constructor();
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
/**
* Pops the last added item out of the SetArray.
*/
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
/**
* Removes the key, if it exists in the set.
*/
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
export {};
//# sourceMappingURL=set-array.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"set-array.d.ts","sourceRoot":"","sources":["../src/set-array.ts"],"names":[],"mappings":"AAAA,KAAK,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC;AAEpC;;;;;;;GAOG;AACH,qBAAa,QAAQ,CAAC,CAAC,SAAS,GAAG,GAAG,GAAG;IACvC,QAAgB,QAAQ,CAAgC;IAChD,KAAK,EAAE,SAAS,CAAC,EAAE,CAAC;;CAM7B;AAeD;;GAEG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,MAAM,GAAG,SAAS,CAElF;AAED;;;GAGG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,MAAM,CAStE;AAED;;GAEG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,GAAG,IAAI,CAM5D;AAED;;GAEG;AACH,wBAAgB,MAAM,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,IAAI,CAYvE"}

View File

@@ -0,0 +1,13 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export declare const COLUMN = 0;
export declare const SOURCES_INDEX = 1;
export declare const SOURCE_LINE = 2;
export declare const SOURCE_COLUMN = 3;
export declare const NAMES_INDEX = 4;
export {};
//# sourceMappingURL=sourcemap-segment.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"sourcemap-segment.d.ts","sourceRoot":"","sources":["../src/sourcemap-segment.ts"],"names":[],"mappings":"AAAA,KAAK,eAAe,GAAG,MAAM,CAAC;AAC9B,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,UAAU,GAAG,MAAM,CAAC;AACzB,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB,MAAM,MAAM,gBAAgB,GACxB,CAAC,eAAe,CAAC,GACjB,CAAC,eAAe,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,CAAC,GACzD,CAAC,eAAe,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;AAE1E,eAAO,MAAM,MAAM,IAAI,CAAC;AACxB,eAAO,MAAM,aAAa,IAAI,CAAC;AAC/B,eAAO,MAAM,WAAW,IAAI,CAAC;AAC7B,eAAO,MAAM,aAAa,IAAI,CAAC;AAC/B,eAAO,MAAM,WAAW,IAAI,CAAC"}

View File

@@ -0,0 +1,13 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export declare const COLUMN = 0;
export declare const SOURCES_INDEX = 1;
export declare const SOURCE_LINE = 2;
export declare const SOURCE_COLUMN = 3;
export declare const NAMES_INDEX = 4;
export {};
//# sourceMappingURL=sourcemap-segment.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"sourcemap-segment.d.ts","sourceRoot":"","sources":["../src/sourcemap-segment.ts"],"names":[],"mappings":"AAAA,KAAK,eAAe,GAAG,MAAM,CAAC;AAC9B,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,UAAU,GAAG,MAAM,CAAC;AACzB,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB,MAAM,MAAM,gBAAgB,GACxB,CAAC,eAAe,CAAC,GACjB,CAAC,eAAe,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,CAAC,GACzD,CAAC,eAAe,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;AAE1E,eAAO,MAAM,MAAM,IAAI,CAAC;AACxB,eAAO,MAAM,aAAa,IAAI,CAAC;AAC/B,eAAO,MAAM,WAAW,IAAI,CAAC;AAC7B,eAAO,MAAM,aAAa,IAAI,CAAC;AAC/B,eAAO,MAAM,WAAW,IAAI,CAAC"}

View File

@@ -1,4 +1,4 @@
import type { SourceMapSegment } from './sourcemap-segment';
import type { SourceMapSegment } from './sourcemap-segment.cts';
export interface SourceMapV3 {
file?: string | null;
names: readonly string[];
@@ -18,7 +18,14 @@ export interface Pos {
line: number;
column: number;
}
export declare type Mapping = {
export interface OriginalPos extends Pos {
source: string;
}
export interface BindingExpressionRange {
start: Pos;
expression: string;
}
export type Mapping = {
generated: Pos;
source: undefined;
original: undefined;
@@ -34,3 +41,4 @@ export declare type Mapping = {
original: Pos;
name: undefined;
};
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAE5D,MAAM,WAAW,WAAW;IAC1B,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,KAAK,EAAE,SAAS,MAAM,EAAE,CAAC;IACzB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IACpC,cAAc,CAAC,EAAE,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IAC5C,OAAO,EAAE,CAAC,CAAC;IACX,UAAU,CAAC,EAAE,SAAS,MAAM,EAAE,CAAC;CAChC;AAED,MAAM,WAAW,gBAAiB,SAAQ,WAAW;IACnD,QAAQ,EAAE,MAAM,CAAC;CAGlB;AAED,MAAM,WAAW,gBAAiB,SAAQ,WAAW;IACnD,QAAQ,EAAE,SAAS,gBAAgB,EAAE,EAAE,CAAC;CAGzC;AAED,MAAM,WAAW,GAAG;IAClB,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,WAAY,SAAQ,GAAG;IACtC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,GAAG,CAAC;IACX,UAAU,EAAE,MAAM,CAAC;CACpB;AAKD,MAAM,MAAM,OAAO,GACf;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,SAAS,CAAC;IAClB,QAAQ,EAAE,SAAS,CAAC;IACpB,IAAI,EAAE,SAAS,CAAC;CACjB,GACD;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACd,GACD;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,SAAS,CAAC;CACjB,CAAC"}

44
node_modules/@jridgewell/gen-mapping/types/types.d.mts generated vendored Normal file
View File

@@ -0,0 +1,44 @@
import type { SourceMapSegment } from './sourcemap-segment.mts';
export interface SourceMapV3 {
file?: string | null;
names: readonly string[];
sourceRoot?: string;
sources: readonly (string | null)[];
sourcesContent?: readonly (string | null)[];
version: 3;
ignoreList?: readonly number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: readonly SourceMapSegment[][];
}
export interface Pos {
line: number;
column: number;
}
export interface OriginalPos extends Pos {
source: string;
}
export interface BindingExpressionRange {
start: Pos;
expression: string;
}
export type Mapping = {
generated: Pos;
source: undefined;
original: undefined;
name: undefined;
} | {
generated: Pos;
source: string;
original: Pos;
name: string;
} | {
generated: Pos;
source: string;
original: Pos;
name: undefined;
};
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAE5D,MAAM,WAAW,WAAW;IAC1B,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,KAAK,EAAE,SAAS,MAAM,EAAE,CAAC;IACzB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IACpC,cAAc,CAAC,EAAE,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IAC5C,OAAO,EAAE,CAAC,CAAC;IACX,UAAU,CAAC,EAAE,SAAS,MAAM,EAAE,CAAC;CAChC;AAED,MAAM,WAAW,gBAAiB,SAAQ,WAAW;IACnD,QAAQ,EAAE,MAAM,CAAC;CAGlB;AAED,MAAM,WAAW,gBAAiB,SAAQ,WAAW;IACnD,QAAQ,EAAE,SAAS,gBAAgB,EAAE,EAAE,CAAC;CAGzC;AAED,MAAM,WAAW,GAAG;IAClB,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,WAAY,SAAQ,GAAG;IACtC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,GAAG,CAAC;IACX,UAAU,EAAE,MAAM,CAAC;CACpB;AAKD,MAAM,MAAM,OAAO,GACf;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,SAAS,CAAC;IAClB,QAAQ,EAAE,SAAS,CAAC;IACpB,IAAI,EAAE,SAAS,CAAC;CACjB,GACD;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACd,GACD;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,SAAS,CAAC;CACjB,CAAC"}

View File

@@ -1,37 +0,0 @@
# @jridgewell/set-array
> Like a Set, but provides the index of the `key` in the backing array
This is designed to allow synchronizing a second array with the contents of the backing array, like
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there
are never duplicates.
## Installation
```sh
npm install @jridgewell/set-array
```
## Usage
```js
import { SetArray, get, put, pop } from '@jridgewell/set-array';
const sa = new SetArray();
let index = put(sa, 'first');
assert.strictEqual(index, 0);
index = put(sa, 'second');
assert.strictEqual(index, 1);
assert.deepEqual(sa.array, [ 'first', 'second' ]);
index = get(sa, 'first');
assert.strictEqual(index, 0);
pop(sa);
index = get(sa, 'second');
assert.strictEqual(index, undefined);
assert.deepEqual(sa.array, [ 'first' ]);
```

View File

@@ -1,69 +0,0 @@
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
class SetArray {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
* with public access modifiers.
*/
function cast(set) {
return set;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
function get(setarr, key) {
return cast(setarr)._indexes[key];
}
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
function put(setarr, key) {
// The key may or may not be present. If it is present, it's a number.
const index = get(setarr, key);
if (index !== undefined)
return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return (indexes[key] = length - 1);
}
/**
* Pops the last added item out of the SetArray.
*/
function pop(setarr) {
const { array, _indexes: indexes } = cast(setarr);
if (array.length === 0)
return;
const last = array.pop();
indexes[last] = undefined;
}
/**
* Removes the key, if it exists in the set.
*/
function remove(setarr, key) {
const index = get(setarr, key);
if (index === undefined)
return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = undefined;
array.pop();
}
export { SetArray, get, pop, put, remove };
//# sourceMappingURL=set-array.mjs.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":"AAEA;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CACF;AAOD;;;;AAIA,SAAS,IAAI,CAAgB,GAAgB;IAC3C,OAAO,GAAU,CAAC;AACpB,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;IAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACpC,CAAC;AAED;;;;SAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;IAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO,KAAK,CAAC;IAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;AACrC,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB;IACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;IAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;AAC5B,CAAC;AAED;;;SAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;IAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO;IAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;QACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;KACf;IACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACzB,KAAK,CAAC,GAAG,EAAE,CAAC;AACd;;;;"}

View File

@@ -1,83 +0,0 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {}));
})(this, (function (exports) { 'use strict';
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
class SetArray {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
* with public access modifiers.
*/
function cast(set) {
return set;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
function get(setarr, key) {
return cast(setarr)._indexes[key];
}
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
function put(setarr, key) {
// The key may or may not be present. If it is present, it's a number.
const index = get(setarr, key);
if (index !== undefined)
return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return (indexes[key] = length - 1);
}
/**
* Pops the last added item out of the SetArray.
*/
function pop(setarr) {
const { array, _indexes: indexes } = cast(setarr);
if (array.length === 0)
return;
const last = array.pop();
indexes[last] = undefined;
}
/**
* Removes the key, if it exists in the set.
*/
function remove(setarr, key) {
const index = get(setarr, key);
if (index === undefined)
return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = undefined;
array.pop();
}
exports.SetArray = SetArray;
exports.get = get;
exports.pop = pop;
exports.put = put;
exports.remove = remove;
Object.defineProperty(exports, '__esModule', { value: true });
}));
//# sourceMappingURL=set-array.umd.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":";;;;;;IAEA;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KACF;IAOD;;;;IAIA,SAAS,IAAI,CAAgB,GAAgB;QAC3C,OAAO,GAAU,CAAC;IACpB,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IACpC,CAAC;IAED;;;;aAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;QAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;IACrC,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB;QACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;QAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;IAC5B,CAAC;IAED;;;aAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;QAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO;QAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;YACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;SACf;QACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;QACzB,KAAK,CAAC,GAAG,EAAE,CAAC;IACd;;;;;;;;;;;;;;"}

View File

@@ -1,65 +0,0 @@
{
"name": "@jridgewell/set-array",
"version": "1.2.1",
"description": "Like a Set, but provides the index of the `key` in the backing array",
"keywords": [],
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"repository": "https://github.com/jridgewell/set-array",
"main": "dist/set-array.umd.js",
"module": "dist/set-array.mjs",
"typings": "dist/types/set-array.d.ts",
"exports": {
".": [
{
"types": "./dist/types/set-array.d.ts",
"browser": "./dist/set-array.umd.js",
"require": "./dist/set-array.umd.js",
"import": "./dist/set-array.mjs"
},
"./dist/set-array.umd.js"
],
"./package.json": "./package.json"
},
"files": [
"dist"
],
"engines": {
"node": ">=6.0.0"
},
"scripts": {
"prebuild": "rm -rf dist",
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"test": "run-s -n test:lint test:only",
"test:debug": "mocha --inspect-brk",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:coverage": "c8 mocha",
"test:watch": "mocha --watch",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build"
},
"devDependencies": {
"@rollup/plugin-typescript": "8.3.0",
"@types/mocha": "9.1.1",
"@types/node": "17.0.29",
"@typescript-eslint/eslint-plugin": "5.10.0",
"@typescript-eslint/parser": "5.10.0",
"c8": "7.11.0",
"eslint": "8.7.0",
"eslint-config-prettier": "8.3.0",
"mocha": "9.2.0",
"npm-run-all": "4.1.5",
"prettier": "2.5.1",
"rollup": "2.66.0",
"tsx": "4.7.1",
"typescript": "4.5.5"
}
}

View File

@@ -1,4 +1,4 @@
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

184
node_modules/@jridgewell/source-map/README.md generated vendored Normal file
View File

@@ -0,0 +1,184 @@
# @jridgewell/source-map
> Packages `@jridgewell/trace-mapping` and `@jridgewell/gen-mapping` into the familiar source-map API
This isn't the full API, but it's the core functionality. This wraps
[@jridgewell/trace-mapping][trace-mapping] and [@jridgewell/gen-mapping][gen-mapping]
implementations.
## Installation
```sh
npm install @jridgewell/source-map
```
## Usage
TODO
### SourceMapConsumer
```typescript
import { SourceMapConsumer } from '@jridgewell/source-map';
const smc = new SourceMapConsumer({
version: 3,
names: ['foo'],
sources: ['input.js'],
mappings: 'AAAAA',
});
```
#### SourceMapConsumer.fromSourceMap(mapGenerator[, mapUrl])
Transforms a `SourceMapGenerator` into a `SourceMapConsumer`.
```typescript
const smg = new SourceMapGenerator();
const smc = SourceMapConsumer.fromSourceMap(map);
smc.originalPositionFor({ line: 1, column: 0 });
```
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
```typescript
const smc = new SourceMapConsumer(map);
smc.originalPositionFor({ line: 1, column: 0 });
```
#### SourceMapConsumer.prototype.mappings
```typescript
const smc = new SourceMapConsumer(map);
smc.mappings; // AAAA
```
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
```typescript
const smc = new SourceMapConsumer(map);
smc.allGeneratedpositionsfor({ line: 1, column: 5, source: "baz.ts" });
// [
// { line: 2, column: 8 }
// ]
```
#### SourceMapConsumer.prototype.eachMapping(callback[, context[, order]])
> This implementation currently does not support the "order" parameter.
> This function can only iterate in Generated order.
```typescript
const smc = new SourceMapConsumer(map);
smc.eachMapping((mapping) => {
// { source: 'baz.ts',
// generatedLine: 4,
// generatedColumn: 5,
// originalLine: 4,
// originalColumn: 5,
// name: null }
});
```
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
```typescript
const smc = new SourceMapConsumer(map);
smc.generatedPositionFor({ line: 1, column: 5, source: "baz.ts" });
// { line: 2, column: 8 }
```
#### SourceMapConsumer.prototype.hasContentsOfAllSources()
```typescript
const smc = new SourceMapConsumer(map);
smc.hasContentsOfAllSources();
// true
```
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
```typescript
const smc = new SourceMapConsumer(map);
smc.generatedPositionFor("baz.ts");
// "export default ..."
```
#### SourceMapConsumer.prototype.version
Returns the source map's version
### SourceMapGenerator
```typescript
import { SourceMapGenerator } from '@jridgewell/source-map';
const smg = new SourceMapGenerator({
file: 'output.js',
sourceRoot: 'https://example.com/',
});
```
#### SourceMapGenerator.fromSourceMap(map)
Transform a `SourceMapConsumer` into a `SourceMapGenerator`.
```typescript
const smc = new SourceMapConsumer();
const smg = SourceMapGenerator.fromSourceMap(smc);
```
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
> This method is not implemented yet
#### SourceMapGenerator.prototype.addMapping(mapping)
```typescript
const smg = new SourceMapGenerator();
smg.addMapping({
generated: { line: 1, column: 0 },
source: 'input.js',
original: { line: 1, column: 0 },
name: 'foo',
});
```
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
```typescript
const smg = new SourceMapGenerator();
smg.setSourceContent('input.js', 'foobar');
```
#### SourceMapGenerator.prototype.toJSON()
```typescript
const smg = new SourceMapGenerator();
smg.toJSON(); // { version: 3, names: [], sources: [], mappings: '' }
```
#### SourceMapGenerator.prototype.toString()
```typescript
const smg = new SourceMapGenerator();
smg.toJSON(); // "{version:3,names:[],sources:[],mappings:''}"
```
#### SourceMapGenerator.prototype.toDecodedMap()
```typescript
const smg = new SourceMapGenerator();
smg.toDecodedMap(); // { version: 3, names: [], sources: [], mappings: [] }
```
## Known differences with other implementations
This implementation has some differences with `source-map` and `source-map-js`.
- `SourceMapConsumer.prototype.eachMapping()`
- Does not support the `order` argument
- `SourceMapGenerator.prototype.applySourceMap()`
- Not implemented
[trace-mapping]: https://github.com/jridgewell/trace-mapping/
[gen-mapping]: https://github.com/jridgewell/gen-mapping/

101
node_modules/@jridgewell/source-map/dist/source-map.mjs generated vendored Normal file
View File

@@ -0,0 +1,101 @@
// src/source-map.ts
import {
AnyMap,
originalPositionFor,
generatedPositionFor,
allGeneratedPositionsFor,
eachMapping,
encodedMappings,
sourceContentFor
} from "@jridgewell/trace-mapping";
import {
GenMapping,
maybeAddMapping,
toDecodedMap,
toEncodedMap,
setSourceContent,
fromMap
} from "@jridgewell/gen-mapping";
var SourceMapConsumer = class _SourceMapConsumer {
constructor(map, mapUrl) {
const trace = this._map = new AnyMap(map, mapUrl);
this.file = trace.file;
this.names = trace.names;
this.sourceRoot = trace.sourceRoot;
this.sources = trace.resolvedSources;
this.sourcesContent = trace.sourcesContent;
this.version = trace.version;
}
static fromSourceMap(map, mapUrl) {
if (map.toDecodedMap) {
return new _SourceMapConsumer(map.toDecodedMap(), mapUrl);
}
return new _SourceMapConsumer(map.toJSON(), mapUrl);
}
get mappings() {
return encodedMappings(this._map);
}
originalPositionFor(needle) {
return originalPositionFor(this._map, needle);
}
generatedPositionFor(originalPosition) {
return generatedPositionFor(this._map, originalPosition);
}
allGeneratedPositionsFor(originalPosition) {
return allGeneratedPositionsFor(this._map, originalPosition);
}
hasContentsOfAllSources() {
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
return false;
}
for (const content of this.sourcesContent) {
if (content == null) {
return false;
}
}
return true;
}
sourceContentFor(source, nullOnMissing) {
const sourceContent = sourceContentFor(this._map, source);
if (sourceContent != null) {
return sourceContent;
}
if (nullOnMissing) {
return null;
}
throw new Error(`"${source}" is not in the SourceMap.`);
}
eachMapping(callback, context) {
eachMapping(this._map, context ? callback.bind(context) : callback);
}
destroy() {
}
};
var SourceMapGenerator = class _SourceMapGenerator {
constructor(opts) {
this._map = opts instanceof GenMapping ? opts : new GenMapping(opts);
}
static fromSourceMap(consumer) {
return new _SourceMapGenerator(fromMap(consumer));
}
addMapping(mapping) {
maybeAddMapping(this._map, mapping);
}
setSourceContent(source, content) {
setSourceContent(this._map, source, content);
}
toJSON() {
return toEncodedMap(this._map);
}
toString() {
return JSON.stringify(this.toJSON());
}
toDecodedMap() {
return toDecodedMap(this._map);
}
};
export {
SourceMapConsumer,
SourceMapGenerator
};
//# sourceMappingURL=source-map.mjs.map

View File

@@ -0,0 +1,6 @@
{
"version": 3,
"sources": ["../src/source-map.ts"],
"mappings": ";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAaA,IAAM,oBAAN,MAAM,mBAAkB;AAAA,EAS7B,YACE,KACA,QACA;AACA,UAAM,QAAS,KAAK,OAAO,IAAI,OAAO,KAAK,MAAM;AAEjD,SAAK,OAAO,MAAM;AAClB,SAAK,QAAQ,MAAM;AACnB,SAAK,aAAa,MAAM;AACxB,SAAK,UAAU,MAAM;AACrB,SAAK,iBAAiB,MAAM;AAC5B,SAAK,UAAU,MAAM;AAAA,EACvB;AAAA,EAEA,OAAO,cAAc,KAAyB,QAAuC;AAGnF,QAAI,IAAI,cAAc;AACpB,aAAO,IAAI,mBAAkB,IAAI,aAAa,GAA8B,MAAM;AAAA,IACpF;AAGA,WAAO,IAAI,mBAAkB,IAAI,OAAO,GAA8B,MAAM;AAAA,EAC9E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,gBAAgB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,oBACE,QACwC;AACxC,WAAO,oBAAoB,KAAK,MAAM,MAAM;AAAA,EAC9C;AAAA,EAEA,qBACE,kBACyC;AACzC,WAAO,qBAAqB,KAAK,MAAM,gBAAgB;AAAA,EACzD;AAAA,EAEA,yBACE,kBAC2C;AAC3C,WAAO,yBAAyB,KAAK,MAAM,gBAAgB;AAAA,EAC7D;AAAA,EAEA,0BAAmC;AACjC,QAAI,CAAC,KAAK,kBAAkB,KAAK,eAAe,WAAW,KAAK,QAAQ,QAAQ;AAC9E,aAAO;AAAA,IACT;AAEA,eAAW,WAAW,KAAK,gBAAgB;AACzC,UAAI,WAAW,MAAM;AACnB,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,iBAAiB,QAAgB,eAAwC;AACvE,UAAM,gBAAgB,iBAAiB,KAAK,MAAM,MAAM;AACxD,QAAI,iBAAiB,MAAM;AACzB,aAAO;AAAA,IACT;AAEA,QAAI,eAAe;AACjB,aAAO;AAAA,IACT;AACA,UAAM,IAAI,MAAM,IAAI,MAAM,4BAA4B;AAAA,EACxD;AAAA,EAEA,YACE,UACA,SACM;AAEN,gBAAY,KAAK,MAAM,UAAU,SAAS,KAAK,OAAO,IAAI,QAAQ;AAAA,EACpE;AAAA,EAEA,UAAU;AAAA,EAEV;AACF;AAEO,IAAM,qBAAN,MAAM,oBAAmB;AAAA,EAG9B,YAAY,MAAgE;AAE1E,SAAK,OAAO,gBAAgB,aAAa,OAAO,IAAI,WAAW,IAAI;AAAA,EACrE;AAAA,EAEA,OAAO,cAAc,UAA6B;AAChD,WAAO,IAAI,oBAAmB,QAAQ,QAAQ,CAAC;AAAA,EACjD;AAAA,EAEA,WAAW,SAAoF;AAC7F,oBAAgB,KAAK,MAAM,OAAO;AAAA,EACpC;AAAA,EAEA,iBACE,QACA,SACqC;AACrC,qBAAiB,KAAK,MAAM,QAAQ,OAAO;AAAA,EAC7C;AAAA,EAEA,SAA0C;AACxC,WAAO,aAAa,KAAK,IAAI;AAAA,EAC/B;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK,UAAU,KAAK,OAAO,CAAC;AAAA,EACrC;AAAA,EAEA,eAAgD;AAC9C,WAAO,aAAa,KAAK,IAAI;AAAA,EAC/B;AACF;",
"names": []
}

View File

@@ -0,0 +1,140 @@
(function (global, factory, m) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(module, require('@jridgewell/gen-mapping'), require('@jridgewell/trace-mapping')) :
typeof define === 'function' && define.amd ? define(['module', '@jridgewell/gen-mapping', '@jridgewell/trace-mapping'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(m = { exports: {} }, global.genMapping, global.traceMapping), global.sourceMap = 'default' in m.exports ? m.exports.default : m.exports);
})(this, (function (module, require_genMapping, require_traceMapping) {
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// umd:@jridgewell/trace-mapping
var require_trace_mapping = __commonJS({
"umd:@jridgewell/trace-mapping"(exports, module2) {
module2.exports = require_traceMapping;
}
});
// umd:@jridgewell/gen-mapping
var require_gen_mapping = __commonJS({
"umd:@jridgewell/gen-mapping"(exports, module2) {
module2.exports = require_genMapping;
}
});
// src/source-map.ts
var source_map_exports = {};
__export(source_map_exports, {
SourceMapConsumer: () => SourceMapConsumer,
SourceMapGenerator: () => SourceMapGenerator
});
module.exports = __toCommonJS(source_map_exports);
var import_trace_mapping = __toESM(require_trace_mapping());
var import_gen_mapping = __toESM(require_gen_mapping());
var SourceMapConsumer = class _SourceMapConsumer {
constructor(map, mapUrl) {
const trace = this._map = new import_trace_mapping.AnyMap(map, mapUrl);
this.file = trace.file;
this.names = trace.names;
this.sourceRoot = trace.sourceRoot;
this.sources = trace.resolvedSources;
this.sourcesContent = trace.sourcesContent;
this.version = trace.version;
}
static fromSourceMap(map, mapUrl) {
if (map.toDecodedMap) {
return new _SourceMapConsumer(map.toDecodedMap(), mapUrl);
}
return new _SourceMapConsumer(map.toJSON(), mapUrl);
}
get mappings() {
return (0, import_trace_mapping.encodedMappings)(this._map);
}
originalPositionFor(needle) {
return (0, import_trace_mapping.originalPositionFor)(this._map, needle);
}
generatedPositionFor(originalPosition) {
return (0, import_trace_mapping.generatedPositionFor)(this._map, originalPosition);
}
allGeneratedPositionsFor(originalPosition) {
return (0, import_trace_mapping.allGeneratedPositionsFor)(this._map, originalPosition);
}
hasContentsOfAllSources() {
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
return false;
}
for (const content of this.sourcesContent) {
if (content == null) {
return false;
}
}
return true;
}
sourceContentFor(source, nullOnMissing) {
const sourceContent = (0, import_trace_mapping.sourceContentFor)(this._map, source);
if (sourceContent != null) {
return sourceContent;
}
if (nullOnMissing) {
return null;
}
throw new Error(`"${source}" is not in the SourceMap.`);
}
eachMapping(callback, context) {
(0, import_trace_mapping.eachMapping)(this._map, context ? callback.bind(context) : callback);
}
destroy() {
}
};
var SourceMapGenerator = class _SourceMapGenerator {
constructor(opts) {
this._map = opts instanceof import_gen_mapping.GenMapping ? opts : new import_gen_mapping.GenMapping(opts);
}
static fromSourceMap(consumer) {
return new _SourceMapGenerator((0, import_gen_mapping.fromMap)(consumer));
}
addMapping(mapping) {
(0, import_gen_mapping.maybeAddMapping)(this._map, mapping);
}
setSourceContent(source, content) {
(0, import_gen_mapping.setSourceContent)(this._map, source, content);
}
toJSON() {
return (0, import_gen_mapping.toEncodedMap)(this._map);
}
toString() {
return JSON.stringify(this.toJSON());
}
toDecodedMap() {
return (0, import_gen_mapping.toDecodedMap)(this._map);
}
};
}));
//# sourceMappingURL=source-map.umd.js.map

View File

@@ -0,0 +1,6 @@
{
"version": 3,
"sources": ["umd:@jridgewell/trace-mapping", "umd:@jridgewell/gen-mapping", "../src/source-map.ts"],
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA,2CAAAA,SAAA;AAAA,IAAAA,QAAO,UAAU;AAAA;AAAA;;;ACAjB;AAAA,yCAAAC,SAAA;AAAA,IAAAA,QAAO,UAAU;AAAA;AAAA;;;ACAjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAQO;AACP,yBAOO;AAaA,IAAM,oBAAN,MAAM,mBAAkB;AAAA,EAS7B,YACE,KACA,QACA;AACA,UAAM,QAAS,KAAK,OAAO,IAAI,4BAAO,KAAK,MAAM;AAEjD,SAAK,OAAO,MAAM;AAClB,SAAK,QAAQ,MAAM;AACnB,SAAK,aAAa,MAAM;AACxB,SAAK,UAAU,MAAM;AACrB,SAAK,iBAAiB,MAAM;AAC5B,SAAK,UAAU,MAAM;AAAA,EACvB;AAAA,EAEA,OAAO,cAAc,KAAyB,QAAuC;AAGnF,QAAI,IAAI,cAAc;AACpB,aAAO,IAAI,mBAAkB,IAAI,aAAa,GAA8B,MAAM;AAAA,IACpF;AAGA,WAAO,IAAI,mBAAkB,IAAI,OAAO,GAA8B,MAAM;AAAA,EAC9E;AAAA,EAEA,IAAI,WAAmB;AACrB,eAAO,sCAAgB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,oBACE,QACwC;AACxC,eAAO,0CAAoB,KAAK,MAAM,MAAM;AAAA,EAC9C;AAAA,EAEA,qBACE,kBACyC;AACzC,eAAO,2CAAqB,KAAK,MAAM,gBAAgB;AAAA,EACzD;AAAA,EAEA,yBACE,kBAC2C;AAC3C,eAAO,+CAAyB,KAAK,MAAM,gBAAgB;AAAA,EAC7D;AAAA,EAEA,0BAAmC;AACjC,QAAI,CAAC,KAAK,kBAAkB,KAAK,eAAe,WAAW,KAAK,QAAQ,QAAQ;AAC9E,aAAO;AAAA,IACT;AAEA,eAAW,WAAW,KAAK,gBAAgB;AACzC,UAAI,WAAW,MAAM;AACnB,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,iBAAiB,QAAgB,eAAwC;AACvE,UAAM,oBAAgB,uCAAiB,KAAK,MAAM,MAAM;AACxD,QAAI,iBAAiB,MAAM;AACzB,aAAO;AAAA,IACT;AAEA,QAAI,eAAe;AACjB,aAAO;AAAA,IACT;AACA,UAAM,IAAI,MAAM,IAAI,MAAM,4BAA4B;AAAA,EACxD;AAAA,EAEA,YACE,UACA,SACM;AAEN,0CAAY,KAAK,MAAM,UAAU,SAAS,KAAK,OAAO,IAAI,QAAQ;AAAA,EACpE;AAAA,EAEA,UAAU;AAAA,EAEV;AACF;AAEO,IAAM,qBAAN,MAAM,oBAAmB;AAAA,EAG9B,YAAY,MAAgE;AAE1E,SAAK,OAAO,gBAAgB,gCAAa,OAAO,IAAI,8BAAW,IAAI;AAAA,EACrE;AAAA,EAEA,OAAO,cAAc,UAA6B;AAChD,WAAO,IAAI,wBAAmB,4BAAQ,QAAQ,CAAC;AAAA,EACjD;AAAA,EAEA,WAAW,SAAoF;AAC7F,4CAAgB,KAAK,MAAM,OAAO;AAAA,EACpC;AAAA,EAEA,iBACE,QACA,SACqC;AACrC,6CAAiB,KAAK,MAAM,QAAQ,OAAO;AAAA,EAC7C;AAAA,EAEA,SAA0C;AACxC,eAAO,iCAAa,KAAK,IAAI;AAAA,EAC/B;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK,UAAU,KAAK,OAAO,CAAC;AAAA,EACrC;AAAA,EAEA,eAAgD;AAC9C,eAAO,iCAAa,KAAK,IAAI;AAAA,EAC/B;AACF;",
"names": ["module", "module"]
}

72
node_modules/@jridgewell/source-map/package.json generated vendored Normal file
View File

@@ -0,0 +1,72 @@
{
"name": "@jridgewell/source-map",
"version": "0.3.10",
"description": "Packages @jridgewell/trace-mapping and @jridgewell/gen-mapping into the familiar source-map API",
"keywords": [
"sourcemap",
"source",
"map"
],
"main": "dist/source-map.umd.js",
"module": "dist/source-map.mjs",
"types": "types/source-map.d.cts",
"files": [
"dist",
"src",
"types"
],
"exports": {
".": [
{
"import": {
"types": "./types/source-map.d.mts",
"default": "./dist/source-map.mjs"
},
"require": {
"types": "./types/source-map.d.cts",
"default": "./dist/source-map.umd.js"
},
"browser": {
"types": "./types/source-map.d.cts",
"default": "./dist/source-map.umd.js"
}
},
"./dist/source-map.umd.js"
],
"./package.json": "./package.json"
},
"scripts": {
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs source-map.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:types": "eslint '{src,test}/**/*.ts'",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test"
},
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/source-map",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/source-map"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.25"
}
}

159
node_modules/@jridgewell/source-map/src/source-map.ts generated vendored Normal file
View File

@@ -0,0 +1,159 @@
import {
AnyMap,
originalPositionFor,
generatedPositionFor,
allGeneratedPositionsFor,
eachMapping,
encodedMappings,
sourceContentFor,
} from '@jridgewell/trace-mapping';
import {
GenMapping,
maybeAddMapping,
toDecodedMap,
toEncodedMap,
setSourceContent,
fromMap,
} from '@jridgewell/gen-mapping';
import type {
TraceMap,
SourceMapInput,
SectionedSourceMapInput,
DecodedSourceMap,
} from '@jridgewell/trace-mapping';
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
export type { Mapping, EncodedSourceMap };
export class SourceMapConsumer {
declare private _map: TraceMap;
declare file: TraceMap['file'];
declare names: TraceMap['names'];
declare sourceRoot: TraceMap['sourceRoot'];
declare sources: TraceMap['sources'];
declare sourcesContent: TraceMap['sourcesContent'];
declare version: TraceMap['version'];
constructor(
map: ConstructorParameters<typeof AnyMap>[0],
mapUrl?: ConstructorParameters<typeof AnyMap>[1],
) {
const trace = (this._map = new AnyMap(map, mapUrl));
this.file = trace.file;
this.names = trace.names;
this.sourceRoot = trace.sourceRoot;
this.sources = trace.resolvedSources;
this.sourcesContent = trace.sourcesContent;
this.version = trace.version;
}
static fromSourceMap(map: SourceMapGenerator, mapUrl?: Parameters<typeof AnyMap>[1]) {
// This is more performant if we receive
// a @jridgewell/source-map SourceMapGenerator
if (map.toDecodedMap) {
return new SourceMapConsumer(map.toDecodedMap() as SectionedSourceMapInput, mapUrl);
}
// This is a fallback for `source-map` and `source-map-js`
return new SourceMapConsumer(map.toJSON() as SectionedSourceMapInput, mapUrl);
}
get mappings(): string {
return encodedMappings(this._map);
}
originalPositionFor(
needle: Parameters<typeof originalPositionFor>[1],
): ReturnType<typeof originalPositionFor> {
return originalPositionFor(this._map, needle);
}
generatedPositionFor(
originalPosition: Parameters<typeof generatedPositionFor>[1],
): ReturnType<typeof generatedPositionFor> {
return generatedPositionFor(this._map, originalPosition);
}
allGeneratedPositionsFor(
originalPosition: Parameters<typeof generatedPositionFor>[1],
): ReturnType<typeof generatedPositionFor>[] {
return allGeneratedPositionsFor(this._map, originalPosition);
}
hasContentsOfAllSources(): boolean {
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
return false;
}
for (const content of this.sourcesContent) {
if (content == null) {
return false;
}
}
return true;
}
sourceContentFor(source: string, nullOnMissing?: boolean): string | null {
const sourceContent = sourceContentFor(this._map, source);
if (sourceContent != null) {
return sourceContent;
}
if (nullOnMissing) {
return null;
}
throw new Error(`"${source}" is not in the SourceMap.`);
}
eachMapping(
callback: Parameters<typeof eachMapping>[1],
context?: any /*, order?: number*/,
): void {
// order is ignored as @jridgewell/trace-map doesn't implement it
eachMapping(this._map, context ? callback.bind(context) : callback);
}
destroy() {
// noop.
}
}
export class SourceMapGenerator {
declare private _map: GenMapping;
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping) {
// TODO :: should this be duck-typed ?
this._map = opts instanceof GenMapping ? opts : new GenMapping(opts);
}
static fromSourceMap(consumer: SourceMapConsumer) {
return new SourceMapGenerator(fromMap(consumer));
}
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping> {
maybeAddMapping(this._map, mapping);
}
setSourceContent(
source: Parameters<typeof setSourceContent>[1],
content: Parameters<typeof setSourceContent>[2],
): ReturnType<typeof setSourceContent> {
setSourceContent(this._map, source, content);
}
toJSON(): ReturnType<typeof toEncodedMap> {
return toEncodedMap(this._map);
}
toString(): string {
return JSON.stringify(this.toJSON());
}
toDecodedMap(): ReturnType<typeof toDecodedMap> {
return toDecodedMap(this._map);
}
}

View File

@@ -0,0 +1,36 @@
import { AnyMap, originalPositionFor, generatedPositionFor, eachMapping } from '@jridgewell/trace-mapping';
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping';
import type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap } from '@jridgewell/trace-mapping';
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
export type { Mapping, EncodedSourceMap };
export declare class SourceMapConsumer {
private _map;
file: TraceMap['file'];
names: TraceMap['names'];
sourceRoot: TraceMap['sourceRoot'];
sources: TraceMap['sources'];
sourcesContent: TraceMap['sourcesContent'];
version: TraceMap['version'];
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl?: ConstructorParameters<typeof AnyMap>[1]);
static fromSourceMap(map: SourceMapGenerator, mapUrl?: Parameters<typeof AnyMap>[1]): SourceMapConsumer;
get mappings(): string;
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>;
generatedPositionFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>;
allGeneratedPositionsFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>[];
hasContentsOfAllSources(): boolean;
sourceContentFor(source: string, nullOnMissing?: boolean): string | null;
eachMapping(callback: Parameters<typeof eachMapping>[1], context?: any): void;
destroy(): void;
}
export declare class SourceMapGenerator {
private _map;
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping);
static fromSourceMap(consumer: SourceMapConsumer): SourceMapGenerator;
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>;
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>;
toJSON(): ReturnType<typeof toEncodedMap>;
toString(): string;
toDecodedMap(): ReturnType<typeof toDecodedMap>;
}
//# sourceMappingURL=source-map.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"source-map.d.ts","sourceRoot":"","sources":["../src/source-map.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,EACN,mBAAmB,EACnB,oBAAoB,EAEpB,WAAW,EAGZ,MAAM,2BAA2B,CAAC;AACnC,OAAO,EACL,UAAU,EACV,eAAe,EACf,YAAY,EACZ,YAAY,EACZ,gBAAgB,EAEjB,MAAM,yBAAyB,CAAC;AAEjC,OAAO,KAAK,EACV,QAAQ,EACR,cAAc,EACd,uBAAuB,EACvB,gBAAgB,EACjB,MAAM,2BAA2B,CAAC;AACnC,YAAY,EAAE,QAAQ,EAAE,cAAc,EAAE,uBAAuB,EAAE,gBAAgB,EAAE,CAAC;AAEpF,OAAO,KAAK,EAAE,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AACzE,YAAY,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAE1C,qBAAa,iBAAiB;IAC5B,QAAgB,IAAI,CAAW;IACvB,IAAI,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;IACvB,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzB,UAAU,EAAE,QAAQ,CAAC,YAAY,CAAC,CAAC;IACnC,OAAO,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;IAC7B,cAAc,EAAE,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAC3C,OAAO,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;gBAGnC,GAAG,EAAE,qBAAqB,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC,EAC5C,MAAM,CAAC,EAAE,qBAAqB,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAYlD,MAAM,CAAC,aAAa,CAAC,GAAG,EAAE,kBAAkB,EAAE,MAAM,CAAC,EAAE,UAAU,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAWnF,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,mBAAmB,CACjB,MAAM,EAAE,UAAU,CAAC,OAAO,mBAAmB,CAAC,CAAC,CAAC,CAAC,GAChD,UAAU,CAAC,OAAO,mBAAmB,CAAC;IAIzC,oBAAoB,CAClB,gBAAgB,EAAE,UAAU,CAAC,OAAO,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAC3D,UAAU,CAAC,OAAO,oBAAoB,CAAC;IAI1C,wBAAwB,CACtB,gBAAgB,EAAE,UAAU,CAAC,OAAO,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAC3D,UAAU,CAAC,OAAO,oBAAoB,CAAC,EAAE;IAI5C,uBAAuB,IAAI,OAAO;IAclC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,OAAO,GAAG,MAAM,GAAG,IAAI;IAYxE,WAAW,CACT,QAAQ,EAAE,UAAU,CAAC,OAAO,WAAW,CAAC,CAAC,CAAC,CAAC,EAC3C,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI;IAKP,OAAO;CAGR;AAED,qBAAa,kBAAkB;IAC7B,QAAgB,IAAI,CAAa;gBAErB,IAAI,EAAE,qBAAqB,CAAC,OAAO,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG,UAAU;IAK1E,MAAM,CAAC,aAAa,CAAC,QAAQ,EAAE,iBAAiB;IAIhD,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,eAAe,CAAC,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,OAAO,eAAe,CAAC;IAI9F,gBAAgB,CACd,MAAM,EAAE,UAAU,CAAC,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC,EAC9C,OAAO,EAAE,UAAU,CAAC,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC,GAC9C,UAAU,CAAC,OAAO,gBAAgB,CAAC;IAItC,MAAM,IAAI,UAAU,CAAC,OAAO,YAAY,CAAC;IAIzC,QAAQ,IAAI,MAAM;IAIlB,YAAY,IAAI,UAAU,CAAC,OAAO,YAAY,CAAC;CAGhD"}

View File

@@ -0,0 +1,36 @@
import { AnyMap, originalPositionFor, generatedPositionFor, eachMapping } from '@jridgewell/trace-mapping';
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping';
import type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap } from '@jridgewell/trace-mapping';
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
export type { Mapping, EncodedSourceMap };
export declare class SourceMapConsumer {
private _map;
file: TraceMap['file'];
names: TraceMap['names'];
sourceRoot: TraceMap['sourceRoot'];
sources: TraceMap['sources'];
sourcesContent: TraceMap['sourcesContent'];
version: TraceMap['version'];
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl?: ConstructorParameters<typeof AnyMap>[1]);
static fromSourceMap(map: SourceMapGenerator, mapUrl?: Parameters<typeof AnyMap>[1]): SourceMapConsumer;
get mappings(): string;
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>;
generatedPositionFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>;
allGeneratedPositionsFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>[];
hasContentsOfAllSources(): boolean;
sourceContentFor(source: string, nullOnMissing?: boolean): string | null;
eachMapping(callback: Parameters<typeof eachMapping>[1], context?: any): void;
destroy(): void;
}
export declare class SourceMapGenerator {
private _map;
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping);
static fromSourceMap(consumer: SourceMapConsumer): SourceMapGenerator;
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>;
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>;
toJSON(): ReturnType<typeof toEncodedMap>;
toString(): string;
toDecodedMap(): ReturnType<typeof toDecodedMap>;
}
//# sourceMappingURL=source-map.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"source-map.d.ts","sourceRoot":"","sources":["../src/source-map.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,EACN,mBAAmB,EACnB,oBAAoB,EAEpB,WAAW,EAGZ,MAAM,2BAA2B,CAAC;AACnC,OAAO,EACL,UAAU,EACV,eAAe,EACf,YAAY,EACZ,YAAY,EACZ,gBAAgB,EAEjB,MAAM,yBAAyB,CAAC;AAEjC,OAAO,KAAK,EACV,QAAQ,EACR,cAAc,EACd,uBAAuB,EACvB,gBAAgB,EACjB,MAAM,2BAA2B,CAAC;AACnC,YAAY,EAAE,QAAQ,EAAE,cAAc,EAAE,uBAAuB,EAAE,gBAAgB,EAAE,CAAC;AAEpF,OAAO,KAAK,EAAE,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AACzE,YAAY,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAE1C,qBAAa,iBAAiB;IAC5B,QAAgB,IAAI,CAAW;IACvB,IAAI,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;IACvB,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzB,UAAU,EAAE,QAAQ,CAAC,YAAY,CAAC,CAAC;IACnC,OAAO,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;IAC7B,cAAc,EAAE,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAC3C,OAAO,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;gBAGnC,GAAG,EAAE,qBAAqB,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC,EAC5C,MAAM,CAAC,EAAE,qBAAqB,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAYlD,MAAM,CAAC,aAAa,CAAC,GAAG,EAAE,kBAAkB,EAAE,MAAM,CAAC,EAAE,UAAU,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAWnF,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,mBAAmB,CACjB,MAAM,EAAE,UAAU,CAAC,OAAO,mBAAmB,CAAC,CAAC,CAAC,CAAC,GAChD,UAAU,CAAC,OAAO,mBAAmB,CAAC;IAIzC,oBAAoB,CAClB,gBAAgB,EAAE,UAAU,CAAC,OAAO,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAC3D,UAAU,CAAC,OAAO,oBAAoB,CAAC;IAI1C,wBAAwB,CACtB,gBAAgB,EAAE,UAAU,CAAC,OAAO,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAC3D,UAAU,CAAC,OAAO,oBAAoB,CAAC,EAAE;IAI5C,uBAAuB,IAAI,OAAO;IAclC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,OAAO,GAAG,MAAM,GAAG,IAAI;IAYxE,WAAW,CACT,QAAQ,EAAE,UAAU,CAAC,OAAO,WAAW,CAAC,CAAC,CAAC,CAAC,EAC3C,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI;IAKP,OAAO;CAGR;AAED,qBAAa,kBAAkB;IAC7B,QAAgB,IAAI,CAAa;gBAErB,IAAI,EAAE,qBAAqB,CAAC,OAAO,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG,UAAU;IAK1E,MAAM,CAAC,aAAa,CAAC,QAAQ,EAAE,iBAAiB;IAIhD,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,eAAe,CAAC,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,OAAO,eAAe,CAAC;IAI9F,gBAAgB,CACd,MAAM,EAAE,UAAU,CAAC,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC,EAC9C,OAAO,EAAE,UAAU,CAAC,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC,GAC9C,UAAU,CAAC,OAAO,gBAAgB,CAAC;IAItC,MAAM,IAAI,UAAU,CAAC,OAAO,YAAY,CAAC;IAIzC,QAAQ,IAAI,MAAM;IAIlB,YAAY,IAAI,UAAU,CAAC,OAAO,YAAY,CAAC;CAGhD"}

View File

@@ -1,6 +1,4 @@
The MIT License
Copyright (c) 2015 Rich Harris
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -17,5 +15,5 @@ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,424 +1,423 @@
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
// src/vlq.ts
var comma = ",".charCodeAt(0);
var semicolon = ";".charCodeAt(0);
var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
var intToChar = new Uint8Array(64);
var charToInt = new Uint8Array(128);
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
function decodeInteger(reader, relative) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
return relative + value;
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -2147483648 | -value;
}
return relative + value;
}
function encodeInteger(builder, num, relative) {
let delta = num - relative;
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
do {
let clamped = delta & 0b011111;
delta >>>= 5;
if (delta > 0)
clamped |= 0b100000;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
let delta = num - relative;
delta = delta < 0 ? -delta << 1 | 1 : delta << 1;
do {
let clamped = delta & 31;
delta >>>= 5;
if (delta > 0) clamped |= 32;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max)
return false;
return reader.peek() !== comma;
if (reader.pos >= max) return false;
return reader.peek() !== comma;
}
const bufLength = 1024 * 16;
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
class StringWriter {
constructor() {
this.pos = 0;
this.out = '';
this.buffer = new Uint8Array(bufLength);
// src/strings.ts
var bufLength = 1024 * 16;
var td = typeof TextDecoder !== "undefined" ? /* @__PURE__ */ new TextDecoder() : typeof Buffer !== "undefined" ? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
}
} : {
decode(buf) {
let out = "";
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
return out;
}
};
var StringWriter = class {
constructor() {
this.pos = 0;
this.out = "";
this.buffer = new Uint8Array(bufLength);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
}
class StringReader {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
}
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
};
var StringReader = class {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
};
const EMPTY = [];
// src/scopes.ts
var EMPTY = [];
function decodeOriginalScopes(input) {
const { length } = input;
const reader = new StringReader(input);
const scopes = [];
const stack = [];
let line = 0;
for (; reader.pos < length; reader.pos++) {
line = decodeInteger(reader, line);
const column = decodeInteger(reader, 0);
if (!hasMoreVlq(reader, length)) {
const last = stack.pop();
last[2] = line;
last[3] = column;
continue;
}
const kind = decodeInteger(reader, 0);
const fields = decodeInteger(reader, 0);
const hasName = fields & 0b0001;
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
let vars = EMPTY;
if (hasMoreVlq(reader, length)) {
vars = [];
do {
const varsIndex = decodeInteger(reader, 0);
vars.push(varsIndex);
} while (hasMoreVlq(reader, length));
}
scope.vars = vars;
scopes.push(scope);
stack.push(scope);
const { length } = input;
const reader = new StringReader(input);
const scopes = [];
const stack = [];
let line = 0;
for (; reader.pos < length; reader.pos++) {
line = decodeInteger(reader, line);
const column = decodeInteger(reader, 0);
if (!hasMoreVlq(reader, length)) {
const last = stack.pop();
last[2] = line;
last[3] = column;
continue;
}
return scopes;
const kind = decodeInteger(reader, 0);
const fields = decodeInteger(reader, 0);
const hasName = fields & 1;
const scope = hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind];
let vars = EMPTY;
if (hasMoreVlq(reader, length)) {
vars = [];
do {
const varsIndex = decodeInteger(reader, 0);
vars.push(varsIndex);
} while (hasMoreVlq(reader, length));
}
scope.vars = vars;
scopes.push(scope);
stack.push(scope);
}
return scopes;
}
function encodeOriginalScopes(scopes) {
const writer = new StringWriter();
for (let i = 0; i < scopes.length;) {
i = _encodeOriginalScopes(scopes, i, writer, [0]);
}
return writer.flush();
const writer = new StringWriter();
for (let i = 0; i < scopes.length; ) {
i = _encodeOriginalScopes(scopes, i, writer, [0]);
}
return writer.flush();
}
function _encodeOriginalScopes(scopes, index, writer, state) {
const scope = scopes[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
if (index > 0)
writer.write(comma);
state[0] = encodeInteger(writer, startLine, state[0]);
encodeInteger(writer, startColumn, 0);
encodeInteger(writer, kind, 0);
const fields = scope.length === 6 ? 0b0001 : 0;
encodeInteger(writer, fields, 0);
if (scope.length === 6)
encodeInteger(writer, scope[5], 0);
for (const v of vars) {
encodeInteger(writer, v, 0);
const scope = scopes[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
if (index > 0) writer.write(comma);
state[0] = encodeInteger(writer, startLine, state[0]);
encodeInteger(writer, startColumn, 0);
encodeInteger(writer, kind, 0);
const fields = scope.length === 6 ? 1 : 0;
encodeInteger(writer, fields, 0);
if (scope.length === 6) encodeInteger(writer, scope[5], 0);
for (const v of vars) {
encodeInteger(writer, v, 0);
}
for (index++; index < scopes.length; ) {
const next = scopes[index];
const { 0: l, 1: c } = next;
if (l > endLine || l === endLine && c >= endColumn) {
break;
}
for (index++; index < scopes.length;) {
const next = scopes[index];
const { 0: l, 1: c } = next;
if (l > endLine || (l === endLine && c >= endColumn)) {
break;
}
index = _encodeOriginalScopes(scopes, index, writer, state);
}
writer.write(comma);
state[0] = encodeInteger(writer, endLine, state[0]);
encodeInteger(writer, endColumn, 0);
return index;
index = _encodeOriginalScopes(scopes, index, writer, state);
}
writer.write(comma);
state[0] = encodeInteger(writer, endLine, state[0]);
encodeInteger(writer, endColumn, 0);
return index;
}
function decodeGeneratedRanges(input) {
const { length } = input;
const reader = new StringReader(input);
const ranges = [];
const stack = [];
let genLine = 0;
let definitionSourcesIndex = 0;
let definitionScopeIndex = 0;
let callsiteSourcesIndex = 0;
let callsiteLine = 0;
let callsiteColumn = 0;
let bindingLine = 0;
let bindingColumn = 0;
do {
const semi = reader.indexOf(';');
let genColumn = 0;
for (; reader.pos < semi; reader.pos++) {
genColumn = decodeInteger(reader, genColumn);
if (!hasMoreVlq(reader, semi)) {
const last = stack.pop();
last[2] = genLine;
last[3] = genColumn;
continue;
const { length } = input;
const reader = new StringReader(input);
const ranges = [];
const stack = [];
let genLine = 0;
let definitionSourcesIndex = 0;
let definitionScopeIndex = 0;
let callsiteSourcesIndex = 0;
let callsiteLine = 0;
let callsiteColumn = 0;
let bindingLine = 0;
let bindingColumn = 0;
do {
const semi = reader.indexOf(";");
let genColumn = 0;
for (; reader.pos < semi; reader.pos++) {
genColumn = decodeInteger(reader, genColumn);
if (!hasMoreVlq(reader, semi)) {
const last = stack.pop();
last[2] = genLine;
last[3] = genColumn;
continue;
}
const fields = decodeInteger(reader, 0);
const hasDefinition = fields & 1;
const hasCallsite = fields & 2;
const hasScope = fields & 4;
let callsite = null;
let bindings = EMPTY;
let range;
if (hasDefinition) {
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
definitionScopeIndex = decodeInteger(
reader,
definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0
);
definitionSourcesIndex = defSourcesIndex;
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
} else {
range = [genLine, genColumn, 0, 0];
}
range.isScope = !!hasScope;
if (hasCallsite) {
const prevCsi = callsiteSourcesIndex;
const prevLine = callsiteLine;
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
const sameSource = prevCsi === callsiteSourcesIndex;
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
callsiteColumn = decodeInteger(
reader,
sameSource && prevLine === callsiteLine ? callsiteColumn : 0
);
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
}
range.callsite = callsite;
if (hasMoreVlq(reader, semi)) {
bindings = [];
do {
bindingLine = genLine;
bindingColumn = genColumn;
const expressionsCount = decodeInteger(reader, 0);
let expressionRanges;
if (expressionsCount < -1) {
expressionRanges = [[decodeInteger(reader, 0)]];
for (let i = -1; i > expressionsCount; i--) {
const prevBl = bindingLine;
bindingLine = decodeInteger(reader, bindingLine);
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
const expression = decodeInteger(reader, 0);
expressionRanges.push([expression, bindingLine, bindingColumn]);
}
const fields = decodeInteger(reader, 0);
const hasDefinition = fields & 0b0001;
const hasCallsite = fields & 0b0010;
const hasScope = fields & 0b0100;
let callsite = null;
let bindings = EMPTY;
let range;
if (hasDefinition) {
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
definitionSourcesIndex = defSourcesIndex;
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
}
else {
range = [genLine, genColumn, 0, 0];
}
range.isScope = !!hasScope;
if (hasCallsite) {
const prevCsi = callsiteSourcesIndex;
const prevLine = callsiteLine;
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
const sameSource = prevCsi === callsiteSourcesIndex;
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
}
range.callsite = callsite;
if (hasMoreVlq(reader, semi)) {
bindings = [];
do {
bindingLine = genLine;
bindingColumn = genColumn;
const expressionsCount = decodeInteger(reader, 0);
let expressionRanges;
if (expressionsCount < -1) {
expressionRanges = [[decodeInteger(reader, 0)]];
for (let i = -1; i > expressionsCount; i--) {
const prevBl = bindingLine;
bindingLine = decodeInteger(reader, bindingLine);
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
const expression = decodeInteger(reader, 0);
expressionRanges.push([expression, bindingLine, bindingColumn]);
}
}
else {
expressionRanges = [[expressionsCount]];
}
bindings.push(expressionRanges);
} while (hasMoreVlq(reader, semi));
}
range.bindings = bindings;
ranges.push(range);
stack.push(range);
}
genLine++;
reader.pos = semi + 1;
} while (reader.pos < length);
return ranges;
} else {
expressionRanges = [[expressionsCount]];
}
bindings.push(expressionRanges);
} while (hasMoreVlq(reader, semi));
}
range.bindings = bindings;
ranges.push(range);
stack.push(range);
}
genLine++;
reader.pos = semi + 1;
} while (reader.pos < length);
return ranges;
}
function encodeGeneratedRanges(ranges) {
if (ranges.length === 0)
return '';
const writer = new StringWriter();
for (let i = 0; i < ranges.length;) {
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
}
return writer.flush();
if (ranges.length === 0) return "";
const writer = new StringWriter();
for (let i = 0; i < ranges.length; ) {
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
}
return writer.flush();
}
function _encodeGeneratedRanges(ranges, index, writer, state) {
const range = ranges[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
if (state[0] < startLine) {
catchupLine(writer, state[0], startLine);
state[0] = startLine;
state[1] = 0;
const range = ranges[index];
const {
0: startLine,
1: startColumn,
2: endLine,
3: endColumn,
isScope,
callsite,
bindings
} = range;
if (state[0] < startLine) {
catchupLine(writer, state[0], startLine);
state[0] = startLine;
state[1] = 0;
} else if (index > 0) {
writer.write(comma);
}
state[1] = encodeInteger(writer, range[1], state[1]);
const fields = (range.length === 6 ? 1 : 0) | (callsite ? 2 : 0) | (isScope ? 4 : 0);
encodeInteger(writer, fields, 0);
if (range.length === 6) {
const { 4: sourcesIndex, 5: scopesIndex } = range;
if (sourcesIndex !== state[2]) {
state[3] = 0;
}
else if (index > 0) {
writer.write(comma);
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
state[3] = encodeInteger(writer, scopesIndex, state[3]);
}
if (callsite) {
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
if (sourcesIndex !== state[4]) {
state[5] = 0;
state[6] = 0;
} else if (callLine !== state[5]) {
state[6] = 0;
}
state[1] = encodeInteger(writer, range[1], state[1]);
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
encodeInteger(writer, fields, 0);
if (range.length === 6) {
const { 4: sourcesIndex, 5: scopesIndex } = range;
if (sourcesIndex !== state[2]) {
state[3] = 0;
}
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
state[3] = encodeInteger(writer, scopesIndex, state[3]);
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
state[5] = encodeInteger(writer, callLine, state[5]);
state[6] = encodeInteger(writer, callColumn, state[6]);
}
if (bindings) {
for (const binding of bindings) {
if (binding.length > 1) encodeInteger(writer, -binding.length, 0);
const expression = binding[0][0];
encodeInteger(writer, expression, 0);
let bindingStartLine = startLine;
let bindingStartColumn = startColumn;
for (let i = 1; i < binding.length; i++) {
const expRange = binding[i];
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
encodeInteger(writer, expRange[0], 0);
}
}
if (callsite) {
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
if (sourcesIndex !== state[4]) {
state[5] = 0;
state[6] = 0;
}
else if (callLine !== state[5]) {
state[6] = 0;
}
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
state[5] = encodeInteger(writer, callLine, state[5]);
state[6] = encodeInteger(writer, callColumn, state[6]);
}
for (index++; index < ranges.length; ) {
const next = ranges[index];
const { 0: l, 1: c } = next;
if (l > endLine || l === endLine && c >= endColumn) {
break;
}
if (bindings) {
for (const binding of bindings) {
if (binding.length > 1)
encodeInteger(writer, -binding.length, 0);
const expression = binding[0][0];
encodeInteger(writer, expression, 0);
let bindingStartLine = startLine;
let bindingStartColumn = startColumn;
for (let i = 1; i < binding.length; i++) {
const expRange = binding[i];
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
encodeInteger(writer, expRange[0], 0);
}
}
}
for (index++; index < ranges.length;) {
const next = ranges[index];
const { 0: l, 1: c } = next;
if (l > endLine || (l === endLine && c >= endColumn)) {
break;
}
index = _encodeGeneratedRanges(ranges, index, writer, state);
}
if (state[0] < endLine) {
catchupLine(writer, state[0], endLine);
state[0] = endLine;
state[1] = 0;
}
else {
writer.write(comma);
}
state[1] = encodeInteger(writer, endColumn, state[1]);
return index;
index = _encodeGeneratedRanges(ranges, index, writer, state);
}
if (state[0] < endLine) {
catchupLine(writer, state[0], endLine);
state[0] = endLine;
state[1] = 0;
} else {
writer.write(comma);
}
state[1] = encodeInteger(writer, endColumn, state[1]);
return index;
}
function catchupLine(writer, lastLine, line) {
do {
writer.write(semicolon);
} while (++lastLine < line);
do {
writer.write(semicolon);
} while (++lastLine < line);
}
// src/sourcemap-codec.ts
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(';');
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol)
sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
}
else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
}
else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(";");
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol) sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
} else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
if (!sorted)
sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
} else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted) sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator);
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
return a[0] - b[0];
}
function encode(decoded) {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0)
writer.write(semicolon);
if (line.length === 0)
continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0)
writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1)
continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4)
continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) writer.write(semicolon);
if (line.length === 0) continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0) writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1) continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4) continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
return writer.flush();
}
return writer.flush();
}
export { decode, decodeGeneratedRanges, decodeOriginalScopes, encode, encodeGeneratedRanges, encodeOriginalScopes };
export {
decode,
decodeGeneratedRanges,
decodeOriginalScopes,
encode,
encodeGeneratedRanges,
encodeOriginalScopes
};
//# sourceMappingURL=sourcemap-codec.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -1,439 +1,452 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
})(this, (function (exports) { 'use strict';
(function (global, factory, m) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(module) :
typeof define === 'function' && define.amd ? define(['module'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(m = { exports: {} }), global.sourcemapCodec = 'default' in m.exports ? m.exports.default : m.exports);
})(this, (function (module) {
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
// src/sourcemap-codec.ts
var sourcemap_codec_exports = {};
__export(sourcemap_codec_exports, {
decode: () => decode,
decodeGeneratedRanges: () => decodeGeneratedRanges,
decodeOriginalScopes: () => decodeOriginalScopes,
encode: () => encode,
encodeGeneratedRanges: () => encodeGeneratedRanges,
encodeOriginalScopes: () => encodeOriginalScopes
});
module.exports = __toCommonJS(sourcemap_codec_exports);
// src/vlq.ts
var comma = ",".charCodeAt(0);
var semicolon = ";".charCodeAt(0);
var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
var intToChar = new Uint8Array(64);
var charToInt = new Uint8Array(128);
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
function decodeInteger(reader, relative) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -2147483648 | -value;
}
return relative + value;
}
function encodeInteger(builder, num, relative) {
let delta = num - relative;
delta = delta < 0 ? -delta << 1 | 1 : delta << 1;
do {
let clamped = delta & 31;
delta >>>= 5;
if (delta > 0) clamped |= 32;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max) return false;
return reader.peek() !== comma;
}
// src/strings.ts
var bufLength = 1024 * 16;
var td = typeof TextDecoder !== "undefined" ? /* @__PURE__ */ new TextDecoder() : typeof Buffer !== "undefined" ? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
}
} : {
decode(buf) {
let out = "";
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
function decodeInteger(reader, relative) {
let value = 0;
let shift = 0;
let integer = 0;
return out;
}
};
var StringWriter = class {
constructor() {
this.pos = 0;
this.out = "";
this.buffer = new Uint8Array(bufLength);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
};
var StringReader = class {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
};
// src/scopes.ts
var EMPTY = [];
function decodeOriginalScopes(input) {
const { length } = input;
const reader = new StringReader(input);
const scopes = [];
const stack = [];
let line = 0;
for (; reader.pos < length; reader.pos++) {
line = decodeInteger(reader, line);
const column = decodeInteger(reader, 0);
if (!hasMoreVlq(reader, length)) {
const last = stack.pop();
last[2] = line;
last[3] = column;
continue;
}
const kind = decodeInteger(reader, 0);
const fields = decodeInteger(reader, 0);
const hasName = fields & 1;
const scope = hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind];
let vars = EMPTY;
if (hasMoreVlq(reader, length)) {
vars = [];
do {
const varsIndex = decodeInteger(reader, 0);
vars.push(varsIndex);
} while (hasMoreVlq(reader, length));
}
scope.vars = vars;
scopes.push(scope);
stack.push(scope);
}
return scopes;
}
function encodeOriginalScopes(scopes) {
const writer = new StringWriter();
for (let i = 0; i < scopes.length; ) {
i = _encodeOriginalScopes(scopes, i, writer, [0]);
}
return writer.flush();
}
function _encodeOriginalScopes(scopes, index, writer, state) {
const scope = scopes[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
if (index > 0) writer.write(comma);
state[0] = encodeInteger(writer, startLine, state[0]);
encodeInteger(writer, startColumn, 0);
encodeInteger(writer, kind, 0);
const fields = scope.length === 6 ? 1 : 0;
encodeInteger(writer, fields, 0);
if (scope.length === 6) encodeInteger(writer, scope[5], 0);
for (const v of vars) {
encodeInteger(writer, v, 0);
}
for (index++; index < scopes.length; ) {
const next = scopes[index];
const { 0: l, 1: c } = next;
if (l > endLine || l === endLine && c >= endColumn) {
break;
}
index = _encodeOriginalScopes(scopes, index, writer, state);
}
writer.write(comma);
state[0] = encodeInteger(writer, endLine, state[0]);
encodeInteger(writer, endColumn, 0);
return index;
}
function decodeGeneratedRanges(input) {
const { length } = input;
const reader = new StringReader(input);
const ranges = [];
const stack = [];
let genLine = 0;
let definitionSourcesIndex = 0;
let definitionScopeIndex = 0;
let callsiteSourcesIndex = 0;
let callsiteLine = 0;
let callsiteColumn = 0;
let bindingLine = 0;
let bindingColumn = 0;
do {
const semi = reader.indexOf(";");
let genColumn = 0;
for (; reader.pos < semi; reader.pos++) {
genColumn = decodeInteger(reader, genColumn);
if (!hasMoreVlq(reader, semi)) {
const last = stack.pop();
last[2] = genLine;
last[3] = genColumn;
continue;
}
const fields = decodeInteger(reader, 0);
const hasDefinition = fields & 1;
const hasCallsite = fields & 2;
const hasScope = fields & 4;
let callsite = null;
let bindings = EMPTY;
let range;
if (hasDefinition) {
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
definitionScopeIndex = decodeInteger(
reader,
definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0
);
definitionSourcesIndex = defSourcesIndex;
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
} else {
range = [genLine, genColumn, 0, 0];
}
range.isScope = !!hasScope;
if (hasCallsite) {
const prevCsi = callsiteSourcesIndex;
const prevLine = callsiteLine;
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
const sameSource = prevCsi === callsiteSourcesIndex;
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
callsiteColumn = decodeInteger(
reader,
sameSource && prevLine === callsiteLine ? callsiteColumn : 0
);
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
}
range.callsite = callsite;
if (hasMoreVlq(reader, semi)) {
bindings = [];
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
return relative + value;
bindingLine = genLine;
bindingColumn = genColumn;
const expressionsCount = decodeInteger(reader, 0);
let expressionRanges;
if (expressionsCount < -1) {
expressionRanges = [[decodeInteger(reader, 0)]];
for (let i = -1; i > expressionsCount; i--) {
const prevBl = bindingLine;
bindingLine = decodeInteger(reader, bindingLine);
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
const expression = decodeInteger(reader, 0);
expressionRanges.push([expression, bindingLine, bindingColumn]);
}
} else {
expressionRanges = [[expressionsCount]];
}
bindings.push(expressionRanges);
} while (hasMoreVlq(reader, semi));
}
range.bindings = bindings;
ranges.push(range);
stack.push(range);
}
function encodeInteger(builder, num, relative) {
let delta = num - relative;
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
do {
let clamped = delta & 0b011111;
delta >>>= 5;
if (delta > 0)
clamped |= 0b100000;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
genLine++;
reader.pos = semi + 1;
} while (reader.pos < length);
return ranges;
}
function encodeGeneratedRanges(ranges) {
if (ranges.length === 0) return "";
const writer = new StringWriter();
for (let i = 0; i < ranges.length; ) {
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
}
return writer.flush();
}
function _encodeGeneratedRanges(ranges, index, writer, state) {
const range = ranges[index];
const {
0: startLine,
1: startColumn,
2: endLine,
3: endColumn,
isScope,
callsite,
bindings
} = range;
if (state[0] < startLine) {
catchupLine(writer, state[0], startLine);
state[0] = startLine;
state[1] = 0;
} else if (index > 0) {
writer.write(comma);
}
state[1] = encodeInteger(writer, range[1], state[1]);
const fields = (range.length === 6 ? 1 : 0) | (callsite ? 2 : 0) | (isScope ? 4 : 0);
encodeInteger(writer, fields, 0);
if (range.length === 6) {
const { 4: sourcesIndex, 5: scopesIndex } = range;
if (sourcesIndex !== state[2]) {
state[3] = 0;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max)
return false;
return reader.peek() !== comma;
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
state[3] = encodeInteger(writer, scopesIndex, state[3]);
}
if (callsite) {
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
if (sourcesIndex !== state[4]) {
state[5] = 0;
state[6] = 0;
} else if (callLine !== state[5]) {
state[6] = 0;
}
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
state[5] = encodeInteger(writer, callLine, state[5]);
state[6] = encodeInteger(writer, callColumn, state[6]);
}
if (bindings) {
for (const binding of bindings) {
if (binding.length > 1) encodeInteger(writer, -binding.length, 0);
const expression = binding[0][0];
encodeInteger(writer, expression, 0);
let bindingStartLine = startLine;
let bindingStartColumn = startColumn;
for (let i = 1; i < binding.length; i++) {
const expRange = binding[i];
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
encodeInteger(writer, expRange[0], 0);
}
}
}
for (index++; index < ranges.length; ) {
const next = ranges[index];
const { 0: l, 1: c } = next;
if (l > endLine || l === endLine && c >= endColumn) {
break;
}
index = _encodeGeneratedRanges(ranges, index, writer, state);
}
if (state[0] < endLine) {
catchupLine(writer, state[0], endLine);
state[0] = endLine;
state[1] = 0;
} else {
writer.write(comma);
}
state[1] = encodeInteger(writer, endColumn, state[1]);
return index;
}
function catchupLine(writer, lastLine, line) {
do {
writer.write(semicolon);
} while (++lastLine < line);
}
const bufLength = 1024 * 16;
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
class StringWriter {
constructor() {
this.pos = 0;
this.out = '';
this.buffer = new Uint8Array(bufLength);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
// src/sourcemap-codec.ts
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(";");
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol) sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
} else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
} else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
class StringReader {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
if (!sorted) sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
}
function encode(decoded) {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) writer.write(semicolon);
if (line.length === 0) continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0) writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1) continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4) continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
const EMPTY = [];
function decodeOriginalScopes(input) {
const { length } = input;
const reader = new StringReader(input);
const scopes = [];
const stack = [];
let line = 0;
for (; reader.pos < length; reader.pos++) {
line = decodeInteger(reader, line);
const column = decodeInteger(reader, 0);
if (!hasMoreVlq(reader, length)) {
const last = stack.pop();
last[2] = line;
last[3] = column;
continue;
}
const kind = decodeInteger(reader, 0);
const fields = decodeInteger(reader, 0);
const hasName = fields & 0b0001;
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
let vars = EMPTY;
if (hasMoreVlq(reader, length)) {
vars = [];
do {
const varsIndex = decodeInteger(reader, 0);
vars.push(varsIndex);
} while (hasMoreVlq(reader, length));
}
scope.vars = vars;
scopes.push(scope);
stack.push(scope);
}
return scopes;
}
function encodeOriginalScopes(scopes) {
const writer = new StringWriter();
for (let i = 0; i < scopes.length;) {
i = _encodeOriginalScopes(scopes, i, writer, [0]);
}
return writer.flush();
}
function _encodeOriginalScopes(scopes, index, writer, state) {
const scope = scopes[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
if (index > 0)
writer.write(comma);
state[0] = encodeInteger(writer, startLine, state[0]);
encodeInteger(writer, startColumn, 0);
encodeInteger(writer, kind, 0);
const fields = scope.length === 6 ? 0b0001 : 0;
encodeInteger(writer, fields, 0);
if (scope.length === 6)
encodeInteger(writer, scope[5], 0);
for (const v of vars) {
encodeInteger(writer, v, 0);
}
for (index++; index < scopes.length;) {
const next = scopes[index];
const { 0: l, 1: c } = next;
if (l > endLine || (l === endLine && c >= endColumn)) {
break;
}
index = _encodeOriginalScopes(scopes, index, writer, state);
}
writer.write(comma);
state[0] = encodeInteger(writer, endLine, state[0]);
encodeInteger(writer, endColumn, 0);
return index;
}
function decodeGeneratedRanges(input) {
const { length } = input;
const reader = new StringReader(input);
const ranges = [];
const stack = [];
let genLine = 0;
let definitionSourcesIndex = 0;
let definitionScopeIndex = 0;
let callsiteSourcesIndex = 0;
let callsiteLine = 0;
let callsiteColumn = 0;
let bindingLine = 0;
let bindingColumn = 0;
do {
const semi = reader.indexOf(';');
let genColumn = 0;
for (; reader.pos < semi; reader.pos++) {
genColumn = decodeInteger(reader, genColumn);
if (!hasMoreVlq(reader, semi)) {
const last = stack.pop();
last[2] = genLine;
last[3] = genColumn;
continue;
}
const fields = decodeInteger(reader, 0);
const hasDefinition = fields & 0b0001;
const hasCallsite = fields & 0b0010;
const hasScope = fields & 0b0100;
let callsite = null;
let bindings = EMPTY;
let range;
if (hasDefinition) {
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
definitionSourcesIndex = defSourcesIndex;
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
}
else {
range = [genLine, genColumn, 0, 0];
}
range.isScope = !!hasScope;
if (hasCallsite) {
const prevCsi = callsiteSourcesIndex;
const prevLine = callsiteLine;
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
const sameSource = prevCsi === callsiteSourcesIndex;
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
}
range.callsite = callsite;
if (hasMoreVlq(reader, semi)) {
bindings = [];
do {
bindingLine = genLine;
bindingColumn = genColumn;
const expressionsCount = decodeInteger(reader, 0);
let expressionRanges;
if (expressionsCount < -1) {
expressionRanges = [[decodeInteger(reader, 0)]];
for (let i = -1; i > expressionsCount; i--) {
const prevBl = bindingLine;
bindingLine = decodeInteger(reader, bindingLine);
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
const expression = decodeInteger(reader, 0);
expressionRanges.push([expression, bindingLine, bindingColumn]);
}
}
else {
expressionRanges = [[expressionsCount]];
}
bindings.push(expressionRanges);
} while (hasMoreVlq(reader, semi));
}
range.bindings = bindings;
ranges.push(range);
stack.push(range);
}
genLine++;
reader.pos = semi + 1;
} while (reader.pos < length);
return ranges;
}
function encodeGeneratedRanges(ranges) {
if (ranges.length === 0)
return '';
const writer = new StringWriter();
for (let i = 0; i < ranges.length;) {
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
}
return writer.flush();
}
function _encodeGeneratedRanges(ranges, index, writer, state) {
const range = ranges[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
if (state[0] < startLine) {
catchupLine(writer, state[0], startLine);
state[0] = startLine;
state[1] = 0;
}
else if (index > 0) {
writer.write(comma);
}
state[1] = encodeInteger(writer, range[1], state[1]);
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
encodeInteger(writer, fields, 0);
if (range.length === 6) {
const { 4: sourcesIndex, 5: scopesIndex } = range;
if (sourcesIndex !== state[2]) {
state[3] = 0;
}
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
state[3] = encodeInteger(writer, scopesIndex, state[3]);
}
if (callsite) {
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
if (sourcesIndex !== state[4]) {
state[5] = 0;
state[6] = 0;
}
else if (callLine !== state[5]) {
state[6] = 0;
}
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
state[5] = encodeInteger(writer, callLine, state[5]);
state[6] = encodeInteger(writer, callColumn, state[6]);
}
if (bindings) {
for (const binding of bindings) {
if (binding.length > 1)
encodeInteger(writer, -binding.length, 0);
const expression = binding[0][0];
encodeInteger(writer, expression, 0);
let bindingStartLine = startLine;
let bindingStartColumn = startColumn;
for (let i = 1; i < binding.length; i++) {
const expRange = binding[i];
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
encodeInteger(writer, expRange[0], 0);
}
}
}
for (index++; index < ranges.length;) {
const next = ranges[index];
const { 0: l, 1: c } = next;
if (l > endLine || (l === endLine && c >= endColumn)) {
break;
}
index = _encodeGeneratedRanges(ranges, index, writer, state);
}
if (state[0] < endLine) {
catchupLine(writer, state[0], endLine);
state[0] = endLine;
state[1] = 0;
}
else {
writer.write(comma);
}
state[1] = encodeInteger(writer, endColumn, state[1]);
return index;
}
function catchupLine(writer, lastLine, line) {
do {
writer.write(semicolon);
} while (++lastLine < line);
}
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(';');
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol)
sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
}
else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
}
else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted)
sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
}
function encode(decoded) {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0)
writer.write(semicolon);
if (line.length === 0)
continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0)
writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1)
continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4)
continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
}
return writer.flush();
}
exports.decode = decode;
exports.decodeGeneratedRanges = decodeGeneratedRanges;
exports.decodeOriginalScopes = decodeOriginalScopes;
exports.encode = encode;
exports.encodeGeneratedRanges = encodeGeneratedRanges;
exports.encodeOriginalScopes = encodeOriginalScopes;
Object.defineProperty(exports, '__esModule', { value: true });
}
return writer.flush();
}
}));
//# sourceMappingURL=sourcemap-codec.umd.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,8 +0,0 @@
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes';
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes';
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
export declare type SourceMapLine = SourceMapSegment[];
export declare type SourceMapMappings = SourceMapLine[];
export declare function decode(mappings: string): SourceMapMappings;
export declare function encode(decoded: SourceMapMappings): string;
export declare function encode(decoded: Readonly<SourceMapMappings>): string;

View File

@@ -1,6 +1,6 @@
{
"name": "@jridgewell/sourcemap-codec",
"version": "1.5.0",
"version": "1.5.4",
"description": "Encode/decode sourcemap mappings",
"keywords": [
"sourcemap",
@@ -8,68 +8,60 @@
],
"main": "dist/sourcemap-codec.umd.js",
"module": "dist/sourcemap-codec.mjs",
"types": "dist/types/sourcemap-codec.d.ts",
"types": "types/sourcemap-codec.d.cts",
"files": [
"dist"
"dist",
"src",
"types"
],
"exports": {
".": [
{
"types": "./dist/types/sourcemap-codec.d.ts",
"browser": "./dist/sourcemap-codec.umd.js",
"require": "./dist/sourcemap-codec.umd.js",
"import": "./dist/sourcemap-codec.mjs"
"import": {
"types": "./types/sourcemap-codec.d.mts",
"default": "./dist/sourcemap-codec.mjs"
},
"require": {
"types": "./types/sourcemap-codec.d.cts",
"default": "./dist/sourcemap-codec.umd.js"
},
"browser": {
"types": "./types/sourcemap-codec.d.cts",
"default": "./dist/sourcemap-codec.umd.js"
}
},
"./dist/sourcemap-codec.umd.js"
],
"./package.json": "./package.json"
},
"scripts": {
"benchmark": "run-s build:rollup benchmark:*",
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"prebuild": "rm -rf dist",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build",
"test": "run-s -n test:lint test:only",
"test:debug": "mocha --inspect-brk",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs sourcemap-codec.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:coverage": "c8 mocha",
"test:watch": "mocha --watch"
"test:types": "eslint '{src,test}/**/*.ts'",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test"
},
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/sourcemap-codec",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/sourcemap-codec"
},
"author": "Rich Harris",
"license": "MIT",
"devDependencies": {
"@rollup/plugin-typescript": "8.3.0",
"@types/mocha": "10.0.6",
"@types/node": "17.0.15",
"@typescript-eslint/eslint-plugin": "5.10.0",
"@typescript-eslint/parser": "5.10.0",
"benchmark": "2.1.4",
"c8": "7.11.2",
"eslint": "8.7.0",
"eslint-config-prettier": "8.3.0",
"mocha": "9.2.0",
"npm-run-all": "4.1.5",
"prettier": "2.5.1",
"rollup": "2.64.0",
"source-map": "0.6.1",
"source-map-js": "1.0.2",
"sourcemap-codec": "1.4.8",
"tsx": "4.7.1",
"typescript": "4.5.4"
}
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT"
}

345
node_modules/@jridgewell/sourcemap-codec/src/scopes.ts generated vendored Normal file
View File

@@ -0,0 +1,345 @@
import { StringReader, StringWriter } from './strings';
import { comma, decodeInteger, encodeInteger, hasMoreVlq, semicolon } from './vlq';
const EMPTY: any[] = [];
type Line = number;
type Column = number;
type Kind = number;
type Name = number;
type Var = number;
type SourcesIndex = number;
type ScopesIndex = number;
type Mix<A, B, O> = (A & O) | (B & O);
export type OriginalScope = Mix<
[Line, Column, Line, Column, Kind],
[Line, Column, Line, Column, Kind, Name],
{ vars: Var[] }
>;
export type GeneratedRange = Mix<
[Line, Column, Line, Column],
[Line, Column, Line, Column, SourcesIndex, ScopesIndex],
{
callsite: CallSite | null;
bindings: Binding[];
isScope: boolean;
}
>;
export type CallSite = [SourcesIndex, Line, Column];
type Binding = BindingExpressionRange[];
export type BindingExpressionRange = [Name] | [Name, Line, Column];
export function decodeOriginalScopes(input: string): OriginalScope[] {
const { length } = input;
const reader = new StringReader(input);
const scopes: OriginalScope[] = [];
const stack: OriginalScope[] = [];
let line = 0;
for (; reader.pos < length; reader.pos++) {
line = decodeInteger(reader, line);
const column = decodeInteger(reader, 0);
if (!hasMoreVlq(reader, length)) {
const last = stack.pop()!;
last[2] = line;
last[3] = column;
continue;
}
const kind = decodeInteger(reader, 0);
const fields = decodeInteger(reader, 0);
const hasName = fields & 0b0001;
const scope: OriginalScope = (
hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]
) as OriginalScope;
let vars: Var[] = EMPTY;
if (hasMoreVlq(reader, length)) {
vars = [];
do {
const varsIndex = decodeInteger(reader, 0);
vars.push(varsIndex);
} while (hasMoreVlq(reader, length));
}
scope.vars = vars;
scopes.push(scope);
stack.push(scope);
}
return scopes;
}
export function encodeOriginalScopes(scopes: OriginalScope[]): string {
const writer = new StringWriter();
for (let i = 0; i < scopes.length; ) {
i = _encodeOriginalScopes(scopes, i, writer, [0]);
}
return writer.flush();
}
function _encodeOriginalScopes(
scopes: OriginalScope[],
index: number,
writer: StringWriter,
state: [
number, // GenColumn
],
): number {
const scope = scopes[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
if (index > 0) writer.write(comma);
state[0] = encodeInteger(writer, startLine, state[0]);
encodeInteger(writer, startColumn, 0);
encodeInteger(writer, kind, 0);
const fields = scope.length === 6 ? 0b0001 : 0;
encodeInteger(writer, fields, 0);
if (scope.length === 6) encodeInteger(writer, scope[5], 0);
for (const v of vars) {
encodeInteger(writer, v, 0);
}
for (index++; index < scopes.length; ) {
const next = scopes[index];
const { 0: l, 1: c } = next;
if (l > endLine || (l === endLine && c >= endColumn)) {
break;
}
index = _encodeOriginalScopes(scopes, index, writer, state);
}
writer.write(comma);
state[0] = encodeInteger(writer, endLine, state[0]);
encodeInteger(writer, endColumn, 0);
return index;
}
export function decodeGeneratedRanges(input: string): GeneratedRange[] {
const { length } = input;
const reader = new StringReader(input);
const ranges: GeneratedRange[] = [];
const stack: GeneratedRange[] = [];
let genLine = 0;
let definitionSourcesIndex = 0;
let definitionScopeIndex = 0;
let callsiteSourcesIndex = 0;
let callsiteLine = 0;
let callsiteColumn = 0;
let bindingLine = 0;
let bindingColumn = 0;
do {
const semi = reader.indexOf(';');
let genColumn = 0;
for (; reader.pos < semi; reader.pos++) {
genColumn = decodeInteger(reader, genColumn);
if (!hasMoreVlq(reader, semi)) {
const last = stack.pop()!;
last[2] = genLine;
last[3] = genColumn;
continue;
}
const fields = decodeInteger(reader, 0);
const hasDefinition = fields & 0b0001;
const hasCallsite = fields & 0b0010;
const hasScope = fields & 0b0100;
let callsite: CallSite | null = null;
let bindings: Binding[] = EMPTY;
let range: GeneratedRange;
if (hasDefinition) {
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
definitionScopeIndex = decodeInteger(
reader,
definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0,
);
definitionSourcesIndex = defSourcesIndex;
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex] as GeneratedRange;
} else {
range = [genLine, genColumn, 0, 0] as GeneratedRange;
}
range.isScope = !!hasScope;
if (hasCallsite) {
const prevCsi = callsiteSourcesIndex;
const prevLine = callsiteLine;
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
const sameSource = prevCsi === callsiteSourcesIndex;
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
callsiteColumn = decodeInteger(
reader,
sameSource && prevLine === callsiteLine ? callsiteColumn : 0,
);
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
}
range.callsite = callsite;
if (hasMoreVlq(reader, semi)) {
bindings = [];
do {
bindingLine = genLine;
bindingColumn = genColumn;
const expressionsCount = decodeInteger(reader, 0);
let expressionRanges: BindingExpressionRange[];
if (expressionsCount < -1) {
expressionRanges = [[decodeInteger(reader, 0)]];
for (let i = -1; i > expressionsCount; i--) {
const prevBl = bindingLine;
bindingLine = decodeInteger(reader, bindingLine);
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
const expression = decodeInteger(reader, 0);
expressionRanges.push([expression, bindingLine, bindingColumn]);
}
} else {
expressionRanges = [[expressionsCount]];
}
bindings.push(expressionRanges);
} while (hasMoreVlq(reader, semi));
}
range.bindings = bindings;
ranges.push(range);
stack.push(range);
}
genLine++;
reader.pos = semi + 1;
} while (reader.pos < length);
return ranges;
}
export function encodeGeneratedRanges(ranges: GeneratedRange[]): string {
if (ranges.length === 0) return '';
const writer = new StringWriter();
for (let i = 0; i < ranges.length; ) {
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
}
return writer.flush();
}
function _encodeGeneratedRanges(
ranges: GeneratedRange[],
index: number,
writer: StringWriter,
state: [
number, // GenLine
number, // GenColumn
number, // DefSourcesIndex
number, // DefScopesIndex
number, // CallSourcesIndex
number, // CallLine
number, // CallColumn
],
): number {
const range = ranges[index];
const {
0: startLine,
1: startColumn,
2: endLine,
3: endColumn,
isScope,
callsite,
bindings,
} = range;
if (state[0] < startLine) {
catchupLine(writer, state[0], startLine);
state[0] = startLine;
state[1] = 0;
} else if (index > 0) {
writer.write(comma);
}
state[1] = encodeInteger(writer, range[1], state[1]);
const fields =
(range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
encodeInteger(writer, fields, 0);
if (range.length === 6) {
const { 4: sourcesIndex, 5: scopesIndex } = range;
if (sourcesIndex !== state[2]) {
state[3] = 0;
}
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
state[3] = encodeInteger(writer, scopesIndex, state[3]);
}
if (callsite) {
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite!;
if (sourcesIndex !== state[4]) {
state[5] = 0;
state[6] = 0;
} else if (callLine !== state[5]) {
state[6] = 0;
}
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
state[5] = encodeInteger(writer, callLine, state[5]);
state[6] = encodeInteger(writer, callColumn, state[6]);
}
if (bindings) {
for (const binding of bindings) {
if (binding.length > 1) encodeInteger(writer, -binding.length, 0);
const expression = binding[0][0];
encodeInteger(writer, expression, 0);
let bindingStartLine = startLine;
let bindingStartColumn = startColumn;
for (let i = 1; i < binding.length; i++) {
const expRange = binding[i];
bindingStartLine = encodeInteger(writer, expRange[1]!, bindingStartLine);
bindingStartColumn = encodeInteger(writer, expRange[2]!, bindingStartColumn);
encodeInteger(writer, expRange[0]!, 0);
}
}
}
for (index++; index < ranges.length; ) {
const next = ranges[index];
const { 0: l, 1: c } = next;
if (l > endLine || (l === endLine && c >= endColumn)) {
break;
}
index = _encodeGeneratedRanges(ranges, index, writer, state);
}
if (state[0] < endLine) {
catchupLine(writer, state[0], endLine);
state[0] = endLine;
state[1] = 0;
} else {
writer.write(comma);
}
state[1] = encodeInteger(writer, endColumn, state[1]);
return index;
}
function catchupLine(writer: StringWriter, lastLine: number, line: number) {
do {
writer.write(semicolon);
} while (++lastLine < line);
}

View File

@@ -0,0 +1,111 @@
import { comma, decodeInteger, encodeInteger, hasMoreVlq, semicolon } from './vlq';
import { StringWriter, StringReader } from './strings';
export {
decodeOriginalScopes,
encodeOriginalScopes,
decodeGeneratedRanges,
encodeGeneratedRanges,
} from './scopes';
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes';
export type SourceMapSegment =
| [number]
| [number, number, number, number]
| [number, number, number, number, number];
export type SourceMapLine = SourceMapSegment[];
export type SourceMapMappings = SourceMapLine[];
export function decode(mappings: string): SourceMapMappings {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded: SourceMapMappings = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(';');
const line: SourceMapLine = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg: SourceMapSegment;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol) sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
} else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
} else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted) sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line: SourceMapSegment[]) {
line.sort(sortComparator);
}
function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {
return a[0] - b[0];
}
export function encode(decoded: SourceMapMappings): string;
export function encode(decoded: Readonly<SourceMapMappings>): string;
export function encode(decoded: Readonly<SourceMapMappings>): string {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) writer.write(semicolon);
if (line.length === 0) continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0) writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1) continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4) continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
}
return writer.flush();
}

View File

@@ -0,0 +1,65 @@
const bufLength = 1024 * 16;
// Provide a fallback for older environments.
const td =
typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf: Uint8Array): string {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf: Uint8Array): string {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
export class StringWriter {
pos = 0;
private out = '';
private buffer = new Uint8Array(bufLength);
write(v: number): void {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush(): string {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
}
export class StringReader {
pos = 0;
declare private buffer: string;
constructor(buffer: string) {
this.buffer = buffer;
}
next(): number {
return this.buffer.charCodeAt(this.pos++);
}
peek(): number {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char: string): number {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
}

55
node_modules/@jridgewell/sourcemap-codec/src/vlq.ts generated vendored Normal file
View File

@@ -0,0 +1,55 @@
import type { StringReader, StringWriter } from './strings';
export const comma = ','.charCodeAt(0);
export const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
export function decodeInteger(reader: StringReader, relative: number): number {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
return relative + value;
}
export function encodeInteger(builder: StringWriter, num: number, relative: number): number {
let delta = num - relative;
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
do {
let clamped = delta & 0b011111;
delta >>>= 5;
if (delta > 0) clamped |= 0b100000;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
export function hasMoreVlq(reader: StringReader, max: number) {
if (reader.pos >= max) return false;
return reader.peek() !== comma;
}

View File

@@ -1,12 +1,12 @@
declare type Line = number;
declare type Column = number;
declare type Kind = number;
declare type Name = number;
declare type Var = number;
declare type SourcesIndex = number;
declare type ScopesIndex = number;
declare type Mix<A, B, O> = (A & O) | (B & O);
export declare type OriginalScope = Mix<[
type Line = number;
type Column = number;
type Kind = number;
type Name = number;
type Var = number;
type SourcesIndex = number;
type ScopesIndex = number;
type Mix<A, B, O> = (A & O) | (B & O);
export type OriginalScope = Mix<[
Line,
Column,
Line,
@@ -22,7 +22,7 @@ export declare type OriginalScope = Mix<[
], {
vars: Var[];
}>;
export declare type GeneratedRange = Mix<[
export type GeneratedRange = Mix<[
Line,
Column,
Line,
@@ -39,11 +39,12 @@ export declare type GeneratedRange = Mix<[
bindings: Binding[];
isScope: boolean;
}>;
export declare type CallSite = [SourcesIndex, Line, Column];
declare type Binding = BindingExpressionRange[];
export declare type BindingExpressionRange = [Name] | [Name, Line, Column];
export type CallSite = [SourcesIndex, Line, Column];
type Binding = BindingExpressionRange[];
export type BindingExpressionRange = [Name] | [Name, Line, Column];
export declare function decodeOriginalScopes(input: string): OriginalScope[];
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
export {};
//# sourceMappingURL=scopes.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"scopes.d.ts","sourceRoot":"","sources":["../src/scopes.ts"],"names":[],"mappings":"AAKA,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,MAAM,GAAG,MAAM,CAAC;AACrB,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,GAAG,GAAG,MAAM,CAAC;AAClB,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,KAAK,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAEtC,MAAM,MAAM,aAAa,GAAG,GAAG,CAC7B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,IAAI;CAAC,EAClC;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,IAAI;CAAC,EACxC;IAAE,IAAI,EAAE,GAAG,EAAE,CAAA;CAAE,CAChB,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG,GAAG,CAC9B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;CAAC,EAC5B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,YAAY;IAAE,WAAW;CAAC,EACvD;IACE,QAAQ,EAAE,QAAQ,GAAG,IAAI,CAAC;IAC1B,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,OAAO,EAAE,OAAO,CAAC;CAClB,CACF,CAAC;AACF,MAAM,MAAM,QAAQ,GAAG,CAAC,YAAY,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AACpD,KAAK,OAAO,GAAG,sBAAsB,EAAE,CAAC;AACxC,MAAM,MAAM,sBAAsB,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AAEnE,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,aAAa,EAAE,CAyCnE;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EAAE,aAAa,EAAE,GAAG,MAAM,CAQpE;AA2CD,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,MAAM,GAAG,cAAc,EAAE,CAoGrE;AAED,wBAAgB,qBAAqB,CAAC,MAAM,EAAE,cAAc,EAAE,GAAG,MAAM,CAUtE"}

View File

@@ -0,0 +1,50 @@
type Line = number;
type Column = number;
type Kind = number;
type Name = number;
type Var = number;
type SourcesIndex = number;
type ScopesIndex = number;
type Mix<A, B, O> = (A & O) | (B & O);
export type OriginalScope = Mix<[
Line,
Column,
Line,
Column,
Kind
], [
Line,
Column,
Line,
Column,
Kind,
Name
], {
vars: Var[];
}>;
export type GeneratedRange = Mix<[
Line,
Column,
Line,
Column
], [
Line,
Column,
Line,
Column,
SourcesIndex,
ScopesIndex
], {
callsite: CallSite | null;
bindings: Binding[];
isScope: boolean;
}>;
export type CallSite = [SourcesIndex, Line, Column];
type Binding = BindingExpressionRange[];
export type BindingExpressionRange = [Name] | [Name, Line, Column];
export declare function decodeOriginalScopes(input: string): OriginalScope[];
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
export {};
//# sourceMappingURL=scopes.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"scopes.d.ts","sourceRoot":"","sources":["../src/scopes.ts"],"names":[],"mappings":"AAKA,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,MAAM,GAAG,MAAM,CAAC;AACrB,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,GAAG,GAAG,MAAM,CAAC;AAClB,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,KAAK,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAEtC,MAAM,MAAM,aAAa,GAAG,GAAG,CAC7B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,IAAI;CAAC,EAClC;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,IAAI;CAAC,EACxC;IAAE,IAAI,EAAE,GAAG,EAAE,CAAA;CAAE,CAChB,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG,GAAG,CAC9B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;CAAC,EAC5B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,YAAY;IAAE,WAAW;CAAC,EACvD;IACE,QAAQ,EAAE,QAAQ,GAAG,IAAI,CAAC;IAC1B,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,OAAO,EAAE,OAAO,CAAC;CAClB,CACF,CAAC;AACF,MAAM,MAAM,QAAQ,GAAG,CAAC,YAAY,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AACpD,KAAK,OAAO,GAAG,sBAAsB,EAAE,CAAC;AACxC,MAAM,MAAM,sBAAsB,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AAEnE,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,aAAa,EAAE,CAyCnE;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EAAE,aAAa,EAAE,GAAG,MAAM,CAQpE;AA2CD,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,MAAM,GAAG,cAAc,EAAE,CAoGrE;AAED,wBAAgB,qBAAqB,CAAC,MAAM,EAAE,cAAc,EAAE,GAAG,MAAM,CAUtE"}

View File

@@ -0,0 +1,9 @@
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes.cts';
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes.cts';
export type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
export type SourceMapLine = SourceMapSegment[];
export type SourceMapMappings = SourceMapLine[];
export declare function decode(mappings: string): SourceMapMappings;
export declare function encode(decoded: SourceMapMappings): string;
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
//# sourceMappingURL=sourcemap-codec.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"sourcemap-codec.d.ts","sourceRoot":"","sources":["../src/sourcemap-codec.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,oBAAoB,EACpB,oBAAoB,EACpB,qBAAqB,EACrB,qBAAqB,GACtB,MAAM,UAAU,CAAC;AAClB,YAAY,EAAE,aAAa,EAAE,cAAc,EAAE,QAAQ,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAC;AAEhG,MAAM,MAAM,gBAAgB,GACxB,CAAC,MAAM,CAAC,GACR,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,GAChC,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;AAC7C,MAAM,MAAM,aAAa,GAAG,gBAAgB,EAAE,CAAC;AAC/C,MAAM,MAAM,iBAAiB,GAAG,aAAa,EAAE,CAAC;AAEhD,wBAAgB,MAAM,CAAC,QAAQ,EAAE,MAAM,GAAG,iBAAiB,CAiD1D;AAUD,wBAAgB,MAAM,CAAC,OAAO,EAAE,iBAAiB,GAAG,MAAM,CAAC;AAC3D,wBAAgB,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,iBAAiB,CAAC,GAAG,MAAM,CAAC"}

View File

@@ -0,0 +1,9 @@
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes.mts';
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes.mts';
export type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
export type SourceMapLine = SourceMapSegment[];
export type SourceMapMappings = SourceMapLine[];
export declare function decode(mappings: string): SourceMapMappings;
export declare function encode(decoded: SourceMapMappings): string;
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
//# sourceMappingURL=sourcemap-codec.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"sourcemap-codec.d.ts","sourceRoot":"","sources":["../src/sourcemap-codec.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,oBAAoB,EACpB,oBAAoB,EACpB,qBAAqB,EACrB,qBAAqB,GACtB,MAAM,UAAU,CAAC;AAClB,YAAY,EAAE,aAAa,EAAE,cAAc,EAAE,QAAQ,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAC;AAEhG,MAAM,MAAM,gBAAgB,GACxB,CAAC,MAAM,CAAC,GACR,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,GAChC,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;AAC7C,MAAM,MAAM,aAAa,GAAG,gBAAgB,EAAE,CAAC;AAC/C,MAAM,MAAM,iBAAiB,GAAG,aAAa,EAAE,CAAC;AAEhD,wBAAgB,MAAM,CAAC,QAAQ,EAAE,MAAM,GAAG,iBAAiB,CAiD1D;AAUD,wBAAgB,MAAM,CAAC,OAAO,EAAE,iBAAiB,GAAG,MAAM,CAAC;AAC3D,wBAAgB,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,iBAAiB,CAAC,GAAG,MAAM,CAAC"}

View File

@@ -13,3 +13,4 @@ export declare class StringReader {
peek(): number;
indexOf(char: string): number;
}
//# sourceMappingURL=strings.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"strings.d.ts","sourceRoot":"","sources":["../src/strings.ts"],"names":[],"mappings":"AAuBA,qBAAa,YAAY;IACvB,GAAG,SAAK;IACR,OAAO,CAAC,GAAG,CAAM;IACjB,OAAO,CAAC,MAAM,CAA6B;IAE3C,KAAK,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI;IAStB,KAAK,IAAI,MAAM;CAIhB;AAED,qBAAa,YAAY;IACvB,GAAG,SAAK;IACR,QAAgB,MAAM,CAAS;gBAEnB,MAAM,EAAE,MAAM;IAI1B,IAAI,IAAI,MAAM;IAId,IAAI,IAAI,MAAM;IAId,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM;CAK9B"}

View File

@@ -0,0 +1,16 @@
export declare class StringWriter {
pos: number;
private out;
private buffer;
write(v: number): void;
flush(): string;
}
export declare class StringReader {
pos: number;
private buffer;
constructor(buffer: string);
next(): number;
peek(): number;
indexOf(char: string): number;
}
//# sourceMappingURL=strings.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"strings.d.ts","sourceRoot":"","sources":["../src/strings.ts"],"names":[],"mappings":"AAuBA,qBAAa,YAAY;IACvB,GAAG,SAAK;IACR,OAAO,CAAC,GAAG,CAAM;IACjB,OAAO,CAAC,MAAM,CAA6B;IAE3C,KAAK,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI;IAStB,KAAK,IAAI,MAAM;CAIhB;AAED,qBAAa,YAAY;IACvB,GAAG,SAAK;IACR,QAAgB,MAAM,CAAS;gBAEnB,MAAM,EAAE,MAAM;IAI1B,IAAI,IAAI,MAAM;IAId,IAAI,IAAI,MAAM;IAId,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM;CAK9B"}

View File

@@ -1,6 +1,7 @@
import type { StringReader, StringWriter } from './strings';
import type { StringReader, StringWriter } from './strings.cts';
export declare const comma: number;
export declare const semicolon: number;
export declare function decodeInteger(reader: StringReader, relative: number): number;
export declare function encodeInteger(builder: StringWriter, num: number, relative: number): number;
export declare function hasMoreVlq(reader: StringReader, max: number): boolean;
//# sourceMappingURL=vlq.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"vlq.d.ts","sourceRoot":"","sources":["../src/vlq.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAE5D,eAAO,MAAM,KAAK,QAAoB,CAAC;AACvC,eAAO,MAAM,SAAS,QAAoB,CAAC;AAY3C,wBAAgB,aAAa,CAAC,MAAM,EAAE,YAAY,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,CAoB5E;AAED,wBAAgB,aAAa,CAAC,OAAO,EAAE,YAAY,EAAE,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,CAY1F;AAED,wBAAgB,UAAU,CAAC,MAAM,EAAE,YAAY,EAAE,GAAG,EAAE,MAAM,WAG3D"}

View File

@@ -0,0 +1,7 @@
import type { StringReader, StringWriter } from './strings.mts';
export declare const comma: number;
export declare const semicolon: number;
export declare function decodeInteger(reader: StringReader, relative: number): number;
export declare function encodeInteger(builder: StringWriter, num: number, relative: number): number;
export declare function hasMoreVlq(reader: StringReader, max: number): boolean;
//# sourceMappingURL=vlq.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"vlq.d.ts","sourceRoot":"","sources":["../src/vlq.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAE5D,eAAO,MAAM,KAAK,QAAoB,CAAC;AACvC,eAAO,MAAM,SAAS,QAAoB,CAAC;AAY3C,wBAAgB,aAAa,CAAC,MAAM,EAAE,YAAY,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,CAoB5E;AAED,wBAAgB,aAAa,CAAC,OAAO,EAAE,YAAY,EAAE,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,CAY1F;AAED,wBAAgB,UAAU,CAAC,MAAM,EAAE,YAAY,EAAE,GAAG,EAAE,MAAM,WAG3D"}

View File

@@ -1,4 +1,4 @@
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -131,33 +131,45 @@ assert.deepEqual(traced, {
## Benchmarks
```
node v18.0.0
node v20.10.0
amp.js.map - 45120 segments
Memory Usage:
trace-mapping decoded 562400 bytes
trace-mapping encoded 5706544 bytes
source-map-js 10717664 bytes
source-map-0.6.1 17446384 bytes
source-map-0.8.0 9701757 bytes
trace-mapping decoded 414164 bytes
trace-mapping encoded 6274352 bytes
source-map-js 10968904 bytes
source-map-0.6.1 17587160 bytes
source-map-0.8.0 8812155 bytes
Chrome dev tools 8672912 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
trace-mapping: decoded JSON input x 205 ops/sec ±0.19% (88 runs sampled)
trace-mapping: encoded JSON input x 405 ops/sec ±1.47% (88 runs sampled)
trace-mapping: decoded Object input x 4,645 ops/sec ±0.15% (98 runs sampled)
trace-mapping: encoded Object input x 458 ops/sec ±1.63% (91 runs sampled)
source-map-js: encoded Object input x 75.48 ops/sec ±1.64% (67 runs sampled)
source-map-0.6.1: encoded Object input x 39.37 ops/sec ±1.44% (53 runs sampled)
Chrome dev tools: encoded Object input x 150 ops/sec ±1.76% (79 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed:
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
Trace speed (random):
trace-mapping: decoded originalPositionFor x 44,946 ops/sec ±0.16% (99 runs sampled)
trace-mapping: encoded originalPositionFor x 37,995 ops/sec ±1.81% (89 runs sampled)
source-map-js: encoded originalPositionFor x 9,230 ops/sec ±1.36% (93 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 8,057 ops/sec ±0.84% (96 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 28,198 ops/sec ±1.12% (91 runs sampled)
Chrome dev tools: encoded originalPositionFor x 46,276 ops/sec ±1.35% (95 runs sampled)
Fastest is Chrome dev tools: encoded originalPositionFor
Trace speed (ascending):
trace-mapping: decoded originalPositionFor x 204,406 ops/sec ±0.19% (97 runs sampled)
trace-mapping: encoded originalPositionFor x 196,695 ops/sec ±0.24% (99 runs sampled)
source-map-js: encoded originalPositionFor x 11,948 ops/sec ±0.94% (99 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 10,730 ops/sec ±0.36% (100 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 51,427 ops/sec ±0.21% (98 runs sampled)
Chrome dev tools: encoded originalPositionFor x 162,615 ops/sec ±0.18% (98 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
@@ -167,28 +179,40 @@ Fastest is trace-mapping: decoded originalPositionFor
babel.min.js.map - 347793 segments
Memory Usage:
trace-mapping decoded 89832 bytes
trace-mapping encoded 35474640 bytes
source-map-js 51257176 bytes
source-map-0.6.1 63515664 bytes
source-map-0.8.0 42933752 bytes
trace-mapping decoded 18504 bytes
trace-mapping encoded 35428008 bytes
source-map-js 51676808 bytes
source-map-0.6.1 63367136 bytes
source-map-0.8.0 43158400 bytes
Chrome dev tools 50721552 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
trace-mapping: decoded JSON input x 17.82 ops/sec ±6.35% (35 runs sampled)
trace-mapping: encoded JSON input x 31.57 ops/sec ±7.50% (43 runs sampled)
trace-mapping: decoded Object input x 867 ops/sec ±0.74% (94 runs sampled)
trace-mapping: encoded Object input x 33.83 ops/sec ±7.66% (46 runs sampled)
source-map-js: encoded Object input x 6.58 ops/sec ±3.31% (20 runs sampled)
source-map-0.6.1: encoded Object input x 4.23 ops/sec ±3.43% (15 runs sampled)
Chrome dev tools: encoded Object input x 22.14 ops/sec ±3.79% (41 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed:
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
Trace speed (random):
trace-mapping: decoded originalPositionFor x 78,234 ops/sec ±1.48% (29 runs sampled)
trace-mapping: encoded originalPositionFor x 60,761 ops/sec ±1.35% (21 runs sampled)
source-map-js: encoded originalPositionFor x 51,448 ops/sec ±2.17% (89 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 47,221 ops/sec ±1.99% (15 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 84,002 ops/sec ±1.45% (27 runs sampled)
Chrome dev tools: encoded originalPositionFor x 106,457 ops/sec ±1.38% (37 runs sampled)
Fastest is Chrome dev tools: encoded originalPositionFor
Trace speed (ascending):
trace-mapping: decoded originalPositionFor x 930,943 ops/sec ±0.25% (99 runs sampled)
trace-mapping: encoded originalPositionFor x 843,545 ops/sec ±0.34% (97 runs sampled)
source-map-js: encoded originalPositionFor x 114,510 ops/sec ±1.37% (36 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 87,412 ops/sec ±0.72% (92 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 197,709 ops/sec ±0.89% (59 runs sampled)
Chrome dev tools: encoded originalPositionFor x 688,983 ops/sec ±0.33% (98 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
@@ -198,28 +222,40 @@ Fastest is trace-mapping: decoded originalPositionFor
preact.js.map - 1992 segments
Memory Usage:
trace-mapping decoded 37128 bytes
trace-mapping encoded 247280 bytes
source-map-js 1143536 bytes
source-map-0.6.1 1290992 bytes
source-map-0.8.0 96544 bytes
trace-mapping decoded 33136 bytes
trace-mapping encoded 254240 bytes
source-map-js 837488 bytes
source-map-0.6.1 961928 bytes
source-map-0.8.0 54384 bytes
Chrome dev tools 709680 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
trace-mapping: decoded JSON input x 3,709 ops/sec ±0.13% (99 runs sampled)
trace-mapping: encoded JSON input x 6,447 ops/sec ±0.22% (101 runs sampled)
trace-mapping: decoded Object input x 83,062 ops/sec ±0.23% (100 runs sampled)
trace-mapping: encoded Object input x 14,980 ops/sec ±0.28% (100 runs sampled)
source-map-js: encoded Object input x 2,544 ops/sec ±0.16% (99 runs sampled)
source-map-0.6.1: encoded Object input x 1,221 ops/sec ±0.37% (97 runs sampled)
Chrome dev tools: encoded Object input x 4,241 ops/sec ±0.39% (93 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed:
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
Trace speed (random):
trace-mapping: decoded originalPositionFor x 91,028 ops/sec ±0.14% (94 runs sampled)
trace-mapping: encoded originalPositionFor x 84,348 ops/sec ±0.26% (98 runs sampled)
source-map-js: encoded originalPositionFor x 26,998 ops/sec ±0.23% (98 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 18,049 ops/sec ±0.26% (100 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 41,916 ops/sec ±0.28% (98 runs sampled)
Chrome dev tools: encoded originalPositionFor x 88,616 ops/sec ±0.14% (98 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
Trace speed (ascending):
trace-mapping: decoded originalPositionFor x 319,960 ops/sec ±0.16% (100 runs sampled)
trace-mapping: encoded originalPositionFor x 302,153 ops/sec ±0.18% (100 runs sampled)
source-map-js: encoded originalPositionFor x 35,574 ops/sec ±0.19% (100 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 19,943 ops/sec ±0.12% (101 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 54,648 ops/sec ±0.20% (99 runs sampled)
Chrome dev tools: encoded originalPositionFor x 278,319 ops/sec ±0.17% (102 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
@@ -229,28 +265,83 @@ Fastest is trace-mapping: decoded originalPositionFor
react.js.map - 5726 segments
Memory Usage:
trace-mapping decoded 16176 bytes
trace-mapping encoded 681552 bytes
source-map-js 2418352 bytes
source-map-0.6.1 2443672 bytes
source-map-0.8.0 111768 bytes
trace-mapping decoded 10872 bytes
trace-mapping encoded 681512 bytes
source-map-js 2563944 bytes
source-map-0.6.1 2150864 bytes
source-map-0.8.0 88680 bytes
Chrome dev tools 1149576 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
trace-mapping: decoded JSON input x 1,887 ops/sec ±0.28% (99 runs sampled)
trace-mapping: encoded JSON input x 4,749 ops/sec ±0.48% (97 runs sampled)
trace-mapping: decoded Object input x 74,236 ops/sec ±0.11% (99 runs sampled)
trace-mapping: encoded Object input x 5,752 ops/sec ±0.38% (100 runs sampled)
source-map-js: encoded Object input x 806 ops/sec ±0.19% (97 runs sampled)
source-map-0.6.1: encoded Object input x 418 ops/sec ±0.33% (94 runs sampled)
Chrome dev tools: encoded Object input x 1,524 ops/sec ±0.57% (92 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed:
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
Trace speed (random):
trace-mapping: decoded originalPositionFor x 620,201 ops/sec ±0.33% (96 runs sampled)
trace-mapping: encoded originalPositionFor x 579,548 ops/sec ±0.35% (97 runs sampled)
source-map-js: encoded originalPositionFor x 230,983 ops/sec ±0.62% (54 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 158,145 ops/sec ±0.80% (46 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 343,801 ops/sec ±0.55% (96 runs sampled)
Chrome dev tools: encoded originalPositionFor x 659,649 ops/sec ±0.49% (98 runs sampled)
Fastest is Chrome dev tools: encoded originalPositionFor
Trace speed (ascending):
trace-mapping: decoded originalPositionFor x 2,368,079 ops/sec ±0.32% (98 runs sampled)
trace-mapping: encoded originalPositionFor x 2,134,039 ops/sec ±2.72% (87 runs sampled)
source-map-js: encoded originalPositionFor x 290,120 ops/sec ±2.49% (82 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 187,613 ops/sec ±0.86% (49 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 479,569 ops/sec ±0.65% (96 runs sampled)
Chrome dev tools: encoded originalPositionFor x 2,048,414 ops/sec ±0.24% (98 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
***
vscode.map - 2141001 segments
Memory Usage:
trace-mapping decoded 5206584 bytes
trace-mapping encoded 208370336 bytes
source-map-js 278493008 bytes
source-map-0.6.1 391564048 bytes
source-map-0.8.0 257508787 bytes
Chrome dev tools 291053000 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 1.63 ops/sec ±33.88% (9 runs sampled)
trace-mapping: encoded JSON input x 3.29 ops/sec ±36.13% (13 runs sampled)
trace-mapping: decoded Object input x 103 ops/sec ±0.93% (77 runs sampled)
trace-mapping: encoded Object input x 5.42 ops/sec ±28.54% (19 runs sampled)
source-map-js: encoded Object input x 1.07 ops/sec ±13.84% (7 runs sampled)
source-map-0.6.1: encoded Object input x 0.60 ops/sec ±2.43% (6 runs sampled)
Chrome dev tools: encoded Object input x 2.61 ops/sec ±22.00% (11 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed (random):
trace-mapping: decoded originalPositionFor x 257,019 ops/sec ±0.97% (93 runs sampled)
trace-mapping: encoded originalPositionFor x 179,163 ops/sec ±0.83% (92 runs sampled)
source-map-js: encoded originalPositionFor x 73,337 ops/sec ±1.35% (87 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 38,797 ops/sec ±1.66% (88 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 107,758 ops/sec ±1.94% (45 runs sampled)
Chrome dev tools: encoded originalPositionFor x 188,550 ops/sec ±1.85% (79 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
Trace speed (ascending):
trace-mapping: decoded originalPositionFor x 447,621 ops/sec ±3.64% (94 runs sampled)
trace-mapping: encoded originalPositionFor x 323,698 ops/sec ±5.20% (88 runs sampled)
source-map-js: encoded originalPositionFor x 78,387 ops/sec ±1.69% (89 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 41,016 ops/sec ±3.01% (25 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 124,204 ops/sec ±0.90% (92 runs sampled)
Chrome dev tools: encoded originalPositionFor x 230,087 ops/sec ±2.61% (93 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
```

View File

@@ -1,580 +1,504 @@
import { encode, decode } from '@jridgewell/sourcemap-codec';
import resolveUri from '@jridgewell/resolve-uri';
// src/trace-mapping.ts
import { encode, decode } from "@jridgewell/sourcemap-codec";
function resolve(input, base) {
// The base is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
if (base && !base.endsWith('/'))
base += '/';
return resolveUri(input, base);
}
// src/resolve.ts
import resolveUri from "@jridgewell/resolve-uri";
/**
* Removes everything after the last "/", but leaves the slash.
*/
// src/strip-filename.ts
function stripFilename(path) {
if (!path)
return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
if (!path) return "";
const index = path.lastIndexOf("/");
return path.slice(0, index + 1);
}
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const REV_GENERATED_LINE = 1;
const REV_GENERATED_COLUMN = 2;
// src/resolve.ts
function resolver(mapUrl, sourceRoot) {
const from = stripFilename(mapUrl);
const prefix = sourceRoot ? sourceRoot + "/" : "";
return (source) => resolveUri(prefix + (source || ""), from);
}
// src/sourcemap-segment.ts
var COLUMN = 0;
var SOURCES_INDEX = 1;
var SOURCE_LINE = 2;
var SOURCE_COLUMN = 3;
var NAMES_INDEX = 4;
var REV_GENERATED_LINE = 1;
var REV_GENERATED_COLUMN = 2;
// src/sort.ts
function maybeSort(mappings, owned) {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length)
return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned)
mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length) return mappings;
if (!owned) mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings, start) {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i]))
return i;
}
return mappings.length;
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i])) return i;
}
return mappings.length;
}
function isSorted(line) {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
return false;
}
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
return false;
}
return true;
}
return true;
}
function sortSegments(line, owned) {
if (!owned)
line = line.slice();
return line.sort(sortComparator);
if (!owned) line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[COLUMN] - b[COLUMN];
return a[COLUMN] - b[COLUMN];
}
let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
// src/binary-search.ts
var found = false;
function binarySearch(haystack, needle, low, high) {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
}
else {
high = mid - 1;
}
while (low <= high) {
const mid = low + (high - low >> 1);
const cmp = haystack[mid][COLUMN] - needle;
if (cmp === 0) {
found = true;
return mid;
}
found = false;
return low - 1;
if (cmp < 0) {
low = mid + 1;
} else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
function upperBound(haystack, needle, index) {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN] !== needle)
break;
}
return index;
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN] !== needle) break;
}
return index;
}
function lowerBound(haystack, needle, index) {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN] !== needle)
break;
}
return index;
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN] !== needle) break;
}
return index;
}
function memoizedState() {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
function memoizedBinarySearch(haystack, needle, state, key) {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
}
else {
high = lastIndex;
}
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
return lastIndex;
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
if (needle >= lastNeedle) {
low = lastIndex === -1 ? 0 : lastIndex;
} else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return state.lastIndex = binarySearch(haystack, needle, low, high);
}
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
// of generated line/column.
// src/by-source.ts
function buildBySources(decoded, memos) {
const sources = memos.map(buildNullArray);
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
if (seg.length === 1)
continue;
const sourceIndex = seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
const originalSource = sources[sourceIndex];
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
const memo = memos[sourceIndex];
// The binary search either found a match, or it found the left-index just before where the
// segment should go. Either way, we want to insert after that. And there may be multiple
// generated segments associated with an original location, so there may need to move several
// indexes before we find where we need to insert.
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
memo.lastIndex = ++index;
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
}
const sources = memos.map(buildNullArray);
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
if (seg.length === 1) continue;
const sourceIndex2 = seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
const originalSource = sources[sourceIndex2];
const originalLine = originalSource[sourceLine] || (originalSource[sourceLine] = []);
const memo = memos[sourceIndex2];
let index = upperBound(
originalLine,
sourceColumn,
memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine)
);
memo.lastIndex = ++index;
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
}
return sources;
}
return sources;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
// order when iterating with for-in.
function buildNullArray() {
return { __proto__: null };
return { __proto__: null };
}
const AnyMap = function (map, mapUrl) {
const parsed = parse(map);
if (!('sections' in parsed)) {
return new TraceMap(parsed, mapUrl);
}
const mappings = [];
const sources = [];
const sourcesContent = [];
const names = [];
const ignoreList = [];
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
const joined = {
version: 3,
file: parsed.file,
names,
sources,
sourcesContent,
mappings,
ignoreList,
};
return presortedDecodedMap(joined);
};
// src/types.ts
function parse(map) {
return typeof map === 'string' ? JSON.parse(map) : map;
return typeof map === "string" ? JSON.parse(map) : map;
}
// src/flatten-map.ts
var FlattenMap = function(map, mapUrl) {
const parsed = parse(map);
if (!("sections" in parsed)) {
return new TraceMap(parsed, mapUrl);
}
const mappings = [];
const sources = [];
const sourcesContent = [];
const names = [];
const ignoreList = [];
recurse(
parsed,
mapUrl,
mappings,
sources,
sourcesContent,
names,
ignoreList,
0,
0,
Infinity,
Infinity
);
const joined = {
version: 3,
file: parsed.file,
names,
sources,
sourcesContent,
mappings,
ignoreList
};
return presortedDecodedMap(joined);
};
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
const { sections } = input;
for (let i = 0; i < sections.length; i++) {
const { map, offset } = sections[i];
let sl = stopLine;
let sc = stopColumn;
if (i + 1 < sections.length) {
const nextOffset = sections[i + 1].offset;
sl = Math.min(stopLine, lineOffset + nextOffset.line);
if (sl === stopLine) {
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
}
else if (sl < stopLine) {
sc = columnOffset + nextOffset.column;
}
}
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
const { sections } = input;
for (let i = 0; i < sections.length; i++) {
const { map, offset } = sections[i];
let sl = stopLine;
let sc = stopColumn;
if (i + 1 < sections.length) {
const nextOffset = sections[i + 1].offset;
sl = Math.min(stopLine, lineOffset + nextOffset.line);
if (sl === stopLine) {
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
} else if (sl < stopLine) {
sc = columnOffset + nextOffset.column;
}
}
addSection(
map,
mapUrl,
mappings,
sources,
sourcesContent,
names,
ignoreList,
lineOffset + offset.line,
columnOffset + offset.column,
sl,
sc
);
}
}
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
const parsed = parse(input);
if ('sections' in parsed)
return recurse(...arguments);
const map = new TraceMap(parsed, mapUrl);
const sourcesOffset = sources.length;
const namesOffset = names.length;
const decoded = decodedMappings(map);
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
append(sources, resolvedSources);
append(names, map.names);
if (contents)
append(sourcesContent, contents);
else
for (let i = 0; i < resolvedSources.length; i++)
sourcesContent.push(null);
if (ignores)
for (let i = 0; i < ignores.length; i++)
ignoreList.push(ignores[i] + sourcesOffset);
for (let i = 0; i < decoded.length; i++) {
const lineI = lineOffset + i;
// We can only add so many lines before we step into the range that the next section's map
// controls. When we get to the last line, then we'll start checking the segments to see if
// they've crossed into the column range. But it may not have any columns that overstep, so we
// still need to check that we don't overstep lines, too.
if (lineI > stopLine)
return;
// The out line may already exist in mappings (if we're continuing the line started by a
// previous section). Or, we may have jumped ahead several lines to start this section.
const out = getLine(mappings, lineI);
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
// map can be multiple lines), it doesn't.
const cOffset = i === 0 ? columnOffset : 0;
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const column = cOffset + seg[COLUMN];
// If this segment steps into the column range that the next section's map controls, we need
// to stop early.
if (lineI === stopLine && column >= stopColumn)
return;
if (seg.length === 1) {
out.push([column]);
continue;
}
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
out.push(seg.length === 4
? [column, sourcesIndex, sourceLine, sourceColumn]
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
}
const parsed = parse(input);
if ("sections" in parsed) return recurse(...arguments);
const map = new TraceMap(parsed, mapUrl);
const sourcesOffset = sources.length;
const namesOffset = names.length;
const decoded = decodedMappings(map);
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
append(sources, resolvedSources);
append(names, map.names);
if (contents) append(sourcesContent, contents);
else for (let i = 0; i < resolvedSources.length; i++) sourcesContent.push(null);
if (ignores) for (let i = 0; i < ignores.length; i++) ignoreList.push(ignores[i] + sourcesOffset);
for (let i = 0; i < decoded.length; i++) {
const lineI = lineOffset + i;
if (lineI > stopLine) return;
const out = getLine(mappings, lineI);
const cOffset = i === 0 ? columnOffset : 0;
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const column = cOffset + seg[COLUMN];
if (lineI === stopLine && column >= stopColumn) return;
if (seg.length === 1) {
out.push([column]);
continue;
}
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
out.push(
seg.length === 4 ? [column, sourcesIndex, sourceLine, sourceColumn] : [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]
);
}
}
}
function append(arr, other) {
for (let i = 0; i < other.length; i++)
arr.push(other[i]);
for (let i = 0; i < other.length; i++) arr.push(other[i]);
}
function getLine(arr, index) {
for (let i = arr.length; i <= index; i++)
arr[i] = [];
return arr[index];
for (let i = arr.length; i <= index; i++) arr[i] = [];
return arr[index];
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
const LEAST_UPPER_BOUND = -1;
const GREATEST_LOWER_BOUND = 1;
class TraceMap {
constructor(map, mapUrl) {
const isString = typeof map === 'string';
if (!isString && map._decodedMemo)
return map;
const parsed = (isString ? JSON.parse(map) : map);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names || [];
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
this.resolvedSources = sources.map((s) => resolve(s || '', from));
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
}
else {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
// src/trace-mapping.ts
var LINE_GTR_ZERO = "`line` must be greater than 0 (lines start at line 1)";
var COL_GTR_EQ_ZERO = "`column` must be greater than or equal to 0 (columns start at column 0)";
var LEAST_UPPER_BOUND = -1;
var GREATEST_LOWER_BOUND = 1;
var TraceMap = class {
constructor(map, mapUrl) {
const isString = typeof map === "string";
if (!isString && map._decodedMemo) return map;
const parsed = parse(map);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names || [];
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || void 0;
const resolve = resolver(mapUrl, sourceRoot);
this.resolvedSources = sources.map(resolve);
const { mappings } = parsed;
if (typeof mappings === "string") {
this._encoded = mappings;
this._decoded = void 0;
} else if (Array.isArray(mappings)) {
this._encoded = void 0;
this._decoded = maybeSort(mappings, isString);
} else if (parsed.sections) {
throw new Error(`TraceMap passed sectioned source map, please use FlattenMap export instead`);
} else {
throw new Error(`invalid source map: ${JSON.stringify(parsed)}`);
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
this._decodedMemo = memoizedState();
this._bySources = void 0;
this._bySourceMemos = void 0;
}
};
function cast(map) {
return map;
return map;
}
/**
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
*/
function encodedMappings(map) {
var _a;
var _b;
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
var _a, _b;
return (_b = (_a = cast(map))._encoded) != null ? _b : _a._encoded = encode(cast(map)._decoded);
}
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
function decodedMappings(map) {
var _a;
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
var _a;
return (_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded));
}
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
function traceSegment(map, line, column) {
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return null;
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
return index === -1 ? null : segments[index];
const decoded = decodedMappings(map);
if (line >= decoded.length) return null;
const segments = decoded[line];
const index = traceSegmentInternal(
segments,
cast(map)._decodedMemo,
line,
column,
GREATEST_LOWER_BOUND
);
return index === -1 ? null : segments[index];
}
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
function originalPositionFor(map, needle) {
let { line, column, bias } = needle;
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
if (index === -1)
return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1)
return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
let { line, column, bias } = needle;
line--;
if (line < 0) throw new Error(LINE_GTR_ZERO);
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
if (line >= decoded.length) return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(
segments,
cast(map)._decodedMemo,
line,
column,
bias || GREATEST_LOWER_BOUND
);
if (index === -1) return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1) return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(
resolvedSources[segment[SOURCES_INDEX]],
segment[SOURCE_LINE] + 1,
segment[SOURCE_COLUMN],
segment.length === 5 ? names[segment[NAMES_INDEX]] : null
);
}
/**
* Finds the generated line/column position of the provided source/line/column source position.
*/
function generatedPositionFor(map, needle) {
const { source, line, column, bias } = needle;
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
const { source, line, column, bias } = needle;
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
}
/**
* Finds all generated line/column positions of the provided source/line/column source position.
*/
function allGeneratedPositionsFor(map, needle) {
const { source, line, column, bias } = needle;
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
const { source, line, column, bias } = needle;
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
}
/**
* Iterates each mapping in generated position order.
*/
function eachMapping(map, cb) {
const decoded = decodedMappings(map);
const { names, resolvedSources } = map;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generatedLine = i + 1;
const generatedColumn = seg[0];
let source = null;
let originalLine = null;
let originalColumn = null;
let name = null;
if (seg.length !== 1) {
source = resolvedSources[seg[1]];
originalLine = seg[2] + 1;
originalColumn = seg[3];
}
if (seg.length === 5)
name = names[seg[4]];
cb({
generatedLine,
generatedColumn,
source,
originalLine,
originalColumn,
name,
});
}
const decoded = decodedMappings(map);
const { names, resolvedSources } = map;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generatedLine = i + 1;
const generatedColumn = seg[0];
let source = null;
let originalLine = null;
let originalColumn = null;
let name = null;
if (seg.length !== 1) {
source = resolvedSources[seg[1]];
originalLine = seg[2] + 1;
originalColumn = seg[3];
}
if (seg.length === 5) name = names[seg[4]];
cb({
generatedLine,
generatedColumn,
source,
originalLine,
originalColumn,
name
});
}
}
}
function sourceIndex(map, source) {
const { sources, resolvedSources } = map;
let index = sources.indexOf(source);
if (index === -1)
index = resolvedSources.indexOf(source);
return index;
const { sources, resolvedSources } = map;
let index = sources.indexOf(source);
if (index === -1) index = resolvedSources.indexOf(source);
return index;
}
/**
* Retrieves the source content for a particular source, if its found. Returns null if not.
*/
function sourceContentFor(map, source) {
const { sourcesContent } = map;
if (sourcesContent == null)
return null;
const index = sourceIndex(map, source);
return index === -1 ? null : sourcesContent[index];
const { sourcesContent } = map;
if (sourcesContent == null) return null;
const index = sourceIndex(map, source);
return index === -1 ? null : sourcesContent[index];
}
/**
* Determines if the source is marked to ignore by the source map.
*/
function isIgnored(map, source) {
const { ignoreList } = map;
if (ignoreList == null)
return false;
const index = sourceIndex(map, source);
return index === -1 ? false : ignoreList.includes(index);
const { ignoreList } = map;
if (ignoreList == null) return false;
const index = sourceIndex(map, source);
return index === -1 ? false : ignoreList.includes(index);
}
/**
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
* maps.
*/
function presortedDecodedMap(map, mapUrl) {
const tracer = new TraceMap(clone(map, []), mapUrl);
cast(tracer)._decoded = map.mappings;
return tracer;
const tracer = new TraceMap(clone(map, []), mapUrl);
cast(tracer)._decoded = map.mappings;
return tracer;
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function decodedMap(map) {
return clone(map, decodedMappings(map));
return clone(map, decodedMappings(map));
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function encodedMap(map) {
return clone(map, encodedMappings(map));
return clone(map, encodedMappings(map));
}
function clone(map, mappings) {
return {
version: map.version,
file: map.file,
names: map.names,
sourceRoot: map.sourceRoot,
sources: map.sources,
sourcesContent: map.sourcesContent,
mappings,
ignoreList: map.ignoreList || map.x_google_ignoreList,
};
return {
version: map.version,
file: map.file,
names: map.names,
sourceRoot: map.sourceRoot,
sources: map.sources,
sourcesContent: map.sourcesContent,
mappings,
ignoreList: map.ignoreList || map.x_google_ignoreList
};
}
function OMapping(source, line, column, name) {
return { source, line, column, name };
return { source, line, column, name };
}
function GMapping(line, column) {
return { line, column };
return { line, column };
}
function traceSegmentInternal(segments, memo, line, column, bias) {
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
}
else if (bias === LEAST_UPPER_BOUND)
index++;
if (index === -1 || index === segments.length)
return -1;
return index;
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
} else if (bias === LEAST_UPPER_BOUND) index++;
if (index === -1 || index === segments.length) return -1;
return index;
}
function sliceGeneratedPositions(segments, memo, line, column, bias) {
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
// match LEAST_UPPER_BOUND.
if (!found && bias === LEAST_UPPER_BOUND)
min++;
if (min === -1 || min === segments.length)
return [];
// We may have found the segment that started at an earlier column. If this is the case, then we
// need to slice all generated segments that match _that_ column, because all such segments span
// to our desired column.
const matchedColumn = found ? column : segments[min][COLUMN];
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
if (!found)
min = lowerBound(segments, matchedColumn, min);
const max = upperBound(segments, matchedColumn, min);
const result = [];
for (; min <= max; min++) {
const segment = segments[min];
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
}
return result;
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
if (!found && bias === LEAST_UPPER_BOUND) min++;
if (min === -1 || min === segments.length) return [];
const matchedColumn = found ? column : segments[min][COLUMN];
if (!found) min = lowerBound(segments, matchedColumn, min);
const max = upperBound(segments, matchedColumn, min);
const result = [];
for (; min <= max; min++) {
const segment = segments[min];
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
}
return result;
}
function generatedPosition(map, source, line, column, bias, all) {
var _a;
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const { sources, resolvedSources } = map;
let sourceIndex = sources.indexOf(source);
if (sourceIndex === -1)
sourceIndex = resolvedSources.indexOf(source);
if (sourceIndex === -1)
return all ? [] : GMapping(null, null);
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
const segments = generated[sourceIndex][line];
if (segments == null)
return all ? [] : GMapping(null, null);
const memo = cast(map)._bySourceMemos[sourceIndex];
if (all)
return sliceGeneratedPositions(segments, memo, line, column, bias);
const index = traceSegmentInternal(segments, memo, line, column, bias);
if (index === -1)
return GMapping(null, null);
const segment = segments[index];
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
var _a;
line--;
if (line < 0) throw new Error(LINE_GTR_ZERO);
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
const { sources, resolvedSources } = map;
let sourceIndex2 = sources.indexOf(source);
if (sourceIndex2 === -1) sourceIndex2 = resolvedSources.indexOf(source);
if (sourceIndex2 === -1) return all ? [] : GMapping(null, null);
const generated = (_a = cast(map))._bySources || (_a._bySources = buildBySources(
decodedMappings(map),
cast(map)._bySourceMemos = sources.map(memoizedState)
));
const segments = generated[sourceIndex2][line];
if (segments == null) return all ? [] : GMapping(null, null);
const memo = cast(map)._bySourceMemos[sourceIndex2];
if (all) return sliceGeneratedPositions(segments, memo, line, column, bias);
const index = traceSegmentInternal(segments, memo, line, column, bias);
if (index === -1) return GMapping(null, null);
const segment = segments[index];
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
}
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
export {
FlattenMap as AnyMap,
FlattenMap,
GREATEST_LOWER_BOUND,
LEAST_UPPER_BOUND,
TraceMap,
allGeneratedPositionsFor,
decodedMap,
decodedMappings,
eachMapping,
encodedMap,
encodedMappings,
generatedPositionFor,
isIgnored,
originalPositionFor,
presortedDecodedMap,
sourceContentFor,
traceSegment
};
//# sourceMappingURL=trace-mapping.mjs.map

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,8 +0,0 @@
import { TraceMap } from './trace-mapping';
import type { SectionedSourceMapInput } from './types';
type AnyMap = {
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
};
export declare const AnyMap: AnyMap;
export {};

View File

@@ -1 +0,0 @@
export default function resolve(input: string, base: string | undefined): string;

View File

@@ -1,6 +1,6 @@
{
"name": "@jridgewell/trace-mapping",
"version": "0.3.25",
"version": "0.3.29",
"description": "Trace the original position through a source map",
"keywords": [
"source",
@@ -8,68 +8,62 @@
],
"main": "dist/trace-mapping.umd.js",
"module": "dist/trace-mapping.mjs",
"types": "dist/types/trace-mapping.d.ts",
"types": "types/trace-mapping.d.cts",
"files": [
"dist"
"dist",
"src",
"types"
],
"exports": {
".": [
{
"types": "./dist/types/trace-mapping.d.ts",
"browser": "./dist/trace-mapping.umd.js",
"require": "./dist/trace-mapping.umd.js",
"import": "./dist/trace-mapping.mjs"
"import": {
"types": "./types/trace-mapping.d.mts",
"default": "./dist/trace-mapping.mjs"
},
"require": {
"types": "./types/trace-mapping.d.cts",
"default": "./dist/trace-mapping.umd.js"
},
"browser": {
"types": "./types/trace-mapping.d.cts",
"default": "./dist/trace-mapping.umd.js"
}
},
"./dist/trace-mapping.umd.js"
],
"./package.json": "./package.json"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"scripts": {
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs trace-mapping.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:types": "eslint '{src,test}/**/*.ts'",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test"
},
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/trace-mapping",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/trace-mapping.git"
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/trace-mapping"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"scripts": {
"benchmark": "run-s build:rollup benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.mjs",
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.mjs",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"prebuild": "rm -rf dist",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build",
"test": "run-s -n test:lint test:only",
"test:debug": "mocha --inspect-brk",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "c8 mocha",
"test:watch": "mocha --watch"
},
"devDependencies": {
"@rollup/plugin-typescript": "11.1.6",
"@types/mocha": "10.0.6",
"@types/node": "20.11.20",
"@typescript-eslint/eslint-plugin": "6.18.1",
"@typescript-eslint/parser": "6.18.1",
"benchmark": "2.1.4",
"c8": "9.0.0",
"esbuild": "0.19.11",
"eslint": "8.56.0",
"eslint-config-prettier": "9.1.0",
"eslint-plugin-no-only-tests": "3.1.0",
"mocha": "10.3.0",
"npm-run-all": "4.1.5",
"prettier": "3.1.1",
"rollup": "4.9.4",
"tsx": "4.7.0",
"typescript": "5.3.3"
},
"dependencies": {
"@jridgewell/resolve-uri": "^3.1.0",
"@jridgewell/sourcemap-codec": "^1.4.14"

View File

@@ -0,0 +1,115 @@
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
import { COLUMN } from './sourcemap-segment';
export type MemoState = {
lastKey: number;
lastNeedle: number;
lastIndex: number;
};
export let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
export function binarySearch(
haystack: SourceMapSegment[] | ReverseSegment[],
needle: number,
low: number,
high: number,
): number {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
} else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
export function upperBound(
haystack: SourceMapSegment[] | ReverseSegment[],
needle: number,
index: number,
): number {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN] !== needle) break;
}
return index;
}
export function lowerBound(
haystack: SourceMapSegment[] | ReverseSegment[],
needle: number,
index: number,
): number {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN] !== needle) break;
}
return index;
}
export function memoizedState(): MemoState {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
export function memoizedBinarySearch(
haystack: SourceMapSegment[] | ReverseSegment[],
needle: number,
state: MemoState,
key: number,
): number {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
} else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
}

View File

@@ -0,0 +1,65 @@
import { COLUMN, SOURCES_INDEX, SOURCE_LINE, SOURCE_COLUMN } from './sourcemap-segment';
import { memoizedBinarySearch, upperBound } from './binary-search';
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
import type { MemoState } from './binary-search';
export type Source = {
__proto__: null;
[line: number]: Exclude<ReverseSegment, [number]>[];
};
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
// of generated line/column.
export default function buildBySources(
decoded: readonly SourceMapSegment[][],
memos: MemoState[],
): Source[] {
const sources: Source[] = memos.map(buildNullArray);
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
if (seg.length === 1) continue;
const sourceIndex = seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
const originalSource = sources[sourceIndex];
const originalLine = (originalSource[sourceLine] ||= []);
const memo = memos[sourceIndex];
// The binary search either found a match, or it found the left-index just before where the
// segment should go. Either way, we want to insert after that. And there may be multiple
// generated segments associated with an original location, so there may need to move several
// indexes before we find where we need to insert.
let index = upperBound(
originalLine,
sourceColumn,
memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine),
);
memo.lastIndex = ++index;
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
}
}
return sources;
}
function insert<T>(array: T[], index: number, value: T) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
// order when iterating with for-in.
function buildNullArray<T extends { __proto__: null }>(): T {
return { __proto__: null } as T;
}

View File

@@ -0,0 +1,192 @@
import { TraceMap, presortedDecodedMap, decodedMappings } from './trace-mapping';
import {
COLUMN,
SOURCES_INDEX,
SOURCE_LINE,
SOURCE_COLUMN,
NAMES_INDEX,
} from './sourcemap-segment';
import { parse } from './types';
import type {
DecodedSourceMap,
DecodedSourceMapXInput,
EncodedSourceMapXInput,
SectionedSourceMapXInput,
SectionedSourceMapInput,
SectionXInput,
Ro,
} from './types';
import type { SourceMapSegment } from './sourcemap-segment';
type FlattenMap = {
new (map: Ro<SectionedSourceMapInput>, mapUrl?: string | null): TraceMap;
(map: Ro<SectionedSourceMapInput>, mapUrl?: string | null): TraceMap;
};
export const FlattenMap: FlattenMap = function (map, mapUrl) {
const parsed = parse(map as SectionedSourceMapInput);
if (!('sections' in parsed)) {
return new TraceMap(parsed as DecodedSourceMapXInput | EncodedSourceMapXInput, mapUrl);
}
const mappings: SourceMapSegment[][] = [];
const sources: string[] = [];
const sourcesContent: (string | null)[] = [];
const names: string[] = [];
const ignoreList: number[] = [];
recurse(
parsed,
mapUrl,
mappings,
sources,
sourcesContent,
names,
ignoreList,
0,
0,
Infinity,
Infinity,
);
const joined: DecodedSourceMap = {
version: 3,
file: parsed.file,
names,
sources,
sourcesContent,
mappings,
ignoreList,
};
return presortedDecodedMap(joined);
} as FlattenMap;
function recurse(
input: SectionedSourceMapXInput,
mapUrl: string | null | undefined,
mappings: SourceMapSegment[][],
sources: string[],
sourcesContent: (string | null)[],
names: string[],
ignoreList: number[],
lineOffset: number,
columnOffset: number,
stopLine: number,
stopColumn: number,
) {
const { sections } = input;
for (let i = 0; i < sections.length; i++) {
const { map, offset } = sections[i];
let sl = stopLine;
let sc = stopColumn;
if (i + 1 < sections.length) {
const nextOffset = sections[i + 1].offset;
sl = Math.min(stopLine, lineOffset + nextOffset.line);
if (sl === stopLine) {
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
} else if (sl < stopLine) {
sc = columnOffset + nextOffset.column;
}
}
addSection(
map,
mapUrl,
mappings,
sources,
sourcesContent,
names,
ignoreList,
lineOffset + offset.line,
columnOffset + offset.column,
sl,
sc,
);
}
}
function addSection(
input: SectionXInput['map'],
mapUrl: string | null | undefined,
mappings: SourceMapSegment[][],
sources: string[],
sourcesContent: (string | null)[],
names: string[],
ignoreList: number[],
lineOffset: number,
columnOffset: number,
stopLine: number,
stopColumn: number,
) {
const parsed = parse(input);
if ('sections' in parsed) return recurse(...(arguments as unknown as Parameters<typeof recurse>));
const map = new TraceMap(parsed, mapUrl);
const sourcesOffset = sources.length;
const namesOffset = names.length;
const decoded = decodedMappings(map);
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
append(sources, resolvedSources);
append(names, map.names);
if (contents) append(sourcesContent, contents);
else for (let i = 0; i < resolvedSources.length; i++) sourcesContent.push(null);
if (ignores) for (let i = 0; i < ignores.length; i++) ignoreList.push(ignores[i] + sourcesOffset);
for (let i = 0; i < decoded.length; i++) {
const lineI = lineOffset + i;
// We can only add so many lines before we step into the range that the next section's map
// controls. When we get to the last line, then we'll start checking the segments to see if
// they've crossed into the column range. But it may not have any columns that overstep, so we
// still need to check that we don't overstep lines, too.
if (lineI > stopLine) return;
// The out line may already exist in mappings (if we're continuing the line started by a
// previous section). Or, we may have jumped ahead several lines to start this section.
const out = getLine(mappings, lineI);
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
// map can be multiple lines), it doesn't.
const cOffset = i === 0 ? columnOffset : 0;
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const column = cOffset + seg[COLUMN];
// If this segment steps into the column range that the next section's map controls, we need
// to stop early.
if (lineI === stopLine && column >= stopColumn) return;
if (seg.length === 1) {
out.push([column]);
continue;
}
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
out.push(
seg.length === 4
? [column, sourcesIndex, sourceLine, sourceColumn]
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]],
);
}
}
}
function append<T>(arr: T[], other: T[]) {
for (let i = 0; i < other.length; i++) arr.push(other[i]);
}
function getLine<T>(arr: T[][], index: number): T[] {
for (let i = arr.length; i <= index; i++) arr[i] = [];
return arr[index];
}

16
node_modules/@jridgewell/trace-mapping/src/resolve.ts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
import resolveUri from '@jridgewell/resolve-uri';
import stripFilename from './strip-filename';
type Resolve = (source: string | null) => string;
export default function resolver(
mapUrl: string | null | undefined,
sourceRoot: string | undefined,
): Resolve {
const from = stripFilename(mapUrl);
// The sourceRoot is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
const prefix = sourceRoot ? sourceRoot + '/' : '';
return (source) => resolveUri(prefix + (source || ''), from);
}

45
node_modules/@jridgewell/trace-mapping/src/sort.ts generated vendored Normal file
View File

@@ -0,0 +1,45 @@
import { COLUMN } from './sourcemap-segment';
import type { SourceMapSegment } from './sourcemap-segment';
export default function maybeSort(
mappings: SourceMapSegment[][],
owned: boolean,
): SourceMapSegment[][] {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length) return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned) mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings: SourceMapSegment[][], start: number): number {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i])) return i;
}
return mappings.length;
}
function isSorted(line: SourceMapSegment[]): boolean {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
return false;
}
}
return true;
}
function sortSegments(line: SourceMapSegment[], owned: boolean): SourceMapSegment[] {
if (!owned) line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {
return a[COLUMN] - b[COLUMN];
}

View File

@@ -0,0 +1,23 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
type GeneratedLine = number;
export type SourceMapSegment =
| [GeneratedColumn]
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
export const COLUMN = 0;
export const SOURCES_INDEX = 1;
export const SOURCE_LINE = 2;
export const SOURCE_COLUMN = 3;
export const NAMES_INDEX = 4;
export const REV_GENERATED_LINE = 1;
export const REV_GENERATED_COLUMN = 2;

View File

@@ -0,0 +1,8 @@
/**
* Removes everything after the last "/", but leaves the slash.
*/
export default function stripFilename(path: string | undefined | null): string {
if (!path) return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}

View File

@@ -0,0 +1,504 @@
import { encode, decode } from '@jridgewell/sourcemap-codec';
import resolver from './resolve';
import maybeSort from './sort';
import buildBySources from './by-source';
import {
memoizedState,
memoizedBinarySearch,
upperBound,
lowerBound,
found as bsFound,
} from './binary-search';
import {
COLUMN,
SOURCES_INDEX,
SOURCE_LINE,
SOURCE_COLUMN,
NAMES_INDEX,
REV_GENERATED_LINE,
REV_GENERATED_COLUMN,
} from './sourcemap-segment';
import { parse } from './types';
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
import type {
SourceMapV3,
DecodedSourceMap,
EncodedSourceMap,
InvalidOriginalMapping,
OriginalMapping,
InvalidGeneratedMapping,
GeneratedMapping,
SourceMapInput,
Needle,
SourceNeedle,
SourceMap,
EachMapping,
Bias,
XInput,
SectionedSourceMap,
Ro,
} from './types';
import type { Source } from './by-source';
import type { MemoState } from './binary-search';
export type { SourceMapSegment } from './sourcemap-segment';
export type {
SourceMap,
DecodedSourceMap,
EncodedSourceMap,
Section,
SectionedSourceMap,
SourceMapV3,
Bias,
EachMapping,
GeneratedMapping,
InvalidGeneratedMapping,
InvalidOriginalMapping,
Needle,
OriginalMapping,
OriginalMapping as Mapping,
SectionedSourceMapInput,
SourceMapInput,
SourceNeedle,
XInput,
EncodedSourceMapXInput,
DecodedSourceMapXInput,
SectionedSourceMapXInput,
SectionXInput,
} from './types';
interface PublicMap {
_encoded: TraceMap['_encoded'];
_decoded: TraceMap['_decoded'];
_decodedMemo: TraceMap['_decodedMemo'];
_bySources: TraceMap['_bySources'];
_bySourceMemos: TraceMap['_bySourceMemos'];
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
export const LEAST_UPPER_BOUND = -1;
export const GREATEST_LOWER_BOUND = 1;
export { FlattenMap, FlattenMap as AnyMap } from './flatten-map';
export class TraceMap implements SourceMap {
declare version: SourceMapV3['version'];
declare file: SourceMapV3['file'];
declare names: SourceMapV3['names'];
declare sourceRoot: SourceMapV3['sourceRoot'];
declare sources: SourceMapV3['sources'];
declare sourcesContent: SourceMapV3['sourcesContent'];
declare ignoreList: SourceMapV3['ignoreList'];
declare resolvedSources: string[];
declare private _encoded: string | undefined;
declare private _decoded: SourceMapSegment[][] | undefined;
declare private _decodedMemo: MemoState;
declare private _bySources: Source[] | undefined;
declare private _bySourceMemos: MemoState[] | undefined;
constructor(map: Ro<SourceMapInput>, mapUrl?: string | null) {
const isString = typeof map === 'string';
if (!isString && (map as unknown as { _decodedMemo: any })._decodedMemo) return map as TraceMap;
const parsed = parse(map as Exclude<SourceMapInput, TraceMap>);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names || [];
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
this.ignoreList = parsed.ignoreList || (parsed as XInput).x_google_ignoreList || undefined;
const resolve = resolver(mapUrl, sourceRoot);
this.resolvedSources = sources.map(resolve);
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
} else if (Array.isArray(mappings)) {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
} else if ((parsed as unknown as SectionedSourceMap).sections) {
throw new Error(`TraceMap passed sectioned source map, please use FlattenMap export instead`);
} else {
throw new Error(`invalid source map: ${JSON.stringify(parsed)}`);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map: unknown): PublicMap {
return map as any;
}
/**
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
*/
export function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'] {
return (cast(map)._encoded ??= encode(cast(map)._decoded!));
}
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
export function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']> {
return (cast(map)._decoded ||= decode(cast(map)._encoded!));
}
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
export function traceSegment(
map: TraceMap,
line: number,
column: number,
): Readonly<SourceMapSegment> | null {
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length) return null;
const segments = decoded[line];
const index = traceSegmentInternal(
segments,
cast(map)._decodedMemo,
line,
column,
GREATEST_LOWER_BOUND,
);
return index === -1 ? null : segments[index];
}
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
export function originalPositionFor(
map: TraceMap,
needle: Needle,
): OriginalMapping | InvalidOriginalMapping {
let { line, column, bias } = needle;
line--;
if (line < 0) throw new Error(LINE_GTR_ZERO);
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length) return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(
segments,
cast(map)._decodedMemo,
line,
column,
bias || GREATEST_LOWER_BOUND,
);
if (index === -1) return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1) return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(
resolvedSources[segment[SOURCES_INDEX]],
segment[SOURCE_LINE] + 1,
segment[SOURCE_COLUMN],
segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
);
}
/**
* Finds the generated line/column position of the provided source/line/column source position.
*/
export function generatedPositionFor(
map: TraceMap,
needle: SourceNeedle,
): GeneratedMapping | InvalidGeneratedMapping {
const { source, line, column, bias } = needle;
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
}
/**
* Finds all generated line/column positions of the provided source/line/column source position.
*/
export function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[] {
const { source, line, column, bias } = needle;
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
}
/**
* Iterates each mapping in generated position order.
*/
export function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void {
const decoded = decodedMappings(map);
const { names, resolvedSources } = map;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generatedLine = i + 1;
const generatedColumn = seg[0];
let source = null;
let originalLine = null;
let originalColumn = null;
let name = null;
if (seg.length !== 1) {
source = resolvedSources[seg[1]];
originalLine = seg[2] + 1;
originalColumn = seg[3];
}
if (seg.length === 5) name = names[seg[4]];
cb({
generatedLine,
generatedColumn,
source,
originalLine,
originalColumn,
name,
} as EachMapping);
}
}
}
function sourceIndex(map: TraceMap, source: string): number {
const { sources, resolvedSources } = map;
let index = sources.indexOf(source);
if (index === -1) index = resolvedSources.indexOf(source);
return index;
}
/**
* Retrieves the source content for a particular source, if its found. Returns null if not.
*/
export function sourceContentFor(map: TraceMap, source: string): string | null {
const { sourcesContent } = map;
if (sourcesContent == null) return null;
const index = sourceIndex(map, source);
return index === -1 ? null : sourcesContent[index];
}
/**
* Determines if the source is marked to ignore by the source map.
*/
export function isIgnored(map: TraceMap, source: string): boolean {
const { ignoreList } = map;
if (ignoreList == null) return false;
const index = sourceIndex(map, source);
return index === -1 ? false : ignoreList.includes(index);
}
/**
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
* maps.
*/
export function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap {
const tracer = new TraceMap(clone(map, []), mapUrl);
cast(tracer)._decoded = map.mappings;
return tracer;
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function decodedMap(
map: TraceMap,
): Omit<DecodedSourceMap, 'mappings'> & { mappings: readonly SourceMapSegment[][] } {
return clone(map, decodedMappings(map));
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function encodedMap(map: TraceMap): EncodedSourceMap {
return clone(map, encodedMappings(map));
}
function clone<T extends string | readonly SourceMapSegment[][]>(
map: TraceMap | DecodedSourceMap,
mappings: T,
): T extends string ? EncodedSourceMap : DecodedSourceMap {
return {
version: map.version,
file: map.file,
names: map.names,
sourceRoot: map.sourceRoot,
sources: map.sources,
sourcesContent: map.sourcesContent,
mappings,
ignoreList: map.ignoreList || (map as XInput).x_google_ignoreList,
} as any;
}
function OMapping(source: null, line: null, column: null, name: null): InvalidOriginalMapping;
function OMapping(
source: string,
line: number,
column: number,
name: string | null,
): OriginalMapping;
function OMapping(
source: string | null,
line: number | null,
column: number | null,
name: string | null,
): OriginalMapping | InvalidOriginalMapping {
return { source, line, column, name } as any;
}
function GMapping(line: null, column: null): InvalidGeneratedMapping;
function GMapping(line: number, column: number): GeneratedMapping;
function GMapping(
line: number | null,
column: number | null,
): GeneratedMapping | InvalidGeneratedMapping {
return { line, column } as any;
}
function traceSegmentInternal(
segments: SourceMapSegment[],
memo: MemoState,
line: number,
column: number,
bias: Bias,
): number;
function traceSegmentInternal(
segments: ReverseSegment[],
memo: MemoState,
line: number,
column: number,
bias: Bias,
): number;
function traceSegmentInternal(
segments: SourceMapSegment[] | ReverseSegment[],
memo: MemoState,
line: number,
column: number,
bias: Bias,
): number {
let index = memoizedBinarySearch(segments, column, memo, line);
if (bsFound) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
} else if (bias === LEAST_UPPER_BOUND) index++;
if (index === -1 || index === segments.length) return -1;
return index;
}
function sliceGeneratedPositions(
segments: ReverseSegment[],
memo: MemoState,
line: number,
column: number,
bias: Bias,
): GeneratedMapping[] {
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
// match LEAST_UPPER_BOUND.
if (!bsFound && bias === LEAST_UPPER_BOUND) min++;
if (min === -1 || min === segments.length) return [];
// We may have found the segment that started at an earlier column. If this is the case, then we
// need to slice all generated segments that match _that_ column, because all such segments span
// to our desired column.
const matchedColumn = bsFound ? column : segments[min][COLUMN];
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
if (!bsFound) min = lowerBound(segments, matchedColumn, min);
const max = upperBound(segments, matchedColumn, min);
const result = [];
for (; min <= max; min++) {
const segment = segments[min];
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
}
return result;
}
function generatedPosition(
map: TraceMap,
source: string,
line: number,
column: number,
bias: Bias,
all: false,
): GeneratedMapping | InvalidGeneratedMapping;
function generatedPosition(
map: TraceMap,
source: string,
line: number,
column: number,
bias: Bias,
all: true,
): GeneratedMapping[];
function generatedPosition(
map: TraceMap,
source: string,
line: number,
column: number,
bias: Bias,
all: boolean,
): GeneratedMapping | InvalidGeneratedMapping | GeneratedMapping[] {
line--;
if (line < 0) throw new Error(LINE_GTR_ZERO);
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
const { sources, resolvedSources } = map;
let sourceIndex = sources.indexOf(source);
if (sourceIndex === -1) sourceIndex = resolvedSources.indexOf(source);
if (sourceIndex === -1) return all ? [] : GMapping(null, null);
const generated = (cast(map)._bySources ||= buildBySources(
decodedMappings(map),
(cast(map)._bySourceMemos = sources.map(memoizedState)),
));
const segments = generated[sourceIndex][line];
if (segments == null) return all ? [] : GMapping(null, null);
const memo = cast(map)._bySourceMemos![sourceIndex];
if (all) return sliceGeneratedPositions(segments, memo, line, column, bias);
const index = traceSegmentInternal(segments, memo, line, column, bias);
if (index === -1) return GMapping(null, null);
const segment = segments[index];
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
}

114
node_modules/@jridgewell/trace-mapping/src/types.ts generated vendored Normal file
View File

@@ -0,0 +1,114 @@
import type { SourceMapSegment } from './sourcemap-segment';
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
export interface SourceMapV3 {
file?: string | null;
names: string[];
sourceRoot?: string;
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
ignoreList?: number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: SourceMapSegment[][];
}
export interface Section {
offset: { line: number; column: number };
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
}
export interface SectionedSourceMap {
file?: string | null;
sections: Section[];
version: 3;
}
export type OriginalMapping = {
source: string | null;
line: number;
column: number;
name: string | null;
};
export type InvalidOriginalMapping = {
source: null;
line: null;
column: null;
name: null;
};
export type GeneratedMapping = {
line: number;
column: number;
};
export type InvalidGeneratedMapping = {
line: null;
column: null;
};
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
export type XInput = { x_google_ignoreList?: SourceMapV3['ignoreList'] };
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
sections: SectionXInput[];
};
export type SectionXInput = Omit<Section, 'map'> & {
map: SectionedSourceMapInput;
};
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
export type Needle = { line: number; column: number; bias?: Bias };
export type SourceNeedle = { source: string; line: number; column: number; bias?: Bias };
export type EachMapping =
| {
generatedLine: number;
generatedColumn: number;
source: null;
originalLine: null;
originalColumn: null;
name: null;
}
| {
generatedLine: number;
generatedColumn: number;
source: string | null;
originalLine: number;
originalColumn: number;
name: string | null;
};
export abstract class SourceMap {
declare version: SourceMapV3['version'];
declare file: SourceMapV3['file'];
declare names: SourceMapV3['names'];
declare sourceRoot: SourceMapV3['sourceRoot'];
declare sources: SourceMapV3['sources'];
declare sourcesContent: SourceMapV3['sourcesContent'];
declare resolvedSources: SourceMapV3['sources'];
declare ignoreList: SourceMapV3['ignoreList'];
}
export type Ro<T> =
T extends Array<infer V>
? V[] | Readonly<V[]> | RoArray<V> | Readonly<RoArray<V>>
: T extends object
? T | Readonly<T> | RoObject<T> | Readonly<RoObject<T>>
: T;
type RoArray<T> = Ro<T>[];
type RoObject<T> = { [K in keyof T]: T[K] | Ro<T[K]> };
export function parse<T>(map: T): Exclude<T, string> {
return typeof map === 'string' ? JSON.parse(map) : (map as Exclude<T, string>);
}

View File

@@ -1,4 +1,4 @@
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment.cts';
export type MemoState = {
lastKey: number;
lastNeedle: number;
@@ -30,3 +30,4 @@ export declare function memoizedState(): MemoState;
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
//# sourceMappingURL=binary-search.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"binary-search.d.ts","sourceRoot":"","sources":["../src/binary-search.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAG5E,MAAM,MAAM,SAAS,GAAG;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,eAAO,IAAI,KAAK,SAAQ,CAAC;AAEzB;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,YAAY,CAC1B,QAAQ,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,GAAG,EAAE,MAAM,EACX,IAAI,EAAE,MAAM,GACX,MAAM,CAmBR;AAED,wBAAgB,UAAU,CACxB,QAAQ,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,GACZ,MAAM,CAKR;AAED,wBAAgB,UAAU,CACxB,QAAQ,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,GACZ,MAAM,CAKR;AAED,wBAAgB,aAAa,IAAI,SAAS,CAMzC;AAED;;;GAGG;AACH,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,SAAS,EAChB,GAAG,EAAE,MAAM,GACV,MAAM,CAsBR"}

View File

@@ -0,0 +1,33 @@
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment.mts';
export type MemoState = {
lastKey: number;
lastNeedle: number;
lastIndex: number;
};
export declare let found: boolean;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
export declare function memoizedState(): MemoState;
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
//# sourceMappingURL=binary-search.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"binary-search.d.ts","sourceRoot":"","sources":["../src/binary-search.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAG5E,MAAM,MAAM,SAAS,GAAG;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,eAAO,IAAI,KAAK,SAAQ,CAAC;AAEzB;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,YAAY,CAC1B,QAAQ,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,GAAG,EAAE,MAAM,EACX,IAAI,EAAE,MAAM,GACX,MAAM,CAmBR;AAED,wBAAgB,UAAU,CACxB,QAAQ,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,GACZ,MAAM,CAKR;AAED,wBAAgB,UAAU,CACxB,QAAQ,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,GACZ,MAAM,CAKR;AAED,wBAAgB,aAAa,IAAI,SAAS,CAMzC;AAED;;;GAGG;AACH,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,SAAS,EAChB,GAAG,EAAE,MAAM,GACV,MAAM,CAsBR"}

View File

@@ -0,0 +1,8 @@
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment.cts';
import type { MemoState } from './binary-search.cts';
export type Source = {
__proto__: null;
[line: number]: Exclude<ReverseSegment, [number]>[];
};
export = function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
//# sourceMappingURL=by-source.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"by-source.d.ts","sourceRoot":"","sources":["../src/by-source.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAC5E,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAEjD,MAAM,MAAM,MAAM,GAAG;IACnB,SAAS,EAAE,IAAI,CAAC;IAChB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;CACrD,CAAC;AAIF,MAAM,CAAC,OAAO,UAAU,cAAc,CACpC,OAAO,EAAE,SAAS,gBAAgB,EAAE,EAAE,EACtC,KAAK,EAAE,SAAS,EAAE,GACjB,MAAM,EAAE,CAgCV"}

View File

@@ -1,7 +1,8 @@
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
import type { MemoState } from './binary-search';
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment.mts';
import type { MemoState } from './binary-search.mts';
export type Source = {
__proto__: null;
[line: number]: Exclude<ReverseSegment, [number]>[];
};
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
//# sourceMappingURL=by-source.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"by-source.d.ts","sourceRoot":"","sources":["../src/by-source.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAC5E,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAEjD,MAAM,MAAM,MAAM,GAAG;IACnB,SAAS,EAAE,IAAI,CAAC;IAChB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;CACrD,CAAC;AAIF,MAAM,CAAC,OAAO,UAAU,cAAc,CACpC,OAAO,EAAE,SAAS,gBAAgB,EAAE,EAAE,EACtC,KAAK,EAAE,SAAS,EAAE,GACjB,MAAM,EAAE,CAgCV"}

View File

@@ -0,0 +1,9 @@
import { TraceMap } from './trace-mapping.cts';
import type { SectionedSourceMapInput, Ro } from './types.cts';
type FlattenMap = {
new (map: Ro<SectionedSourceMapInput>, mapUrl?: string | null): TraceMap;
(map: Ro<SectionedSourceMapInput>, mapUrl?: string | null): TraceMap;
};
export declare const FlattenMap: FlattenMap;
export {};
//# sourceMappingURL=flatten-map.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"flatten-map.d.ts","sourceRoot":"","sources":["../src/flatten-map.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAwC,MAAM,iBAAiB,CAAC;AAUjF,OAAO,KAAK,EAKV,uBAAuB,EAEvB,EAAE,EACH,MAAM,SAAS,CAAC;AAGjB,KAAK,UAAU,GAAG;IAChB,KAAK,GAAG,EAAE,EAAE,CAAC,uBAAuB,CAAC,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,QAAQ,CAAC;IACzE,CAAC,GAAG,EAAE,EAAE,CAAC,uBAAuB,CAAC,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,QAAQ,CAAC;CACtE,CAAC;AAEF,eAAO,MAAM,UAAU,EAAE,UAsCV,CAAC"}

Some files were not shown because too many files have changed in this diff Show More