full site update

This commit is contained in:
2025-07-24 18:46:24 +02:00
parent bfe2b90d8d
commit 37a6e0ab31
6912 changed files with 540482 additions and 361712 deletions

View File

@@ -0,0 +1,115 @@
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
import { COLUMN } from './sourcemap-segment';
export type MemoState = {
lastKey: number;
lastNeedle: number;
lastIndex: number;
};
export let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
export function binarySearch(
haystack: SourceMapSegment[] | ReverseSegment[],
needle: number,
low: number,
high: number,
): number {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
} else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
export function upperBound(
haystack: SourceMapSegment[] | ReverseSegment[],
needle: number,
index: number,
): number {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN] !== needle) break;
}
return index;
}
export function lowerBound(
haystack: SourceMapSegment[] | ReverseSegment[],
needle: number,
index: number,
): number {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN] !== needle) break;
}
return index;
}
export function memoizedState(): MemoState {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
export function memoizedBinarySearch(
haystack: SourceMapSegment[] | ReverseSegment[],
needle: number,
state: MemoState,
key: number,
): number {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
} else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
}

View File

@@ -0,0 +1,65 @@
import { COLUMN, SOURCES_INDEX, SOURCE_LINE, SOURCE_COLUMN } from './sourcemap-segment';
import { memoizedBinarySearch, upperBound } from './binary-search';
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
import type { MemoState } from './binary-search';
export type Source = {
__proto__: null;
[line: number]: Exclude<ReverseSegment, [number]>[];
};
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
// of generated line/column.
export default function buildBySources(
decoded: readonly SourceMapSegment[][],
memos: MemoState[],
): Source[] {
const sources: Source[] = memos.map(buildNullArray);
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
if (seg.length === 1) continue;
const sourceIndex = seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
const originalSource = sources[sourceIndex];
const originalLine = (originalSource[sourceLine] ||= []);
const memo = memos[sourceIndex];
// The binary search either found a match, or it found the left-index just before where the
// segment should go. Either way, we want to insert after that. And there may be multiple
// generated segments associated with an original location, so there may need to move several
// indexes before we find where we need to insert.
let index = upperBound(
originalLine,
sourceColumn,
memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine),
);
memo.lastIndex = ++index;
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
}
}
return sources;
}
function insert<T>(array: T[], index: number, value: T) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
// order when iterating with for-in.
function buildNullArray<T extends { __proto__: null }>(): T {
return { __proto__: null } as T;
}

View File

@@ -0,0 +1,192 @@
import { TraceMap, presortedDecodedMap, decodedMappings } from './trace-mapping';
import {
COLUMN,
SOURCES_INDEX,
SOURCE_LINE,
SOURCE_COLUMN,
NAMES_INDEX,
} from './sourcemap-segment';
import { parse } from './types';
import type {
DecodedSourceMap,
DecodedSourceMapXInput,
EncodedSourceMapXInput,
SectionedSourceMapXInput,
SectionedSourceMapInput,
SectionXInput,
Ro,
} from './types';
import type { SourceMapSegment } from './sourcemap-segment';
type FlattenMap = {
new (map: Ro<SectionedSourceMapInput>, mapUrl?: string | null): TraceMap;
(map: Ro<SectionedSourceMapInput>, mapUrl?: string | null): TraceMap;
};
export const FlattenMap: FlattenMap = function (map, mapUrl) {
const parsed = parse(map as SectionedSourceMapInput);
if (!('sections' in parsed)) {
return new TraceMap(parsed as DecodedSourceMapXInput | EncodedSourceMapXInput, mapUrl);
}
const mappings: SourceMapSegment[][] = [];
const sources: string[] = [];
const sourcesContent: (string | null)[] = [];
const names: string[] = [];
const ignoreList: number[] = [];
recurse(
parsed,
mapUrl,
mappings,
sources,
sourcesContent,
names,
ignoreList,
0,
0,
Infinity,
Infinity,
);
const joined: DecodedSourceMap = {
version: 3,
file: parsed.file,
names,
sources,
sourcesContent,
mappings,
ignoreList,
};
return presortedDecodedMap(joined);
} as FlattenMap;
function recurse(
input: SectionedSourceMapXInput,
mapUrl: string | null | undefined,
mappings: SourceMapSegment[][],
sources: string[],
sourcesContent: (string | null)[],
names: string[],
ignoreList: number[],
lineOffset: number,
columnOffset: number,
stopLine: number,
stopColumn: number,
) {
const { sections } = input;
for (let i = 0; i < sections.length; i++) {
const { map, offset } = sections[i];
let sl = stopLine;
let sc = stopColumn;
if (i + 1 < sections.length) {
const nextOffset = sections[i + 1].offset;
sl = Math.min(stopLine, lineOffset + nextOffset.line);
if (sl === stopLine) {
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
} else if (sl < stopLine) {
sc = columnOffset + nextOffset.column;
}
}
addSection(
map,
mapUrl,
mappings,
sources,
sourcesContent,
names,
ignoreList,
lineOffset + offset.line,
columnOffset + offset.column,
sl,
sc,
);
}
}
function addSection(
input: SectionXInput['map'],
mapUrl: string | null | undefined,
mappings: SourceMapSegment[][],
sources: string[],
sourcesContent: (string | null)[],
names: string[],
ignoreList: number[],
lineOffset: number,
columnOffset: number,
stopLine: number,
stopColumn: number,
) {
const parsed = parse(input);
if ('sections' in parsed) return recurse(...(arguments as unknown as Parameters<typeof recurse>));
const map = new TraceMap(parsed, mapUrl);
const sourcesOffset = sources.length;
const namesOffset = names.length;
const decoded = decodedMappings(map);
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
append(sources, resolvedSources);
append(names, map.names);
if (contents) append(sourcesContent, contents);
else for (let i = 0; i < resolvedSources.length; i++) sourcesContent.push(null);
if (ignores) for (let i = 0; i < ignores.length; i++) ignoreList.push(ignores[i] + sourcesOffset);
for (let i = 0; i < decoded.length; i++) {
const lineI = lineOffset + i;
// We can only add so many lines before we step into the range that the next section's map
// controls. When we get to the last line, then we'll start checking the segments to see if
// they've crossed into the column range. But it may not have any columns that overstep, so we
// still need to check that we don't overstep lines, too.
if (lineI > stopLine) return;
// The out line may already exist in mappings (if we're continuing the line started by a
// previous section). Or, we may have jumped ahead several lines to start this section.
const out = getLine(mappings, lineI);
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
// map can be multiple lines), it doesn't.
const cOffset = i === 0 ? columnOffset : 0;
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const column = cOffset + seg[COLUMN];
// If this segment steps into the column range that the next section's map controls, we need
// to stop early.
if (lineI === stopLine && column >= stopColumn) return;
if (seg.length === 1) {
out.push([column]);
continue;
}
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
out.push(
seg.length === 4
? [column, sourcesIndex, sourceLine, sourceColumn]
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]],
);
}
}
}
function append<T>(arr: T[], other: T[]) {
for (let i = 0; i < other.length; i++) arr.push(other[i]);
}
function getLine<T>(arr: T[][], index: number): T[] {
for (let i = arr.length; i <= index; i++) arr[i] = [];
return arr[index];
}

16
node_modules/@jridgewell/trace-mapping/src/resolve.ts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
import resolveUri from '@jridgewell/resolve-uri';
import stripFilename from './strip-filename';
type Resolve = (source: string | null) => string;
export default function resolver(
mapUrl: string | null | undefined,
sourceRoot: string | undefined,
): Resolve {
const from = stripFilename(mapUrl);
// The sourceRoot is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
const prefix = sourceRoot ? sourceRoot + '/' : '';
return (source) => resolveUri(prefix + (source || ''), from);
}

45
node_modules/@jridgewell/trace-mapping/src/sort.ts generated vendored Normal file
View File

@@ -0,0 +1,45 @@
import { COLUMN } from './sourcemap-segment';
import type { SourceMapSegment } from './sourcemap-segment';
export default function maybeSort(
mappings: SourceMapSegment[][],
owned: boolean,
): SourceMapSegment[][] {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length) return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned) mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings: SourceMapSegment[][], start: number): number {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i])) return i;
}
return mappings.length;
}
function isSorted(line: SourceMapSegment[]): boolean {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
return false;
}
}
return true;
}
function sortSegments(line: SourceMapSegment[], owned: boolean): SourceMapSegment[] {
if (!owned) line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {
return a[COLUMN] - b[COLUMN];
}

View File

@@ -0,0 +1,23 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
type GeneratedLine = number;
export type SourceMapSegment =
| [GeneratedColumn]
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
export const COLUMN = 0;
export const SOURCES_INDEX = 1;
export const SOURCE_LINE = 2;
export const SOURCE_COLUMN = 3;
export const NAMES_INDEX = 4;
export const REV_GENERATED_LINE = 1;
export const REV_GENERATED_COLUMN = 2;

View File

@@ -0,0 +1,8 @@
/**
* Removes everything after the last "/", but leaves the slash.
*/
export default function stripFilename(path: string | undefined | null): string {
if (!path) return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}

View File

@@ -0,0 +1,504 @@
import { encode, decode } from '@jridgewell/sourcemap-codec';
import resolver from './resolve';
import maybeSort from './sort';
import buildBySources from './by-source';
import {
memoizedState,
memoizedBinarySearch,
upperBound,
lowerBound,
found as bsFound,
} from './binary-search';
import {
COLUMN,
SOURCES_INDEX,
SOURCE_LINE,
SOURCE_COLUMN,
NAMES_INDEX,
REV_GENERATED_LINE,
REV_GENERATED_COLUMN,
} from './sourcemap-segment';
import { parse } from './types';
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
import type {
SourceMapV3,
DecodedSourceMap,
EncodedSourceMap,
InvalidOriginalMapping,
OriginalMapping,
InvalidGeneratedMapping,
GeneratedMapping,
SourceMapInput,
Needle,
SourceNeedle,
SourceMap,
EachMapping,
Bias,
XInput,
SectionedSourceMap,
Ro,
} from './types';
import type { Source } from './by-source';
import type { MemoState } from './binary-search';
export type { SourceMapSegment } from './sourcemap-segment';
export type {
SourceMap,
DecodedSourceMap,
EncodedSourceMap,
Section,
SectionedSourceMap,
SourceMapV3,
Bias,
EachMapping,
GeneratedMapping,
InvalidGeneratedMapping,
InvalidOriginalMapping,
Needle,
OriginalMapping,
OriginalMapping as Mapping,
SectionedSourceMapInput,
SourceMapInput,
SourceNeedle,
XInput,
EncodedSourceMapXInput,
DecodedSourceMapXInput,
SectionedSourceMapXInput,
SectionXInput,
} from './types';
interface PublicMap {
_encoded: TraceMap['_encoded'];
_decoded: TraceMap['_decoded'];
_decodedMemo: TraceMap['_decodedMemo'];
_bySources: TraceMap['_bySources'];
_bySourceMemos: TraceMap['_bySourceMemos'];
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
export const LEAST_UPPER_BOUND = -1;
export const GREATEST_LOWER_BOUND = 1;
export { FlattenMap, FlattenMap as AnyMap } from './flatten-map';
export class TraceMap implements SourceMap {
declare version: SourceMapV3['version'];
declare file: SourceMapV3['file'];
declare names: SourceMapV3['names'];
declare sourceRoot: SourceMapV3['sourceRoot'];
declare sources: SourceMapV3['sources'];
declare sourcesContent: SourceMapV3['sourcesContent'];
declare ignoreList: SourceMapV3['ignoreList'];
declare resolvedSources: string[];
declare private _encoded: string | undefined;
declare private _decoded: SourceMapSegment[][] | undefined;
declare private _decodedMemo: MemoState;
declare private _bySources: Source[] | undefined;
declare private _bySourceMemos: MemoState[] | undefined;
constructor(map: Ro<SourceMapInput>, mapUrl?: string | null) {
const isString = typeof map === 'string';
if (!isString && (map as unknown as { _decodedMemo: any })._decodedMemo) return map as TraceMap;
const parsed = parse(map as Exclude<SourceMapInput, TraceMap>);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names || [];
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
this.ignoreList = parsed.ignoreList || (parsed as XInput).x_google_ignoreList || undefined;
const resolve = resolver(mapUrl, sourceRoot);
this.resolvedSources = sources.map(resolve);
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
} else if (Array.isArray(mappings)) {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
} else if ((parsed as unknown as SectionedSourceMap).sections) {
throw new Error(`TraceMap passed sectioned source map, please use FlattenMap export instead`);
} else {
throw new Error(`invalid source map: ${JSON.stringify(parsed)}`);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map: unknown): PublicMap {
return map as any;
}
/**
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
*/
export function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'] {
return (cast(map)._encoded ??= encode(cast(map)._decoded!));
}
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
export function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']> {
return (cast(map)._decoded ||= decode(cast(map)._encoded!));
}
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
export function traceSegment(
map: TraceMap,
line: number,
column: number,
): Readonly<SourceMapSegment> | null {
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length) return null;
const segments = decoded[line];
const index = traceSegmentInternal(
segments,
cast(map)._decodedMemo,
line,
column,
GREATEST_LOWER_BOUND,
);
return index === -1 ? null : segments[index];
}
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
export function originalPositionFor(
map: TraceMap,
needle: Needle,
): OriginalMapping | InvalidOriginalMapping {
let { line, column, bias } = needle;
line--;
if (line < 0) throw new Error(LINE_GTR_ZERO);
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length) return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(
segments,
cast(map)._decodedMemo,
line,
column,
bias || GREATEST_LOWER_BOUND,
);
if (index === -1) return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1) return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(
resolvedSources[segment[SOURCES_INDEX]],
segment[SOURCE_LINE] + 1,
segment[SOURCE_COLUMN],
segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
);
}
/**
* Finds the generated line/column position of the provided source/line/column source position.
*/
export function generatedPositionFor(
map: TraceMap,
needle: SourceNeedle,
): GeneratedMapping | InvalidGeneratedMapping {
const { source, line, column, bias } = needle;
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
}
/**
* Finds all generated line/column positions of the provided source/line/column source position.
*/
export function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[] {
const { source, line, column, bias } = needle;
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
}
/**
* Iterates each mapping in generated position order.
*/
export function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void {
const decoded = decodedMappings(map);
const { names, resolvedSources } = map;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generatedLine = i + 1;
const generatedColumn = seg[0];
let source = null;
let originalLine = null;
let originalColumn = null;
let name = null;
if (seg.length !== 1) {
source = resolvedSources[seg[1]];
originalLine = seg[2] + 1;
originalColumn = seg[3];
}
if (seg.length === 5) name = names[seg[4]];
cb({
generatedLine,
generatedColumn,
source,
originalLine,
originalColumn,
name,
} as EachMapping);
}
}
}
function sourceIndex(map: TraceMap, source: string): number {
const { sources, resolvedSources } = map;
let index = sources.indexOf(source);
if (index === -1) index = resolvedSources.indexOf(source);
return index;
}
/**
* Retrieves the source content for a particular source, if its found. Returns null if not.
*/
export function sourceContentFor(map: TraceMap, source: string): string | null {
const { sourcesContent } = map;
if (sourcesContent == null) return null;
const index = sourceIndex(map, source);
return index === -1 ? null : sourcesContent[index];
}
/**
* Determines if the source is marked to ignore by the source map.
*/
export function isIgnored(map: TraceMap, source: string): boolean {
const { ignoreList } = map;
if (ignoreList == null) return false;
const index = sourceIndex(map, source);
return index === -1 ? false : ignoreList.includes(index);
}
/**
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
* maps.
*/
export function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap {
const tracer = new TraceMap(clone(map, []), mapUrl);
cast(tracer)._decoded = map.mappings;
return tracer;
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function decodedMap(
map: TraceMap,
): Omit<DecodedSourceMap, 'mappings'> & { mappings: readonly SourceMapSegment[][] } {
return clone(map, decodedMappings(map));
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function encodedMap(map: TraceMap): EncodedSourceMap {
return clone(map, encodedMappings(map));
}
function clone<T extends string | readonly SourceMapSegment[][]>(
map: TraceMap | DecodedSourceMap,
mappings: T,
): T extends string ? EncodedSourceMap : DecodedSourceMap {
return {
version: map.version,
file: map.file,
names: map.names,
sourceRoot: map.sourceRoot,
sources: map.sources,
sourcesContent: map.sourcesContent,
mappings,
ignoreList: map.ignoreList || (map as XInput).x_google_ignoreList,
} as any;
}
function OMapping(source: null, line: null, column: null, name: null): InvalidOriginalMapping;
function OMapping(
source: string,
line: number,
column: number,
name: string | null,
): OriginalMapping;
function OMapping(
source: string | null,
line: number | null,
column: number | null,
name: string | null,
): OriginalMapping | InvalidOriginalMapping {
return { source, line, column, name } as any;
}
function GMapping(line: null, column: null): InvalidGeneratedMapping;
function GMapping(line: number, column: number): GeneratedMapping;
function GMapping(
line: number | null,
column: number | null,
): GeneratedMapping | InvalidGeneratedMapping {
return { line, column } as any;
}
function traceSegmentInternal(
segments: SourceMapSegment[],
memo: MemoState,
line: number,
column: number,
bias: Bias,
): number;
function traceSegmentInternal(
segments: ReverseSegment[],
memo: MemoState,
line: number,
column: number,
bias: Bias,
): number;
function traceSegmentInternal(
segments: SourceMapSegment[] | ReverseSegment[],
memo: MemoState,
line: number,
column: number,
bias: Bias,
): number {
let index = memoizedBinarySearch(segments, column, memo, line);
if (bsFound) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
} else if (bias === LEAST_UPPER_BOUND) index++;
if (index === -1 || index === segments.length) return -1;
return index;
}
function sliceGeneratedPositions(
segments: ReverseSegment[],
memo: MemoState,
line: number,
column: number,
bias: Bias,
): GeneratedMapping[] {
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
// match LEAST_UPPER_BOUND.
if (!bsFound && bias === LEAST_UPPER_BOUND) min++;
if (min === -1 || min === segments.length) return [];
// We may have found the segment that started at an earlier column. If this is the case, then we
// need to slice all generated segments that match _that_ column, because all such segments span
// to our desired column.
const matchedColumn = bsFound ? column : segments[min][COLUMN];
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
if (!bsFound) min = lowerBound(segments, matchedColumn, min);
const max = upperBound(segments, matchedColumn, min);
const result = [];
for (; min <= max; min++) {
const segment = segments[min];
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
}
return result;
}
function generatedPosition(
map: TraceMap,
source: string,
line: number,
column: number,
bias: Bias,
all: false,
): GeneratedMapping | InvalidGeneratedMapping;
function generatedPosition(
map: TraceMap,
source: string,
line: number,
column: number,
bias: Bias,
all: true,
): GeneratedMapping[];
function generatedPosition(
map: TraceMap,
source: string,
line: number,
column: number,
bias: Bias,
all: boolean,
): GeneratedMapping | InvalidGeneratedMapping | GeneratedMapping[] {
line--;
if (line < 0) throw new Error(LINE_GTR_ZERO);
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
const { sources, resolvedSources } = map;
let sourceIndex = sources.indexOf(source);
if (sourceIndex === -1) sourceIndex = resolvedSources.indexOf(source);
if (sourceIndex === -1) return all ? [] : GMapping(null, null);
const generated = (cast(map)._bySources ||= buildBySources(
decodedMappings(map),
(cast(map)._bySourceMemos = sources.map(memoizedState)),
));
const segments = generated[sourceIndex][line];
if (segments == null) return all ? [] : GMapping(null, null);
const memo = cast(map)._bySourceMemos![sourceIndex];
if (all) return sliceGeneratedPositions(segments, memo, line, column, bias);
const index = traceSegmentInternal(segments, memo, line, column, bias);
if (index === -1) return GMapping(null, null);
const segment = segments[index];
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
}

114
node_modules/@jridgewell/trace-mapping/src/types.ts generated vendored Normal file
View File

@@ -0,0 +1,114 @@
import type { SourceMapSegment } from './sourcemap-segment';
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
export interface SourceMapV3 {
file?: string | null;
names: string[];
sourceRoot?: string;
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
ignoreList?: number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: SourceMapSegment[][];
}
export interface Section {
offset: { line: number; column: number };
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
}
export interface SectionedSourceMap {
file?: string | null;
sections: Section[];
version: 3;
}
export type OriginalMapping = {
source: string | null;
line: number;
column: number;
name: string | null;
};
export type InvalidOriginalMapping = {
source: null;
line: null;
column: null;
name: null;
};
export type GeneratedMapping = {
line: number;
column: number;
};
export type InvalidGeneratedMapping = {
line: null;
column: null;
};
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
export type XInput = { x_google_ignoreList?: SourceMapV3['ignoreList'] };
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
sections: SectionXInput[];
};
export type SectionXInput = Omit<Section, 'map'> & {
map: SectionedSourceMapInput;
};
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
export type Needle = { line: number; column: number; bias?: Bias };
export type SourceNeedle = { source: string; line: number; column: number; bias?: Bias };
export type EachMapping =
| {
generatedLine: number;
generatedColumn: number;
source: null;
originalLine: null;
originalColumn: null;
name: null;
}
| {
generatedLine: number;
generatedColumn: number;
source: string | null;
originalLine: number;
originalColumn: number;
name: string | null;
};
export abstract class SourceMap {
declare version: SourceMapV3['version'];
declare file: SourceMapV3['file'];
declare names: SourceMapV3['names'];
declare sourceRoot: SourceMapV3['sourceRoot'];
declare sources: SourceMapV3['sources'];
declare sourcesContent: SourceMapV3['sourcesContent'];
declare resolvedSources: SourceMapV3['sources'];
declare ignoreList: SourceMapV3['ignoreList'];
}
export type Ro<T> =
T extends Array<infer V>
? V[] | Readonly<V[]> | RoArray<V> | Readonly<RoArray<V>>
: T extends object
? T | Readonly<T> | RoObject<T> | Readonly<RoObject<T>>
: T;
type RoArray<T> = Ro<T>[];
type RoObject<T> = { [K in keyof T]: T[K] | Ro<T[K]> };
export function parse<T>(map: T): Exclude<T, string> {
return typeof map === 'string' ? JSON.parse(map) : (map as Exclude<T, string>);
}