blob: 8fce400cdd58b173579a1fa89b1cd831b8de0824 [file] [log] [blame]
Samuel Shuert274a4d62023-12-01 15:04:55 -05001import { encode, decode } from '@jridgewell/sourcemap-codec';
2import resolveUri from '@jridgewell/resolve-uri';
3
4function resolve(input, base) {
5 // The base is always treated as a directory, if it's not empty.
6 // https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
7 // https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
8 if (base && !base.endsWith('/'))
9 base += '/';
10 return resolveUri(input, base);
11}
12
13/**
14 * Removes everything after the last "/", but leaves the slash.
15 */
16function stripFilename(path) {
17 if (!path)
18 return '';
19 const index = path.lastIndexOf('/');
20 return path.slice(0, index + 1);
21}
22
23const COLUMN = 0;
24const SOURCES_INDEX = 1;
25const SOURCE_LINE = 2;
26const SOURCE_COLUMN = 3;
27const NAMES_INDEX = 4;
28const REV_GENERATED_LINE = 1;
29const REV_GENERATED_COLUMN = 2;
30
31function maybeSort(mappings, owned) {
32 const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
33 if (unsortedIndex === mappings.length)
34 return mappings;
35 // If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
36 // not, we do not want to modify the consumer's input array.
37 if (!owned)
38 mappings = mappings.slice();
39 for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
40 mappings[i] = sortSegments(mappings[i], owned);
41 }
42 return mappings;
43}
44function nextUnsortedSegmentLine(mappings, start) {
45 for (let i = start; i < mappings.length; i++) {
46 if (!isSorted(mappings[i]))
47 return i;
48 }
49 return mappings.length;
50}
51function isSorted(line) {
52 for (let j = 1; j < line.length; j++) {
53 if (line[j][COLUMN] < line[j - 1][COLUMN]) {
54 return false;
55 }
56 }
57 return true;
58}
59function sortSegments(line, owned) {
60 if (!owned)
61 line = line.slice();
62 return line.sort(sortComparator);
63}
64function sortComparator(a, b) {
65 return a[COLUMN] - b[COLUMN];
66}
67
68let found = false;
69/**
70 * A binary search implementation that returns the index if a match is found.
71 * If no match is found, then the left-index (the index associated with the item that comes just
72 * before the desired index) is returned. To maintain proper sort order, a splice would happen at
73 * the next index:
74 *
75 * ```js
76 * const array = [1, 3];
77 * const needle = 2;
78 * const index = binarySearch(array, needle, (item, needle) => item - needle);
79 *
80 * assert.equal(index, 0);
81 * array.splice(index + 1, 0, needle);
82 * assert.deepEqual(array, [1, 2, 3]);
83 * ```
84 */
85function binarySearch(haystack, needle, low, high) {
86 while (low <= high) {
87 const mid = low + ((high - low) >> 1);
88 const cmp = haystack[mid][COLUMN] - needle;
89 if (cmp === 0) {
90 found = true;
91 return mid;
92 }
93 if (cmp < 0) {
94 low = mid + 1;
95 }
96 else {
97 high = mid - 1;
98 }
99 }
100 found = false;
101 return low - 1;
102}
103function upperBound(haystack, needle, index) {
104 for (let i = index + 1; i < haystack.length; i++, index++) {
105 if (haystack[i][COLUMN] !== needle)
106 break;
107 }
108 return index;
109}
110function lowerBound(haystack, needle, index) {
111 for (let i = index - 1; i >= 0; i--, index--) {
112 if (haystack[i][COLUMN] !== needle)
113 break;
114 }
115 return index;
116}
117function memoizedState() {
118 return {
119 lastKey: -1,
120 lastNeedle: -1,
121 lastIndex: -1,
122 };
123}
124/**
125 * This overly complicated beast is just to record the last tested line/column and the resulting
126 * index, allowing us to skip a few tests if mappings are monotonically increasing.
127 */
128function memoizedBinarySearch(haystack, needle, state, key) {
129 const { lastKey, lastNeedle, lastIndex } = state;
130 let low = 0;
131 let high = haystack.length - 1;
132 if (key === lastKey) {
133 if (needle === lastNeedle) {
134 found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
135 return lastIndex;
136 }
137 if (needle >= lastNeedle) {
138 // lastIndex may be -1 if the previous needle was not found.
139 low = lastIndex === -1 ? 0 : lastIndex;
140 }
141 else {
142 high = lastIndex;
143 }
144 }
145 state.lastKey = key;
146 state.lastNeedle = needle;
147 return (state.lastIndex = binarySearch(haystack, needle, low, high));
148}
149
150// Rebuilds the original source files, with mappings that are ordered by source line/column instead
151// of generated line/column.
152function buildBySources(decoded, memos) {
153 const sources = memos.map(buildNullArray);
154 for (let i = 0; i < decoded.length; i++) {
155 const line = decoded[i];
156 for (let j = 0; j < line.length; j++) {
157 const seg = line[j];
158 if (seg.length === 1)
159 continue;
160 const sourceIndex = seg[SOURCES_INDEX];
161 const sourceLine = seg[SOURCE_LINE];
162 const sourceColumn = seg[SOURCE_COLUMN];
163 const originalSource = sources[sourceIndex];
164 const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
165 const memo = memos[sourceIndex];
166 // The binary search either found a match, or it found the left-index just before where the
167 // segment should go. Either way, we want to insert after that. And there may be multiple
168 // generated segments associated with an original location, so there may need to move several
169 // indexes before we find where we need to insert.
170 const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
171 insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
172 }
173 }
174 return sources;
175}
176function insert(array, index, value) {
177 for (let i = array.length; i > index; i--) {
178 array[i] = array[i - 1];
179 }
180 array[index] = value;
181}
182// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
183// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
184// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
185// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
186// order when iterating with for-in.
187function buildNullArray() {
188 return { __proto__: null };
189}
190
191const AnyMap = function (map, mapUrl) {
192 const parsed = typeof map === 'string' ? JSON.parse(map) : map;
193 if (!('sections' in parsed))
194 return new TraceMap(parsed, mapUrl);
195 const mappings = [];
196 const sources = [];
197 const sourcesContent = [];
198 const names = [];
199 const { sections } = parsed;
200 let i = 0;
201 for (; i < sections.length - 1; i++) {
202 const no = sections[i + 1].offset;
203 addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
204 }
205 if (sections.length > 0) {
206 addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
207 }
208 const joined = {
209 version: 3,
210 file: parsed.file,
211 names,
212 sources,
213 sourcesContent,
214 mappings,
215 };
216 return presortedDecodedMap(joined);
217};
218function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
219 const map = AnyMap(section.map, mapUrl);
220 const { line: lineOffset, column: columnOffset } = section.offset;
221 const sourcesOffset = sources.length;
222 const namesOffset = names.length;
223 const decoded = decodedMappings(map);
224 const { resolvedSources } = map;
225 append(sources, resolvedSources);
226 append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
227 append(names, map.names);
228 // If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
229 for (let i = mappings.length; i <= lineOffset; i++)
230 mappings.push([]);
231 // We can only add so many lines before we step into the range that the next section's map
232 // controls. When we get to the last line, then we'll start checking the segments to see if
233 // they've crossed into the column range.
234 const stopI = stopLine - lineOffset;
235 const len = Math.min(decoded.length, stopI + 1);
236 for (let i = 0; i < len; i++) {
237 const line = decoded[i];
238 // On the 0th loop, the line will already exist due to a previous section, or the line catch up
239 // loop above.
240 const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
241 // On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
242 // map can be multiple lines), it doesn't.
243 const cOffset = i === 0 ? columnOffset : 0;
244 for (let j = 0; j < line.length; j++) {
245 const seg = line[j];
246 const column = cOffset + seg[COLUMN];
247 // If this segment steps into the column range that the next section's map controls, we need
248 // to stop early.
249 if (i === stopI && column >= stopColumn)
250 break;
251 if (seg.length === 1) {
252 out.push([column]);
253 continue;
254 }
255 const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
256 const sourceLine = seg[SOURCE_LINE];
257 const sourceColumn = seg[SOURCE_COLUMN];
258 if (seg.length === 4) {
259 out.push([column, sourcesIndex, sourceLine, sourceColumn]);
260 continue;
261 }
262 out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
263 }
264 }
265}
266function append(arr, other) {
267 for (let i = 0; i < other.length; i++)
268 arr.push(other[i]);
269}
270// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
271// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
272// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
273// sourcemap would desynchronize the sources/contents.
274function fillSourcesContent(len) {
275 const sourcesContent = [];
276 for (let i = 0; i < len; i++)
277 sourcesContent[i] = null;
278 return sourcesContent;
279}
280
281const INVALID_ORIGINAL_MAPPING = Object.freeze({
282 source: null,
283 line: null,
284 column: null,
285 name: null,
286});
287const INVALID_GENERATED_MAPPING = Object.freeze({
288 line: null,
289 column: null,
290});
291const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
292const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
293const LEAST_UPPER_BOUND = -1;
294const GREATEST_LOWER_BOUND = 1;
295/**
296 * Returns the encoded (VLQ string) form of the SourceMap's mappings field.
297 */
298let encodedMappings;
299/**
300 * Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
301 */
302let decodedMappings;
303/**
304 * A low-level API to find the segment associated with a generated line/column (think, from a
305 * stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
306 */
307let traceSegment;
308/**
309 * A higher-level API to find the source/line/column associated with a generated line/column
310 * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
311 * `source-map` library.
312 */
313let originalPositionFor;
314/**
315 * Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
316 * the found mapping is from the same source and line as the originalPositionFor mapping.
317 *
318 * Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
319 * using the same needle that would return `id` when calling `originalPositionFor`.
320 */
321let generatedPositionFor;
322/**
323 * Iterates each mapping in generated position order.
324 */
325let eachMapping;
326/**
327 * A helper that skips sorting of the input map's mappings array, which can be expensive for larger
328 * maps.
329 */
330let presortedDecodedMap;
331/**
332 * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
333 * a sourcemap, or to JSON.stringify.
334 */
335let decodedMap;
336/**
337 * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
338 * a sourcemap, or to JSON.stringify.
339 */
340let encodedMap;
341class TraceMap {
342 constructor(map, mapUrl) {
343 this._decodedMemo = memoizedState();
344 this._bySources = undefined;
345 this._bySourceMemos = undefined;
346 const isString = typeof map === 'string';
347 if (!isString && map.constructor === TraceMap)
348 return map;
349 const parsed = (isString ? JSON.parse(map) : map);
350 const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
351 this.version = version;
352 this.file = file;
353 this.names = names;
354 this.sourceRoot = sourceRoot;
355 this.sources = sources;
356 this.sourcesContent = sourcesContent;
357 if (sourceRoot || mapUrl) {
358 const from = resolve(sourceRoot || '', stripFilename(mapUrl));
359 this.resolvedSources = sources.map((s) => resolve(s || '', from));
360 }
361 else {
362 this.resolvedSources = sources.map((s) => s || '');
363 }
364 const { mappings } = parsed;
365 if (typeof mappings === 'string') {
366 this._encoded = mappings;
367 this._decoded = undefined;
368 }
369 else {
370 this._encoded = undefined;
371 this._decoded = maybeSort(mappings, isString);
372 }
373 }
374}
375(() => {
376 encodedMappings = (map) => {
377 var _a;
378 return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded)));
379 };
380 decodedMappings = (map) => {
381 return (map._decoded || (map._decoded = decode(map._encoded)));
382 };
383 traceSegment = (map, line, column) => {
384 const decoded = decodedMappings(map);
385 // It's common for parent source maps to have pointers to lines that have no
386 // mapping (like a "//# sourceMappingURL=") at the end of the child file.
387 if (line >= decoded.length)
388 return null;
389 return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
390 };
391 originalPositionFor = (map, { line, column, bias }) => {
392 line--;
393 if (line < 0)
394 throw new Error(LINE_GTR_ZERO);
395 if (column < 0)
396 throw new Error(COL_GTR_EQ_ZERO);
397 const decoded = decodedMappings(map);
398 // It's common for parent source maps to have pointers to lines that have no
399 // mapping (like a "//# sourceMappingURL=") at the end of the child file.
400 if (line >= decoded.length)
401 return INVALID_ORIGINAL_MAPPING;
402 const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
403 if (segment == null)
404 return INVALID_ORIGINAL_MAPPING;
405 if (segment.length == 1)
406 return INVALID_ORIGINAL_MAPPING;
407 const { names, resolvedSources } = map;
408 return {
409 source: resolvedSources[segment[SOURCES_INDEX]],
410 line: segment[SOURCE_LINE] + 1,
411 column: segment[SOURCE_COLUMN],
412 name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
413 };
414 };
415 generatedPositionFor = (map, { source, line, column, bias }) => {
416 line--;
417 if (line < 0)
418 throw new Error(LINE_GTR_ZERO);
419 if (column < 0)
420 throw new Error(COL_GTR_EQ_ZERO);
421 const { sources, resolvedSources } = map;
422 let sourceIndex = sources.indexOf(source);
423 if (sourceIndex === -1)
424 sourceIndex = resolvedSources.indexOf(source);
425 if (sourceIndex === -1)
426 return INVALID_GENERATED_MAPPING;
427 const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
428 const memos = map._bySourceMemos;
429 const segments = generated[sourceIndex][line];
430 if (segments == null)
431 return INVALID_GENERATED_MAPPING;
432 const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
433 if (segment == null)
434 return INVALID_GENERATED_MAPPING;
435 return {
436 line: segment[REV_GENERATED_LINE] + 1,
437 column: segment[REV_GENERATED_COLUMN],
438 };
439 };
440 eachMapping = (map, cb) => {
441 const decoded = decodedMappings(map);
442 const { names, resolvedSources } = map;
443 for (let i = 0; i < decoded.length; i++) {
444 const line = decoded[i];
445 for (let j = 0; j < line.length; j++) {
446 const seg = line[j];
447 const generatedLine = i + 1;
448 const generatedColumn = seg[0];
449 let source = null;
450 let originalLine = null;
451 let originalColumn = null;
452 let name = null;
453 if (seg.length !== 1) {
454 source = resolvedSources[seg[1]];
455 originalLine = seg[2] + 1;
456 originalColumn = seg[3];
457 }
458 if (seg.length === 5)
459 name = names[seg[4]];
460 cb({
461 generatedLine,
462 generatedColumn,
463 source,
464 originalLine,
465 originalColumn,
466 name,
467 });
468 }
469 }
470 };
471 presortedDecodedMap = (map, mapUrl) => {
472 const clone = Object.assign({}, map);
473 clone.mappings = [];
474 const tracer = new TraceMap(clone, mapUrl);
475 tracer._decoded = map.mappings;
476 return tracer;
477 };
478 decodedMap = (map) => {
479 return {
480 version: 3,
481 file: map.file,
482 names: map.names,
483 sourceRoot: map.sourceRoot,
484 sources: map.sources,
485 sourcesContent: map.sourcesContent,
486 mappings: decodedMappings(map),
487 };
488 };
489 encodedMap = (map) => {
490 return {
491 version: 3,
492 file: map.file,
493 names: map.names,
494 sourceRoot: map.sourceRoot,
495 sources: map.sources,
496 sourcesContent: map.sourcesContent,
497 mappings: encodedMappings(map),
498 };
499 };
500})();
501function traceSegmentInternal(segments, memo, line, column, bias) {
502 let index = memoizedBinarySearch(segments, column, memo, line);
503 if (found) {
504 index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
505 }
506 else if (bias === LEAST_UPPER_BOUND)
507 index++;
508 if (index === -1 || index === segments.length)
509 return null;
510 return segments[index];
511}
512
513export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, traceSegment };
514//# sourceMappingURL=trace-mapping.mjs.map