触控环操纵下,球可以移动了,或者说背景可以反向移动了
This commit is contained in:
parent
41fa1390ae
commit
272fb9e060
0
chatWithDSLocal.md
Normal file
0
chatWithDSLocal.md
Normal file
6
dist/config.js
vendored
Normal file
6
dist/config.js
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
// src/config.ts
|
||||||
|
export const GameConfig = {
|
||||||
|
aspectRatio: 2 / 1,
|
||||||
|
backgroundColor: '#f2e4a9',
|
||||||
|
borderColor: '#23a5a5'
|
||||||
|
};
|
35
dist/core/ControlBall.js
vendored
Normal file
35
dist/core/ControlBall.js
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import { Vector2 } from './Vector2.js';
|
||||||
|
export class ControlBall {
|
||||||
|
constructor(canvas, gameWidth, gameHeight, baseSpeed) {
|
||||||
|
this.gameWidth = gameWidth;
|
||||||
|
this.gameHeight = gameHeight;
|
||||||
|
this.baseSpeed = baseSpeed;
|
||||||
|
this.velocity = new Vector2();
|
||||||
|
this.canvas = canvas;
|
||||||
|
this.ctx = canvas.getContext('2d');
|
||||||
|
this.radius = Math.min(gameWidth, gameHeight) * 0.05;
|
||||||
|
this.maxSpeed = baseSpeed * 1.2;
|
||||||
|
this.position = new Vector2(gameWidth / 2, gameHeight / 2);
|
||||||
|
}
|
||||||
|
update(touchData) {
|
||||||
|
// 计算速度
|
||||||
|
const speed = this.baseSpeed * touchData.speedFactor;
|
||||||
|
this.velocity = touchData.normalizedPosition.scale(speed);
|
||||||
|
// 更新位置
|
||||||
|
this.position = this.position.add(this.velocity);
|
||||||
|
// 边界检测
|
||||||
|
this.position.x = Math.max(this.radius, Math.min(this.gameWidth - this.radius, this.position.x));
|
||||||
|
this.position.y = Math.max(this.radius, Math.min(this.gameHeight - this.radius, this.position.y));
|
||||||
|
}
|
||||||
|
draw() {
|
||||||
|
this.ctx.beginPath();
|
||||||
|
this.ctx.arc(this.position.x, this.position.y, this.radius, 0, Math.PI * 2);
|
||||||
|
this.ctx.fillStyle = '#ff6b6b';
|
||||||
|
this.ctx.fill();
|
||||||
|
this.ctx.closePath();
|
||||||
|
}
|
||||||
|
reset() {
|
||||||
|
this.position = new Vector2(this.gameWidth / 2, this.gameHeight / 2);
|
||||||
|
this.velocity = new Vector2();
|
||||||
|
}
|
||||||
|
}
|
241
dist/core/GameBox.js
vendored
Normal file
241
dist/core/GameBox.js
vendored
Normal file
@ -0,0 +1,241 @@
|
|||||||
|
// src/core/GameBox.ts
|
||||||
|
export class GameBox {
|
||||||
|
destroy() {
|
||||||
|
// 移除事件监听器
|
||||||
|
window.removeEventListener('resize', () => this.updateSize());
|
||||||
|
window.removeEventListener('orientationchange', () => {
|
||||||
|
setTimeout(() => this.updateSize(), 100);
|
||||||
|
});
|
||||||
|
// 移除 DOM 元素
|
||||||
|
if (this.element && this.element.parentNode) {
|
||||||
|
this.element.parentNode.removeChild(this.element);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//
|
||||||
|
constructor(config) {
|
||||||
|
this.lastBackgroundPosition = { x: 50, y: 50 }; // Track background position
|
||||||
|
this.isTouching = false;
|
||||||
|
this.currentDirection = { x: 0, y: 0 };
|
||||||
|
this.moveInterval = null;
|
||||||
|
this.aspectRatio = config.aspectRatio;
|
||||||
|
this.backgroundColor = config.backgroundColor;
|
||||||
|
this.borderColor = config.borderColor;
|
||||||
|
this.container = config.element;
|
||||||
|
this.element = this.createBoxElement();
|
||||||
|
this.ballElement = this.createBall();
|
||||||
|
this.touchRingElement = this.createTouchRing();
|
||||||
|
this.container.style.display = 'flex';
|
||||||
|
this.container.style.justifyContent = 'center';
|
||||||
|
this.container.style.alignItems = 'center';
|
||||||
|
this.container.style.width = '100vw';
|
||||||
|
this.container.style.height = '100vh';
|
||||||
|
this.container.style.overflow = 'hidden';
|
||||||
|
this.bindEvents();
|
||||||
|
this.updateSize();
|
||||||
|
}
|
||||||
|
// 删除 createContainer 方法
|
||||||
|
createBoxElement() {
|
||||||
|
const element = document.createElement('div');
|
||||||
|
element.className = 'game-box';
|
||||||
|
element.style.boxSizing = 'border-box';
|
||||||
|
element.style.backgroundSize = 'auto 100%';
|
||||||
|
element.style.backgroundRepeat = 'no-repeat';
|
||||||
|
element.style.backgroundPosition = 'center center';
|
||||||
|
element.style.display = 'flex';
|
||||||
|
element.style.justifyContent = 'center';
|
||||||
|
element.style.alignItems = 'center';
|
||||||
|
this.applyBoxStyle(element);
|
||||||
|
this.container.appendChild(element);
|
||||||
|
return element;
|
||||||
|
}
|
||||||
|
applyBoxStyle(element) {
|
||||||
|
// Create grid background with random colors
|
||||||
|
const gridSize = 200; // Size of each grid square (doubled from previous)
|
||||||
|
const gridColor = 'rgba(0, 0, 0, 0.1)';
|
||||||
|
// Create canvas for random grid colors
|
||||||
|
const canvas = document.createElement('canvas');
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
canvas.width = 2000;
|
||||||
|
canvas.height = 2000;
|
||||||
|
// Create gradient grid pattern
|
||||||
|
const baseColor = 200; // Base gray level
|
||||||
|
const colorStep = 40; // Increased color step for larger grids
|
||||||
|
for (let x = 0; x < canvas.width; x += gridSize) {
|
||||||
|
for (let y = 0; y < canvas.height; y += gridSize) {
|
||||||
|
// Calculate gradient based on grid position
|
||||||
|
const xCycle = Math.floor((x / gridSize) % 5);
|
||||||
|
const yCycle = Math.floor((y / gridSize) % 5);
|
||||||
|
const colorValue = baseColor - (xCycle + yCycle) * colorStep;
|
||||||
|
// Ensure color stays within valid range
|
||||||
|
const finalColor = Math.max(0, Math.min(255, colorValue));
|
||||||
|
ctx.fillStyle = `rgba(${finalColor},${finalColor},${finalColor},1)`;
|
||||||
|
ctx.fillRect(x, y, gridSize, gridSize);
|
||||||
|
// Add subtle border between cells
|
||||||
|
ctx.strokeStyle = 'rgba(0,0,0,0.1)';
|
||||||
|
ctx.strokeRect(x, y, gridSize, gridSize);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Add grid lines and coordinates
|
||||||
|
const gridImage = canvas.toDataURL();
|
||||||
|
element.style.background = `
|
||||||
|
url('${gridImage}'),
|
||||||
|
linear-gradient(to right, ${gridColor} 1px, transparent 1px),
|
||||||
|
linear-gradient(to bottom, ${gridColor} 1px, transparent 1px),
|
||||||
|
${this.backgroundColor}
|
||||||
|
`;
|
||||||
|
element.style.backgroundSize = `${gridSize}px ${gridSize}px`;
|
||||||
|
element.style.border = `1px solid ${this.borderColor}`;
|
||||||
|
// Add coordinate labels
|
||||||
|
this.addGridCoordinates(element);
|
||||||
|
}
|
||||||
|
addGridCoordinates(element) {
|
||||||
|
const gridSize = 200;
|
||||||
|
const width = 2000; // Use canvas width for consistent coordinates
|
||||||
|
const height = 2000; // Use canvas height for consistent coordinates
|
||||||
|
// X-axis labels
|
||||||
|
for (let x = 0; x < width; x += gridSize) {
|
||||||
|
const label = document.createElement('div');
|
||||||
|
label.style.position = 'absolute';
|
||||||
|
label.style.left = `${x}px`;
|
||||||
|
label.style.bottom = '-20px';
|
||||||
|
label.style.color = this.borderColor;
|
||||||
|
label.style.fontSize = '12px';
|
||||||
|
label.innerText = `${x / gridSize}`;
|
||||||
|
element.appendChild(label);
|
||||||
|
}
|
||||||
|
// Y-axis labels
|
||||||
|
for (let y = 0; y < height; y += gridSize) {
|
||||||
|
const label = document.createElement('div');
|
||||||
|
label.style.position = 'absolute';
|
||||||
|
label.style.top = `${y}px`;
|
||||||
|
label.style.left = '-20px';
|
||||||
|
label.style.color = this.borderColor;
|
||||||
|
label.style.fontSize = '12px';
|
||||||
|
label.innerText = `${y / gridSize}`;
|
||||||
|
element.appendChild(label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
createBall() {
|
||||||
|
const ball = document.createElement('div');
|
||||||
|
ball.style.position = 'absolute';
|
||||||
|
ball.style.width = '40px';
|
||||||
|
ball.style.height = '40px';
|
||||||
|
ball.style.borderRadius = '50%';
|
||||||
|
ball.style.backgroundColor = 'red';
|
||||||
|
ball.style.transform = 'translate(-50%, -50%)';
|
||||||
|
ball.style.left = '50%';
|
||||||
|
ball.style.top = '50%';
|
||||||
|
this.element.appendChild(ball);
|
||||||
|
return ball;
|
||||||
|
}
|
||||||
|
createTouchRing() {
|
||||||
|
const ring = document.createElement('div');
|
||||||
|
ring.style.position = 'absolute';
|
||||||
|
ring.style.width = '100px';
|
||||||
|
ring.style.height = '100px';
|
||||||
|
ring.style.borderRadius = '50%';
|
||||||
|
ring.style.border = '2px solid rgba(255,255,255,0.5)';
|
||||||
|
ring.style.left = '20%';
|
||||||
|
ring.style.bottom = '30%';
|
||||||
|
ring.style.background = 'rgba(245, 222, 179, 0.5)'; // wheat color with 50% opacity
|
||||||
|
ring.style.transform = 'translate(-50%, 50%)';
|
||||||
|
ring.style.pointerEvents = 'auto';
|
||||||
|
this.element.appendChild(ring);
|
||||||
|
return ring;
|
||||||
|
}
|
||||||
|
bindEvents() {
|
||||||
|
window.addEventListener('resize', () => this.updateSize());
|
||||||
|
window.addEventListener('orientationchange', () => {
|
||||||
|
setTimeout(() => this.updateSize(), 100);
|
||||||
|
});
|
||||||
|
// Add touch event listeners
|
||||||
|
this.touchRingElement.addEventListener('touchstart', this.handleTouchStart.bind(this));
|
||||||
|
this.touchRingElement.addEventListener('touchmove', this.handleTouchMove.bind(this));
|
||||||
|
this.touchRingElement.addEventListener('touchend', this.handleTouchEnd.bind(this));
|
||||||
|
}
|
||||||
|
handleTouchStart(event) {
|
||||||
|
event.preventDefault();
|
||||||
|
this.isTouching = true;
|
||||||
|
this.updateBallPosition(event.touches[0]);
|
||||||
|
// Start continuous movement
|
||||||
|
this.moveInterval = window.setInterval(() => {
|
||||||
|
if (this.isTouching) {
|
||||||
|
this.lastBackgroundPosition.x += this.currentDirection.x;
|
||||||
|
this.lastBackgroundPosition.y += this.currentDirection.y;
|
||||||
|
this.element.style.backgroundPosition = `${this.lastBackgroundPosition.x}% ${this.lastBackgroundPosition.y}%`;
|
||||||
|
}
|
||||||
|
}, 16); // ~60fps
|
||||||
|
}
|
||||||
|
handleTouchMove(event) {
|
||||||
|
event.preventDefault();
|
||||||
|
this.updateBallPosition(event.touches[0]);
|
||||||
|
}
|
||||||
|
handleTouchEnd() {
|
||||||
|
this.isTouching = false;
|
||||||
|
if (this.moveInterval) {
|
||||||
|
window.clearInterval(this.moveInterval);
|
||||||
|
this.moveInterval = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updateBallPosition(touch) {
|
||||||
|
const rect = this.touchRingElement.getBoundingClientRect();
|
||||||
|
const ringCenterX = rect.left + rect.width / 2;
|
||||||
|
const ringCenterY = rect.top + rect.height / 2;
|
||||||
|
const ringRadius = rect.width / 2;
|
||||||
|
// Get touch position relative to ring center
|
||||||
|
const touchX = touch.clientX - ringCenterX;
|
||||||
|
const touchY = touch.clientY - ringCenterY;
|
||||||
|
// Calculate distance from center
|
||||||
|
const distance = Math.sqrt(touchX * touchX + touchY * touchY);
|
||||||
|
const normalizedDistance = Math.min(distance / ringRadius, 1);
|
||||||
|
// Calculate speed factor
|
||||||
|
let speedFactor = 0;
|
||||||
|
if (normalizedDistance > 0.5 && normalizedDistance <= 1) {
|
||||||
|
// Map distance from 0.5-1 to 0.8-1.2 in 5% increments
|
||||||
|
const steps = (normalizedDistance - 0.5) / 0.1;
|
||||||
|
speedFactor = 0.8 + Math.floor(steps) * 0.05;
|
||||||
|
}
|
||||||
|
// Calculate direction vector
|
||||||
|
const directionX = touchX / distance;
|
||||||
|
const directionY = touchY / distance;
|
||||||
|
// Normalize direction vector to maintain equal speed in both axes
|
||||||
|
const magnitude = Math.sqrt(directionX * directionX + directionY * directionY);
|
||||||
|
const normalizedX = directionX / magnitude;
|
||||||
|
const normalizedY = directionY / magnitude;
|
||||||
|
// Update current direction for continuous movement
|
||||||
|
this.currentDirection = {
|
||||||
|
x: -normalizedX * speedFactor * 0.5, // Keep horizontal speed unchanged
|
||||||
|
y: -normalizedY * speedFactor * 1.0 // Double vertical speed
|
||||||
|
};
|
||||||
|
// Update background position based on last position
|
||||||
|
this.lastBackgroundPosition.x += this.currentDirection.x;
|
||||||
|
this.lastBackgroundPosition.y += this.currentDirection.y;
|
||||||
|
this.element.style.backgroundPosition = `${this.lastBackgroundPosition.x}% ${this.lastBackgroundPosition.y}%`;
|
||||||
|
}
|
||||||
|
updateSize() {
|
||||||
|
const visualWidth = window.innerWidth;
|
||||||
|
const visualHeight = window.innerHeight;
|
||||||
|
const windowRatio = visualWidth / visualHeight;
|
||||||
|
let boxWidth, boxHeight;
|
||||||
|
if (windowRatio > this.aspectRatio) {
|
||||||
|
// Window is wider than aspect ratio - fit to height
|
||||||
|
boxHeight = visualHeight;
|
||||||
|
boxWidth = boxHeight * this.aspectRatio;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Window is taller than aspect ratio - fit to width
|
||||||
|
boxWidth = visualWidth;
|
||||||
|
boxHeight = boxWidth / this.aspectRatio;
|
||||||
|
}
|
||||||
|
// Set box dimensions
|
||||||
|
this.element.style.width = `${boxWidth}px`;
|
||||||
|
this.element.style.height = `${boxHeight}px`;
|
||||||
|
this.element.style.margin = 'auto'; // Use flexbox centering
|
||||||
|
// Update touch ring position
|
||||||
|
// this.touchRingElement.style.left = '20px';
|
||||||
|
// this.touchRingElement.style.bottom = '20px';
|
||||||
|
}
|
||||||
|
setBackground(imageUrl) {
|
||||||
|
this.element.style.backgroundImage = `url('${imageUrl}')`;
|
||||||
|
}
|
||||||
|
}
|
21
dist/core/GameInput.js
vendored
Normal file
21
dist/core/GameInput.js
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { Vector2 } from './Vector2.js';
|
||||||
|
import { Direction } from './types/Direction.js';
|
||||||
|
export class GameInput {
|
||||||
|
constructor(touchRing) {
|
||||||
|
this.touchRing = touchRing;
|
||||||
|
this.currentInput = {
|
||||||
|
direction: Direction.None,
|
||||||
|
speedFactor: 1,
|
||||||
|
normalizedPosition: new Vector2(0, 0)
|
||||||
|
};
|
||||||
|
this.initEvents();
|
||||||
|
}
|
||||||
|
initEvents() {
|
||||||
|
this.touchRing.addEventListener('touchChange', (event) => {
|
||||||
|
this.currentInput = event.detail;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
getCurrentInput() {
|
||||||
|
return this.currentInput;
|
||||||
|
}
|
||||||
|
}
|
12
dist/core/TouchBox.js
vendored
Normal file
12
dist/core/TouchBox.js
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
export var Direction;
|
||||||
|
(function (Direction) {
|
||||||
|
Direction["Up"] = "up";
|
||||||
|
Direction["Down"] = "down";
|
||||||
|
Direction["Left"] = "left";
|
||||||
|
Direction["Right"] = "right";
|
||||||
|
Direction["UpLeft"] = "up-left";
|
||||||
|
Direction["UpRight"] = "up-right";
|
||||||
|
Direction["DownLeft"] = "down-left";
|
||||||
|
Direction["DownRight"] = "down-right";
|
||||||
|
Direction["None"] = "none";
|
||||||
|
})(Direction || (Direction = {}));
|
94
dist/core/TouchRing.js
vendored
Normal file
94
dist/core/TouchRing.js
vendored
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
import { Vector2 } from './Vector2';
|
||||||
|
import { Direction } from './types/TouchData';
|
||||||
|
export class TouchRing {
|
||||||
|
constructor(parent, size) {
|
||||||
|
this.parent = parent;
|
||||||
|
this.size = size;
|
||||||
|
this.isTouching = false;
|
||||||
|
this.element = document.createElement('div');
|
||||||
|
this.radius = size / 2;
|
||||||
|
this.center = new Vector2(this.radius, this.radius);
|
||||||
|
this.initElement();
|
||||||
|
this.initEvents();
|
||||||
|
}
|
||||||
|
initElement() {
|
||||||
|
this.element.style.width = `${this.size}px`;
|
||||||
|
this.element.style.height = `${this.size}px`;
|
||||||
|
this.element.style.borderRadius = '50%';
|
||||||
|
this.element.style.backgroundColor = '#5ab5da';
|
||||||
|
this.element.style.opacity = '0.7';
|
||||||
|
this.element.style.position = 'absolute';
|
||||||
|
this.element.style.left = '5%';
|
||||||
|
this.element.style.bottom = '5%';
|
||||||
|
this.element.style.touchAction = 'none';
|
||||||
|
this.parent.appendChild(this.element);
|
||||||
|
}
|
||||||
|
initEvents() {
|
||||||
|
this.element.addEventListener('touchstart', this.handleTouchStart.bind(this));
|
||||||
|
this.element.addEventListener('touchmove', this.handleTouchMove.bind(this));
|
||||||
|
this.element.addEventListener('touchend', this.handleTouchEnd.bind(this));
|
||||||
|
}
|
||||||
|
handleTouchStart(event) {
|
||||||
|
event.preventDefault();
|
||||||
|
this.isTouching = true;
|
||||||
|
}
|
||||||
|
handleTouchMove(event) {
|
||||||
|
if (!this.isTouching)
|
||||||
|
return;
|
||||||
|
event.preventDefault();
|
||||||
|
const touch = event.touches[0];
|
||||||
|
const rect = this.element.getBoundingClientRect();
|
||||||
|
const touchPos = new Vector2(touch.clientX - rect.left, touch.clientY - rect.top);
|
||||||
|
const touchData = this.calculateTouchData(touchPos);
|
||||||
|
this.dispatchTouchEvent(touchData);
|
||||||
|
}
|
||||||
|
handleTouchEnd() {
|
||||||
|
this.isTouching = false;
|
||||||
|
this.dispatchTouchEvent({
|
||||||
|
direction: Direction.None,
|
||||||
|
speedFactor: 1,
|
||||||
|
normalizedPosition: new Vector2()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
calculateTouchData(touchPos) {
|
||||||
|
const offset = touchPos.subtract(this.center);
|
||||||
|
const distance = offset.length();
|
||||||
|
const clampedDistance = Math.min(distance, this.radius);
|
||||||
|
// 计算速度因子
|
||||||
|
const speedFactor = 0.8 + (clampedDistance / this.radius) * 0.4;
|
||||||
|
// 计算方向
|
||||||
|
const angle = Math.atan2(offset.y, offset.x);
|
||||||
|
const direction = this.getDirectionFromAngle(angle);
|
||||||
|
// 归一化位置
|
||||||
|
const normalizedPosition = offset.scale(1 / this.radius);
|
||||||
|
return {
|
||||||
|
direction,
|
||||||
|
speedFactor,
|
||||||
|
normalizedPosition
|
||||||
|
};
|
||||||
|
}
|
||||||
|
getDirectionFromAngle(angle) {
|
||||||
|
const pi = Math.PI;
|
||||||
|
if (angle >= -pi / 8 && angle < pi / 8)
|
||||||
|
return Direction.Right;
|
||||||
|
if (angle >= pi / 8 && angle < 3 * pi / 8)
|
||||||
|
return Direction.UpRight;
|
||||||
|
if (angle >= 3 * pi / 8 && angle < 5 * pi / 8)
|
||||||
|
return Direction.Up;
|
||||||
|
if (angle >= 5 * pi / 8 && angle < 7 * pi / 8)
|
||||||
|
return Direction.UpLeft;
|
||||||
|
if (angle >= 7 * pi / 8 || angle < -7 * pi / 8)
|
||||||
|
return Direction.Left;
|
||||||
|
if (angle >= -7 * pi / 8 && angle < -5 * pi / 8)
|
||||||
|
return Direction.DownLeft;
|
||||||
|
if (angle >= -5 * pi / 8 && angle < -3 * pi / 8)
|
||||||
|
return Direction.Down;
|
||||||
|
if (angle >= -3 * pi / 8 && angle < -pi / 8)
|
||||||
|
return Direction.DownRight;
|
||||||
|
return Direction.None;
|
||||||
|
}
|
||||||
|
dispatchTouchEvent(data) {
|
||||||
|
const event = new CustomEvent('touchChange', { detail: data });
|
||||||
|
this.element.dispatchEvent(event);
|
||||||
|
}
|
||||||
|
}
|
40
dist/core/Vector2.js
vendored
Normal file
40
dist/core/Vector2.js
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
export class Vector2 {
|
||||||
|
constructor(x = 0, y = 0) {
|
||||||
|
this.x = x;
|
||||||
|
this.y = y;
|
||||||
|
}
|
||||||
|
// 向量加法
|
||||||
|
add(v) {
|
||||||
|
return new Vector2(this.x + v.x, this.y + v.y);
|
||||||
|
}
|
||||||
|
// 向量减法
|
||||||
|
subtract(v) {
|
||||||
|
return new Vector2(this.x - v.x, this.y - v.y);
|
||||||
|
}
|
||||||
|
// 向量缩放
|
||||||
|
scale(scalar) {
|
||||||
|
return new Vector2(this.x * scalar, this.y * scalar);
|
||||||
|
}
|
||||||
|
// 向量归一化
|
||||||
|
normalize() {
|
||||||
|
const len = this.length();
|
||||||
|
return len > 0 ? this.scale(1 / len) : new Vector2();
|
||||||
|
}
|
||||||
|
// 向量长度
|
||||||
|
length() {
|
||||||
|
return Math.sqrt(this.x * this.x + this.y * this.y);
|
||||||
|
}
|
||||||
|
// 计算两点间距离
|
||||||
|
distanceTo(v) {
|
||||||
|
return this.subtract(v).length();
|
||||||
|
}
|
||||||
|
// 克隆向量
|
||||||
|
clone() {
|
||||||
|
return new Vector2(this.x, this.y);
|
||||||
|
}
|
||||||
|
// 限制向量长度
|
||||||
|
clampLength(maxLength) {
|
||||||
|
const len = this.length();
|
||||||
|
return len > maxLength ? this.normalize().scale(maxLength) : this.clone();
|
||||||
|
}
|
||||||
|
}
|
12
dist/core/types/Direction.js
vendored
Normal file
12
dist/core/types/Direction.js
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
export var Direction;
|
||||||
|
(function (Direction) {
|
||||||
|
Direction["None"] = "none";
|
||||||
|
Direction["Up"] = "up";
|
||||||
|
Direction["Down"] = "down";
|
||||||
|
Direction["Left"] = "left";
|
||||||
|
Direction["Right"] = "right";
|
||||||
|
Direction["UpLeft"] = "up-left";
|
||||||
|
Direction["UpRight"] = "up-right";
|
||||||
|
Direction["DownLeft"] = "down-left";
|
||||||
|
Direction["DownRight"] = "down-right";
|
||||||
|
})(Direction || (Direction = {}));
|
12
dist/core/types/TouchData.js
vendored
Normal file
12
dist/core/types/TouchData.js
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
export var Direction;
|
||||||
|
(function (Direction) {
|
||||||
|
Direction["Up"] = "up";
|
||||||
|
Direction["Down"] = "down";
|
||||||
|
Direction["Left"] = "left";
|
||||||
|
Direction["Right"] = "right";
|
||||||
|
Direction["UpLeft"] = "up-left";
|
||||||
|
Direction["UpRight"] = "up-right";
|
||||||
|
Direction["DownLeft"] = "down-left";
|
||||||
|
Direction["DownRight"] = "down-right";
|
||||||
|
Direction["None"] = "none";
|
||||||
|
})(Direction || (Direction = {}));
|
12
dist/index.js
vendored
Normal file
12
dist/index.js
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { GameBox } from './core/GameBox.js';
|
||||||
|
const gameContainer = document.getElementById('gameContainer') || document.body;
|
||||||
|
const gameBox = new GameBox({
|
||||||
|
aspectRatio: 16 / 9,
|
||||||
|
backgroundColor: '#ffffff',
|
||||||
|
borderColor: '#000000',
|
||||||
|
element: gameContainer
|
||||||
|
});
|
||||||
|
// 处理窗口关闭
|
||||||
|
window.addEventListener('beforeunload', () => {
|
||||||
|
gameBox.destroy();
|
||||||
|
});
|
185
dist/matrix-demo.js
vendored
185
dist/matrix-demo.js
vendored
@ -1,185 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
class Matrix3 {
|
|
||||||
constructor() {
|
|
||||||
this.elements = [
|
|
||||||
1, 0, 0,
|
|
||||||
0, 1, 0,
|
|
||||||
0, 0, 1
|
|
||||||
];
|
|
||||||
}
|
|
||||||
multiply(m) {
|
|
||||||
const a = this.elements;
|
|
||||||
const b = m.elements;
|
|
||||||
return new Matrix3().set([
|
|
||||||
a[0] * b[0] + a[1] * b[3] + a[2] * b[6],
|
|
||||||
a[0] * b[1] + a[1] * b[4] + a[2] * b[7],
|
|
||||||
a[0] * b[2] + a[1] * b[5] + a[2] * b[8],
|
|
||||||
a[3] * b[0] + a[4] * b[3] + a[5] * b[6],
|
|
||||||
a[3] * b[1] + a[4] * b[4] + a[5] * b[7],
|
|
||||||
a[3] * b[2] + a[4] * b[5] + a[5] * b[8],
|
|
||||||
a[6] * b[0] + a[7] * b[3] + a[8] * b[6],
|
|
||||||
a[6] * b[1] + a[7] * b[4] + a[8] * b[7],
|
|
||||||
a[6] * b[2] + a[7] * b[5] + a[8] * b[8],
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
translate(tx, ty) {
|
|
||||||
return this.multiply(new Matrix3().set([
|
|
||||||
1, 0, tx,
|
|
||||||
0, 1, ty,
|
|
||||||
0, 0, 1
|
|
||||||
]));
|
|
||||||
}
|
|
||||||
scale(sx, sy) {
|
|
||||||
return this.multiply(new Matrix3().set([
|
|
||||||
sx, 0, 0,
|
|
||||||
0, sy, 0,
|
|
||||||
0, 0, 1
|
|
||||||
]));
|
|
||||||
}
|
|
||||||
rotate(angle) {
|
|
||||||
const c = Math.cos(angle);
|
|
||||||
const s = Math.sin(angle);
|
|
||||||
return this.multiply(new Matrix3().set([
|
|
||||||
c, -s, 0,
|
|
||||||
s, c, 0,
|
|
||||||
0, 0, 1
|
|
||||||
]));
|
|
||||||
}
|
|
||||||
set(values) {
|
|
||||||
this.elements = [...values];
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
transform(ctx) {
|
|
||||||
const [a, b, c, d, e, f] = [
|
|
||||||
this.elements[0],
|
|
||||||
this.elements[3],
|
|
||||||
this.elements[1],
|
|
||||||
this.elements[4],
|
|
||||||
this.elements[2],
|
|
||||||
this.elements[5]
|
|
||||||
];
|
|
||||||
//console.log('Applying transform:', { a, b, c, d, e, f });
|
|
||||||
ctx.transform(a, b, c, d, e, f);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
class MapDemo {
|
|
||||||
constructor() {
|
|
||||||
this.matrix = new Matrix3();
|
|
||||||
this.canvas = document.createElement('canvas');
|
|
||||||
this.ctx = this.canvas.getContext('2d');
|
|
||||||
this.initCanvas();
|
|
||||||
this.initControls();
|
|
||||||
this.draw();
|
|
||||||
}
|
|
||||||
initCanvas() {
|
|
||||||
this.canvas.width = 800;
|
|
||||||
this.canvas.height = 600;
|
|
||||||
this.canvas.style.border = '1px solid black';
|
|
||||||
document.body.appendChild(this.canvas);
|
|
||||||
const dpr = window.devicePixelRatio || 1;
|
|
||||||
this.canvas.width *= dpr;
|
|
||||||
this.canvas.height *= dpr;
|
|
||||||
this.ctx.scale(dpr, dpr);
|
|
||||||
}
|
|
||||||
initControls() {
|
|
||||||
document.addEventListener('keydown', (e) => {
|
|
||||||
//console.log('Key pressed:', e.key);
|
|
||||||
const step = 20;
|
|
||||||
const scaleFactor = 0.1;
|
|
||||||
const rotateAngle = Math.PI / 18;
|
|
||||||
//console.log('Before transform - Matrix:', this.matrix);
|
|
||||||
switch (e.key.toLowerCase()) {
|
|
||||||
case 'w':
|
|
||||||
case 'arrowup':
|
|
||||||
this.matrix = this.matrix.translate(0, -step);
|
|
||||||
break;
|
|
||||||
case 's':
|
|
||||||
case 'arrowdown':
|
|
||||||
this.matrix = this.matrix.translate(0, step);
|
|
||||||
break;
|
|
||||||
case 'a':
|
|
||||||
case 'arrowleft':
|
|
||||||
this.matrix = this.matrix.translate(-step, 0);
|
|
||||||
break;
|
|
||||||
case 'd':
|
|
||||||
case 'arrowright':
|
|
||||||
this.matrix = this.matrix.translate(step, 0);
|
|
||||||
break;
|
|
||||||
case 'z':
|
|
||||||
this.matrix = this.matrix.scale(1 + scaleFactor, 1 + scaleFactor);
|
|
||||||
break;
|
|
||||||
case 'x':
|
|
||||||
this.matrix = this.matrix.scale(1 - scaleFactor, 1 - scaleFactor);
|
|
||||||
break;
|
|
||||||
case 'q':
|
|
||||||
this.matrix = this.matrix.rotate(-rotateAngle);
|
|
||||||
break;
|
|
||||||
case 'e':
|
|
||||||
this.matrix = this.matrix.rotate(rotateAngle);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
//console.log('After transform - Matrix:', this.matrix);
|
|
||||||
e.preventDefault();
|
|
||||||
this.draw();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
drawGrid() {
|
|
||||||
const ctx = this.ctx;
|
|
||||||
ctx.strokeStyle = '#ddd';
|
|
||||||
ctx.lineWidth = 1;
|
|
||||||
// 绘制网格
|
|
||||||
for (let x = -500; x <= 500; x += 50) {
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(x, -500);
|
|
||||||
ctx.lineTo(x, 500);
|
|
||||||
ctx.stroke();
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(-500, x);
|
|
||||||
ctx.lineTo(500, x);
|
|
||||||
ctx.stroke();
|
|
||||||
}
|
|
||||||
// 绘制坐标轴
|
|
||||||
ctx.strokeStyle = 'black';
|
|
||||||
ctx.lineWidth = 2;
|
|
||||||
ctx.beginPath();
|
|
||||||
ctx.moveTo(-500, 0);
|
|
||||||
ctx.lineTo(500, 0);
|
|
||||||
ctx.moveTo(0, -500);
|
|
||||||
ctx.lineTo(0, 500);
|
|
||||||
ctx.stroke();
|
|
||||||
// 绘制坐标标签
|
|
||||||
ctx.fillStyle = 'black';
|
|
||||||
ctx.font = '12px Arial';
|
|
||||||
for (let x = -500; x <= 500; x += 100) {
|
|
||||||
ctx.fillText(x.toString(), x + 5, 15);
|
|
||||||
ctx.fillText(x.toString(), 5, -x + 5);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
draw() {
|
|
||||||
const ctx = this.ctx;
|
|
||||||
// 保存当前状态并重置变换
|
|
||||||
ctx.save();
|
|
||||||
ctx.setTransform(1, 0, 0, 1, 0, 0);
|
|
||||||
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
||||||
ctx.restore();
|
|
||||||
// 应用当前矩阵变换
|
|
||||||
ctx.save();
|
|
||||||
this.matrix.transform(ctx);
|
|
||||||
// 绘制内容
|
|
||||||
this.drawGrid();
|
|
||||||
ctx.restore();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// 初始化代码
|
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
|
||||||
new MapDemo();
|
|
||||||
const info = document.createElement('div');
|
|
||||||
info.innerHTML = `
|
|
||||||
<h3>操作说明:</h3>
|
|
||||||
<p>平移: W/A/S/D 或 方向键</p>
|
|
||||||
<p>缩放: Z/X</p>
|
|
||||||
<p>旋转: Q/E</p>
|
|
||||||
`;
|
|
||||||
document.body.appendChild(info);
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=matrix-demo.js.map
|
|
1
dist/matrix-demo.js.map
vendored
1
dist/matrix-demo.js.map
vendored
File diff suppressed because one or more lines are too long
8
dist/utils/ScreenUtiles.js
vendored
Normal file
8
dist/utils/ScreenUtiles.js
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
export class ScreenAdapter {
|
||||||
|
static get viewportRatio() {
|
||||||
|
return window.innerWidth / window.innerHeight;
|
||||||
|
}
|
||||||
|
static isLandscape() {
|
||||||
|
return window.matchMedia("(orientation: landscape)").matches;
|
||||||
|
}
|
||||||
|
}
|
6
err.txt
Normal file
6
err.txt
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
|
||||||
|
> mobile-game@1.0.0 build
|
||||||
|
> tsc
|
||||||
|
|
||||||
|
src/core/GameBox.ts(35,29): error TS2339: Property 'element' does not exist on type '{ aspectRatio: number; backgroundColor: string; borderColor: string; }'.
|
||||||
|
src/index.ts(8,3): error TS2353: Object literal may only specify known properties, and 'element' does not exist in type '{ aspectRatio: number; backgroundColor: string; borderColor: string; }'.
|
31
index.html
31
index.html
@ -1,19 +1,32 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>Matrix Map Demo (TS)</title>
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0,
|
||||||
|
maximum-scale=1.0, user-scalable=no, viewport-fit=cover">
|
||||||
|
<title>Mobile Game</title>
|
||||||
<style>
|
<style>
|
||||||
body {
|
html, body {
|
||||||
display: flex;
|
margin: 0;
|
||||||
flex-direction: column;
|
padding: 0;
|
||||||
align-items: center;
|
overflow: hidden;
|
||||||
font-family: Arial, sans-serif;
|
width: 100vw;
|
||||||
|
height: 100vh;
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
}
|
||||||
|
#gameContainer {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
}
|
}
|
||||||
canvas { margin: 20px; }
|
|
||||||
div { margin: 10px; }
|
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<script src="../dist/matrix-demo.js"></script>
|
<div id="gameContainer"></div>
|
||||||
|
<script type="module" src="/dist/index.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"test": "echo \"Error: no test specified\" && exit 1",
|
|
||||||
"build": "tsc",
|
|
||||||
"dev": "concurrently \"tsc -w\" \"live-server\""
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^22.12.0",
|
|
||||||
"typescript": "^5.7.3",
|
|
||||||
"concurrently": "^8.2.2",
|
|
||||||
"live-server": "^1.2.2"
|
|
||||||
}
|
|
@ -1,14 +0,0 @@
|
|||||||
"compilerOptions": {
|
|
||||||
"target": "ES6",
|
|
||||||
"module": "ES6",
|
|
||||||
"outDir": "./dist",
|
|
||||||
"rootDir": "./src",
|
|
||||||
"strict": true,
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"skipLibCheck": true,
|
|
||||||
"forceConsistentCasingInFileNames": true
|
|
||||||
},
|
|
||||||
"include": [
|
|
||||||
"src/**/*"
|
|
||||||
]
|
|
||||||
}
|
|
1
node_modules/.bin/browserslist
generated
vendored
Symbolic link
1
node_modules/.bin/browserslist
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../browserslist/cli.js
|
1
node_modules/.bin/envinfo
generated
vendored
Symbolic link
1
node_modules/.bin/envinfo
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../envinfo/dist/cli.js
|
1
node_modules/.bin/flat
generated
vendored
Symbolic link
1
node_modules/.bin/flat
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../flat/cli.js
|
1
node_modules/.bin/import-local-fixture
generated
vendored
Symbolic link
1
node_modules/.bin/import-local-fixture
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../import-local/fixtures/cli.js
|
1
node_modules/.bin/is-docker
generated
vendored
Symbolic link
1
node_modules/.bin/is-docker
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../is-docker/cli.js
|
1
node_modules/.bin/node-which
generated
vendored
Symbolic link
1
node_modules/.bin/node-which
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../which/bin/node-which
|
1
node_modules/.bin/rc
generated
vendored
Symbolic link
1
node_modules/.bin/rc
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../rc/cli.js
|
1
node_modules/.bin/resolve
generated
vendored
Symbolic link
1
node_modules/.bin/resolve
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../resolve/bin/resolve
|
1
node_modules/.bin/serve
generated
vendored
Symbolic link
1
node_modules/.bin/serve
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../serve/build/main.js
|
1
node_modules/.bin/terser
generated
vendored
Symbolic link
1
node_modules/.bin/terser
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../terser/bin/terser
|
1
node_modules/.bin/update-browserslist-db
generated
vendored
Symbolic link
1
node_modules/.bin/update-browserslist-db
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../update-browserslist-db/cli.js
|
1
node_modules/.bin/webpack
generated
vendored
Symbolic link
1
node_modules/.bin/webpack
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../webpack/bin/webpack.js
|
1
node_modules/.bin/webpack-cli
generated
vendored
Symbolic link
1
node_modules/.bin/webpack-cli
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../webpack-cli/bin/cli.js
|
2261
node_modules/.package-lock.json
generated
vendored
2261
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
21
node_modules/@discoveryjs/json-ext/LICENSE
generated
vendored
Normal file
21
node_modules/@discoveryjs/json-ext/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2020-2024 Roman Dvornov <rdvornov@gmail.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
290
node_modules/@discoveryjs/json-ext/README.md
generated
vendored
Normal file
290
node_modules/@discoveryjs/json-ext/README.md
generated
vendored
Normal file
@ -0,0 +1,290 @@
|
|||||||
|
# json-ext
|
||||||
|
|
||||||
|
[](https://www.npmjs.com/package/@discoveryjs/json-ext)
|
||||||
|
[](https://github.com/discoveryjs/json-ext/actions/workflows/ci.yml)
|
||||||
|
[](https://coveralls.io/github/discoveryjs/json-ext)
|
||||||
|
[](https://www.npmjs.com/package/@discoveryjs/json-ext)
|
||||||
|
|
||||||
|
A set of utilities designed to extend JSON's capabilities, especially for handling large JSON data (over 100MB) efficiently:
|
||||||
|
|
||||||
|
- [parseChunked()](#parsechunked) – Parses JSON incrementally; similar to [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse), but processing JSON data in chunks.
|
||||||
|
- [stringifyChunked()](#stringifychunked) – Converts JavaScript objects to JSON incrementally; similar to [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns a generator that yields JSON strings in parts.
|
||||||
|
- [stringifyInfo()](#stringifyinfo) – Estimates the size of the `JSON.stringify()` result and identifies circular references without generating the JSON.
|
||||||
|
- [parseFromWebStream()](#parsefromwebstream) – A helper function to parse JSON chunks directly from a Web Stream.
|
||||||
|
- [createStringifyWebStream()](#createstringifywebstream) – A helper function to generate JSON data as a Web Stream.
|
||||||
|
|
||||||
|
### Key Features
|
||||||
|
|
||||||
|
- Optimized to handle large JSON data with minimal resource usage (see [benchmarks](./benchmarks/README.md))
|
||||||
|
- Works seamlessly with browsers, Node.js, Deno, and Bun
|
||||||
|
- Supports both Node.js and Web streams
|
||||||
|
- Available in both ESM and CommonJS
|
||||||
|
- TypeScript typings included
|
||||||
|
- No external dependencies
|
||||||
|
- Compact size: 9.4Kb (minified), 3.8Kb (min+gzip)
|
||||||
|
|
||||||
|
### Why json-ext?
|
||||||
|
|
||||||
|
- **Handles large JSON files**: Overcomes the limitations of V8 for strings larger than ~500MB, enabling the processing of huge JSON data.
|
||||||
|
- **Prevents main thread blocking**: Distributes parsing and stringifying over time, ensuring the main thread remains responsive during heavy JSON operations.
|
||||||
|
- **Reduces memory usage**: Traditional `JSON.parse()` and `JSON.stringify()` require loading entire data into memory, leading to high memory consumption and increased garbage collection pressure. `parseChunked()` and `stringifyChunked()` process data incrementally, optimizing memory usage.
|
||||||
|
- **Size estimation**: `stringifyInfo()` allows estimating the size of resulting JSON before generating it, enabling better decision-making for JSON generation strategies.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install @discoveryjs/json-ext
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### parseChunked()
|
||||||
|
|
||||||
|
Functions like [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse), iterating over chunks to reconstruct the result object, and returns a [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise).
|
||||||
|
|
||||||
|
> Note: `reviver` parameter is not supported yet.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
function parseChunked(input: Iterable<Chunk> | AsyncIterable<Chunk>): Promise<any>;
|
||||||
|
function parseChunked(input: () => (Iterable<Chunk> | AsyncIterable<Chunk>)): Promise<any>;
|
||||||
|
|
||||||
|
type Chunk = string | Buffer | Uint8Array;
|
||||||
|
```
|
||||||
|
|
||||||
|
[Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#parse-chunked)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { parseChunked } from '@discoveryjs/json-ext';
|
||||||
|
|
||||||
|
const data = await parseChunked(chunkEmitter);
|
||||||
|
```
|
||||||
|
|
||||||
|
Parameter `chunkEmitter` can be an iterable or async iterable that iterates over chunks, or a function returning such a value. A chunk can be a `string`, `Uint8Array`, or Node.js `Buffer`.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- Generator:
|
||||||
|
```js
|
||||||
|
parseChunked(function*() {
|
||||||
|
yield '{ "hello":';
|
||||||
|
yield Buffer.from(' "wor'); // Node.js only
|
||||||
|
yield new TextEncoder().encode('ld" }'); // returns Uint8Array
|
||||||
|
});
|
||||||
|
```
|
||||||
|
- Async generator:
|
||||||
|
```js
|
||||||
|
parseChunked(async function*() {
|
||||||
|
for await (const chunk of someAsyncSource) {
|
||||||
|
yield chunk;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
- Array:
|
||||||
|
```js
|
||||||
|
parseChunked(['{ "hello":', ' "world"}'])
|
||||||
|
```
|
||||||
|
- Function returning iterable:
|
||||||
|
```js
|
||||||
|
parseChunked(() => ['{ "hello":', ' "world"}'])
|
||||||
|
```
|
||||||
|
- Node.js [`Readable`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) stream:
|
||||||
|
```js
|
||||||
|
import fs from 'node:fs';
|
||||||
|
|
||||||
|
parseChunked(fs.createReadStream('path/to/file.json'))
|
||||||
|
```
|
||||||
|
- Web stream (e.g., using [fetch()](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)):
|
||||||
|
> Note: Iterability for Web streams was added later in the Web platform, not all environments support it. Consider using `parseFromWebStream()` for broader compatibility.
|
||||||
|
```js
|
||||||
|
const response = await fetch('https://example.com/data.json');
|
||||||
|
const data = await parseChunked(response.body); // body is ReadableStream
|
||||||
|
```
|
||||||
|
|
||||||
|
### stringifyChunked()
|
||||||
|
|
||||||
|
Functions like [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns a generator yielding strings instead of a single string.
|
||||||
|
|
||||||
|
> Note: Returns `"null"` when `JSON.stringify()` returns `undefined` (since a chunk cannot be `undefined`).
|
||||||
|
|
||||||
|
```ts
|
||||||
|
function stringifyChunked(value: any, replacer?: Replacer, space?: Space): Generator<string, void, unknown>;
|
||||||
|
function stringifyChunked(value: any, options: StringifyOptions): Generator<string, void, unknown>;
|
||||||
|
|
||||||
|
type Replacer =
|
||||||
|
| ((this: any, key: string, value: any) => any)
|
||||||
|
| (string | number)[]
|
||||||
|
| null;
|
||||||
|
type Space = string | number | null;
|
||||||
|
type StringifyOptions = {
|
||||||
|
replacer?: Replacer;
|
||||||
|
space?: Space;
|
||||||
|
highWaterMark?: number;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
[Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#stream-stringifying)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
- Getting an array of chunks:
|
||||||
|
```js
|
||||||
|
const chunks = [...stringifyChunked(data)];
|
||||||
|
```
|
||||||
|
- Iterating over chunks:
|
||||||
|
```js
|
||||||
|
for (const chunk of stringifyChunked(data)) {
|
||||||
|
console.log(chunk);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- Specifying the minimum size of a chunk with `highWaterMark` option:
|
||||||
|
```js
|
||||||
|
const data = [1, "hello world", 42];
|
||||||
|
|
||||||
|
console.log([...stringifyChunked(data)]); // default 16kB
|
||||||
|
// ['[1,"hello world",42]']
|
||||||
|
|
||||||
|
console.log([...stringifyChunked(data, { highWaterMark: 16 })]);
|
||||||
|
// ['[1,"hello world"', ',42]']
|
||||||
|
|
||||||
|
console.log([...stringifyChunked(data, { highWaterMark: 1 })]);
|
||||||
|
// ['[1', ',"hello world"', ',42', ']']
|
||||||
|
```
|
||||||
|
- Streaming into a stream with a `Promise` (modern Node.js):
|
||||||
|
```js
|
||||||
|
import { pipeline } from 'node:stream/promises';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
|
||||||
|
await pipeline(
|
||||||
|
stringifyChunked(data),
|
||||||
|
fs.createWriteStream('path/to/file.json')
|
||||||
|
);
|
||||||
|
```
|
||||||
|
- Wrapping into a `Promise` streaming into a stream (legacy Node.js):
|
||||||
|
```js
|
||||||
|
import { Readable } from 'node:stream';
|
||||||
|
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
Readable.from(stringifyChunked(data))
|
||||||
|
.on('error', reject)
|
||||||
|
.pipe(stream)
|
||||||
|
.on('error', reject)
|
||||||
|
.on('finish', resolve);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
- Writing into a file synchronously:
|
||||||
|
> Note: Slower than `JSON.stringify()` but uses much less heap space and has no limitation on string length
|
||||||
|
```js
|
||||||
|
import fs from 'node:fs';
|
||||||
|
|
||||||
|
const fd = fs.openSync('output.json', 'w');
|
||||||
|
|
||||||
|
for (const chunk of stringifyChunked(data)) {
|
||||||
|
fs.writeFileSync(fd, chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.closeSync(fd);
|
||||||
|
```
|
||||||
|
- Using with fetch (JSON streaming):
|
||||||
|
> Note: This feature has limited support in browsers, see [Streaming requests with the fetch API](https://developer.chrome.com/docs/capabilities/web-apis/fetch-streaming-requests)
|
||||||
|
|
||||||
|
> Note: `ReadableStream.from()` has limited [support in browsers](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/from_static), use [`createStringifyWebStream()`](#createstringifywebstream) instead.
|
||||||
|
```js
|
||||||
|
fetch('http://example.com', {
|
||||||
|
method: 'POST',
|
||||||
|
duplex: 'half',
|
||||||
|
body: ReadableStream.from(stringifyChunked(data))
|
||||||
|
});
|
||||||
|
```
|
||||||
|
- Wrapping into `ReadableStream`:
|
||||||
|
> Note: Use `ReadableStream.from()` or [`createStringifyWebStream()`](#createstringifywebstream) when no extra logic is needed
|
||||||
|
```js
|
||||||
|
new ReadableStream({
|
||||||
|
start() {
|
||||||
|
this.generator = stringifyChunked(data);
|
||||||
|
},
|
||||||
|
pull(controller) {
|
||||||
|
const { value, done } = this.generator.next();
|
||||||
|
|
||||||
|
if (done) {
|
||||||
|
controller.close();
|
||||||
|
} else {
|
||||||
|
controller.enqueue(value);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel() {
|
||||||
|
this.generator = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### stringifyInfo()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export function stringifyInfo(value: any, replacer?: Replacer, space?: Space): StringifyInfoResult;
|
||||||
|
export function stringifyInfo(value: any, options?: StringifyInfoOptions): StringifyInfoResult;
|
||||||
|
|
||||||
|
type StringifyInfoOptions = {
|
||||||
|
replacer?: Replacer;
|
||||||
|
space?: Space;
|
||||||
|
continueOnCircular?: boolean;
|
||||||
|
}
|
||||||
|
type StringifyInfoResult = {
|
||||||
|
bytes: number; // size of JSON in bytes
|
||||||
|
spaceBytes: number; // size of white spaces in bytes (when space option used)
|
||||||
|
circular: object[]; // list of circular references
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Functions like [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns an object with the expected overall size of the stringify operation and a list of circular references.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { stringifyInfo } from '@discoveryjs/json-ext';
|
||||||
|
|
||||||
|
console.log(stringifyInfo({ test: true }, null, 4));
|
||||||
|
// {
|
||||||
|
// bytes: 20, // Buffer.byteLength('{\n "test": true\n}')
|
||||||
|
// spaceBytes: 7,
|
||||||
|
// circular: []
|
||||||
|
// }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
##### continueOnCircular
|
||||||
|
|
||||||
|
Type: `Boolean`
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
Determines whether to continue collecting info for a value when a circular reference is found. Setting this option to `true` allows finding all circular references.
|
||||||
|
|
||||||
|
### parseFromWebStream()
|
||||||
|
|
||||||
|
A helper function to consume JSON from a Web Stream. You can use `parseChunked(stream)` instead, but `@@asyncIterator` on `ReadableStream` has limited support in browsers (see [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) compatibility table).
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { parseFromWebStream } from '@discoveryjs/json-ext';
|
||||||
|
|
||||||
|
const data = await parseFromWebStream(readableStream);
|
||||||
|
// equivalent to (when ReadableStream[@@asyncIterator] is supported):
|
||||||
|
// await parseChunked(readableStream);
|
||||||
|
```
|
||||||
|
|
||||||
|
### createStringifyWebStream()
|
||||||
|
|
||||||
|
A helper function to convert `stringifyChunked()` into a `ReadableStream` (Web Stream). You can use `ReadableStream.from()` instead, but this method has limited support in browsers (see [ReadableStream.from()](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/from_static) compatibility table).
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createStringifyWebStream } from '@discoveryjs/json-ext';
|
||||||
|
|
||||||
|
createStringifyWebStream({ test: true });
|
||||||
|
// equivalent to (when ReadableStream.from() is supported):
|
||||||
|
// ReadableStream.from(stringifyChunked({ test: true }))
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
14
node_modules/@discoveryjs/json-ext/cjs/index.cjs
generated
vendored
Normal file
14
node_modules/@discoveryjs/json-ext/cjs/index.cjs
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const parseChunked = require('./parse-chunked.cjs');
|
||||||
|
const stringifyChunked = require('./stringify-chunked.cjs');
|
||||||
|
const stringifyInfo = require('./stringify-info.cjs');
|
||||||
|
const webStreams = require('./web-streams.cjs');
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
exports.parseChunked = parseChunked.parseChunked;
|
||||||
|
exports.stringifyChunked = stringifyChunked.stringifyChunked;
|
||||||
|
exports.stringifyInfo = stringifyInfo.stringifyInfo;
|
||||||
|
exports.createStringifyWebStream = webStreams.createStringifyWebStream;
|
||||||
|
exports.parseFromWebStream = webStreams.parseFromWebStream;
|
355
node_modules/@discoveryjs/json-ext/cjs/parse-chunked.cjs
generated
vendored
Normal file
355
node_modules/@discoveryjs/json-ext/cjs/parse-chunked.cjs
generated
vendored
Normal file
@ -0,0 +1,355 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const utils = require('./utils.cjs');
|
||||||
|
|
||||||
|
const STACK_OBJECT = 1;
|
||||||
|
const STACK_ARRAY = 2;
|
||||||
|
const decoder = new TextDecoder();
|
||||||
|
|
||||||
|
function adjustPosition(error, parser) {
|
||||||
|
if (error.name === 'SyntaxError' && parser.jsonParseOffset) {
|
||||||
|
error.message = error.message.replace(/at position (\d+)/, (_, pos) =>
|
||||||
|
'at position ' + (Number(pos) + parser.jsonParseOffset)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
|
function append(array, elements) {
|
||||||
|
// Note: Avoid to use array.push(...elements) since it may lead to
|
||||||
|
// "RangeError: Maximum call stack size exceeded" for a long arrays
|
||||||
|
const initialLength = array.length;
|
||||||
|
array.length += elements.length;
|
||||||
|
|
||||||
|
for (let i = 0; i < elements.length; i++) {
|
||||||
|
array[initialLength + i] = elements[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function parseChunked(chunkEmitter) {
|
||||||
|
const iterable = typeof chunkEmitter === 'function'
|
||||||
|
? chunkEmitter()
|
||||||
|
: chunkEmitter;
|
||||||
|
|
||||||
|
if (utils.isIterable(iterable)) {
|
||||||
|
let parser = new ChunkParser();
|
||||||
|
|
||||||
|
try {
|
||||||
|
for await (const chunk of iterable) {
|
||||||
|
if (typeof chunk !== 'string' && !ArrayBuffer.isView(chunk)) {
|
||||||
|
throw new TypeError('Invalid chunk: Expected string, TypedArray or Buffer');
|
||||||
|
}
|
||||||
|
|
||||||
|
parser.push(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parser.finish();
|
||||||
|
} catch (e) {
|
||||||
|
throw adjustPosition(e, parser);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new TypeError(
|
||||||
|
'Invalid chunk emitter: Expected an Iterable, AsyncIterable, generator, ' +
|
||||||
|
'async generator, or a function returning an Iterable or AsyncIterable'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
class ChunkParser {
|
||||||
|
constructor() {
|
||||||
|
this.value = undefined;
|
||||||
|
this.valueStack = null;
|
||||||
|
|
||||||
|
this.stack = new Array(100);
|
||||||
|
this.lastFlushDepth = 0;
|
||||||
|
this.flushDepth = 0;
|
||||||
|
this.stateString = false;
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
this.pendingByteSeq = null;
|
||||||
|
this.pendingChunk = null;
|
||||||
|
this.chunkOffset = 0;
|
||||||
|
this.jsonParseOffset = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
parseAndAppend(fragment, wrap) {
|
||||||
|
// Append new entries or elements
|
||||||
|
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
|
||||||
|
if (wrap) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = '{' + fragment + '}';
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.assign(this.valueStack.value, JSON.parse(fragment));
|
||||||
|
} else {
|
||||||
|
if (wrap) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = '[' + fragment + ']';
|
||||||
|
}
|
||||||
|
|
||||||
|
append(this.valueStack.value, JSON.parse(fragment));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
prepareAddition(fragment) {
|
||||||
|
const { value } = this.valueStack;
|
||||||
|
const expectComma = Array.isArray(value)
|
||||||
|
? value.length !== 0
|
||||||
|
: Object.keys(value).length !== 0;
|
||||||
|
|
||||||
|
if (expectComma) {
|
||||||
|
// Skip a comma at the beginning of fragment, otherwise it would
|
||||||
|
// fail to parse
|
||||||
|
if (fragment[0] === ',') {
|
||||||
|
this.jsonParseOffset++;
|
||||||
|
return fragment.slice(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// When value (an object or array) is not empty and a fragment
|
||||||
|
// doesn't start with a comma, a single valid fragment starting
|
||||||
|
// is a closing bracket. If it's not, a prefix is adding to fail
|
||||||
|
// parsing. Otherwise, the sequence of chunks can be successfully
|
||||||
|
// parsed, although it should not, e.g. ["[{}", "{}]"]
|
||||||
|
if (fragment[0] !== '}' && fragment[0] !== ']') {
|
||||||
|
this.jsonParseOffset -= 3;
|
||||||
|
return '[[]' + fragment;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fragment;
|
||||||
|
}
|
||||||
|
|
||||||
|
flush(chunk, start, end) {
|
||||||
|
let fragment = chunk.slice(start, end);
|
||||||
|
|
||||||
|
// Save position correction an error in JSON.parse() if any
|
||||||
|
this.jsonParseOffset = this.chunkOffset + start;
|
||||||
|
|
||||||
|
// Prepend pending chunk if any
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
fragment = this.pendingChunk + fragment;
|
||||||
|
this.jsonParseOffset -= this.pendingChunk.length;
|
||||||
|
this.pendingChunk = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.flushDepth === this.lastFlushDepth) {
|
||||||
|
// Depth didn't changed, so it's a root value or entry/element set
|
||||||
|
if (this.flushDepth > 0) {
|
||||||
|
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||||
|
} else {
|
||||||
|
// That's an entire value on a top level
|
||||||
|
this.value = JSON.parse(fragment);
|
||||||
|
this.valueStack = {
|
||||||
|
value: this.value,
|
||||||
|
prev: null
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else if (this.flushDepth > this.lastFlushDepth) {
|
||||||
|
// Add missed closing brackets/parentheses
|
||||||
|
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
|
||||||
|
fragment += this.stack[i] === STACK_OBJECT ? '}' : ']';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.lastFlushDepth === 0) {
|
||||||
|
// That's a root value
|
||||||
|
this.value = JSON.parse(fragment);
|
||||||
|
this.valueStack = {
|
||||||
|
value: this.value,
|
||||||
|
prev: null
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move down to the depths to the last object/array, which is current now
|
||||||
|
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
|
||||||
|
let value = this.valueStack.value;
|
||||||
|
|
||||||
|
if (this.stack[i - 1] === STACK_OBJECT) {
|
||||||
|
// find last entry
|
||||||
|
let key;
|
||||||
|
// eslint-disable-next-line curly
|
||||||
|
for (key in value);
|
||||||
|
value = value[key];
|
||||||
|
} else {
|
||||||
|
// last element
|
||||||
|
value = value[value.length - 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
this.valueStack = {
|
||||||
|
value,
|
||||||
|
prev: this.valueStack
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else /* this.flushDepth < this.lastFlushDepth */ {
|
||||||
|
fragment = this.prepareAddition(fragment);
|
||||||
|
|
||||||
|
// Add missed opening brackets/parentheses
|
||||||
|
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = (this.stack[i] === STACK_OBJECT ? '{' : '[') + fragment;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.parseAndAppend(fragment, false);
|
||||||
|
|
||||||
|
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||||
|
this.valueStack = this.valueStack.prev;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.lastFlushDepth = this.flushDepth;
|
||||||
|
}
|
||||||
|
|
||||||
|
push(chunk) {
|
||||||
|
if (typeof chunk !== 'string') {
|
||||||
|
// Suppose chunk is Buffer or Uint8Array
|
||||||
|
|
||||||
|
// Prepend uncompleted byte sequence if any
|
||||||
|
if (this.pendingByteSeq !== null) {
|
||||||
|
const origRawChunk = chunk;
|
||||||
|
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
|
||||||
|
chunk.set(this.pendingByteSeq);
|
||||||
|
chunk.set(origRawChunk, this.pendingByteSeq.length);
|
||||||
|
this.pendingByteSeq = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// In case Buffer/Uint8Array, an input is encoded in UTF8
|
||||||
|
// Seek for parts of uncompleted UTF8 symbol on the ending
|
||||||
|
// This makes sense only if we expect more chunks and last char is not multi-bytes
|
||||||
|
if (chunk[chunk.length - 1] > 127) {
|
||||||
|
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
|
||||||
|
const byte = chunk[chunk.length - 1 - seqLength];
|
||||||
|
|
||||||
|
// 10xxxxxx - 2nd, 3rd or 4th byte
|
||||||
|
// 110xxxxx – first byte of 2-byte sequence
|
||||||
|
// 1110xxxx - first byte of 3-byte sequence
|
||||||
|
// 11110xxx - first byte of 4-byte sequence
|
||||||
|
if (byte >> 6 === 3) {
|
||||||
|
seqLength++;
|
||||||
|
|
||||||
|
// If the sequence is really incomplete, then preserve it
|
||||||
|
// for the future chunk and cut off it from the current chunk
|
||||||
|
if ((seqLength !== 4 && byte >> 3 === 0b11110) ||
|
||||||
|
(seqLength !== 3 && byte >> 4 === 0b1110) ||
|
||||||
|
(seqLength !== 2 && byte >> 5 === 0b110)) {
|
||||||
|
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
|
||||||
|
chunk = chunk.slice(0, -seqLength);
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert chunk to a string, since single decode per chunk
|
||||||
|
// is much effective than decode multiple small substrings
|
||||||
|
chunk = decoder.decode(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
const chunkLength = chunk.length;
|
||||||
|
let lastFlushPoint = 0;
|
||||||
|
let flushPoint = 0;
|
||||||
|
|
||||||
|
// Main scan loop
|
||||||
|
scan: for (let i = 0; i < chunkLength; i++) {
|
||||||
|
if (this.stateString) {
|
||||||
|
for (; i < chunkLength; i++) {
|
||||||
|
if (this.stateStringEscape) {
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
} else {
|
||||||
|
switch (chunk.charCodeAt(i)) {
|
||||||
|
case 0x22: /* " */
|
||||||
|
this.stateString = false;
|
||||||
|
continue scan;
|
||||||
|
|
||||||
|
case 0x5C: /* \ */
|
||||||
|
this.stateStringEscape = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (chunk.charCodeAt(i)) {
|
||||||
|
case 0x22: /* " */
|
||||||
|
this.stateString = true;
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x2C: /* , */
|
||||||
|
flushPoint = i;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x7B: /* { */
|
||||||
|
// Open an object
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.stack[this.flushDepth++] = STACK_OBJECT;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x5B: /* [ */
|
||||||
|
// Open an array
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.stack[this.flushDepth++] = STACK_ARRAY;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x5D: /* ] */
|
||||||
|
case 0x7D: /* } */
|
||||||
|
// Close an object or array
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.flushDepth--;
|
||||||
|
|
||||||
|
if (this.flushDepth < this.lastFlushDepth) {
|
||||||
|
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||||
|
lastFlushPoint = flushPoint;
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x09: /* \t */
|
||||||
|
case 0x0A: /* \n */
|
||||||
|
case 0x0D: /* \r */
|
||||||
|
case 0x20: /* space */
|
||||||
|
// Move points forward when they points on current position and it's a whitespace
|
||||||
|
if (lastFlushPoint === i) {
|
||||||
|
lastFlushPoint++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (flushPoint === i) {
|
||||||
|
flushPoint++;
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (flushPoint > lastFlushPoint) {
|
||||||
|
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Produce pendingChunk if something left
|
||||||
|
if (flushPoint < chunkLength) {
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
// When there is already a pending chunk then no flush happened,
|
||||||
|
// appending entire chunk to pending one
|
||||||
|
this.pendingChunk += chunk;
|
||||||
|
} else {
|
||||||
|
// Create a pending chunk, it will start with non-whitespace since
|
||||||
|
// flushPoint was moved forward away from whitespaces on scan
|
||||||
|
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.chunkOffset += chunkLength;
|
||||||
|
}
|
||||||
|
|
||||||
|
finish() {
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
this.flush('', 0, 0);
|
||||||
|
this.pendingChunk = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.parseChunked = parseChunked;
|
175
node_modules/@discoveryjs/json-ext/cjs/stringify-chunked.cjs
generated
vendored
Normal file
175
node_modules/@discoveryjs/json-ext/cjs/stringify-chunked.cjs
generated
vendored
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const utils = require('./utils.cjs');
|
||||||
|
|
||||||
|
function encodeString(value) {
|
||||||
|
if (/[^\x20\x21\x23-\x5B\x5D-\uD799]/.test(value)) { // [^\x20-\uD799]|[\x22\x5c]
|
||||||
|
return JSON.stringify(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return '"' + value + '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
function* stringifyChunked(value, ...args) {
|
||||||
|
const { replacer, getKeys, space, ...options } = utils.normalizeStringifyOptions(...args);
|
||||||
|
const highWaterMark = Number(options.highWaterMark) || 0x4000; // 16kb by default
|
||||||
|
|
||||||
|
const keyStrings = new Map();
|
||||||
|
const stack = [];
|
||||||
|
const rootValue = { '': value };
|
||||||
|
let prevState = null;
|
||||||
|
let state = () => printEntry('', value);
|
||||||
|
let stateValue = rootValue;
|
||||||
|
let stateEmpty = true;
|
||||||
|
let stateKeys = [''];
|
||||||
|
let stateIndex = 0;
|
||||||
|
let buffer = '';
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
state();
|
||||||
|
|
||||||
|
if (buffer.length >= highWaterMark || prevState === null) {
|
||||||
|
// flush buffer
|
||||||
|
yield buffer;
|
||||||
|
buffer = '';
|
||||||
|
|
||||||
|
if (prevState === null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function printObject() {
|
||||||
|
if (stateIndex === 0) {
|
||||||
|
stateKeys = getKeys(stateValue);
|
||||||
|
buffer += '{';
|
||||||
|
}
|
||||||
|
|
||||||
|
// when no keys left
|
||||||
|
if (stateIndex === stateKeys.length) {
|
||||||
|
buffer += space && !stateEmpty
|
||||||
|
? `\n${space.repeat(stack.length - 1)}}`
|
||||||
|
: '}';
|
||||||
|
|
||||||
|
popState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = stateKeys[stateIndex++];
|
||||||
|
printEntry(key, stateValue[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
function printArray() {
|
||||||
|
if (stateIndex === 0) {
|
||||||
|
buffer += '[';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stateIndex === stateValue.length) {
|
||||||
|
buffer += space && !stateEmpty
|
||||||
|
? `\n${space.repeat(stack.length - 1)}]`
|
||||||
|
: ']';
|
||||||
|
|
||||||
|
popState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
printEntry(stateIndex, stateValue[stateIndex++]);
|
||||||
|
}
|
||||||
|
|
||||||
|
function printEntryPrelude(key) {
|
||||||
|
if (stateEmpty) {
|
||||||
|
stateEmpty = false;
|
||||||
|
} else {
|
||||||
|
buffer += ',';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (space && prevState !== null) {
|
||||||
|
buffer += `\n${space.repeat(stack.length)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state === printObject) {
|
||||||
|
let keyString = keyStrings.get(key);
|
||||||
|
|
||||||
|
if (keyString === undefined) {
|
||||||
|
keyStrings.set(key, keyString = encodeString(key) + (space ? ': ' : ':'));
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer += keyString;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function printEntry(key, value) {
|
||||||
|
value = utils.replaceValue(stateValue, key, value, replacer);
|
||||||
|
|
||||||
|
if (value === null || typeof value !== 'object') {
|
||||||
|
// primitive
|
||||||
|
if (state !== printObject || value !== undefined) {
|
||||||
|
printEntryPrelude(key);
|
||||||
|
pushPrimitive(value);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If the visited set does not change after adding a value, then it is already in the set
|
||||||
|
if (stack.includes(value)) {
|
||||||
|
throw new TypeError('Converting circular structure to JSON');
|
||||||
|
}
|
||||||
|
|
||||||
|
printEntryPrelude(key);
|
||||||
|
stack.push(value);
|
||||||
|
|
||||||
|
pushState();
|
||||||
|
state = Array.isArray(value) ? printArray : printObject;
|
||||||
|
stateValue = value;
|
||||||
|
stateEmpty = true;
|
||||||
|
stateIndex = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushPrimitive(value) {
|
||||||
|
switch (typeof value) {
|
||||||
|
case 'string':
|
||||||
|
buffer += encodeString(value);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'number':
|
||||||
|
buffer += Number.isFinite(value) ? String(value) : 'null';
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'boolean':
|
||||||
|
buffer += value ? 'true' : 'false';
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'undefined':
|
||||||
|
case 'object': // typeof null === 'object'
|
||||||
|
buffer += 'null';
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new TypeError(`Do not know how to serialize a ${value.constructor?.name || typeof value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushState() {
|
||||||
|
prevState = {
|
||||||
|
keys: stateKeys,
|
||||||
|
index: stateIndex,
|
||||||
|
prev: prevState
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function popState() {
|
||||||
|
stack.pop();
|
||||||
|
const value = stack.length > 0 ? stack[stack.length - 1] : rootValue;
|
||||||
|
|
||||||
|
// restore state
|
||||||
|
state = Array.isArray(value) ? printArray : printObject;
|
||||||
|
stateValue = value;
|
||||||
|
stateEmpty = false;
|
||||||
|
stateKeys = prevState.keys;
|
||||||
|
stateIndex = prevState.index;
|
||||||
|
|
||||||
|
// pop state
|
||||||
|
prevState = prevState.prev;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.stringifyChunked = stringifyChunked;
|
250
node_modules/@discoveryjs/json-ext/cjs/stringify-info.cjs
generated
vendored
Normal file
250
node_modules/@discoveryjs/json-ext/cjs/stringify-info.cjs
generated
vendored
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const utils = require('./utils.cjs');
|
||||||
|
|
||||||
|
const hasOwn = typeof Object.hasOwn === 'function'
|
||||||
|
? Object.hasOwn
|
||||||
|
: (object, key) => Object.hasOwnProperty.call(object, key);
|
||||||
|
|
||||||
|
// https://tc39.es/ecma262/#table-json-single-character-escapes
|
||||||
|
const escapableCharCodeSubstitution = { // JSON Single Character Escape Sequences
|
||||||
|
0x08: '\\b',
|
||||||
|
0x09: '\\t',
|
||||||
|
0x0a: '\\n',
|
||||||
|
0x0c: '\\f',
|
||||||
|
0x0d: '\\r',
|
||||||
|
0x22: '\\\"',
|
||||||
|
0x5c: '\\\\'
|
||||||
|
};
|
||||||
|
|
||||||
|
const charLength2048 = Uint8Array.from({ length: 2048 }, (_, code) => {
|
||||||
|
if (hasOwn(escapableCharCodeSubstitution, code)) {
|
||||||
|
return 2; // \X
|
||||||
|
}
|
||||||
|
|
||||||
|
if (code < 0x20) {
|
||||||
|
return 6; // \uXXXX
|
||||||
|
}
|
||||||
|
|
||||||
|
return code < 128 ? 1 : 2; // UTF8 bytes
|
||||||
|
});
|
||||||
|
|
||||||
|
function isLeadingSurrogate(code) {
|
||||||
|
return code >= 0xD800 && code <= 0xDBFF;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isTrailingSurrogate(code) {
|
||||||
|
return code >= 0xDC00 && code <= 0xDFFF;
|
||||||
|
}
|
||||||
|
|
||||||
|
function stringLength(str) {
|
||||||
|
// Fast path to compute length when a string contains only characters encoded as single bytes
|
||||||
|
if (!/[^\x20\x21\x23-\x5B\x5D-\x7F]/.test(str)) {
|
||||||
|
return str.length + 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
let len = 0;
|
||||||
|
let prevLeadingSurrogate = false;
|
||||||
|
|
||||||
|
for (let i = 0; i < str.length; i++) {
|
||||||
|
const code = str.charCodeAt(i);
|
||||||
|
|
||||||
|
if (code < 2048) {
|
||||||
|
len += charLength2048[code];
|
||||||
|
} else if (isLeadingSurrogate(code)) {
|
||||||
|
len += 6; // \uXXXX since no pair with trailing surrogate yet
|
||||||
|
prevLeadingSurrogate = true;
|
||||||
|
continue;
|
||||||
|
} else if (isTrailingSurrogate(code)) {
|
||||||
|
len = prevLeadingSurrogate
|
||||||
|
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
|
||||||
|
: len + 6; // \uXXXX
|
||||||
|
} else {
|
||||||
|
len += 3; // code >= 2048 is 3 bytes length for UTF8
|
||||||
|
}
|
||||||
|
|
||||||
|
prevLeadingSurrogate = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return len + 2; // +2 for quotes
|
||||||
|
}
|
||||||
|
|
||||||
|
// avoid producing a string from a number
|
||||||
|
function intLength(num) {
|
||||||
|
let len = 0;
|
||||||
|
|
||||||
|
if (num < 0) {
|
||||||
|
len = 1;
|
||||||
|
num = -num;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (num >= 1e9) {
|
||||||
|
len += 9;
|
||||||
|
num = (num - num % 1e9) / 1e9;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (num >= 1e4) {
|
||||||
|
if (num >= 1e6) {
|
||||||
|
return len + (num >= 1e8
|
||||||
|
? 9
|
||||||
|
: num >= 1e7 ? 8 : 7
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return len + (num >= 1e5 ? 6 : 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
return len + (num >= 1e2
|
||||||
|
? num >= 1e3 ? 4 : 3
|
||||||
|
: num >= 10 ? 2 : 1
|
||||||
|
);
|
||||||
|
}
|
||||||
|
function primitiveLength(value) {
|
||||||
|
switch (typeof value) {
|
||||||
|
case 'string':
|
||||||
|
return stringLength(value);
|
||||||
|
|
||||||
|
case 'number':
|
||||||
|
return Number.isFinite(value)
|
||||||
|
? Number.isInteger(value)
|
||||||
|
? intLength(value)
|
||||||
|
: String(value).length
|
||||||
|
: 4 /* null */;
|
||||||
|
|
||||||
|
case 'boolean':
|
||||||
|
return value ? 4 /* true */ : 5 /* false */;
|
||||||
|
|
||||||
|
case 'undefined':
|
||||||
|
case 'object':
|
||||||
|
return 4; /* null */
|
||||||
|
|
||||||
|
default:
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function stringifyInfo(value, ...args) {
|
||||||
|
const { replacer, getKeys, ...options } = utils.normalizeStringifyOptions(...args);
|
||||||
|
const continueOnCircular = Boolean(options.continueOnCircular);
|
||||||
|
const space = options.space?.length || 0;
|
||||||
|
|
||||||
|
const keysLength = new Map();
|
||||||
|
const visited = new Map();
|
||||||
|
const circular = new Set();
|
||||||
|
const stack = [];
|
||||||
|
const root = { '': value };
|
||||||
|
let stop = false;
|
||||||
|
let bytes = 0;
|
||||||
|
let spaceBytes = 0;
|
||||||
|
let objects = 0;
|
||||||
|
|
||||||
|
walk(root, '', value);
|
||||||
|
|
||||||
|
// when value is undefined or replaced for undefined
|
||||||
|
if (bytes === 0) {
|
||||||
|
bytes += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
bytes: isNaN(bytes) ? Infinity : bytes + spaceBytes,
|
||||||
|
spaceBytes: space > 0 && isNaN(bytes) ? Infinity : spaceBytes,
|
||||||
|
circular: [...circular]
|
||||||
|
};
|
||||||
|
|
||||||
|
function walk(holder, key, value) {
|
||||||
|
if (stop) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
value = utils.replaceValue(holder, key, value, replacer);
|
||||||
|
|
||||||
|
if (value === null || typeof value !== 'object') {
|
||||||
|
// primitive
|
||||||
|
if (value !== undefined || Array.isArray(holder)) {
|
||||||
|
bytes += primitiveLength(value);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// check for circular references
|
||||||
|
if (stack.includes(value)) {
|
||||||
|
circular.add(value);
|
||||||
|
bytes += 4; // treat as null
|
||||||
|
|
||||||
|
if (!continueOnCircular) {
|
||||||
|
stop = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using 'visited' allows avoiding hang-ups in cases of highly interconnected object graphs;
|
||||||
|
// for example, a list of git commits with references to parents can lead to N^2 complexity for traversal,
|
||||||
|
// and N when 'visited' is used
|
||||||
|
if (visited.has(value)) {
|
||||||
|
bytes += visited.get(value);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
objects++;
|
||||||
|
|
||||||
|
const prevObjects = objects;
|
||||||
|
const valueBytes = bytes;
|
||||||
|
let valueLength = 0;
|
||||||
|
|
||||||
|
stack.push(value);
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
// array
|
||||||
|
valueLength = value.length;
|
||||||
|
|
||||||
|
for (let i = 0; i < valueLength; i++) {
|
||||||
|
walk(value, i, value[i]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// object
|
||||||
|
let prevLength = bytes;
|
||||||
|
|
||||||
|
for (const key of getKeys(value)) {
|
||||||
|
walk(value, key, value[key]);
|
||||||
|
|
||||||
|
if (prevLength !== bytes) {
|
||||||
|
let keyLen = keysLength.get(key);
|
||||||
|
|
||||||
|
if (keyLen === undefined) {
|
||||||
|
keysLength.set(key, keyLen = stringLength(key) + 1); // "key":
|
||||||
|
}
|
||||||
|
|
||||||
|
// value is printed
|
||||||
|
bytes += keyLen;
|
||||||
|
valueLength++;
|
||||||
|
prevLength = bytes;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bytes += valueLength === 0
|
||||||
|
? 2 // {} or []
|
||||||
|
: 1 + valueLength; // {} or [] + commas
|
||||||
|
|
||||||
|
if (space > 0 && valueLength > 0) {
|
||||||
|
spaceBytes +=
|
||||||
|
// a space between ":" and a value for each object entry
|
||||||
|
(Array.isArray(value) ? 0 : valueLength) +
|
||||||
|
// the formula results from folding the following components:
|
||||||
|
// - for each key-value or element: ident + newline
|
||||||
|
// (1 + stack.length * space) * valueLength
|
||||||
|
// - ident (one space less) before "}" or "]" + newline
|
||||||
|
// (stack.length - 1) * space + 1
|
||||||
|
(1 + stack.length * space) * (valueLength + 1) - space;
|
||||||
|
}
|
||||||
|
|
||||||
|
stack.pop();
|
||||||
|
|
||||||
|
// add to 'visited' only objects that contain nested objects
|
||||||
|
if (prevObjects !== objects) {
|
||||||
|
visited.set(value, bytes - valueBytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.stringifyInfo = stringifyInfo;
|
108
node_modules/@discoveryjs/json-ext/cjs/utils.cjs
generated
vendored
Normal file
108
node_modules/@discoveryjs/json-ext/cjs/utils.cjs
generated
vendored
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
function isIterable(value) {
|
||||||
|
return (
|
||||||
|
typeof value === 'object' &&
|
||||||
|
value !== null &&
|
||||||
|
(
|
||||||
|
typeof value[Symbol.iterator] === 'function' ||
|
||||||
|
typeof value[Symbol.asyncIterator] === 'function'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function replaceValue(holder, key, value, replacer) {
|
||||||
|
if (value && typeof value.toJSON === 'function') {
|
||||||
|
value = value.toJSON();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (replacer !== null) {
|
||||||
|
value = replacer.call(holder, String(key), value);
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (typeof value) {
|
||||||
|
case 'function':
|
||||||
|
case 'symbol':
|
||||||
|
value = undefined;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'object':
|
||||||
|
if (value !== null) {
|
||||||
|
const cls = value.constructor;
|
||||||
|
if (cls === String || cls === Number || cls === Boolean) {
|
||||||
|
value = value.valueOf();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeReplacer(replacer) {
|
||||||
|
if (typeof replacer === 'function') {
|
||||||
|
return replacer;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(replacer)) {
|
||||||
|
const allowlist = new Set(replacer
|
||||||
|
.map(item => {
|
||||||
|
const cls = item && item.constructor;
|
||||||
|
return cls === String || cls === Number ? String(item) : null;
|
||||||
|
})
|
||||||
|
.filter(item => typeof item === 'string')
|
||||||
|
);
|
||||||
|
|
||||||
|
return [...allowlist];
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeSpace(space) {
|
||||||
|
if (typeof space === 'number') {
|
||||||
|
if (!Number.isFinite(space) || space < 1) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return ' '.repeat(Math.min(space, 10));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof space === 'string') {
|
||||||
|
return space.slice(0, 10) || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeStringifyOptions(optionsOrReplacer, space) {
|
||||||
|
if (optionsOrReplacer === null || Array.isArray(optionsOrReplacer) || typeof optionsOrReplacer !== 'object') {
|
||||||
|
optionsOrReplacer = {
|
||||||
|
replacer: optionsOrReplacer,
|
||||||
|
space
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let replacer = normalizeReplacer(optionsOrReplacer.replacer);
|
||||||
|
let getKeys = Object.keys;
|
||||||
|
|
||||||
|
if (Array.isArray(replacer)) {
|
||||||
|
const allowlist = replacer;
|
||||||
|
|
||||||
|
getKeys = () => allowlist;
|
||||||
|
replacer = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...optionsOrReplacer,
|
||||||
|
replacer,
|
||||||
|
getKeys,
|
||||||
|
space: normalizeSpace(optionsOrReplacer.space)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.isIterable = isIterable;
|
||||||
|
exports.normalizeReplacer = normalizeReplacer;
|
||||||
|
exports.normalizeSpace = normalizeSpace;
|
||||||
|
exports.normalizeStringifyOptions = normalizeStringifyOptions;
|
||||||
|
exports.replaceValue = replaceValue;
|
60
node_modules/@discoveryjs/json-ext/cjs/web-streams.cjs
generated
vendored
Normal file
60
node_modules/@discoveryjs/json-ext/cjs/web-streams.cjs
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const parseChunked = require('./parse-chunked.cjs');
|
||||||
|
const stringifyChunked = require('./stringify-chunked.cjs');
|
||||||
|
const utils = require('./utils.cjs');
|
||||||
|
|
||||||
|
/* eslint-env browser */
|
||||||
|
|
||||||
|
function parseFromWebStream(stream) {
|
||||||
|
// 2024/6/17: currently, an @@asyncIterator on a ReadableStream is not widely supported,
|
||||||
|
// therefore use a fallback using a reader
|
||||||
|
// https://caniuse.com/mdn-api_readablestream_--asynciterator
|
||||||
|
return parseChunked.parseChunked(utils.isIterable(stream) ? stream : async function*() {
|
||||||
|
const reader = stream.getReader();
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const { value, done } = await reader.read();
|
||||||
|
|
||||||
|
if (done) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
yield value;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
reader.releaseLock();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function createStringifyWebStream(value, replacer, space) {
|
||||||
|
// 2024/6/17: the ReadableStream.from() static method is supported
|
||||||
|
// in Node.js 20.6+ and Firefox only
|
||||||
|
if (typeof ReadableStream.from === 'function') {
|
||||||
|
return ReadableStream.from(stringifyChunked.stringifyChunked(value, replacer, space));
|
||||||
|
}
|
||||||
|
|
||||||
|
// emulate ReadableStream.from()
|
||||||
|
return new ReadableStream({
|
||||||
|
start() {
|
||||||
|
this.generator = stringifyChunked.stringifyChunked(value, replacer, space);
|
||||||
|
},
|
||||||
|
pull(controller) {
|
||||||
|
const { value, done } = this.generator.next();
|
||||||
|
|
||||||
|
if (done) {
|
||||||
|
controller.close();
|
||||||
|
} else {
|
||||||
|
controller.enqueue(value);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel() {
|
||||||
|
this.generator = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.createStringifyWebStream = createStringifyWebStream;
|
||||||
|
exports.parseFromWebStream = parseFromWebStream;
|
705
node_modules/@discoveryjs/json-ext/dist/json-ext.js
generated
vendored
Normal file
705
node_modules/@discoveryjs/json-ext/dist/json-ext.js
generated
vendored
Normal file
@ -0,0 +1,705 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||||
|
typeof define === 'function' && define.amd ? define(factory) :
|
||||||
|
(global.jsonExt = factory());
|
||||||
|
}(typeof globalThis != 'undefined' ? globalThis : typeof window != 'undefined' ? window : typeof global != 'undefined' ? global : typeof self != 'undefined' ? self : this, (function () {
|
||||||
|
var exports = (() => {
|
||||||
|
var __defProp = Object.defineProperty;
|
||||||
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||||
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||||
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||||
|
var __export = (target, all) => {
|
||||||
|
for (var name in all)
|
||||||
|
__defProp(target, name, { get: all[name], enumerable: true });
|
||||||
|
};
|
||||||
|
var __copyProps = (to, from, except, desc) => {
|
||||||
|
if (from && typeof from === "object" || typeof from === "function") {
|
||||||
|
for (let key of __getOwnPropNames(from))
|
||||||
|
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||||
|
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||||
|
}
|
||||||
|
return to;
|
||||||
|
};
|
||||||
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||||
|
|
||||||
|
// src/index.js
|
||||||
|
var src_exports = {};
|
||||||
|
__export(src_exports, {
|
||||||
|
createStringifyWebStream: () => createStringifyWebStream,
|
||||||
|
parseChunked: () => parseChunked,
|
||||||
|
parseFromWebStream: () => parseFromWebStream,
|
||||||
|
stringifyChunked: () => stringifyChunked,
|
||||||
|
stringifyInfo: () => stringifyInfo
|
||||||
|
});
|
||||||
|
|
||||||
|
// src/utils.js
|
||||||
|
function isIterable(value) {
|
||||||
|
return typeof value === "object" && value !== null && (typeof value[Symbol.iterator] === "function" || typeof value[Symbol.asyncIterator] === "function");
|
||||||
|
}
|
||||||
|
function replaceValue(holder, key, value, replacer) {
|
||||||
|
if (value && typeof value.toJSON === "function") {
|
||||||
|
value = value.toJSON();
|
||||||
|
}
|
||||||
|
if (replacer !== null) {
|
||||||
|
value = replacer.call(holder, String(key), value);
|
||||||
|
}
|
||||||
|
switch (typeof value) {
|
||||||
|
case "function":
|
||||||
|
case "symbol":
|
||||||
|
value = void 0;
|
||||||
|
break;
|
||||||
|
case "object":
|
||||||
|
if (value !== null) {
|
||||||
|
const cls = value.constructor;
|
||||||
|
if (cls === String || cls === Number || cls === Boolean) {
|
||||||
|
value = value.valueOf();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
function normalizeReplacer(replacer) {
|
||||||
|
if (typeof replacer === "function") {
|
||||||
|
return replacer;
|
||||||
|
}
|
||||||
|
if (Array.isArray(replacer)) {
|
||||||
|
const allowlist = new Set(
|
||||||
|
replacer.map((item) => {
|
||||||
|
const cls = item && item.constructor;
|
||||||
|
return cls === String || cls === Number ? String(item) : null;
|
||||||
|
}).filter((item) => typeof item === "string")
|
||||||
|
);
|
||||||
|
return [...allowlist];
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
function normalizeSpace(space) {
|
||||||
|
if (typeof space === "number") {
|
||||||
|
if (!Number.isFinite(space) || space < 1) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return " ".repeat(Math.min(space, 10));
|
||||||
|
}
|
||||||
|
if (typeof space === "string") {
|
||||||
|
return space.slice(0, 10) || false;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
function normalizeStringifyOptions(optionsOrReplacer, space) {
|
||||||
|
if (optionsOrReplacer === null || Array.isArray(optionsOrReplacer) || typeof optionsOrReplacer !== "object") {
|
||||||
|
optionsOrReplacer = {
|
||||||
|
replacer: optionsOrReplacer,
|
||||||
|
space
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let replacer = normalizeReplacer(optionsOrReplacer.replacer);
|
||||||
|
let getKeys = Object.keys;
|
||||||
|
if (Array.isArray(replacer)) {
|
||||||
|
const allowlist = replacer;
|
||||||
|
getKeys = () => allowlist;
|
||||||
|
replacer = null;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...optionsOrReplacer,
|
||||||
|
replacer,
|
||||||
|
getKeys,
|
||||||
|
space: normalizeSpace(optionsOrReplacer.space)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// src/parse-chunked.js
|
||||||
|
var STACK_OBJECT = 1;
|
||||||
|
var STACK_ARRAY = 2;
|
||||||
|
var decoder = new TextDecoder();
|
||||||
|
function adjustPosition(error, parser) {
|
||||||
|
if (error.name === "SyntaxError" && parser.jsonParseOffset) {
|
||||||
|
error.message = error.message.replace(
|
||||||
|
/at position (\d+)/,
|
||||||
|
(_, pos) => "at position " + (Number(pos) + parser.jsonParseOffset)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
function append(array, elements) {
|
||||||
|
const initialLength = array.length;
|
||||||
|
array.length += elements.length;
|
||||||
|
for (let i = 0; i < elements.length; i++) {
|
||||||
|
array[initialLength + i] = elements[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function parseChunked(chunkEmitter) {
|
||||||
|
const iterable = typeof chunkEmitter === "function" ? chunkEmitter() : chunkEmitter;
|
||||||
|
if (isIterable(iterable)) {
|
||||||
|
let parser = new ChunkParser();
|
||||||
|
try {
|
||||||
|
for await (const chunk of iterable) {
|
||||||
|
if (typeof chunk !== "string" && !ArrayBuffer.isView(chunk)) {
|
||||||
|
throw new TypeError("Invalid chunk: Expected string, TypedArray or Buffer");
|
||||||
|
}
|
||||||
|
parser.push(chunk);
|
||||||
|
}
|
||||||
|
return parser.finish();
|
||||||
|
} catch (e) {
|
||||||
|
throw adjustPosition(e, parser);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new TypeError(
|
||||||
|
"Invalid chunk emitter: Expected an Iterable, AsyncIterable, generator, async generator, or a function returning an Iterable or AsyncIterable"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
var ChunkParser = class {
|
||||||
|
constructor() {
|
||||||
|
this.value = void 0;
|
||||||
|
this.valueStack = null;
|
||||||
|
this.stack = new Array(100);
|
||||||
|
this.lastFlushDepth = 0;
|
||||||
|
this.flushDepth = 0;
|
||||||
|
this.stateString = false;
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
this.pendingByteSeq = null;
|
||||||
|
this.pendingChunk = null;
|
||||||
|
this.chunkOffset = 0;
|
||||||
|
this.jsonParseOffset = 0;
|
||||||
|
}
|
||||||
|
parseAndAppend(fragment, wrap) {
|
||||||
|
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
|
||||||
|
if (wrap) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = "{" + fragment + "}";
|
||||||
|
}
|
||||||
|
Object.assign(this.valueStack.value, JSON.parse(fragment));
|
||||||
|
} else {
|
||||||
|
if (wrap) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = "[" + fragment + "]";
|
||||||
|
}
|
||||||
|
append(this.valueStack.value, JSON.parse(fragment));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
prepareAddition(fragment) {
|
||||||
|
const { value } = this.valueStack;
|
||||||
|
const expectComma = Array.isArray(value) ? value.length !== 0 : Object.keys(value).length !== 0;
|
||||||
|
if (expectComma) {
|
||||||
|
if (fragment[0] === ",") {
|
||||||
|
this.jsonParseOffset++;
|
||||||
|
return fragment.slice(1);
|
||||||
|
}
|
||||||
|
if (fragment[0] !== "}" && fragment[0] !== "]") {
|
||||||
|
this.jsonParseOffset -= 3;
|
||||||
|
return "[[]" + fragment;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fragment;
|
||||||
|
}
|
||||||
|
flush(chunk, start, end) {
|
||||||
|
let fragment = chunk.slice(start, end);
|
||||||
|
this.jsonParseOffset = this.chunkOffset + start;
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
fragment = this.pendingChunk + fragment;
|
||||||
|
this.jsonParseOffset -= this.pendingChunk.length;
|
||||||
|
this.pendingChunk = null;
|
||||||
|
}
|
||||||
|
if (this.flushDepth === this.lastFlushDepth) {
|
||||||
|
if (this.flushDepth > 0) {
|
||||||
|
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||||
|
} else {
|
||||||
|
this.value = JSON.parse(fragment);
|
||||||
|
this.valueStack = {
|
||||||
|
value: this.value,
|
||||||
|
prev: null
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else if (this.flushDepth > this.lastFlushDepth) {
|
||||||
|
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
|
||||||
|
fragment += this.stack[i] === STACK_OBJECT ? "}" : "]";
|
||||||
|
}
|
||||||
|
if (this.lastFlushDepth === 0) {
|
||||||
|
this.value = JSON.parse(fragment);
|
||||||
|
this.valueStack = {
|
||||||
|
value: this.value,
|
||||||
|
prev: null
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||||
|
}
|
||||||
|
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
|
||||||
|
let value = this.valueStack.value;
|
||||||
|
if (this.stack[i - 1] === STACK_OBJECT) {
|
||||||
|
let key;
|
||||||
|
for (key in value) ;
|
||||||
|
value = value[key];
|
||||||
|
} else {
|
||||||
|
value = value[value.length - 1];
|
||||||
|
}
|
||||||
|
this.valueStack = {
|
||||||
|
value,
|
||||||
|
prev: this.valueStack
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fragment = this.prepareAddition(fragment);
|
||||||
|
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = (this.stack[i] === STACK_OBJECT ? "{" : "[") + fragment;
|
||||||
|
}
|
||||||
|
this.parseAndAppend(fragment, false);
|
||||||
|
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||||
|
this.valueStack = this.valueStack.prev;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.lastFlushDepth = this.flushDepth;
|
||||||
|
}
|
||||||
|
push(chunk) {
|
||||||
|
if (typeof chunk !== "string") {
|
||||||
|
if (this.pendingByteSeq !== null) {
|
||||||
|
const origRawChunk = chunk;
|
||||||
|
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
|
||||||
|
chunk.set(this.pendingByteSeq);
|
||||||
|
chunk.set(origRawChunk, this.pendingByteSeq.length);
|
||||||
|
this.pendingByteSeq = null;
|
||||||
|
}
|
||||||
|
if (chunk[chunk.length - 1] > 127) {
|
||||||
|
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
|
||||||
|
const byte = chunk[chunk.length - 1 - seqLength];
|
||||||
|
if (byte >> 6 === 3) {
|
||||||
|
seqLength++;
|
||||||
|
if (seqLength !== 4 && byte >> 3 === 30 || seqLength !== 3 && byte >> 4 === 14 || seqLength !== 2 && byte >> 5 === 6) {
|
||||||
|
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
|
||||||
|
chunk = chunk.slice(0, -seqLength);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
chunk = decoder.decode(chunk);
|
||||||
|
}
|
||||||
|
const chunkLength = chunk.length;
|
||||||
|
let lastFlushPoint = 0;
|
||||||
|
let flushPoint = 0;
|
||||||
|
scan: for (let i = 0; i < chunkLength; i++) {
|
||||||
|
if (this.stateString) {
|
||||||
|
for (; i < chunkLength; i++) {
|
||||||
|
if (this.stateStringEscape) {
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
} else {
|
||||||
|
switch (chunk.charCodeAt(i)) {
|
||||||
|
case 34:
|
||||||
|
this.stateString = false;
|
||||||
|
continue scan;
|
||||||
|
case 92:
|
||||||
|
this.stateStringEscape = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
switch (chunk.charCodeAt(i)) {
|
||||||
|
case 34:
|
||||||
|
this.stateString = true;
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
break;
|
||||||
|
case 44:
|
||||||
|
flushPoint = i;
|
||||||
|
break;
|
||||||
|
case 123:
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.stack[this.flushDepth++] = STACK_OBJECT;
|
||||||
|
break;
|
||||||
|
case 91:
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.stack[this.flushDepth++] = STACK_ARRAY;
|
||||||
|
break;
|
||||||
|
case 93:
|
||||||
|
/* ] */
|
||||||
|
case 125:
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.flushDepth--;
|
||||||
|
if (this.flushDepth < this.lastFlushDepth) {
|
||||||
|
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||||
|
lastFlushPoint = flushPoint;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 9:
|
||||||
|
/* \t */
|
||||||
|
case 10:
|
||||||
|
/* \n */
|
||||||
|
case 13:
|
||||||
|
/* \r */
|
||||||
|
case 32:
|
||||||
|
if (lastFlushPoint === i) {
|
||||||
|
lastFlushPoint++;
|
||||||
|
}
|
||||||
|
if (flushPoint === i) {
|
||||||
|
flushPoint++;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (flushPoint > lastFlushPoint) {
|
||||||
|
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||||
|
}
|
||||||
|
if (flushPoint < chunkLength) {
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
this.pendingChunk += chunk;
|
||||||
|
} else {
|
||||||
|
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.chunkOffset += chunkLength;
|
||||||
|
}
|
||||||
|
finish() {
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
this.flush("", 0, 0);
|
||||||
|
this.pendingChunk = null;
|
||||||
|
}
|
||||||
|
return this.value;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// src/stringify-chunked.js
|
||||||
|
function encodeString(value) {
|
||||||
|
if (/[^\x20\x21\x23-\x5B\x5D-\uD799]/.test(value)) {
|
||||||
|
return JSON.stringify(value);
|
||||||
|
}
|
||||||
|
return '"' + value + '"';
|
||||||
|
}
|
||||||
|
function* stringifyChunked(value, ...args) {
|
||||||
|
const { replacer, getKeys, space, ...options } = normalizeStringifyOptions(...args);
|
||||||
|
const highWaterMark = Number(options.highWaterMark) || 16384;
|
||||||
|
const keyStrings = /* @__PURE__ */ new Map();
|
||||||
|
const stack = [];
|
||||||
|
const rootValue = { "": value };
|
||||||
|
let prevState = null;
|
||||||
|
let state = () => printEntry("", value);
|
||||||
|
let stateValue = rootValue;
|
||||||
|
let stateEmpty = true;
|
||||||
|
let stateKeys = [""];
|
||||||
|
let stateIndex = 0;
|
||||||
|
let buffer = "";
|
||||||
|
while (true) {
|
||||||
|
state();
|
||||||
|
if (buffer.length >= highWaterMark || prevState === null) {
|
||||||
|
yield buffer;
|
||||||
|
buffer = "";
|
||||||
|
if (prevState === null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function printObject() {
|
||||||
|
if (stateIndex === 0) {
|
||||||
|
stateKeys = getKeys(stateValue);
|
||||||
|
buffer += "{";
|
||||||
|
}
|
||||||
|
if (stateIndex === stateKeys.length) {
|
||||||
|
buffer += space && !stateEmpty ? `
|
||||||
|
${space.repeat(stack.length - 1)}}` : "}";
|
||||||
|
popState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const key = stateKeys[stateIndex++];
|
||||||
|
printEntry(key, stateValue[key]);
|
||||||
|
}
|
||||||
|
function printArray() {
|
||||||
|
if (stateIndex === 0) {
|
||||||
|
buffer += "[";
|
||||||
|
}
|
||||||
|
if (stateIndex === stateValue.length) {
|
||||||
|
buffer += space && !stateEmpty ? `
|
||||||
|
${space.repeat(stack.length - 1)}]` : "]";
|
||||||
|
popState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
printEntry(stateIndex, stateValue[stateIndex++]);
|
||||||
|
}
|
||||||
|
function printEntryPrelude(key) {
|
||||||
|
if (stateEmpty) {
|
||||||
|
stateEmpty = false;
|
||||||
|
} else {
|
||||||
|
buffer += ",";
|
||||||
|
}
|
||||||
|
if (space && prevState !== null) {
|
||||||
|
buffer += `
|
||||||
|
${space.repeat(stack.length)}`;
|
||||||
|
}
|
||||||
|
if (state === printObject) {
|
||||||
|
let keyString = keyStrings.get(key);
|
||||||
|
if (keyString === void 0) {
|
||||||
|
keyStrings.set(key, keyString = encodeString(key) + (space ? ": " : ":"));
|
||||||
|
}
|
||||||
|
buffer += keyString;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function printEntry(key, value2) {
|
||||||
|
value2 = replaceValue(stateValue, key, value2, replacer);
|
||||||
|
if (value2 === null || typeof value2 !== "object") {
|
||||||
|
if (state !== printObject || value2 !== void 0) {
|
||||||
|
printEntryPrelude(key);
|
||||||
|
pushPrimitive(value2);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (stack.includes(value2)) {
|
||||||
|
throw new TypeError("Converting circular structure to JSON");
|
||||||
|
}
|
||||||
|
printEntryPrelude(key);
|
||||||
|
stack.push(value2);
|
||||||
|
pushState();
|
||||||
|
state = Array.isArray(value2) ? printArray : printObject;
|
||||||
|
stateValue = value2;
|
||||||
|
stateEmpty = true;
|
||||||
|
stateIndex = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function pushPrimitive(value2) {
|
||||||
|
switch (typeof value2) {
|
||||||
|
case "string":
|
||||||
|
buffer += encodeString(value2);
|
||||||
|
break;
|
||||||
|
case "number":
|
||||||
|
buffer += Number.isFinite(value2) ? String(value2) : "null";
|
||||||
|
break;
|
||||||
|
case "boolean":
|
||||||
|
buffer += value2 ? "true" : "false";
|
||||||
|
break;
|
||||||
|
case "undefined":
|
||||||
|
case "object":
|
||||||
|
buffer += "null";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new TypeError(`Do not know how to serialize a ${value2.constructor?.name || typeof value2}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function pushState() {
|
||||||
|
prevState = {
|
||||||
|
keys: stateKeys,
|
||||||
|
index: stateIndex,
|
||||||
|
prev: prevState
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function popState() {
|
||||||
|
stack.pop();
|
||||||
|
const value2 = stack.length > 0 ? stack[stack.length - 1] : rootValue;
|
||||||
|
state = Array.isArray(value2) ? printArray : printObject;
|
||||||
|
stateValue = value2;
|
||||||
|
stateEmpty = false;
|
||||||
|
stateKeys = prevState.keys;
|
||||||
|
stateIndex = prevState.index;
|
||||||
|
prevState = prevState.prev;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// src/stringify-info.js
|
||||||
|
var hasOwn = typeof Object.hasOwn === "function" ? Object.hasOwn : (object, key) => Object.hasOwnProperty.call(object, key);
|
||||||
|
var escapableCharCodeSubstitution = {
|
||||||
|
// JSON Single Character Escape Sequences
|
||||||
|
8: "\\b",
|
||||||
|
9: "\\t",
|
||||||
|
10: "\\n",
|
||||||
|
12: "\\f",
|
||||||
|
13: "\\r",
|
||||||
|
34: '\\"',
|
||||||
|
92: "\\\\"
|
||||||
|
};
|
||||||
|
var charLength2048 = Uint8Array.from({ length: 2048 }, (_, code) => {
|
||||||
|
if (hasOwn(escapableCharCodeSubstitution, code)) {
|
||||||
|
return 2;
|
||||||
|
}
|
||||||
|
if (code < 32) {
|
||||||
|
return 6;
|
||||||
|
}
|
||||||
|
return code < 128 ? 1 : 2;
|
||||||
|
});
|
||||||
|
function isLeadingSurrogate(code) {
|
||||||
|
return code >= 55296 && code <= 56319;
|
||||||
|
}
|
||||||
|
function isTrailingSurrogate(code) {
|
||||||
|
return code >= 56320 && code <= 57343;
|
||||||
|
}
|
||||||
|
function stringLength(str) {
|
||||||
|
if (!/[^\x20\x21\x23-\x5B\x5D-\x7F]/.test(str)) {
|
||||||
|
return str.length + 2;
|
||||||
|
}
|
||||||
|
let len = 0;
|
||||||
|
let prevLeadingSurrogate = false;
|
||||||
|
for (let i = 0; i < str.length; i++) {
|
||||||
|
const code = str.charCodeAt(i);
|
||||||
|
if (code < 2048) {
|
||||||
|
len += charLength2048[code];
|
||||||
|
} else if (isLeadingSurrogate(code)) {
|
||||||
|
len += 6;
|
||||||
|
prevLeadingSurrogate = true;
|
||||||
|
continue;
|
||||||
|
} else if (isTrailingSurrogate(code)) {
|
||||||
|
len = prevLeadingSurrogate ? len - 2 : len + 6;
|
||||||
|
} else {
|
||||||
|
len += 3;
|
||||||
|
}
|
||||||
|
prevLeadingSurrogate = false;
|
||||||
|
}
|
||||||
|
return len + 2;
|
||||||
|
}
|
||||||
|
function intLength(num) {
|
||||||
|
let len = 0;
|
||||||
|
if (num < 0) {
|
||||||
|
len = 1;
|
||||||
|
num = -num;
|
||||||
|
}
|
||||||
|
if (num >= 1e9) {
|
||||||
|
len += 9;
|
||||||
|
num = (num - num % 1e9) / 1e9;
|
||||||
|
}
|
||||||
|
if (num >= 1e4) {
|
||||||
|
if (num >= 1e6) {
|
||||||
|
return len + (num >= 1e8 ? 9 : num >= 1e7 ? 8 : 7);
|
||||||
|
}
|
||||||
|
return len + (num >= 1e5 ? 6 : 5);
|
||||||
|
}
|
||||||
|
return len + (num >= 100 ? num >= 1e3 ? 4 : 3 : num >= 10 ? 2 : 1);
|
||||||
|
}
|
||||||
|
function primitiveLength(value) {
|
||||||
|
switch (typeof value) {
|
||||||
|
case "string":
|
||||||
|
return stringLength(value);
|
||||||
|
case "number":
|
||||||
|
return Number.isFinite(value) ? Number.isInteger(value) ? intLength(value) : String(value).length : 4;
|
||||||
|
case "boolean":
|
||||||
|
return value ? 4 : 5;
|
||||||
|
case "undefined":
|
||||||
|
case "object":
|
||||||
|
return 4;
|
||||||
|
/* null */
|
||||||
|
default:
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function stringifyInfo(value, ...args) {
|
||||||
|
const { replacer, getKeys, ...options } = normalizeStringifyOptions(...args);
|
||||||
|
const continueOnCircular = Boolean(options.continueOnCircular);
|
||||||
|
const space = options.space?.length || 0;
|
||||||
|
const keysLength = /* @__PURE__ */ new Map();
|
||||||
|
const visited = /* @__PURE__ */ new Map();
|
||||||
|
const circular = /* @__PURE__ */ new Set();
|
||||||
|
const stack = [];
|
||||||
|
const root = { "": value };
|
||||||
|
let stop = false;
|
||||||
|
let bytes = 0;
|
||||||
|
let spaceBytes = 0;
|
||||||
|
let objects = 0;
|
||||||
|
walk(root, "", value);
|
||||||
|
if (bytes === 0) {
|
||||||
|
bytes += 9;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
bytes: isNaN(bytes) ? Infinity : bytes + spaceBytes,
|
||||||
|
spaceBytes: space > 0 && isNaN(bytes) ? Infinity : spaceBytes,
|
||||||
|
circular: [...circular]
|
||||||
|
};
|
||||||
|
function walk(holder, key, value2) {
|
||||||
|
if (stop) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
value2 = replaceValue(holder, key, value2, replacer);
|
||||||
|
if (value2 === null || typeof value2 !== "object") {
|
||||||
|
if (value2 !== void 0 || Array.isArray(holder)) {
|
||||||
|
bytes += primitiveLength(value2);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (stack.includes(value2)) {
|
||||||
|
circular.add(value2);
|
||||||
|
bytes += 4;
|
||||||
|
if (!continueOnCircular) {
|
||||||
|
stop = true;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (visited.has(value2)) {
|
||||||
|
bytes += visited.get(value2);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
objects++;
|
||||||
|
const prevObjects = objects;
|
||||||
|
const valueBytes = bytes;
|
||||||
|
let valueLength = 0;
|
||||||
|
stack.push(value2);
|
||||||
|
if (Array.isArray(value2)) {
|
||||||
|
valueLength = value2.length;
|
||||||
|
for (let i = 0; i < valueLength; i++) {
|
||||||
|
walk(value2, i, value2[i]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let prevLength = bytes;
|
||||||
|
for (const key2 of getKeys(value2)) {
|
||||||
|
walk(value2, key2, value2[key2]);
|
||||||
|
if (prevLength !== bytes) {
|
||||||
|
let keyLen = keysLength.get(key2);
|
||||||
|
if (keyLen === void 0) {
|
||||||
|
keysLength.set(key2, keyLen = stringLength(key2) + 1);
|
||||||
|
}
|
||||||
|
bytes += keyLen;
|
||||||
|
valueLength++;
|
||||||
|
prevLength = bytes;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bytes += valueLength === 0 ? 2 : 1 + valueLength;
|
||||||
|
if (space > 0 && valueLength > 0) {
|
||||||
|
spaceBytes += // a space between ":" and a value for each object entry
|
||||||
|
(Array.isArray(value2) ? 0 : valueLength) + // the formula results from folding the following components:
|
||||||
|
// - for each key-value or element: ident + newline
|
||||||
|
// (1 + stack.length * space) * valueLength
|
||||||
|
// - ident (one space less) before "}" or "]" + newline
|
||||||
|
// (stack.length - 1) * space + 1
|
||||||
|
(1 + stack.length * space) * (valueLength + 1) - space;
|
||||||
|
}
|
||||||
|
stack.pop();
|
||||||
|
if (prevObjects !== objects) {
|
||||||
|
visited.set(value2, bytes - valueBytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// src/web-streams.js
|
||||||
|
function parseFromWebStream(stream) {
|
||||||
|
return parseChunked(isIterable(stream) ? stream : async function* () {
|
||||||
|
const reader = stream.getReader();
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const { value, done } = await reader.read();
|
||||||
|
if (done) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
yield value;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
reader.releaseLock();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function createStringifyWebStream(value, replacer, space) {
|
||||||
|
if (typeof ReadableStream.from === "function") {
|
||||||
|
return ReadableStream.from(stringifyChunked(value, replacer, space));
|
||||||
|
}
|
||||||
|
return new ReadableStream({
|
||||||
|
start() {
|
||||||
|
this.generator = stringifyChunked(value, replacer, space);
|
||||||
|
},
|
||||||
|
pull(controller) {
|
||||||
|
const { value: value2, done } = this.generator.next();
|
||||||
|
if (done) {
|
||||||
|
controller.close();
|
||||||
|
} else {
|
||||||
|
controller.enqueue(value2);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel() {
|
||||||
|
this.generator = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return __toCommonJS(src_exports);
|
||||||
|
})();
|
||||||
|
|
||||||
|
return exports;
|
||||||
|
})));
|
13
node_modules/@discoveryjs/json-ext/dist/json-ext.min.js
generated
vendored
Normal file
13
node_modules/@discoveryjs/json-ext/dist/json-ext.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
node_modules/@discoveryjs/json-ext/dist/json-ext.min.js.map
generated
vendored
Normal file
7
node_modules/@discoveryjs/json-ext/dist/json-ext.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/@discoveryjs/json-ext/dist/package.json
generated
vendored
Normal file
3
node_modules/@discoveryjs/json-ext/dist/package.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"type": "commonjs"
|
||||||
|
}
|
37
node_modules/@discoveryjs/json-ext/index.d.ts
generated
vendored
Normal file
37
node_modules/@discoveryjs/json-ext/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
declare module '@discoveryjs/json-ext' {
|
||||||
|
type Chunk = string | Uint8Array | Buffer;
|
||||||
|
type Replacer =
|
||||||
|
| ((this: any, key: string, value: any) => any)
|
||||||
|
| (string | number)[]
|
||||||
|
| null;
|
||||||
|
type Space = string | number | null;
|
||||||
|
type StringifyOptions = {
|
||||||
|
replacer?: Replacer;
|
||||||
|
space?: Space;
|
||||||
|
highWaterMark?: number;
|
||||||
|
};
|
||||||
|
type StringifyInfoOptions = {
|
||||||
|
replacer?: Replacer;
|
||||||
|
space?: Space;
|
||||||
|
continueOnCircular?: boolean;
|
||||||
|
}
|
||||||
|
type StringifyInfoResult = {
|
||||||
|
bytes: number;
|
||||||
|
spaceBytes: number;
|
||||||
|
circular: object[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export function parseChunked(input: Iterable<Chunk> | AsyncIterable<Chunk>): Promise<any>;
|
||||||
|
export function parseChunked(input: () => (Iterable<Chunk> | AsyncIterable<Chunk>)): Promise<any>;
|
||||||
|
|
||||||
|
export function stringifyChunked(value: any, replacer?: Replacer, space?: Space): Generator<string>;
|
||||||
|
export function stringifyChunked(value: any, options: StringifyOptions): Generator<string>;
|
||||||
|
|
||||||
|
export function stringifyInfo(value: any, replacer?: Replacer, space?: Space): StringifyInfoResult;
|
||||||
|
export function stringifyInfo(value: any, options?: StringifyInfoOptions): StringifyInfoResult;
|
||||||
|
|
||||||
|
// Web streams
|
||||||
|
export function parseFromWebStream(stream: ReadableStream<Chunk>): Promise<any>;
|
||||||
|
export function createStringifyWebStream(value: any, replacer?: Replacer, space?: Space): ReadableStream<string>;
|
||||||
|
export function createStringifyWebStream(value: any, options: StringifyOptions): ReadableStream<string>;
|
||||||
|
}
|
68
node_modules/@discoveryjs/json-ext/package.json
generated
vendored
Normal file
68
node_modules/@discoveryjs/json-ext/package.json
generated
vendored
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
{
|
||||||
|
"name": "@discoveryjs/json-ext",
|
||||||
|
"version": "0.6.3",
|
||||||
|
"description": "A set of utilities that extend the use of JSON",
|
||||||
|
"keywords": [
|
||||||
|
"json",
|
||||||
|
"utils",
|
||||||
|
"stream",
|
||||||
|
"async",
|
||||||
|
"promise",
|
||||||
|
"stringify",
|
||||||
|
"info"
|
||||||
|
],
|
||||||
|
"author": "Roman Dvornov <rdvornov@gmail.com> (https://github.com/lahmatiy)",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/discoveryjs/json-ext.git"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.17.0"
|
||||||
|
},
|
||||||
|
"type": "module",
|
||||||
|
"main": "./cjs/index.cjs",
|
||||||
|
"module": "./src/index.js",
|
||||||
|
"types": "./index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./index.d.ts",
|
||||||
|
"require": "./cjs/index.cjs",
|
||||||
|
"import": "./src/index.js"
|
||||||
|
},
|
||||||
|
"./dist/*": "./dist/*",
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "npm run test:src",
|
||||||
|
"lint": "eslint src",
|
||||||
|
"lint-and-test": "npm run lint && npm test",
|
||||||
|
"bundle": "node scripts/bundle.js",
|
||||||
|
"transpile": "node scripts/transpile.cjs",
|
||||||
|
"test:all": "npm run test:src && npm run test:cjs && npm run test:dist && npm run test:e2e",
|
||||||
|
"test:src": "mocha --reporter progress src/*.test.js",
|
||||||
|
"test:cjs": "mocha --reporter progress cjs/*.test.cjs",
|
||||||
|
"test:e2e": "mocha --reporter progress test-e2e",
|
||||||
|
"test:dist": "mocha --reporter progress dist/test",
|
||||||
|
"test:deno": "node scripts/deno-adapt-test.js && mocha --reporter progress deno-tests/*.test.js",
|
||||||
|
"bundle-and-test": "npm run bundle && npm run test:dist",
|
||||||
|
"coverage": "c8 --reporter=lcovonly npm test",
|
||||||
|
"prepublishOnly": "npm run lint && npm run bundle && npm run transpile && npm run test:all"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"c8": "^7.10.0",
|
||||||
|
"chalk": "^4.1.0",
|
||||||
|
"esbuild": "^0.24.0",
|
||||||
|
"eslint": "^8.57.0",
|
||||||
|
"mocha": "^9.2.2",
|
||||||
|
"rollup": "^2.79.2"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"cjs",
|
||||||
|
"!cjs/*{.test,-cases}.cjs",
|
||||||
|
"dist",
|
||||||
|
"src",
|
||||||
|
"!src/*{.test,-cases}.js",
|
||||||
|
"index.d.ts"
|
||||||
|
]
|
||||||
|
}
|
4
node_modules/@discoveryjs/json-ext/src/index.js
generated
vendored
Normal file
4
node_modules/@discoveryjs/json-ext/src/index.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
export { parseChunked } from './parse-chunked.js';
|
||||||
|
export { stringifyChunked } from './stringify-chunked.js';
|
||||||
|
export { stringifyInfo } from './stringify-info.js';
|
||||||
|
export { createStringifyWebStream, parseFromWebStream } from './web-streams.js';
|
352
node_modules/@discoveryjs/json-ext/src/parse-chunked.js
generated
vendored
Normal file
352
node_modules/@discoveryjs/json-ext/src/parse-chunked.js
generated
vendored
Normal file
@ -0,0 +1,352 @@
|
|||||||
|
import { isIterable } from './utils.js';
|
||||||
|
|
||||||
|
const STACK_OBJECT = 1;
|
||||||
|
const STACK_ARRAY = 2;
|
||||||
|
const decoder = new TextDecoder();
|
||||||
|
|
||||||
|
function adjustPosition(error, parser) {
|
||||||
|
if (error.name === 'SyntaxError' && parser.jsonParseOffset) {
|
||||||
|
error.message = error.message.replace(/at position (\d+)/, (_, pos) =>
|
||||||
|
'at position ' + (Number(pos) + parser.jsonParseOffset)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
|
function append(array, elements) {
|
||||||
|
// Note: Avoid to use array.push(...elements) since it may lead to
|
||||||
|
// "RangeError: Maximum call stack size exceeded" for a long arrays
|
||||||
|
const initialLength = array.length;
|
||||||
|
array.length += elements.length;
|
||||||
|
|
||||||
|
for (let i = 0; i < elements.length; i++) {
|
||||||
|
array[initialLength + i] = elements[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function parseChunked(chunkEmitter) {
|
||||||
|
const iterable = typeof chunkEmitter === 'function'
|
||||||
|
? chunkEmitter()
|
||||||
|
: chunkEmitter;
|
||||||
|
|
||||||
|
if (isIterable(iterable)) {
|
||||||
|
let parser = new ChunkParser();
|
||||||
|
|
||||||
|
try {
|
||||||
|
for await (const chunk of iterable) {
|
||||||
|
if (typeof chunk !== 'string' && !ArrayBuffer.isView(chunk)) {
|
||||||
|
throw new TypeError('Invalid chunk: Expected string, TypedArray or Buffer');
|
||||||
|
}
|
||||||
|
|
||||||
|
parser.push(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parser.finish();
|
||||||
|
} catch (e) {
|
||||||
|
throw adjustPosition(e, parser);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new TypeError(
|
||||||
|
'Invalid chunk emitter: Expected an Iterable, AsyncIterable, generator, ' +
|
||||||
|
'async generator, or a function returning an Iterable or AsyncIterable'
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
class ChunkParser {
|
||||||
|
constructor() {
|
||||||
|
this.value = undefined;
|
||||||
|
this.valueStack = null;
|
||||||
|
|
||||||
|
this.stack = new Array(100);
|
||||||
|
this.lastFlushDepth = 0;
|
||||||
|
this.flushDepth = 0;
|
||||||
|
this.stateString = false;
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
this.pendingByteSeq = null;
|
||||||
|
this.pendingChunk = null;
|
||||||
|
this.chunkOffset = 0;
|
||||||
|
this.jsonParseOffset = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
parseAndAppend(fragment, wrap) {
|
||||||
|
// Append new entries or elements
|
||||||
|
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
|
||||||
|
if (wrap) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = '{' + fragment + '}';
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.assign(this.valueStack.value, JSON.parse(fragment));
|
||||||
|
} else {
|
||||||
|
if (wrap) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = '[' + fragment + ']';
|
||||||
|
}
|
||||||
|
|
||||||
|
append(this.valueStack.value, JSON.parse(fragment));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
prepareAddition(fragment) {
|
||||||
|
const { value } = this.valueStack;
|
||||||
|
const expectComma = Array.isArray(value)
|
||||||
|
? value.length !== 0
|
||||||
|
: Object.keys(value).length !== 0;
|
||||||
|
|
||||||
|
if (expectComma) {
|
||||||
|
// Skip a comma at the beginning of fragment, otherwise it would
|
||||||
|
// fail to parse
|
||||||
|
if (fragment[0] === ',') {
|
||||||
|
this.jsonParseOffset++;
|
||||||
|
return fragment.slice(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// When value (an object or array) is not empty and a fragment
|
||||||
|
// doesn't start with a comma, a single valid fragment starting
|
||||||
|
// is a closing bracket. If it's not, a prefix is adding to fail
|
||||||
|
// parsing. Otherwise, the sequence of chunks can be successfully
|
||||||
|
// parsed, although it should not, e.g. ["[{}", "{}]"]
|
||||||
|
if (fragment[0] !== '}' && fragment[0] !== ']') {
|
||||||
|
this.jsonParseOffset -= 3;
|
||||||
|
return '[[]' + fragment;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fragment;
|
||||||
|
}
|
||||||
|
|
||||||
|
flush(chunk, start, end) {
|
||||||
|
let fragment = chunk.slice(start, end);
|
||||||
|
|
||||||
|
// Save position correction an error in JSON.parse() if any
|
||||||
|
this.jsonParseOffset = this.chunkOffset + start;
|
||||||
|
|
||||||
|
// Prepend pending chunk if any
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
fragment = this.pendingChunk + fragment;
|
||||||
|
this.jsonParseOffset -= this.pendingChunk.length;
|
||||||
|
this.pendingChunk = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.flushDepth === this.lastFlushDepth) {
|
||||||
|
// Depth didn't changed, so it's a root value or entry/element set
|
||||||
|
if (this.flushDepth > 0) {
|
||||||
|
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||||
|
} else {
|
||||||
|
// That's an entire value on a top level
|
||||||
|
this.value = JSON.parse(fragment);
|
||||||
|
this.valueStack = {
|
||||||
|
value: this.value,
|
||||||
|
prev: null
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else if (this.flushDepth > this.lastFlushDepth) {
|
||||||
|
// Add missed closing brackets/parentheses
|
||||||
|
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
|
||||||
|
fragment += this.stack[i] === STACK_OBJECT ? '}' : ']';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.lastFlushDepth === 0) {
|
||||||
|
// That's a root value
|
||||||
|
this.value = JSON.parse(fragment);
|
||||||
|
this.valueStack = {
|
||||||
|
value: this.value,
|
||||||
|
prev: null
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move down to the depths to the last object/array, which is current now
|
||||||
|
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
|
||||||
|
let value = this.valueStack.value;
|
||||||
|
|
||||||
|
if (this.stack[i - 1] === STACK_OBJECT) {
|
||||||
|
// find last entry
|
||||||
|
let key;
|
||||||
|
// eslint-disable-next-line curly
|
||||||
|
for (key in value);
|
||||||
|
value = value[key];
|
||||||
|
} else {
|
||||||
|
// last element
|
||||||
|
value = value[value.length - 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
this.valueStack = {
|
||||||
|
value,
|
||||||
|
prev: this.valueStack
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else /* this.flushDepth < this.lastFlushDepth */ {
|
||||||
|
fragment = this.prepareAddition(fragment);
|
||||||
|
|
||||||
|
// Add missed opening brackets/parentheses
|
||||||
|
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||||
|
this.jsonParseOffset--;
|
||||||
|
fragment = (this.stack[i] === STACK_OBJECT ? '{' : '[') + fragment;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.parseAndAppend(fragment, false);
|
||||||
|
|
||||||
|
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||||
|
this.valueStack = this.valueStack.prev;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.lastFlushDepth = this.flushDepth;
|
||||||
|
}
|
||||||
|
|
||||||
|
push(chunk) {
|
||||||
|
if (typeof chunk !== 'string') {
|
||||||
|
// Suppose chunk is Buffer or Uint8Array
|
||||||
|
|
||||||
|
// Prepend uncompleted byte sequence if any
|
||||||
|
if (this.pendingByteSeq !== null) {
|
||||||
|
const origRawChunk = chunk;
|
||||||
|
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
|
||||||
|
chunk.set(this.pendingByteSeq);
|
||||||
|
chunk.set(origRawChunk, this.pendingByteSeq.length);
|
||||||
|
this.pendingByteSeq = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// In case Buffer/Uint8Array, an input is encoded in UTF8
|
||||||
|
// Seek for parts of uncompleted UTF8 symbol on the ending
|
||||||
|
// This makes sense only if we expect more chunks and last char is not multi-bytes
|
||||||
|
if (chunk[chunk.length - 1] > 127) {
|
||||||
|
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
|
||||||
|
const byte = chunk[chunk.length - 1 - seqLength];
|
||||||
|
|
||||||
|
// 10xxxxxx - 2nd, 3rd or 4th byte
|
||||||
|
// 110xxxxx – first byte of 2-byte sequence
|
||||||
|
// 1110xxxx - first byte of 3-byte sequence
|
||||||
|
// 11110xxx - first byte of 4-byte sequence
|
||||||
|
if (byte >> 6 === 3) {
|
||||||
|
seqLength++;
|
||||||
|
|
||||||
|
// If the sequence is really incomplete, then preserve it
|
||||||
|
// for the future chunk and cut off it from the current chunk
|
||||||
|
if ((seqLength !== 4 && byte >> 3 === 0b11110) ||
|
||||||
|
(seqLength !== 3 && byte >> 4 === 0b1110) ||
|
||||||
|
(seqLength !== 2 && byte >> 5 === 0b110)) {
|
||||||
|
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
|
||||||
|
chunk = chunk.slice(0, -seqLength);
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert chunk to a string, since single decode per chunk
|
||||||
|
// is much effective than decode multiple small substrings
|
||||||
|
chunk = decoder.decode(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
const chunkLength = chunk.length;
|
||||||
|
let lastFlushPoint = 0;
|
||||||
|
let flushPoint = 0;
|
||||||
|
|
||||||
|
// Main scan loop
|
||||||
|
scan: for (let i = 0; i < chunkLength; i++) {
|
||||||
|
if (this.stateString) {
|
||||||
|
for (; i < chunkLength; i++) {
|
||||||
|
if (this.stateStringEscape) {
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
} else {
|
||||||
|
switch (chunk.charCodeAt(i)) {
|
||||||
|
case 0x22: /* " */
|
||||||
|
this.stateString = false;
|
||||||
|
continue scan;
|
||||||
|
|
||||||
|
case 0x5C: /* \ */
|
||||||
|
this.stateStringEscape = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (chunk.charCodeAt(i)) {
|
||||||
|
case 0x22: /* " */
|
||||||
|
this.stateString = true;
|
||||||
|
this.stateStringEscape = false;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x2C: /* , */
|
||||||
|
flushPoint = i;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x7B: /* { */
|
||||||
|
// Open an object
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.stack[this.flushDepth++] = STACK_OBJECT;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x5B: /* [ */
|
||||||
|
// Open an array
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.stack[this.flushDepth++] = STACK_ARRAY;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x5D: /* ] */
|
||||||
|
case 0x7D: /* } */
|
||||||
|
// Close an object or array
|
||||||
|
flushPoint = i + 1;
|
||||||
|
this.flushDepth--;
|
||||||
|
|
||||||
|
if (this.flushDepth < this.lastFlushDepth) {
|
||||||
|
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||||
|
lastFlushPoint = flushPoint;
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 0x09: /* \t */
|
||||||
|
case 0x0A: /* \n */
|
||||||
|
case 0x0D: /* \r */
|
||||||
|
case 0x20: /* space */
|
||||||
|
// Move points forward when they points on current position and it's a whitespace
|
||||||
|
if (lastFlushPoint === i) {
|
||||||
|
lastFlushPoint++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (flushPoint === i) {
|
||||||
|
flushPoint++;
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (flushPoint > lastFlushPoint) {
|
||||||
|
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Produce pendingChunk if something left
|
||||||
|
if (flushPoint < chunkLength) {
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
// When there is already a pending chunk then no flush happened,
|
||||||
|
// appending entire chunk to pending one
|
||||||
|
this.pendingChunk += chunk;
|
||||||
|
} else {
|
||||||
|
// Create a pending chunk, it will start with non-whitespace since
|
||||||
|
// flushPoint was moved forward away from whitespaces on scan
|
||||||
|
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.chunkOffset += chunkLength;
|
||||||
|
}
|
||||||
|
|
||||||
|
finish() {
|
||||||
|
if (this.pendingChunk !== null) {
|
||||||
|
this.flush('', 0, 0);
|
||||||
|
this.pendingChunk = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.value;
|
||||||
|
}
|
||||||
|
};
|
171
node_modules/@discoveryjs/json-ext/src/stringify-chunked.js
generated
vendored
Normal file
171
node_modules/@discoveryjs/json-ext/src/stringify-chunked.js
generated
vendored
Normal file
@ -0,0 +1,171 @@
|
|||||||
|
import { normalizeStringifyOptions, replaceValue } from './utils.js';
|
||||||
|
|
||||||
|
function encodeString(value) {
|
||||||
|
if (/[^\x20\x21\x23-\x5B\x5D-\uD799]/.test(value)) { // [^\x20-\uD799]|[\x22\x5c]
|
||||||
|
return JSON.stringify(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return '"' + value + '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function* stringifyChunked(value, ...args) {
|
||||||
|
const { replacer, getKeys, space, ...options } = normalizeStringifyOptions(...args);
|
||||||
|
const highWaterMark = Number(options.highWaterMark) || 0x4000; // 16kb by default
|
||||||
|
|
||||||
|
const keyStrings = new Map();
|
||||||
|
const stack = [];
|
||||||
|
const rootValue = { '': value };
|
||||||
|
let prevState = null;
|
||||||
|
let state = () => printEntry('', value);
|
||||||
|
let stateValue = rootValue;
|
||||||
|
let stateEmpty = true;
|
||||||
|
let stateKeys = [''];
|
||||||
|
let stateIndex = 0;
|
||||||
|
let buffer = '';
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
state();
|
||||||
|
|
||||||
|
if (buffer.length >= highWaterMark || prevState === null) {
|
||||||
|
// flush buffer
|
||||||
|
yield buffer;
|
||||||
|
buffer = '';
|
||||||
|
|
||||||
|
if (prevState === null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function printObject() {
|
||||||
|
if (stateIndex === 0) {
|
||||||
|
stateKeys = getKeys(stateValue);
|
||||||
|
buffer += '{';
|
||||||
|
}
|
||||||
|
|
||||||
|
// when no keys left
|
||||||
|
if (stateIndex === stateKeys.length) {
|
||||||
|
buffer += space && !stateEmpty
|
||||||
|
? `\n${space.repeat(stack.length - 1)}}`
|
||||||
|
: '}';
|
||||||
|
|
||||||
|
popState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = stateKeys[stateIndex++];
|
||||||
|
printEntry(key, stateValue[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
function printArray() {
|
||||||
|
if (stateIndex === 0) {
|
||||||
|
buffer += '[';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stateIndex === stateValue.length) {
|
||||||
|
buffer += space && !stateEmpty
|
||||||
|
? `\n${space.repeat(stack.length - 1)}]`
|
||||||
|
: ']';
|
||||||
|
|
||||||
|
popState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
printEntry(stateIndex, stateValue[stateIndex++]);
|
||||||
|
}
|
||||||
|
|
||||||
|
function printEntryPrelude(key) {
|
||||||
|
if (stateEmpty) {
|
||||||
|
stateEmpty = false;
|
||||||
|
} else {
|
||||||
|
buffer += ',';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (space && prevState !== null) {
|
||||||
|
buffer += `\n${space.repeat(stack.length)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state === printObject) {
|
||||||
|
let keyString = keyStrings.get(key);
|
||||||
|
|
||||||
|
if (keyString === undefined) {
|
||||||
|
keyStrings.set(key, keyString = encodeString(key) + (space ? ': ' : ':'));
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer += keyString;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function printEntry(key, value) {
|
||||||
|
value = replaceValue(stateValue, key, value, replacer);
|
||||||
|
|
||||||
|
if (value === null || typeof value !== 'object') {
|
||||||
|
// primitive
|
||||||
|
if (state !== printObject || value !== undefined) {
|
||||||
|
printEntryPrelude(key);
|
||||||
|
pushPrimitive(value);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If the visited set does not change after adding a value, then it is already in the set
|
||||||
|
if (stack.includes(value)) {
|
||||||
|
throw new TypeError('Converting circular structure to JSON');
|
||||||
|
}
|
||||||
|
|
||||||
|
printEntryPrelude(key);
|
||||||
|
stack.push(value);
|
||||||
|
|
||||||
|
pushState();
|
||||||
|
state = Array.isArray(value) ? printArray : printObject;
|
||||||
|
stateValue = value;
|
||||||
|
stateEmpty = true;
|
||||||
|
stateIndex = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushPrimitive(value) {
|
||||||
|
switch (typeof value) {
|
||||||
|
case 'string':
|
||||||
|
buffer += encodeString(value);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'number':
|
||||||
|
buffer += Number.isFinite(value) ? String(value) : 'null';
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'boolean':
|
||||||
|
buffer += value ? 'true' : 'false';
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'undefined':
|
||||||
|
case 'object': // typeof null === 'object'
|
||||||
|
buffer += 'null';
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new TypeError(`Do not know how to serialize a ${value.constructor?.name || typeof value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushState() {
|
||||||
|
prevState = {
|
||||||
|
keys: stateKeys,
|
||||||
|
index: stateIndex,
|
||||||
|
prev: prevState
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function popState() {
|
||||||
|
stack.pop();
|
||||||
|
const value = stack.length > 0 ? stack[stack.length - 1] : rootValue;
|
||||||
|
|
||||||
|
// restore state
|
||||||
|
state = Array.isArray(value) ? printArray : printObject;
|
||||||
|
stateValue = value;
|
||||||
|
stateEmpty = false;
|
||||||
|
stateKeys = prevState.keys;
|
||||||
|
stateIndex = prevState.index;
|
||||||
|
|
||||||
|
// pop state
|
||||||
|
prevState = prevState.prev;
|
||||||
|
}
|
||||||
|
};
|
247
node_modules/@discoveryjs/json-ext/src/stringify-info.js
generated
vendored
Normal file
247
node_modules/@discoveryjs/json-ext/src/stringify-info.js
generated
vendored
Normal file
@ -0,0 +1,247 @@
|
|||||||
|
import { normalizeStringifyOptions, replaceValue } from './utils.js';
|
||||||
|
|
||||||
|
const hasOwn = typeof Object.hasOwn === 'function'
|
||||||
|
? Object.hasOwn
|
||||||
|
: (object, key) => Object.hasOwnProperty.call(object, key);
|
||||||
|
|
||||||
|
// https://tc39.es/ecma262/#table-json-single-character-escapes
|
||||||
|
const escapableCharCodeSubstitution = { // JSON Single Character Escape Sequences
|
||||||
|
0x08: '\\b',
|
||||||
|
0x09: '\\t',
|
||||||
|
0x0a: '\\n',
|
||||||
|
0x0c: '\\f',
|
||||||
|
0x0d: '\\r',
|
||||||
|
0x22: '\\\"',
|
||||||
|
0x5c: '\\\\'
|
||||||
|
};
|
||||||
|
|
||||||
|
const charLength2048 = Uint8Array.from({ length: 2048 }, (_, code) => {
|
||||||
|
if (hasOwn(escapableCharCodeSubstitution, code)) {
|
||||||
|
return 2; // \X
|
||||||
|
}
|
||||||
|
|
||||||
|
if (code < 0x20) {
|
||||||
|
return 6; // \uXXXX
|
||||||
|
}
|
||||||
|
|
||||||
|
return code < 128 ? 1 : 2; // UTF8 bytes
|
||||||
|
});
|
||||||
|
|
||||||
|
function isLeadingSurrogate(code) {
|
||||||
|
return code >= 0xD800 && code <= 0xDBFF;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isTrailingSurrogate(code) {
|
||||||
|
return code >= 0xDC00 && code <= 0xDFFF;
|
||||||
|
}
|
||||||
|
|
||||||
|
function stringLength(str) {
|
||||||
|
// Fast path to compute length when a string contains only characters encoded as single bytes
|
||||||
|
if (!/[^\x20\x21\x23-\x5B\x5D-\x7F]/.test(str)) {
|
||||||
|
return str.length + 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
let len = 0;
|
||||||
|
let prevLeadingSurrogate = false;
|
||||||
|
|
||||||
|
for (let i = 0; i < str.length; i++) {
|
||||||
|
const code = str.charCodeAt(i);
|
||||||
|
|
||||||
|
if (code < 2048) {
|
||||||
|
len += charLength2048[code];
|
||||||
|
} else if (isLeadingSurrogate(code)) {
|
||||||
|
len += 6; // \uXXXX since no pair with trailing surrogate yet
|
||||||
|
prevLeadingSurrogate = true;
|
||||||
|
continue;
|
||||||
|
} else if (isTrailingSurrogate(code)) {
|
||||||
|
len = prevLeadingSurrogate
|
||||||
|
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
|
||||||
|
: len + 6; // \uXXXX
|
||||||
|
} else {
|
||||||
|
len += 3; // code >= 2048 is 3 bytes length for UTF8
|
||||||
|
}
|
||||||
|
|
||||||
|
prevLeadingSurrogate = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return len + 2; // +2 for quotes
|
||||||
|
}
|
||||||
|
|
||||||
|
// avoid producing a string from a number
|
||||||
|
function intLength(num) {
|
||||||
|
let len = 0;
|
||||||
|
|
||||||
|
if (num < 0) {
|
||||||
|
len = 1;
|
||||||
|
num = -num;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (num >= 1e9) {
|
||||||
|
len += 9;
|
||||||
|
num = (num - num % 1e9) / 1e9;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (num >= 1e4) {
|
||||||
|
if (num >= 1e6) {
|
||||||
|
return len + (num >= 1e8
|
||||||
|
? 9
|
||||||
|
: num >= 1e7 ? 8 : 7
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return len + (num >= 1e5 ? 6 : 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
return len + (num >= 1e2
|
||||||
|
? num >= 1e3 ? 4 : 3
|
||||||
|
: num >= 10 ? 2 : 1
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
function primitiveLength(value) {
|
||||||
|
switch (typeof value) {
|
||||||
|
case 'string':
|
||||||
|
return stringLength(value);
|
||||||
|
|
||||||
|
case 'number':
|
||||||
|
return Number.isFinite(value)
|
||||||
|
? Number.isInteger(value)
|
||||||
|
? intLength(value)
|
||||||
|
: String(value).length
|
||||||
|
: 4 /* null */;
|
||||||
|
|
||||||
|
case 'boolean':
|
||||||
|
return value ? 4 /* true */ : 5 /* false */;
|
||||||
|
|
||||||
|
case 'undefined':
|
||||||
|
case 'object':
|
||||||
|
return 4; /* null */
|
||||||
|
|
||||||
|
default:
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stringifyInfo(value, ...args) {
|
||||||
|
const { replacer, getKeys, ...options } = normalizeStringifyOptions(...args);
|
||||||
|
const continueOnCircular = Boolean(options.continueOnCircular);
|
||||||
|
const space = options.space?.length || 0;
|
||||||
|
|
||||||
|
const keysLength = new Map();
|
||||||
|
const visited = new Map();
|
||||||
|
const circular = new Set();
|
||||||
|
const stack = [];
|
||||||
|
const root = { '': value };
|
||||||
|
let stop = false;
|
||||||
|
let bytes = 0;
|
||||||
|
let spaceBytes = 0;
|
||||||
|
let objects = 0;
|
||||||
|
|
||||||
|
walk(root, '', value);
|
||||||
|
|
||||||
|
// when value is undefined or replaced for undefined
|
||||||
|
if (bytes === 0) {
|
||||||
|
bytes += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
bytes: isNaN(bytes) ? Infinity : bytes + spaceBytes,
|
||||||
|
spaceBytes: space > 0 && isNaN(bytes) ? Infinity : spaceBytes,
|
||||||
|
circular: [...circular]
|
||||||
|
};
|
||||||
|
|
||||||
|
function walk(holder, key, value) {
|
||||||
|
if (stop) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
value = replaceValue(holder, key, value, replacer);
|
||||||
|
|
||||||
|
if (value === null || typeof value !== 'object') {
|
||||||
|
// primitive
|
||||||
|
if (value !== undefined || Array.isArray(holder)) {
|
||||||
|
bytes += primitiveLength(value);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// check for circular references
|
||||||
|
if (stack.includes(value)) {
|
||||||
|
circular.add(value);
|
||||||
|
bytes += 4; // treat as null
|
||||||
|
|
||||||
|
if (!continueOnCircular) {
|
||||||
|
stop = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using 'visited' allows avoiding hang-ups in cases of highly interconnected object graphs;
|
||||||
|
// for example, a list of git commits with references to parents can lead to N^2 complexity for traversal,
|
||||||
|
// and N when 'visited' is used
|
||||||
|
if (visited.has(value)) {
|
||||||
|
bytes += visited.get(value);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
objects++;
|
||||||
|
|
||||||
|
const prevObjects = objects;
|
||||||
|
const valueBytes = bytes;
|
||||||
|
let valueLength = 0;
|
||||||
|
|
||||||
|
stack.push(value);
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
// array
|
||||||
|
valueLength = value.length;
|
||||||
|
|
||||||
|
for (let i = 0; i < valueLength; i++) {
|
||||||
|
walk(value, i, value[i]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// object
|
||||||
|
let prevLength = bytes;
|
||||||
|
|
||||||
|
for (const key of getKeys(value)) {
|
||||||
|
walk(value, key, value[key]);
|
||||||
|
|
||||||
|
if (prevLength !== bytes) {
|
||||||
|
let keyLen = keysLength.get(key);
|
||||||
|
|
||||||
|
if (keyLen === undefined) {
|
||||||
|
keysLength.set(key, keyLen = stringLength(key) + 1); // "key":
|
||||||
|
}
|
||||||
|
|
||||||
|
// value is printed
|
||||||
|
bytes += keyLen;
|
||||||
|
valueLength++;
|
||||||
|
prevLength = bytes;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bytes += valueLength === 0
|
||||||
|
? 2 // {} or []
|
||||||
|
: 1 + valueLength; // {} or [] + commas
|
||||||
|
|
||||||
|
if (space > 0 && valueLength > 0) {
|
||||||
|
spaceBytes +=
|
||||||
|
// a space between ":" and a value for each object entry
|
||||||
|
(Array.isArray(value) ? 0 : valueLength) +
|
||||||
|
// the formula results from folding the following components:
|
||||||
|
// - for each key-value or element: ident + newline
|
||||||
|
// (1 + stack.length * space) * valueLength
|
||||||
|
// - ident (one space less) before "}" or "]" + newline
|
||||||
|
// (stack.length - 1) * space + 1
|
||||||
|
(1 + stack.length * space) * (valueLength + 1) - space;
|
||||||
|
}
|
||||||
|
|
||||||
|
stack.pop();
|
||||||
|
|
||||||
|
// add to 'visited' only objects that contain nested objects
|
||||||
|
if (prevObjects !== objects) {
|
||||||
|
visited.set(value, bytes - valueBytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
100
node_modules/@discoveryjs/json-ext/src/utils.js
generated
vendored
Normal file
100
node_modules/@discoveryjs/json-ext/src/utils.js
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
export function isIterable(value) {
|
||||||
|
return (
|
||||||
|
typeof value === 'object' &&
|
||||||
|
value !== null &&
|
||||||
|
(
|
||||||
|
typeof value[Symbol.iterator] === 'function' ||
|
||||||
|
typeof value[Symbol.asyncIterator] === 'function'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function replaceValue(holder, key, value, replacer) {
|
||||||
|
if (value && typeof value.toJSON === 'function') {
|
||||||
|
value = value.toJSON();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (replacer !== null) {
|
||||||
|
value = replacer.call(holder, String(key), value);
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (typeof value) {
|
||||||
|
case 'function':
|
||||||
|
case 'symbol':
|
||||||
|
value = undefined;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'object':
|
||||||
|
if (value !== null) {
|
||||||
|
const cls = value.constructor;
|
||||||
|
if (cls === String || cls === Number || cls === Boolean) {
|
||||||
|
value = value.valueOf();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function normalizeReplacer(replacer) {
|
||||||
|
if (typeof replacer === 'function') {
|
||||||
|
return replacer;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(replacer)) {
|
||||||
|
const allowlist = new Set(replacer
|
||||||
|
.map(item => {
|
||||||
|
const cls = item && item.constructor;
|
||||||
|
return cls === String || cls === Number ? String(item) : null;
|
||||||
|
})
|
||||||
|
.filter(item => typeof item === 'string')
|
||||||
|
);
|
||||||
|
|
||||||
|
return [...allowlist];
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function normalizeSpace(space) {
|
||||||
|
if (typeof space === 'number') {
|
||||||
|
if (!Number.isFinite(space) || space < 1) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return ' '.repeat(Math.min(space, 10));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof space === 'string') {
|
||||||
|
return space.slice(0, 10) || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function normalizeStringifyOptions(optionsOrReplacer, space) {
|
||||||
|
if (optionsOrReplacer === null || Array.isArray(optionsOrReplacer) || typeof optionsOrReplacer !== 'object') {
|
||||||
|
optionsOrReplacer = {
|
||||||
|
replacer: optionsOrReplacer,
|
||||||
|
space
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let replacer = normalizeReplacer(optionsOrReplacer.replacer);
|
||||||
|
let getKeys = Object.keys;
|
||||||
|
|
||||||
|
if (Array.isArray(replacer)) {
|
||||||
|
const allowlist = replacer;
|
||||||
|
|
||||||
|
getKeys = () => allowlist;
|
||||||
|
replacer = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...optionsOrReplacer,
|
||||||
|
replacer,
|
||||||
|
getKeys,
|
||||||
|
space: normalizeSpace(optionsOrReplacer.space)
|
||||||
|
};
|
||||||
|
}
|
54
node_modules/@discoveryjs/json-ext/src/web-streams.js
generated
vendored
Normal file
54
node_modules/@discoveryjs/json-ext/src/web-streams.js
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
/* eslint-env browser */
|
||||||
|
import { parseChunked } from './parse-chunked.js';
|
||||||
|
import { stringifyChunked } from './stringify-chunked.js';
|
||||||
|
import { isIterable } from './utils.js';
|
||||||
|
|
||||||
|
export function parseFromWebStream(stream) {
|
||||||
|
// 2024/6/17: currently, an @@asyncIterator on a ReadableStream is not widely supported,
|
||||||
|
// therefore use a fallback using a reader
|
||||||
|
// https://caniuse.com/mdn-api_readablestream_--asynciterator
|
||||||
|
return parseChunked(isIterable(stream) ? stream : async function*() {
|
||||||
|
const reader = stream.getReader();
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const { value, done } = await reader.read();
|
||||||
|
|
||||||
|
if (done) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
yield value;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
reader.releaseLock();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createStringifyWebStream(value, replacer, space) {
|
||||||
|
// 2024/6/17: the ReadableStream.from() static method is supported
|
||||||
|
// in Node.js 20.6+ and Firefox only
|
||||||
|
if (typeof ReadableStream.from === 'function') {
|
||||||
|
return ReadableStream.from(stringifyChunked(value, replacer, space));
|
||||||
|
}
|
||||||
|
|
||||||
|
// emulate ReadableStream.from()
|
||||||
|
return new ReadableStream({
|
||||||
|
start() {
|
||||||
|
this.generator = stringifyChunked(value, replacer, space);
|
||||||
|
},
|
||||||
|
pull(controller) {
|
||||||
|
const { value, done } = this.generator.next();
|
||||||
|
|
||||||
|
if (done) {
|
||||||
|
controller.close();
|
||||||
|
} else {
|
||||||
|
controller.enqueue(value);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel() {
|
||||||
|
this.generator = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
# @jridgewell/gen-mapping
|
||||||
|
|
||||||
|
> Generate source maps
|
||||||
|
|
||||||
|
`gen-mapping` allows you to generate a source map during transpilation or minification.
|
||||||
|
With a source map, you're able to trace the original location in the source file, either in Chrome's
|
||||||
|
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
|
||||||
|
provides the same `addMapping` and `setSourceContent` API.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/gen-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
const map = new GenMapping({
|
||||||
|
file: 'output.js',
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
});
|
||||||
|
|
||||||
|
setSourceContent(map, 'input.js', `function foo() {}`);
|
||||||
|
|
||||||
|
addMapping(map, {
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
addMapping(map, {
|
||||||
|
generated: { line: 1, column: 9 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 9 },
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toDecodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
file: 'output.js',
|
||||||
|
names: ['foo'],
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['function foo() {}'],
|
||||||
|
mappings: [
|
||||||
|
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toEncodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
file: 'output.js',
|
||||||
|
names: ['foo'],
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['function foo() {}'],
|
||||||
|
mappings: 'AAAA,SAASA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Smaller Sourcemaps
|
||||||
|
|
||||||
|
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
|
||||||
|
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
|
||||||
|
intelligently determine if this marking adds useful information. If not, the marking will be
|
||||||
|
skipped.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { maybeAddMapping } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
const map = new GenMapping();
|
||||||
|
|
||||||
|
// Adding a sourceless marking at the beginning of a line isn't useful.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Adding a new source marking is useful.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
// But adding another marking pointing to the exact same original location isn't, even if the
|
||||||
|
// generated column changed.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 9 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toEncodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
names: [],
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: [null],
|
||||||
|
mappings: 'AAAA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 5852872 bytes
|
||||||
|
gen-mapping: addMapping 7716042 bytes
|
||||||
|
source-map-js 6143250 bytes
|
||||||
|
source-map-0.6.1 6124102 bytes
|
||||||
|
source-map-0.8.0 6121173 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
|
||||||
|
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
|
||||||
|
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
|
||||||
|
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
|
||||||
|
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 37578063 bytes
|
||||||
|
gen-mapping: addMapping 37212897 bytes
|
||||||
|
source-map-js 47638527 bytes
|
||||||
|
source-map-0.6.1 47690503 bytes
|
||||||
|
source-map-0.8.0 47470188 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addMapping
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
|
||||||
|
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
|
||||||
|
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
|
||||||
|
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
|
||||||
|
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 416247 bytes
|
||||||
|
gen-mapping: addMapping 419824 bytes
|
||||||
|
source-map-js 1024619 bytes
|
||||||
|
source-map-0.6.1 1146004 bytes
|
||||||
|
source-map-0.8.0 1113250 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
|
||||||
|
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
|
||||||
|
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
|
||||||
|
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
|
||||||
|
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 975096 bytes
|
||||||
|
gen-mapping: addMapping 1102981 bytes
|
||||||
|
source-map-js 2918836 bytes
|
||||||
|
source-map-0.6.1 2885435 bytes
|
||||||
|
source-map-0.8.0 2874336 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
|
||||||
|
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
|
||||||
|
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
|
||||||
|
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
||||||
|
[trace-mapping]: https://github.com/jridgewell/trace-mapping
|
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
import { SetArray, put, remove } from '@jridgewell/set-array';
|
||||||
|
import { encode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
class GenMapping {
|
||||||
|
constructor({ file, sourceRoot } = {}) {
|
||||||
|
this._names = new SetArray();
|
||||||
|
this._sources = new SetArray();
|
||||||
|
this._sourcesContent = [];
|
||||||
|
this._mappings = [];
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this._ignoreList = new SetArray();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||||
|
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
}
|
||||||
|
function addMapping(map, mapping) {
|
||||||
|
return addMappingInternal(false, map, mapping);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
const maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
function setSourceContent(map, source, content) {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
||||||
|
const index = put(sources, source);
|
||||||
|
sourcesContent[index] = content;
|
||||||
|
}
|
||||||
|
function setIgnore(map, source, ignore = true) {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
||||||
|
const index = put(sources, source);
|
||||||
|
if (index === sourcesContent.length)
|
||||||
|
sourcesContent[index] = null;
|
||||||
|
if (ignore)
|
||||||
|
put(ignoreList, index);
|
||||||
|
else
|
||||||
|
remove(ignoreList, index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function toDecodedMap(map) {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: map.sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: ignoreList.array,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function toEncodedMap(map) {
|
||||||
|
const decoded = toDecodedMap(map);
|
||||||
|
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
function fromMap(input) {
|
||||||
|
const map = new TraceMap(input);
|
||||||
|
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||||
|
putAll(cast(gen)._names, map.names);
|
||||||
|
putAll(cast(gen)._sources, map.sources);
|
||||||
|
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||||
|
cast(gen)._mappings = decodedMappings(map);
|
||||||
|
if (map.ignoreList)
|
||||||
|
putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||||
|
return gen;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
function allMappings(map) {
|
||||||
|
const out = [];
|
||||||
|
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||||
|
for (let i = 0; i < mappings.length; i++) {
|
||||||
|
const line = mappings[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||||
|
let source = undefined;
|
||||||
|
let original = undefined;
|
||||||
|
let name = undefined;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = sources.array[seg[SOURCES_INDEX]];
|
||||||
|
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names.array[seg[NAMES_INDEX]];
|
||||||
|
}
|
||||||
|
out.push({ generated, source, original, name });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index))
|
||||||
|
return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
const sourcesIndex = put(sources, source);
|
||||||
|
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length)
|
||||||
|
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return insert(line, index, name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
}
|
||||||
|
function getLine(mappings, index) {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
function getColumnIndex(line, genColumn) {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN])
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
function removeEmptyFinalLines(mappings) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (len < length)
|
||||||
|
mappings.length = len;
|
||||||
|
}
|
||||||
|
function putAll(setarr, array) {
|
||||||
|
for (let i = 0; i < array.length; i++)
|
||||||
|
put(setarr, array[i]);
|
||||||
|
}
|
||||||
|
function skipSourceless(line, index) {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0)
|
||||||
|
return true;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0)
|
||||||
|
return false;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1)
|
||||||
|
return false;
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||||
|
}
|
||||||
|
function addMappingInternal(skipable, map, mapping) {
|
||||||
|
const { generated, source, original, name, content } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||||
|
}
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setIgnore, setSourceContent, toDecodedMap, toEncodedMap };
|
||||||
|
//# sourceMappingURL=gen-mapping.mjs.map
|
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
246
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
246
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping));
|
||||||
|
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict';
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
class GenMapping {
|
||||||
|
constructor({ file, sourceRoot } = {}) {
|
||||||
|
this._names = new setArray.SetArray();
|
||||||
|
this._sources = new setArray.SetArray();
|
||||||
|
this._sourcesContent = [];
|
||||||
|
this._mappings = [];
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this._ignoreList = new setArray.SetArray();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||||
|
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
}
|
||||||
|
function addMapping(map, mapping) {
|
||||||
|
return addMappingInternal(false, map, mapping);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
const maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping);
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
function setSourceContent(map, source, content) {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
||||||
|
const index = setArray.put(sources, source);
|
||||||
|
sourcesContent[index] = content;
|
||||||
|
}
|
||||||
|
function setIgnore(map, source, ignore = true) {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
||||||
|
const index = setArray.put(sources, source);
|
||||||
|
if (index === sourcesContent.length)
|
||||||
|
sourcesContent[index] = null;
|
||||||
|
if (ignore)
|
||||||
|
setArray.put(ignoreList, index);
|
||||||
|
else
|
||||||
|
setArray.remove(ignoreList, index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function toDecodedMap(map) {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: map.sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: ignoreList.array,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function toEncodedMap(map) {
|
||||||
|
const decoded = toDecodedMap(map);
|
||||||
|
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) });
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
function fromMap(input) {
|
||||||
|
const map = new traceMapping.TraceMap(input);
|
||||||
|
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||||
|
putAll(cast(gen)._names, map.names);
|
||||||
|
putAll(cast(gen)._sources, map.sources);
|
||||||
|
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||||
|
cast(gen)._mappings = traceMapping.decodedMappings(map);
|
||||||
|
if (map.ignoreList)
|
||||||
|
putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||||
|
return gen;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
function allMappings(map) {
|
||||||
|
const out = [];
|
||||||
|
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||||
|
for (let i = 0; i < mappings.length; i++) {
|
||||||
|
const line = mappings[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||||
|
let source = undefined;
|
||||||
|
let original = undefined;
|
||||||
|
let name = undefined;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = sources.array[seg[SOURCES_INDEX]];
|
||||||
|
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names.array[seg[NAMES_INDEX]];
|
||||||
|
}
|
||||||
|
out.push({ generated, source, original, name });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index))
|
||||||
|
return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
const sourcesIndex = setArray.put(sources, source);
|
||||||
|
const namesIndex = name ? setArray.put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length)
|
||||||
|
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return insert(line, index, name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
}
|
||||||
|
function getLine(mappings, index) {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
function getColumnIndex(line, genColumn) {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN])
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
function removeEmptyFinalLines(mappings) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (len < length)
|
||||||
|
mappings.length = len;
|
||||||
|
}
|
||||||
|
function putAll(setarr, array) {
|
||||||
|
for (let i = 0; i < array.length; i++)
|
||||||
|
setArray.put(setarr, array[i]);
|
||||||
|
}
|
||||||
|
function skipSourceless(line, index) {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0)
|
||||||
|
return true;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0)
|
||||||
|
return false;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1)
|
||||||
|
return false;
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||||
|
}
|
||||||
|
function addMappingInternal(skipable, map, mapping) {
|
||||||
|
const { generated, source, original, name, content } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||||
|
}
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.GenMapping = GenMapping;
|
||||||
|
exports.addMapping = addMapping;
|
||||||
|
exports.addSegment = addSegment;
|
||||||
|
exports.allMappings = allMappings;
|
||||||
|
exports.fromMap = fromMap;
|
||||||
|
exports.maybeAddMapping = maybeAddMapping;
|
||||||
|
exports.maybeAddSegment = maybeAddSegment;
|
||||||
|
exports.setIgnore = setIgnore;
|
||||||
|
exports.setSourceContent = setSourceContent;
|
||||||
|
exports.toDecodedMap = toDecodedMap;
|
||||||
|
exports.toEncodedMap = toEncodedMap;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=gen-mapping.umd.js.map
|
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
88
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
88
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||||
|
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
||||||
|
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||||
|
export declare type Options = {
|
||||||
|
file?: string | null;
|
||||||
|
sourceRoot?: string | null;
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
export declare class GenMapping {
|
||||||
|
private _names;
|
||||||
|
private _sources;
|
||||||
|
private _sourcesContent;
|
||||||
|
private _mappings;
|
||||||
|
private _ignoreList;
|
||||||
|
file: string | null | undefined;
|
||||||
|
sourceRoot: string | null | undefined;
|
||||||
|
constructor({ file, sourceRoot }?: Options);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to associate a generated position with an original source position. Line and
|
||||||
|
* column here are 0-based, unlike `addMapping`.
|
||||||
|
*/
|
||||||
|
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
|
||||||
|
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
|
||||||
|
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
|
||||||
|
/**
|
||||||
|
* A high-level API to associate a generated position with an original source position. Line is
|
||||||
|
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare function addMapping(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source?: null;
|
||||||
|
original?: null;
|
||||||
|
name?: null;
|
||||||
|
content?: null;
|
||||||
|
}): void;
|
||||||
|
export declare function addMapping(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name?: null;
|
||||||
|
content?: string | null;
|
||||||
|
}): void;
|
||||||
|
export declare function addMapping(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: string;
|
||||||
|
content?: string | null;
|
||||||
|
}): void;
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
export declare const maybeAddSegment: typeof addSegment;
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
export declare const maybeAddMapping: typeof addMapping;
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
|
||||||
|
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
export declare function fromMap(input: SourceMapInput): GenMapping;
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare function allMappings(map: GenMapping): Mapping[];
|
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
declare type GeneratedColumn = number;
|
||||||
|
declare type SourcesIndex = number;
|
||||||
|
declare type SourceLine = number;
|
||||||
|
declare type SourceColumn = number;
|
||||||
|
declare type NamesIndex = number;
|
||||||
|
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
export declare const COLUMN = 0;
|
||||||
|
export declare const SOURCES_INDEX = 1;
|
||||||
|
export declare const SOURCE_LINE = 2;
|
||||||
|
export declare const SOURCE_COLUMN = 3;
|
||||||
|
export declare const NAMES_INDEX = 4;
|
||||||
|
export {};
|
36
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
36
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: readonly string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: readonly (string | null)[];
|
||||||
|
sourcesContent?: readonly (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
ignoreList?: readonly number[];
|
||||||
|
}
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
export interface Pos {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
}
|
||||||
|
export declare type Mapping = {
|
||||||
|
generated: Pos;
|
||||||
|
source: undefined;
|
||||||
|
original: undefined;
|
||||||
|
name: undefined;
|
||||||
|
} | {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: string;
|
||||||
|
} | {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: undefined;
|
||||||
|
};
|
19
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
257
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
257
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,257 @@
|
|||||||
|
# @jridgewell/trace-mapping
|
||||||
|
|
||||||
|
> Trace the original position through a source map
|
||||||
|
|
||||||
|
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||||
|
original location in the source file through a source map.
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||||
|
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/trace-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
TraceMap,
|
||||||
|
originalPositionFor,
|
||||||
|
generatedPositionFor,
|
||||||
|
sourceContentFor,
|
||||||
|
isIgnored,
|
||||||
|
} from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const tracer = new TraceMap({
|
||||||
|
version: 3,
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['content of input.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'KAyCIA',
|
||||||
|
ignoreList: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = sourceContentFor(tracer, traced.source);
|
||||||
|
assert.strictEqual(content, 'content for input.js');
|
||||||
|
|
||||||
|
const generated = generatedPositionFor(tracer, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
});
|
||||||
|
assert.deepEqual(generated, {
|
||||||
|
line: 1,
|
||||||
|
column: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
const ignored = isIgnored(tracer, 'input.js');
|
||||||
|
assert.equal(ignored, false);
|
||||||
|
```
|
||||||
|
|
||||||
|
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||||
|
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
// line is 0-base.
|
||||||
|
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||||
|
|
||||||
|
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
// Again, line is 0-base and so is sourceLine
|
||||||
|
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||||
|
```
|
||||||
|
|
||||||
|
### SectionedSourceMaps
|
||||||
|
|
||||||
|
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||||
|
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||||
|
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||||
|
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||||
|
`TraceMap` instance:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||||
|
const fooOutput = 'foo';
|
||||||
|
const barOutput = 'bar';
|
||||||
|
const output = [fooOutput, barOutput].join('\n');
|
||||||
|
|
||||||
|
const sectioned = new AnyMap({
|
||||||
|
version: 3,
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
// 0-base line and column
|
||||||
|
offset: { line: 0, column: 0 },
|
||||||
|
// fooOutput's sourcemap
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['foo.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// barOutput's sourcemap will not affect the first line, only the second
|
||||||
|
offset: { line: 1, column: 0 },
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['bar.js'],
|
||||||
|
names: ['bar'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const traced = originalPositionFor(sectioned, {
|
||||||
|
line: 2,
|
||||||
|
column: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'bar.js',
|
||||||
|
line: 1,
|
||||||
|
column: 0,
|
||||||
|
name: 'bar',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map - 45120 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 562400 bytes
|
||||||
|
trace-mapping encoded 5706544 bytes
|
||||||
|
source-map-js 10717664 bytes
|
||||||
|
source-map-0.6.1 17446384 bytes
|
||||||
|
source-map-0.8.0 9701757 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map - 347793 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 89832 bytes
|
||||||
|
trace-mapping encoded 35474640 bytes
|
||||||
|
source-map-js 51257176 bytes
|
||||||
|
source-map-0.6.1 63515664 bytes
|
||||||
|
source-map-0.8.0 42933752 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map - 1992 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 37128 bytes
|
||||||
|
trace-mapping encoded 247280 bytes
|
||||||
|
source-map-js 1143536 bytes
|
||||||
|
source-map-0.6.1 1290992 bytes
|
||||||
|
source-map-0.8.0 96544 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map - 5726 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 16176 bytes
|
||||||
|
trace-mapping encoded 681552 bytes
|
||||||
|
source-map-js 2418352 bytes
|
||||||
|
source-map-0.6.1 2443672 bytes
|
||||||
|
source-map-0.8.0 111768 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
580
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
580
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,580 @@
|
|||||||
|
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import resolveUri from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
memo.lastIndex = ++index;
|
||||||
|
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = parse(map);
|
||||||
|
if (!('sections' in parsed)) {
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
}
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const ignoreList = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function parse(map) {
|
||||||
|
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
}
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const parsed = parse(input);
|
||||||
|
if ('sections' in parsed)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(parsed, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
if (ignores)
|
||||||
|
for (let i = 0; i < ignores.length; i++)
|
||||||
|
ignoreList.push(ignores[i] + sourcesOffset);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names || [];
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function encodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
var _b;
|
||||||
|
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function decodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
function traceSegment(map, line, column) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
return index === -1 ? null : segments[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(map, needle) {
|
||||||
|
let { line, column, bias } = needle;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (index === -1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
if (segment.length === 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function generatedPositionFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function allGeneratedPositionsFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||||
|
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
function eachMapping(map, cb) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function sourceIndex(map, source) {
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
function sourceContentFor(map, source) {
|
||||||
|
const { sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
function isIgnored(map, source) {
|
||||||
|
const { ignoreList } = map;
|
||||||
|
if (ignoreList == null)
|
||||||
|
return false;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? false : ignoreList.includes(index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
function presortedDecodedMap(map, mapUrl) {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
cast(tracer)._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function decodedMap(map) {
|
||||||
|
return clone(map, decodedMappings(map));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function encodedMap(map) {
|
||||||
|
return clone(map, encodedMappings(map));
|
||||||
|
}
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return -1;
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||||
|
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||||
|
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||||
|
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||||
|
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||||
|
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||||
|
// match LEAST_UPPER_BOUND.
|
||||||
|
if (!found && bias === LEAST_UPPER_BOUND)
|
||||||
|
min++;
|
||||||
|
if (min === -1 || min === segments.length)
|
||||||
|
return [];
|
||||||
|
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||||
|
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||||
|
// to our desired column.
|
||||||
|
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||||
|
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||||
|
if (!found)
|
||||||
|
min = lowerBound(segments, matchedColumn, min);
|
||||||
|
const max = upperBound(segments, matchedColumn, min);
|
||||||
|
const result = [];
|
||||||
|
for (; min <= max; min++) {
|
||||||
|
const segment = segments[min];
|
||||||
|
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function generatedPosition(map, source, line, column, bias, all) {
|
||||||
|
var _a;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||||
|
if (all)
|
||||||
|
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||||
|
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||||
|
if (index === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||||
|
//# sourceMappingURL=trace-mapping.mjs.map
|
1
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
600
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
600
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,600 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||||
|
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
memo.lastIndex = ++index;
|
||||||
|
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = parse(map);
|
||||||
|
if (!('sections' in parsed)) {
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
}
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const ignoreList = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function parse(map) {
|
||||||
|
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
}
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const parsed = parse(input);
|
||||||
|
if ('sections' in parsed)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(parsed, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
if (ignores)
|
||||||
|
for (let i = 0; i < ignores.length; i++)
|
||||||
|
ignoreList.push(ignores[i] + sourcesOffset);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names || [];
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function encodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
var _b;
|
||||||
|
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function decodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
function traceSegment(map, line, column) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
return index === -1 ? null : segments[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(map, needle) {
|
||||||
|
let { line, column, bias } = needle;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (index === -1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
if (segment.length === 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function generatedPositionFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function allGeneratedPositionsFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||||
|
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
function eachMapping(map, cb) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function sourceIndex(map, source) {
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
function sourceContentFor(map, source) {
|
||||||
|
const { sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
function isIgnored(map, source) {
|
||||||
|
const { ignoreList } = map;
|
||||||
|
if (ignoreList == null)
|
||||||
|
return false;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? false : ignoreList.includes(index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
function presortedDecodedMap(map, mapUrl) {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
cast(tracer)._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function decodedMap(map) {
|
||||||
|
return clone(map, decodedMappings(map));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function encodedMap(map) {
|
||||||
|
return clone(map, encodedMappings(map));
|
||||||
|
}
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return -1;
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||||
|
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||||
|
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||||
|
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||||
|
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||||
|
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||||
|
// match LEAST_UPPER_BOUND.
|
||||||
|
if (!found && bias === LEAST_UPPER_BOUND)
|
||||||
|
min++;
|
||||||
|
if (min === -1 || min === segments.length)
|
||||||
|
return [];
|
||||||
|
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||||
|
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||||
|
// to our desired column.
|
||||||
|
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||||
|
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||||
|
if (!found)
|
||||||
|
min = lowerBound(segments, matchedColumn, min);
|
||||||
|
const max = upperBound(segments, matchedColumn, min);
|
||||||
|
const result = [];
|
||||||
|
for (; min <= max; min++) {
|
||||||
|
const segment = segments[min];
|
||||||
|
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function generatedPosition(map, source, line, column, bias, all) {
|
||||||
|
var _a;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||||
|
if (all)
|
||||||
|
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||||
|
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||||
|
if (index === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.AnyMap = AnyMap;
|
||||||
|
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||||
|
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||||
|
exports.TraceMap = TraceMap;
|
||||||
|
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
||||||
|
exports.decodedMap = decodedMap;
|
||||||
|
exports.decodedMappings = decodedMappings;
|
||||||
|
exports.eachMapping = eachMapping;
|
||||||
|
exports.encodedMap = encodedMap;
|
||||||
|
exports.encodedMappings = encodedMappings;
|
||||||
|
exports.generatedPositionFor = generatedPositionFor;
|
||||||
|
exports.isIgnored = isIgnored;
|
||||||
|
exports.originalPositionFor = originalPositionFor;
|
||||||
|
exports.presortedDecodedMap = presortedDecodedMap;
|
||||||
|
exports.sourceContentFor = sourceContentFor;
|
||||||
|
exports.traceSegment = traceSegment;
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=trace-mapping.umd.js.map
|
1
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import { TraceMap } from './trace-mapping';
|
||||||
|
import type { SectionedSourceMapInput } from './types';
|
||||||
|
type AnyMap = {
|
||||||
|
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
};
|
||||||
|
export declare const AnyMap: AnyMap;
|
||||||
|
export {};
|
32
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||||
|
export type MemoState = {
|
||||||
|
lastKey: number;
|
||||||
|
lastNeedle: number;
|
||||||
|
lastIndex: number;
|
||||||
|
};
|
||||||
|
export declare let found: boolean;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||||
|
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function memoizedState(): MemoState;
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
7
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { MemoState } from './binary-search';
|
||||||
|
export type Source = {
|
||||||
|
__proto__: null;
|
||||||
|
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||||
|
};
|
||||||
|
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
1
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
export default function resolve(input: string, base: string | undefined): string;
|
2
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
16
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
type GeneratedColumn = number;
|
||||||
|
type SourcesIndex = number;
|
||||||
|
type SourceLine = number;
|
||||||
|
type SourceColumn = number;
|
||||||
|
type NamesIndex = number;
|
||||||
|
type GeneratedLine = number;
|
||||||
|
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||||
|
export declare const COLUMN = 0;
|
||||||
|
export declare const SOURCES_INDEX = 1;
|
||||||
|
export declare const SOURCE_LINE = 2;
|
||||||
|
export declare const SOURCE_COLUMN = 3;
|
||||||
|
export declare const NAMES_INDEX = 4;
|
||||||
|
export declare const REV_GENERATED_LINE = 1;
|
||||||
|
export declare const REV_GENERATED_COLUMN = 2;
|
||||||
|
export {};
|
4
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
export default function stripFilename(path: string | undefined | null): string;
|
79
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
79
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||||
|
export type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
||||||
|
export declare const LEAST_UPPER_BOUND = -1;
|
||||||
|
export declare const GREATEST_LOWER_BOUND = 1;
|
||||||
|
export { AnyMap } from './any-map';
|
||||||
|
export declare class TraceMap implements SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
ignoreList: SourceMapV3['ignoreList'];
|
||||||
|
resolvedSources: string[];
|
||||||
|
private _encoded;
|
||||||
|
private _decoded;
|
||||||
|
private _decodedMemo;
|
||||||
|
private _bySources;
|
||||||
|
private _bySourceMemos;
|
||||||
|
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
99
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
99
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: (string | null)[];
|
||||||
|
sourcesContent?: (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
ignoreList?: number[];
|
||||||
|
}
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
export interface Section {
|
||||||
|
offset: {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||||
|
}
|
||||||
|
export interface SectionedSourceMap {
|
||||||
|
file?: string | null;
|
||||||
|
sections: Section[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
export type OriginalMapping = {
|
||||||
|
source: string | null;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export type InvalidOriginalMapping = {
|
||||||
|
source: null;
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
name: null;
|
||||||
|
};
|
||||||
|
export type GeneratedMapping = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
export type InvalidGeneratedMapping = {
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
};
|
||||||
|
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
||||||
|
export type XInput = {
|
||||||
|
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
||||||
|
};
|
||||||
|
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
||||||
|
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
||||||
|
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
||||||
|
sections: SectionXInput[];
|
||||||
|
};
|
||||||
|
export type SectionXInput = Omit<Section, 'map'> & {
|
||||||
|
map: SectionedSourceMapInput;
|
||||||
|
};
|
||||||
|
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
||||||
|
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
||||||
|
export type Needle = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: Bias;
|
||||||
|
};
|
||||||
|
export type SourceNeedle = {
|
||||||
|
source: string;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: Bias;
|
||||||
|
};
|
||||||
|
export type EachMapping = {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: null;
|
||||||
|
originalLine: null;
|
||||||
|
originalColumn: null;
|
||||||
|
name: null;
|
||||||
|
} | {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: string | null;
|
||||||
|
originalLine: number;
|
||||||
|
originalColumn: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export declare abstract class SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
resolvedSources: SourceMapV3['sources'];
|
||||||
|
ignoreList: SourceMapV3['ignoreList'];
|
||||||
|
}
|
77
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
77
node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/trace-mapping",
|
||||||
|
"version": "0.3.25",
|
||||||
|
"description": "Trace the original position through a source map",
|
||||||
|
"keywords": [
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"main": "dist/trace-mapping.umd.js",
|
||||||
|
"module": "dist/trace-mapping.mjs",
|
||||||
|
"types": "dist/types/trace-mapping.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/trace-mapping.d.ts",
|
||||||
|
"browser": "./dist/trace-mapping.umd.js",
|
||||||
|
"require": "./dist/trace-mapping.umd.js",
|
||||||
|
"import": "./dist/trace-mapping.mjs"
|
||||||
|
},
|
||||||
|
"./dist/trace-mapping.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.mjs",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "11.1.6",
|
||||||
|
"@types/mocha": "10.0.6",
|
||||||
|
"@types/node": "20.11.20",
|
||||||
|
"@typescript-eslint/eslint-plugin": "6.18.1",
|
||||||
|
"@typescript-eslint/parser": "6.18.1",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "9.0.0",
|
||||||
|
"esbuild": "0.19.11",
|
||||||
|
"eslint": "8.56.0",
|
||||||
|
"eslint-config-prettier": "9.1.0",
|
||||||
|
"eslint-plugin-no-only-tests": "3.1.0",
|
||||||
|
"mocha": "10.3.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "3.1.1",
|
||||||
|
"rollup": "4.9.4",
|
||||||
|
"tsx": "4.7.0",
|
||||||
|
"typescript": "5.3.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/resolve-uri": "^3.1.0",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||||
|
}
|
||||||
|
}
|
76
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
76
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/gen-mapping",
|
||||||
|
"version": "0.3.8",
|
||||||
|
"description": "Generate source maps",
|
||||||
|
"keywords": [
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/gen-mapping",
|
||||||
|
"main": "dist/gen-mapping.umd.js",
|
||||||
|
"module": "dist/gen-mapping.mjs",
|
||||||
|
"types": "dist/types/gen-mapping.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/gen-mapping.d.ts",
|
||||||
|
"browser": "./dist/gen-mapping.umd.js",
|
||||||
|
"require": "./dist/gen-mapping.umd.js",
|
||||||
|
"import": "./dist/gen-mapping.mjs"
|
||||||
|
},
|
||||||
|
"./dist/gen-mapping.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node benchmark/index.mjs",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.2",
|
||||||
|
"@types/mocha": "9.1.1",
|
||||||
|
"@types/node": "17.0.29",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.21.0",
|
||||||
|
"@typescript-eslint/parser": "5.21.0",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "7.11.2",
|
||||||
|
"eslint": "8.14.0",
|
||||||
|
"eslint-config-prettier": "8.5.0",
|
||||||
|
"mocha": "9.2.2",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.6.2",
|
||||||
|
"rollup": "2.70.2",
|
||||||
|
"tsx": "4.7.1",
|
||||||
|
"typescript": "4.6.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/set-array": "^1.2.1",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.24"
|
||||||
|
}
|
||||||
|
}
|
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# @jridgewell/set-array
|
||||||
|
|
||||||
|
> Like a Set, but provides the index of the `key` in the backing array
|
||||||
|
|
||||||
|
This is designed to allow synchronizing a second array with the contents of the backing array, like
|
||||||
|
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there
|
||||||
|
are never duplicates.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/set-array
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { SetArray, get, put, pop } from '@jridgewell/set-array';
|
||||||
|
|
||||||
|
const sa = new SetArray();
|
||||||
|
|
||||||
|
let index = put(sa, 'first');
|
||||||
|
assert.strictEqual(index, 0);
|
||||||
|
|
||||||
|
index = put(sa, 'second');
|
||||||
|
assert.strictEqual(index, 1);
|
||||||
|
|
||||||
|
assert.deepEqual(sa.array, [ 'first', 'second' ]);
|
||||||
|
|
||||||
|
index = get(sa, 'first');
|
||||||
|
assert.strictEqual(index, 0);
|
||||||
|
|
||||||
|
pop(sa);
|
||||||
|
index = get(sa, 'second');
|
||||||
|
assert.strictEqual(index, undefined);
|
||||||
|
assert.deepEqual(sa.array, [ 'first' ]);
|
||||||
|
```
|
69
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
69
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
class SetArray {
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null };
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(set) {
|
||||||
|
return set;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
function get(setarr, key) {
|
||||||
|
return cast(setarr)._indexes[key];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
function put(setarr, key) {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = get(setarr, key);
|
||||||
|
if (index !== undefined)
|
||||||
|
return index;
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
const length = array.push(key);
|
||||||
|
return (indexes[key] = length - 1);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
function pop(setarr) {
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
if (array.length === 0)
|
||||||
|
return;
|
||||||
|
const last = array.pop();
|
||||||
|
indexes[last] = undefined;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Removes the key, if it exists in the set.
|
||||||
|
*/
|
||||||
|
function remove(setarr, key) {
|
||||||
|
const index = get(setarr, key);
|
||||||
|
if (index === undefined)
|
||||||
|
return;
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
for (let i = index + 1; i < array.length; i++) {
|
||||||
|
const k = array[i];
|
||||||
|
array[i - 1] = k;
|
||||||
|
indexes[k]--;
|
||||||
|
}
|
||||||
|
indexes[key] = undefined;
|
||||||
|
array.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
export { SetArray, get, pop, put, remove };
|
||||||
|
//# sourceMappingURL=set-array.mjs.map
|
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":"AAEA;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CACF;AAOD;;;;AAIA,SAAS,IAAI,CAAgB,GAAgB;IAC3C,OAAO,GAAU,CAAC;AACpB,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;IAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACpC,CAAC;AAED;;;;SAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;IAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO,KAAK,CAAC;IAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;AACrC,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB;IACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;IAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;AAC5B,CAAC;AAED;;;SAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;IAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO;IAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;QACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;KACf;IACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACzB,KAAK,CAAC,GAAG,EAAE,CAAC;AACd;;;;"}
|
83
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
83
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {}));
|
||||||
|
})(this, (function (exports) { 'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
class SetArray {
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null };
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(set) {
|
||||||
|
return set;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
function get(setarr, key) {
|
||||||
|
return cast(setarr)._indexes[key];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
function put(setarr, key) {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = get(setarr, key);
|
||||||
|
if (index !== undefined)
|
||||||
|
return index;
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
const length = array.push(key);
|
||||||
|
return (indexes[key] = length - 1);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
function pop(setarr) {
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
if (array.length === 0)
|
||||||
|
return;
|
||||||
|
const last = array.pop();
|
||||||
|
indexes[last] = undefined;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Removes the key, if it exists in the set.
|
||||||
|
*/
|
||||||
|
function remove(setarr, key) {
|
||||||
|
const index = get(setarr, key);
|
||||||
|
if (index === undefined)
|
||||||
|
return;
|
||||||
|
const { array, _indexes: indexes } = cast(setarr);
|
||||||
|
for (let i = index + 1; i < array.length; i++) {
|
||||||
|
const k = array[i];
|
||||||
|
array[i - 1] = k;
|
||||||
|
indexes[k]--;
|
||||||
|
}
|
||||||
|
indexes[key] = undefined;
|
||||||
|
array.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.SetArray = SetArray;
|
||||||
|
exports.get = get;
|
||||||
|
exports.pop = pop;
|
||||||
|
exports.put = put;
|
||||||
|
exports.remove = remove;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=set-array.umd.js.map
|
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":";;;;;;IAEA;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KACF;IAOD;;;;IAIA,SAAS,IAAI,CAAgB,GAAgB;QAC3C,OAAO,GAAU,CAAC;IACpB,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IACpC,CAAC;IAED;;;;aAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;QAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;IACrC,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB;QACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;QAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;IAC5B,CAAC;IAED;;;aAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;QAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO;QAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;YACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;SACf;QACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;QACzB,KAAK,CAAC,GAAG,EAAE,CAAC;IACd;;;;;;;;;;;;;;"}
|
32
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
declare type Key = string | number | symbol;
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
export declare class SetArray<T extends Key = Key> {
|
||||||
|
private _indexes;
|
||||||
|
array: readonly T[];
|
||||||
|
constructor();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
|
||||||
|
/**
|
||||||
|
* Removes the key, if it exists in the set.
|
||||||
|
*/
|
||||||
|
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
|
||||||
|
export {};
|
65
node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
65
node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/set-array",
|
||||||
|
"version": "1.2.1",
|
||||||
|
"description": "Like a Set, but provides the index of the `key` in the backing array",
|
||||||
|
"keywords": [],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/set-array",
|
||||||
|
"main": "dist/set-array.umd.js",
|
||||||
|
"module": "dist/set-array.mjs",
|
||||||
|
"typings": "dist/types/set-array.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/set-array.d.ts",
|
||||||
|
"browser": "./dist/set-array.umd.js",
|
||||||
|
"require": "./dist/set-array.umd.js",
|
||||||
|
"import": "./dist/set-array.mjs"
|
||||||
|
},
|
||||||
|
"./dist/set-array.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@types/mocha": "9.1.1",
|
||||||
|
"@types/node": "17.0.29",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.66.0",
|
||||||
|
"tsx": "4.7.1",
|
||||||
|
"typescript": "4.5.5"
|
||||||
|
}
|
||||||
|
}
|
19
node_modules/@jridgewell/source-map/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/source-map/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
184
node_modules/@jridgewell/source-map/README.md
generated
vendored
Normal file
184
node_modules/@jridgewell/source-map/README.md
generated
vendored
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
# @jridgewell/source-map
|
||||||
|
|
||||||
|
> Packages `@jridgewell/trace-mapping` and `@jridgewell/gen-mapping` into the familiar source-map API
|
||||||
|
|
||||||
|
This isn't the full API, but it's the core functionality. This wraps
|
||||||
|
[@jridgewell/trace-mapping][trace-mapping] and [@jridgewell/gen-mapping][gen-mapping]
|
||||||
|
implementations.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/source-map
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
### SourceMapConsumer
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SourceMapConsumer } from '@jridgewell/source-map';
|
||||||
|
const smc = new SourceMapConsumer({
|
||||||
|
version: 3,
|
||||||
|
names: ['foo'],
|
||||||
|
sources: ['input.js'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.fromSourceMap(mapGenerator[, mapUrl])
|
||||||
|
|
||||||
|
Transforms a `SourceMapGenerator` into a `SourceMapConsumer`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
|
||||||
|
const smc = SourceMapConsumer.fromSourceMap(map);
|
||||||
|
smc.originalPositionFor({ line: 1, column: 0 });
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.originalPositionFor({ line: 1, column: 0 });
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.mappings
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.mappings; // AAAA
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.allGeneratedpositionsfor({ line: 1, column: 5, source: "baz.ts" });
|
||||||
|
// [
|
||||||
|
// { line: 2, column: 8 }
|
||||||
|
// ]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.eachMapping(callback[, context[, order]])
|
||||||
|
|
||||||
|
> This implementation currently does not support the "order" parameter.
|
||||||
|
> This function can only iterate in Generated order.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.eachMapping((mapping) => {
|
||||||
|
// { source: 'baz.ts',
|
||||||
|
// generatedLine: 4,
|
||||||
|
// generatedColumn: 5,
|
||||||
|
// originalLine: 4,
|
||||||
|
// originalColumn: 5,
|
||||||
|
// name: null }
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.generatedPositionFor({ line: 1, column: 5, source: "baz.ts" });
|
||||||
|
// { line: 2, column: 8 }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.hasContentsOfAllSources()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.hasContentsOfAllSources();
|
||||||
|
// true
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.generatedPositionFor("baz.ts");
|
||||||
|
// "export default ..."
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.version
|
||||||
|
|
||||||
|
Returns the source map's version
|
||||||
|
|
||||||
|
### SourceMapGenerator
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SourceMapGenerator } from '@jridgewell/source-map';
|
||||||
|
const smg = new SourceMapGenerator({
|
||||||
|
file: 'output.js',
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.fromSourceMap(map)
|
||||||
|
|
||||||
|
Transform a `SourceMapConsumer` into a `SourceMapGenerator`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer();
|
||||||
|
const smg = SourceMapGenerator.fromSourceMap(smc);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||||
|
|
||||||
|
> This method is not implemented yet
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.addMapping({
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.setSourceContent('input.js', 'foobar');
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toJSON()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.toJSON(); // { version: 3, names: [], sources: [], mappings: '' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toString()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.toJSON(); // "{version:3,names:[],sources:[],mappings:''}"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toDecodedMap()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.toDecodedMap(); // { version: 3, names: [], sources: [], mappings: [] }
|
||||||
|
```
|
||||||
|
|
||||||
|
## Known differences with other implementations
|
||||||
|
|
||||||
|
This implementation has some differences with `source-map` and `source-map-js`.
|
||||||
|
|
||||||
|
- `SourceMapConsumer.prototype.eachMapping()`
|
||||||
|
- Does not support the `order` argument
|
||||||
|
- `SourceMapGenerator.prototype.applySourceMap()`
|
||||||
|
- Not implemented
|
||||||
|
|
||||||
|
[trace-mapping]: https://github.com/jridgewell/trace-mapping/
|
||||||
|
[gen-mapping]: https://github.com/jridgewell/gen-mapping/
|
95
node_modules/@jridgewell/source-map/dist/source-map.cjs
generated
vendored
Normal file
95
node_modules/@jridgewell/source-map/dist/source-map.cjs
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
var traceMapping = require('@jridgewell/trace-mapping');
|
||||||
|
var genMapping = require('@jridgewell/gen-mapping');
|
||||||
|
|
||||||
|
class SourceMapConsumer {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const trace = (this._map = new traceMapping.AnyMap(map, mapUrl));
|
||||||
|
this.file = trace.file;
|
||||||
|
this.names = trace.names;
|
||||||
|
this.sourceRoot = trace.sourceRoot;
|
||||||
|
this.sources = trace.resolvedSources;
|
||||||
|
this.sourcesContent = trace.sourcesContent;
|
||||||
|
this.version = trace.version;
|
||||||
|
}
|
||||||
|
static fromSourceMap(map, mapUrl) {
|
||||||
|
// This is more performant if we receive
|
||||||
|
// a @jridgewell/source-map SourceMapGenerator
|
||||||
|
if (map.toDecodedMap) {
|
||||||
|
return new SourceMapConsumer(map.toDecodedMap(), mapUrl);
|
||||||
|
}
|
||||||
|
// This is a fallback for `source-map` and `source-map-js`
|
||||||
|
return new SourceMapConsumer(map.toJSON(), mapUrl);
|
||||||
|
}
|
||||||
|
get mappings() {
|
||||||
|
return traceMapping.encodedMappings(this._map);
|
||||||
|
}
|
||||||
|
originalPositionFor(needle) {
|
||||||
|
return traceMapping.originalPositionFor(this._map, needle);
|
||||||
|
}
|
||||||
|
generatedPositionFor(originalPosition) {
|
||||||
|
return traceMapping.generatedPositionFor(this._map, originalPosition);
|
||||||
|
}
|
||||||
|
allGeneratedPositionsFor(originalPosition) {
|
||||||
|
return traceMapping.allGeneratedPositionsFor(this._map, originalPosition);
|
||||||
|
}
|
||||||
|
hasContentsOfAllSources() {
|
||||||
|
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
for (const content of this.sourcesContent) {
|
||||||
|
if (content == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
sourceContentFor(source, nullOnMissing) {
|
||||||
|
const sourceContent = traceMapping.sourceContentFor(this._map, source);
|
||||||
|
if (sourceContent != null) {
|
||||||
|
return sourceContent;
|
||||||
|
}
|
||||||
|
if (nullOnMissing) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw new Error(`"${source}" is not in the SourceMap.`);
|
||||||
|
}
|
||||||
|
eachMapping(callback, context /*, order?: number*/) {
|
||||||
|
// order is ignored as @jridgewell/trace-map doesn't implement it
|
||||||
|
traceMapping.eachMapping(this._map, context ? callback.bind(context) : callback);
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
// noop.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class SourceMapGenerator {
|
||||||
|
constructor(opts) {
|
||||||
|
// TODO :: should this be duck-typed ?
|
||||||
|
this._map = opts instanceof genMapping.GenMapping ? opts : new genMapping.GenMapping(opts);
|
||||||
|
}
|
||||||
|
static fromSourceMap(consumer) {
|
||||||
|
return new SourceMapGenerator(genMapping.fromMap(consumer));
|
||||||
|
}
|
||||||
|
addMapping(mapping) {
|
||||||
|
genMapping.maybeAddMapping(this._map, mapping);
|
||||||
|
}
|
||||||
|
setSourceContent(source, content) {
|
||||||
|
genMapping.setSourceContent(this._map, source, content);
|
||||||
|
}
|
||||||
|
toJSON() {
|
||||||
|
return genMapping.toEncodedMap(this._map);
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
return JSON.stringify(this.toJSON());
|
||||||
|
}
|
||||||
|
toDecodedMap() {
|
||||||
|
return genMapping.toDecodedMap(this._map);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.SourceMapConsumer = SourceMapConsumer;
|
||||||
|
exports.SourceMapGenerator = SourceMapGenerator;
|
||||||
|
//# sourceMappingURL=source-map.cjs.map
|
1
node_modules/@jridgewell/source-map/dist/source-map.cjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
90
node_modules/@jridgewell/source-map/dist/source-map.mjs
generated
vendored
Normal file
90
node_modules/@jridgewell/source-map/dist/source-map.mjs
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
import { AnyMap, encodedMappings, originalPositionFor, generatedPositionFor, allGeneratedPositionsFor, sourceContentFor, eachMapping } from '@jridgewell/trace-mapping';
|
||||||
|
import { GenMapping, fromMap, maybeAddMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
class SourceMapConsumer {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const trace = (this._map = new AnyMap(map, mapUrl));
|
||||||
|
this.file = trace.file;
|
||||||
|
this.names = trace.names;
|
||||||
|
this.sourceRoot = trace.sourceRoot;
|
||||||
|
this.sources = trace.resolvedSources;
|
||||||
|
this.sourcesContent = trace.sourcesContent;
|
||||||
|
this.version = trace.version;
|
||||||
|
}
|
||||||
|
static fromSourceMap(map, mapUrl) {
|
||||||
|
// This is more performant if we receive
|
||||||
|
// a @jridgewell/source-map SourceMapGenerator
|
||||||
|
if (map.toDecodedMap) {
|
||||||
|
return new SourceMapConsumer(map.toDecodedMap(), mapUrl);
|
||||||
|
}
|
||||||
|
// This is a fallback for `source-map` and `source-map-js`
|
||||||
|
return new SourceMapConsumer(map.toJSON(), mapUrl);
|
||||||
|
}
|
||||||
|
get mappings() {
|
||||||
|
return encodedMappings(this._map);
|
||||||
|
}
|
||||||
|
originalPositionFor(needle) {
|
||||||
|
return originalPositionFor(this._map, needle);
|
||||||
|
}
|
||||||
|
generatedPositionFor(originalPosition) {
|
||||||
|
return generatedPositionFor(this._map, originalPosition);
|
||||||
|
}
|
||||||
|
allGeneratedPositionsFor(originalPosition) {
|
||||||
|
return allGeneratedPositionsFor(this._map, originalPosition);
|
||||||
|
}
|
||||||
|
hasContentsOfAllSources() {
|
||||||
|
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
for (const content of this.sourcesContent) {
|
||||||
|
if (content == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
sourceContentFor(source, nullOnMissing) {
|
||||||
|
const sourceContent = sourceContentFor(this._map, source);
|
||||||
|
if (sourceContent != null) {
|
||||||
|
return sourceContent;
|
||||||
|
}
|
||||||
|
if (nullOnMissing) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw new Error(`"${source}" is not in the SourceMap.`);
|
||||||
|
}
|
||||||
|
eachMapping(callback, context /*, order?: number*/) {
|
||||||
|
// order is ignored as @jridgewell/trace-map doesn't implement it
|
||||||
|
eachMapping(this._map, context ? callback.bind(context) : callback);
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
// noop.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class SourceMapGenerator {
|
||||||
|
constructor(opts) {
|
||||||
|
// TODO :: should this be duck-typed ?
|
||||||
|
this._map = opts instanceof GenMapping ? opts : new GenMapping(opts);
|
||||||
|
}
|
||||||
|
static fromSourceMap(consumer) {
|
||||||
|
return new SourceMapGenerator(fromMap(consumer));
|
||||||
|
}
|
||||||
|
addMapping(mapping) {
|
||||||
|
maybeAddMapping(this._map, mapping);
|
||||||
|
}
|
||||||
|
setSourceContent(source, content) {
|
||||||
|
setSourceContent(this._map, source, content);
|
||||||
|
}
|
||||||
|
toJSON() {
|
||||||
|
return toEncodedMap(this._map);
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
return JSON.stringify(this.toJSON());
|
||||||
|
}
|
||||||
|
toDecodedMap() {
|
||||||
|
return toDecodedMap(this._map);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { SourceMapConsumer, SourceMapGenerator };
|
||||||
|
//# sourceMappingURL=source-map.mjs.map
|
1
node_modules/@jridgewell/source-map/dist/source-map.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1242
node_modules/@jridgewell/source-map/dist/source-map.umd.js
generated
vendored
Normal file
1242
node_modules/@jridgewell/source-map/dist/source-map.umd.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/@jridgewell/source-map/dist/source-map.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
35
node_modules/@jridgewell/source-map/dist/types/source-map.d.ts
generated
vendored
Normal file
35
node_modules/@jridgewell/source-map/dist/types/source-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import { AnyMap, originalPositionFor, generatedPositionFor, eachMapping } from '@jridgewell/trace-mapping';
|
||||||
|
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping';
|
||||||
|
import type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap } from '@jridgewell/trace-mapping';
|
||||||
|
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
|
||||||
|
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
|
||||||
|
export type { Mapping, EncodedSourceMap };
|
||||||
|
export declare class SourceMapConsumer {
|
||||||
|
private _map;
|
||||||
|
file: TraceMap['file'];
|
||||||
|
names: TraceMap['names'];
|
||||||
|
sourceRoot: TraceMap['sourceRoot'];
|
||||||
|
sources: TraceMap['sources'];
|
||||||
|
sourcesContent: TraceMap['sourcesContent'];
|
||||||
|
version: TraceMap['version'];
|
||||||
|
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl: Parameters<typeof AnyMap>[1]);
|
||||||
|
static fromSourceMap(map: SourceMapGenerator, mapUrl: Parameters<typeof AnyMap>[1]): SourceMapConsumer;
|
||||||
|
get mappings(): string;
|
||||||
|
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>;
|
||||||
|
generatedPositionFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>;
|
||||||
|
allGeneratedPositionsFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>[];
|
||||||
|
hasContentsOfAllSources(): boolean;
|
||||||
|
sourceContentFor(source: string, nullOnMissing?: boolean): string | null;
|
||||||
|
eachMapping(callback: Parameters<typeof eachMapping>[1], context?: any): void;
|
||||||
|
destroy(): void;
|
||||||
|
}
|
||||||
|
export declare class SourceMapGenerator {
|
||||||
|
private _map;
|
||||||
|
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping);
|
||||||
|
static fromSourceMap(consumer: SourceMapConsumer): SourceMapGenerator;
|
||||||
|
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>;
|
||||||
|
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>;
|
||||||
|
toJSON(): ReturnType<typeof toEncodedMap>;
|
||||||
|
toString(): string;
|
||||||
|
toDecodedMap(): ReturnType<typeof toDecodedMap>;
|
||||||
|
}
|
19
node_modules/@jridgewell/source-map/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/source-map/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
257
node_modules/@jridgewell/source-map/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
257
node_modules/@jridgewell/source-map/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,257 @@
|
|||||||
|
# @jridgewell/trace-mapping
|
||||||
|
|
||||||
|
> Trace the original position through a source map
|
||||||
|
|
||||||
|
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||||
|
original location in the source file through a source map.
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||||
|
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/trace-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
TraceMap,
|
||||||
|
originalPositionFor,
|
||||||
|
generatedPositionFor,
|
||||||
|
sourceContentFor,
|
||||||
|
isIgnored,
|
||||||
|
} from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const tracer = new TraceMap({
|
||||||
|
version: 3,
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['content of input.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'KAyCIA',
|
||||||
|
ignoreList: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = sourceContentFor(tracer, traced.source);
|
||||||
|
assert.strictEqual(content, 'content for input.js');
|
||||||
|
|
||||||
|
const generated = generatedPositionFor(tracer, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
});
|
||||||
|
assert.deepEqual(generated, {
|
||||||
|
line: 1,
|
||||||
|
column: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
const ignored = isIgnored(tracer, 'input.js');
|
||||||
|
assert.equal(ignored, false);
|
||||||
|
```
|
||||||
|
|
||||||
|
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||||
|
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
// line is 0-base.
|
||||||
|
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||||
|
|
||||||
|
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
// Again, line is 0-base and so is sourceLine
|
||||||
|
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||||
|
```
|
||||||
|
|
||||||
|
### SectionedSourceMaps
|
||||||
|
|
||||||
|
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||||
|
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||||
|
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||||
|
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||||
|
`TraceMap` instance:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||||
|
const fooOutput = 'foo';
|
||||||
|
const barOutput = 'bar';
|
||||||
|
const output = [fooOutput, barOutput].join('\n');
|
||||||
|
|
||||||
|
const sectioned = new AnyMap({
|
||||||
|
version: 3,
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
// 0-base line and column
|
||||||
|
offset: { line: 0, column: 0 },
|
||||||
|
// fooOutput's sourcemap
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['foo.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// barOutput's sourcemap will not affect the first line, only the second
|
||||||
|
offset: { line: 1, column: 0 },
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['bar.js'],
|
||||||
|
names: ['bar'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const traced = originalPositionFor(sectioned, {
|
||||||
|
line: 2,
|
||||||
|
column: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'bar.js',
|
||||||
|
line: 1,
|
||||||
|
column: 0,
|
||||||
|
name: 'bar',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map - 45120 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 562400 bytes
|
||||||
|
trace-mapping encoded 5706544 bytes
|
||||||
|
source-map-js 10717664 bytes
|
||||||
|
source-map-0.6.1 17446384 bytes
|
||||||
|
source-map-0.8.0 9701757 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map - 347793 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 89832 bytes
|
||||||
|
trace-mapping encoded 35474640 bytes
|
||||||
|
source-map-js 51257176 bytes
|
||||||
|
source-map-0.6.1 63515664 bytes
|
||||||
|
source-map-0.8.0 42933752 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map - 1992 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 37128 bytes
|
||||||
|
trace-mapping encoded 247280 bytes
|
||||||
|
source-map-js 1143536 bytes
|
||||||
|
source-map-0.6.1 1290992 bytes
|
||||||
|
source-map-0.8.0 96544 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map - 5726 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 16176 bytes
|
||||||
|
trace-mapping encoded 681552 bytes
|
||||||
|
source-map-js 2418352 bytes
|
||||||
|
source-map-0.6.1 2443672 bytes
|
||||||
|
source-map-0.8.0 111768 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
580
node_modules/@jridgewell/source-map/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
580
node_modules/@jridgewell/source-map/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,580 @@
|
|||||||
|
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import resolveUri from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
memo.lastIndex = ++index;
|
||||||
|
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = parse(map);
|
||||||
|
if (!('sections' in parsed)) {
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
}
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const ignoreList = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function parse(map) {
|
||||||
|
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
}
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const parsed = parse(input);
|
||||||
|
if ('sections' in parsed)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(parsed, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
if (ignores)
|
||||||
|
for (let i = 0; i < ignores.length; i++)
|
||||||
|
ignoreList.push(ignores[i] + sourcesOffset);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names || [];
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||||
|
* with public access modifiers.
|
||||||
|
*/
|
||||||
|
function cast(map) {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function encodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
var _b;
|
||||||
|
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
function decodedMappings(map) {
|
||||||
|
var _a;
|
||||||
|
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
function traceSegment(map, line, column) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
return index === -1 ? null : segments[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(map, needle) {
|
||||||
|
let { line, column, bias } = needle;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segments = decoded[line];
|
||||||
|
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (index === -1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
if (segment.length === 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds the generated line/column position of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function generatedPositionFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||||
|
*/
|
||||||
|
function allGeneratedPositionsFor(map, needle) {
|
||||||
|
const { source, line, column, bias } = needle;
|
||||||
|
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||||
|
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
function eachMapping(map, cb) {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function sourceIndex(map, source) {
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
function sourceContentFor(map, source) {
|
||||||
|
const { sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Determines if the source is marked to ignore by the source map.
|
||||||
|
*/
|
||||||
|
function isIgnored(map, source) {
|
||||||
|
const { ignoreList } = map;
|
||||||
|
if (ignoreList == null)
|
||||||
|
return false;
|
||||||
|
const index = sourceIndex(map, source);
|
||||||
|
return index === -1 ? false : ignoreList.includes(index);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
function presortedDecodedMap(map, mapUrl) {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
cast(tracer)._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function decodedMap(map) {
|
||||||
|
return clone(map, decodedMappings(map));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
function encodedMap(map) {
|
||||||
|
return clone(map, encodedMappings(map));
|
||||||
|
}
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return -1;
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||||
|
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||||
|
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||||
|
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||||
|
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||||
|
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||||
|
// match LEAST_UPPER_BOUND.
|
||||||
|
if (!found && bias === LEAST_UPPER_BOUND)
|
||||||
|
min++;
|
||||||
|
if (min === -1 || min === segments.length)
|
||||||
|
return [];
|
||||||
|
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||||
|
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||||
|
// to our desired column.
|
||||||
|
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||||
|
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||||
|
if (!found)
|
||||||
|
min = lowerBound(segments, matchedColumn, min);
|
||||||
|
const max = upperBound(segments, matchedColumn, min);
|
||||||
|
const result = [];
|
||||||
|
for (; min <= max; min++) {
|
||||||
|
const segment = segments[min];
|
||||||
|
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function generatedPosition(map, source, line, column, bias, all) {
|
||||||
|
var _a;
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return all ? [] : GMapping(null, null);
|
||||||
|
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||||
|
if (all)
|
||||||
|
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||||
|
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||||
|
if (index === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = segments[index];
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||||
|
//# sourceMappingURL=trace-mapping.mjs.map
|
1
node_modules/@jridgewell/source-map/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user