first add
This commit is contained in:
commit
41fa1390ae
185
dist/matrix-demo.js
vendored
Normal file
185
dist/matrix-demo.js
vendored
Normal file
@ -0,0 +1,185 @@
|
||||
"use strict";
|
||||
class Matrix3 {
|
||||
constructor() {
|
||||
this.elements = [
|
||||
1, 0, 0,
|
||||
0, 1, 0,
|
||||
0, 0, 1
|
||||
];
|
||||
}
|
||||
multiply(m) {
|
||||
const a = this.elements;
|
||||
const b = m.elements;
|
||||
return new Matrix3().set([
|
||||
a[0] * b[0] + a[1] * b[3] + a[2] * b[6],
|
||||
a[0] * b[1] + a[1] * b[4] + a[2] * b[7],
|
||||
a[0] * b[2] + a[1] * b[5] + a[2] * b[8],
|
||||
a[3] * b[0] + a[4] * b[3] + a[5] * b[6],
|
||||
a[3] * b[1] + a[4] * b[4] + a[5] * b[7],
|
||||
a[3] * b[2] + a[4] * b[5] + a[5] * b[8],
|
||||
a[6] * b[0] + a[7] * b[3] + a[8] * b[6],
|
||||
a[6] * b[1] + a[7] * b[4] + a[8] * b[7],
|
||||
a[6] * b[2] + a[7] * b[5] + a[8] * b[8],
|
||||
]);
|
||||
}
|
||||
translate(tx, ty) {
|
||||
return this.multiply(new Matrix3().set([
|
||||
1, 0, tx,
|
||||
0, 1, ty,
|
||||
0, 0, 1
|
||||
]));
|
||||
}
|
||||
scale(sx, sy) {
|
||||
return this.multiply(new Matrix3().set([
|
||||
sx, 0, 0,
|
||||
0, sy, 0,
|
||||
0, 0, 1
|
||||
]));
|
||||
}
|
||||
rotate(angle) {
|
||||
const c = Math.cos(angle);
|
||||
const s = Math.sin(angle);
|
||||
return this.multiply(new Matrix3().set([
|
||||
c, -s, 0,
|
||||
s, c, 0,
|
||||
0, 0, 1
|
||||
]));
|
||||
}
|
||||
set(values) {
|
||||
this.elements = [...values];
|
||||
return this;
|
||||
}
|
||||
transform(ctx) {
|
||||
const [a, b, c, d, e, f] = [
|
||||
this.elements[0],
|
||||
this.elements[3],
|
||||
this.elements[1],
|
||||
this.elements[4],
|
||||
this.elements[2],
|
||||
this.elements[5]
|
||||
];
|
||||
//console.log('Applying transform:', { a, b, c, d, e, f });
|
||||
ctx.transform(a, b, c, d, e, f);
|
||||
}
|
||||
}
|
||||
class MapDemo {
|
||||
constructor() {
|
||||
this.matrix = new Matrix3();
|
||||
this.canvas = document.createElement('canvas');
|
||||
this.ctx = this.canvas.getContext('2d');
|
||||
this.initCanvas();
|
||||
this.initControls();
|
||||
this.draw();
|
||||
}
|
||||
initCanvas() {
|
||||
this.canvas.width = 800;
|
||||
this.canvas.height = 600;
|
||||
this.canvas.style.border = '1px solid black';
|
||||
document.body.appendChild(this.canvas);
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
this.canvas.width *= dpr;
|
||||
this.canvas.height *= dpr;
|
||||
this.ctx.scale(dpr, dpr);
|
||||
}
|
||||
initControls() {
|
||||
document.addEventListener('keydown', (e) => {
|
||||
//console.log('Key pressed:', e.key);
|
||||
const step = 20;
|
||||
const scaleFactor = 0.1;
|
||||
const rotateAngle = Math.PI / 18;
|
||||
//console.log('Before transform - Matrix:', this.matrix);
|
||||
switch (e.key.toLowerCase()) {
|
||||
case 'w':
|
||||
case 'arrowup':
|
||||
this.matrix = this.matrix.translate(0, -step);
|
||||
break;
|
||||
case 's':
|
||||
case 'arrowdown':
|
||||
this.matrix = this.matrix.translate(0, step);
|
||||
break;
|
||||
case 'a':
|
||||
case 'arrowleft':
|
||||
this.matrix = this.matrix.translate(-step, 0);
|
||||
break;
|
||||
case 'd':
|
||||
case 'arrowright':
|
||||
this.matrix = this.matrix.translate(step, 0);
|
||||
break;
|
||||
case 'z':
|
||||
this.matrix = this.matrix.scale(1 + scaleFactor, 1 + scaleFactor);
|
||||
break;
|
||||
case 'x':
|
||||
this.matrix = this.matrix.scale(1 - scaleFactor, 1 - scaleFactor);
|
||||
break;
|
||||
case 'q':
|
||||
this.matrix = this.matrix.rotate(-rotateAngle);
|
||||
break;
|
||||
case 'e':
|
||||
this.matrix = this.matrix.rotate(rotateAngle);
|
||||
break;
|
||||
}
|
||||
//console.log('After transform - Matrix:', this.matrix);
|
||||
e.preventDefault();
|
||||
this.draw();
|
||||
});
|
||||
}
|
||||
drawGrid() {
|
||||
const ctx = this.ctx;
|
||||
ctx.strokeStyle = '#ddd';
|
||||
ctx.lineWidth = 1;
|
||||
// 绘制网格
|
||||
for (let x = -500; x <= 500; x += 50) {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, -500);
|
||||
ctx.lineTo(x, 500);
|
||||
ctx.stroke();
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(-500, x);
|
||||
ctx.lineTo(500, x);
|
||||
ctx.stroke();
|
||||
}
|
||||
// 绘制坐标轴
|
||||
ctx.strokeStyle = 'black';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(-500, 0);
|
||||
ctx.lineTo(500, 0);
|
||||
ctx.moveTo(0, -500);
|
||||
ctx.lineTo(0, 500);
|
||||
ctx.stroke();
|
||||
// 绘制坐标标签
|
||||
ctx.fillStyle = 'black';
|
||||
ctx.font = '12px Arial';
|
||||
for (let x = -500; x <= 500; x += 100) {
|
||||
ctx.fillText(x.toString(), x + 5, 15);
|
||||
ctx.fillText(x.toString(), 5, -x + 5);
|
||||
}
|
||||
}
|
||||
draw() {
|
||||
const ctx = this.ctx;
|
||||
// 保存当前状态并重置变换
|
||||
ctx.save();
|
||||
ctx.setTransform(1, 0, 0, 1, 0, 0);
|
||||
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
||||
ctx.restore();
|
||||
// 应用当前矩阵变换
|
||||
ctx.save();
|
||||
this.matrix.transform(ctx);
|
||||
// 绘制内容
|
||||
this.drawGrid();
|
||||
ctx.restore();
|
||||
}
|
||||
}
|
||||
// 初始化代码
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
new MapDemo();
|
||||
const info = document.createElement('div');
|
||||
info.innerHTML = `
|
||||
<h3>操作说明:</h3>
|
||||
<p>平移: W/A/S/D 或 方向键</p>
|
||||
<p>缩放: Z/X</p>
|
||||
<p>旋转: Q/E</p>
|
||||
`;
|
||||
document.body.appendChild(info);
|
||||
});
|
||||
//# sourceMappingURL=matrix-demo.js.map
|
1
dist/matrix-demo.js.map
vendored
Normal file
1
dist/matrix-demo.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
19
index.html
Normal file
19
index.html
Normal file
@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Matrix Map Demo (TS)</title>
|
||||
<style>
|
||||
body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
font-family: Arial, sans-serif;
|
||||
}
|
||||
canvas { margin: 20px; }
|
||||
div { margin: 10px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<script src="../dist/matrix-demo.js"></script>
|
||||
</body>
|
||||
</html>
|
11
node/map-interaction/package.json
Normal file
11
node/map-interaction/package.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "tsc",
|
||||
"dev": "concurrently \"tsc -w\" \"live-server\""
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.12.0",
|
||||
"typescript": "^5.7.3",
|
||||
"concurrently": "^8.2.2",
|
||||
"live-server": "^1.2.2"
|
||||
}
|
14
node/map-interaction/tsconfig.json
Normal file
14
node/map-interaction/tsconfig.json
Normal file
@ -0,0 +1,14 @@
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"module": "ES6",
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
1
node_modules/.bin/acorn
generated
vendored
Symbolic link
1
node_modules/.bin/acorn
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../acorn/bin/acorn
|
1
node_modules/.bin/nodemon
generated
vendored
Symbolic link
1
node_modules/.bin/nodemon
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../nodemon/bin/nodemon.js
|
1
node_modules/.bin/nodetouch
generated
vendored
Symbolic link
1
node_modules/.bin/nodetouch
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../touch/bin/nodetouch.js
|
1
node_modules/.bin/semver
generated
vendored
Symbolic link
1
node_modules/.bin/semver
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../semver/bin/semver.js
|
1
node_modules/.bin/ts-node
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin.js
|
1
node_modules/.bin/ts-node-cwd
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-cwd
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-cwd.js
|
1
node_modules/.bin/ts-node-esm
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-esm
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-esm.js
|
1
node_modules/.bin/ts-node-script
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-script
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-script.js
|
1
node_modules/.bin/ts-node-transpile-only
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-transpile-only
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-transpile.js
|
1
node_modules/.bin/ts-script
generated
vendored
Symbolic link
1
node_modules/.bin/ts-script
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-script-deprecated.js
|
1
node_modules/.bin/tsc
generated
vendored
Symbolic link
1
node_modules/.bin/tsc
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../typescript/bin/tsc
|
1
node_modules/.bin/tsserver
generated
vendored
Symbolic link
1
node_modules/.bin/tsserver
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../typescript/bin/tsserver
|
549
node_modules/.package-lock.json
generated
vendored
Normal file
549
node_modules/.package-lock.json
generated
vendored
Normal file
@ -0,0 +1,549 @@
|
||||
{
|
||||
"name": "map-interaction",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@cspotcode/source-map-support": {
|
||||
"version": "0.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
||||
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "0.3.9"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/resolve-uri": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
|
||||
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/sourcemap-codec": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz",
|
||||
"integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@jridgewell/trace-mapping": {
|
||||
"version": "0.3.9",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
|
||||
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.0.3",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tsconfig/node10": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
|
||||
"integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@tsconfig/node12": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
|
||||
"integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@tsconfig/node14": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
|
||||
"integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@tsconfig/node16": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
|
||||
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "22.12.0",
|
||||
"resolved": "https://registry.npmmirror.com/@types/node/-/node-22.12.0.tgz",
|
||||
"integrity": "sha512-Fll2FZ1riMjNmlmJOdAyY5pUbkftXslB5DgEzlIuNaiWhXd00FhWxVC/r4yV/4wBb9JfImTu+jiSvXTkJ7F/gA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~6.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn": {
|
||||
"version": "8.14.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
|
||||
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn-walk": {
|
||||
"version": "8.3.4",
|
||||
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
|
||||
"integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"acorn": "^8.11.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/anymatch": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
|
||||
"integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"normalize-path": "^3.0.0",
|
||||
"picomatch": "^2.0.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/arg": {
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
|
||||
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/binary-extensions": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
|
||||
"integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/braces": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"fill-range": "^7.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/chokidar": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
|
||||
"integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"anymatch": "~3.1.2",
|
||||
"braces": "~3.0.2",
|
||||
"glob-parent": "~5.1.2",
|
||||
"is-binary-path": "~2.1.0",
|
||||
"is-glob": "~4.0.1",
|
||||
"normalize-path": "~3.0.0",
|
||||
"readdirp": "~3.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.10.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://paulmillr.com/funding/"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/create-require": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
|
||||
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/diff": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
|
||||
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/fill-range": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
||||
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/glob-parent": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
|
||||
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-glob": "^4.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/has-flag": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
|
||||
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/ignore-by-default": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
|
||||
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/is-binary-path": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
|
||||
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"binary-extensions": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-extglob": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
|
||||
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-glob": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
||||
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-extglob": "^2.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-number": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/make-error": {
|
||||
"version": "1.3.6",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
|
||||
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/minimatch": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/nodemon": {
|
||||
"version": "3.1.9",
|
||||
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.9.tgz",
|
||||
"integrity": "sha512-hdr1oIb2p6ZSxu3PB2JWWYS7ZQ0qvaZsc3hK8DR8f02kRzc8rjYmxAIvdz+aYC+8F2IjNaB7HMcSDg8nQpJxyg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"chokidar": "^3.5.2",
|
||||
"debug": "^4",
|
||||
"ignore-by-default": "^1.0.1",
|
||||
"minimatch": "^3.1.2",
|
||||
"pstree.remy": "^1.1.8",
|
||||
"semver": "^7.5.3",
|
||||
"simple-update-notifier": "^2.0.0",
|
||||
"supports-color": "^5.5.0",
|
||||
"touch": "^3.1.0",
|
||||
"undefsafe": "^2.0.5"
|
||||
},
|
||||
"bin": {
|
||||
"nodemon": "bin/nodemon.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/nodemon"
|
||||
}
|
||||
},
|
||||
"node_modules/normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
||||
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=8.6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/pstree.remy": {
|
||||
"version": "1.1.8",
|
||||
"resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz",
|
||||
"integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/readdirp": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
|
||||
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"picomatch": "^2.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.7.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.0.tgz",
|
||||
"integrity": "sha512-DrfFnPzblFmNrIZzg5RzHegbiRWg7KMR7btwi2yjHwx06zsUbO5g613sVwEV7FTwmzJu+Io0lJe2GJ3LxqpvBQ==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/simple-update-notifier": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
|
||||
"integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"semver": "^7.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/supports-color": {
|
||||
"version": "5.5.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
|
||||
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"has-flag": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/to-regex-range": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-number": "^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/touch": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz",
|
||||
"integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"nodetouch": "bin/nodetouch.js"
|
||||
}
|
||||
},
|
||||
"node_modules/ts-node": {
|
||||
"version": "10.9.2",
|
||||
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
|
||||
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@cspotcode/source-map-support": "^0.8.0",
|
||||
"@tsconfig/node10": "^1.0.7",
|
||||
"@tsconfig/node12": "^1.0.7",
|
||||
"@tsconfig/node14": "^1.0.0",
|
||||
"@tsconfig/node16": "^1.0.2",
|
||||
"acorn": "^8.4.1",
|
||||
"acorn-walk": "^8.1.1",
|
||||
"arg": "^4.1.0",
|
||||
"create-require": "^1.1.0",
|
||||
"diff": "^4.0.1",
|
||||
"make-error": "^1.1.1",
|
||||
"v8-compile-cache-lib": "^3.0.1",
|
||||
"yn": "3.1.1"
|
||||
},
|
||||
"bin": {
|
||||
"ts-node": "dist/bin.js",
|
||||
"ts-node-cwd": "dist/bin-cwd.js",
|
||||
"ts-node-esm": "dist/bin-esm.js",
|
||||
"ts-node-script": "dist/bin-script.js",
|
||||
"ts-node-transpile-only": "dist/bin-transpile.js",
|
||||
"ts-script": "dist/bin-script-deprecated.js"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@swc/core": ">=1.2.50",
|
||||
"@swc/wasm": ">=1.2.50",
|
||||
"@types/node": "*",
|
||||
"typescript": ">=2.7"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@swc/core": {
|
||||
"optional": true
|
||||
},
|
||||
"@swc/wasm": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.7.3",
|
||||
"resolved": "https://registry.npmmirror.com/typescript/-/typescript-5.7.3.tgz",
|
||||
"integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/undefsafe": {
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz",
|
||||
"integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "6.20.0",
|
||||
"resolved": "https://registry.npmmirror.com/undici-types/-/undici-types-6.20.0.tgz",
|
||||
"integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/v8-compile-cache-lib": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
|
||||
"integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/yn": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
|
||||
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
21
node_modules/@cspotcode/source-map-support/LICENSE.md
generated
vendored
Normal file
21
node_modules/@cspotcode/source-map-support/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Evan Wallace
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
289
node_modules/@cspotcode/source-map-support/README.md
generated
vendored
Normal file
289
node_modules/@cspotcode/source-map-support/README.md
generated
vendored
Normal file
@ -0,0 +1,289 @@
|
||||
# Source Map Support
|
||||
|
||||
[](https://npmjs.org/package/@cspotcode/source-map-support)
|
||||
[](https://npmjs.org/package/@cspotcode/source-map-support)
|
||||
[](https://github.com/cspotcode/node-source-map-support/actions?query=workflow%3A%22Continuous+Integration%22)
|
||||
|
||||
This module provides source map support for stack traces in node via the [V8 stack trace API](https://github.com/v8/v8/wiki/Stack-Trace-API). It uses the [source-map](https://github.com/mozilla/source-map) module to replace the paths and line numbers of source-mapped files with their original paths and line numbers. The output mimics node's stack trace format with the goal of making every compile-to-JS language more of a first-class citizen. Source maps are completely general (not specific to any one language) so you can use source maps with multiple compile-to-JS languages in the same node process.
|
||||
|
||||
## Installation and Usage
|
||||
|
||||
#### Node support
|
||||
|
||||
```
|
||||
$ npm install @cspotcode/source-map-support
|
||||
```
|
||||
|
||||
Source maps can be generated using libraries such as [source-map-index-generator](https://github.com/twolfson/source-map-index-generator). Once you have a valid source map, place a source mapping comment somewhere in the file (usually done automatically or with an option by your transpiler):
|
||||
|
||||
```
|
||||
//# sourceMappingURL=path/to/source.map
|
||||
```
|
||||
|
||||
If multiple sourceMappingURL comments exist in one file, the last sourceMappingURL comment will be
|
||||
respected (e.g. if a file mentions the comment in code, or went through multiple transpilers).
|
||||
The path should either be absolute or relative to the compiled file.
|
||||
|
||||
From here you have two options.
|
||||
|
||||
##### CLI Usage
|
||||
|
||||
```bash
|
||||
node -r @cspotcode/source-map-support/register compiled.js
|
||||
# Or to enable hookRequire
|
||||
node -r @cspotcode/source-map-support/register-hook-require compiled.js
|
||||
```
|
||||
|
||||
##### Programmatic Usage
|
||||
|
||||
Put the following line at the top of the compiled file.
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install();
|
||||
```
|
||||
|
||||
It is also possible to install the source map support directly by
|
||||
requiring the `register` module which can be handy with ES6:
|
||||
|
||||
```js
|
||||
import '@cspotcode/source-map-support/register'
|
||||
|
||||
// Instead of:
|
||||
import sourceMapSupport from '@cspotcode/source-map-support'
|
||||
sourceMapSupport.install()
|
||||
```
|
||||
Note: if you're using babel-register, it includes source-map-support already.
|
||||
|
||||
It is also very useful with Mocha:
|
||||
|
||||
```
|
||||
$ mocha --require @cspotcode/source-map-support/register tests/
|
||||
```
|
||||
|
||||
#### Browser support
|
||||
|
||||
This library also works in Chrome. While the DevTools console already supports source maps, the V8 engine doesn't and `Error.prototype.stack` will be incorrect without this library. Everything will just work if you deploy your source files using [browserify](http://browserify.org/). Just make sure to pass the `--debug` flag to the browserify command so your source maps are included in the bundled code.
|
||||
|
||||
This library also works if you use another build process or just include the source files directly. In this case, include the file `browser-source-map-support.js` in your page and call `sourceMapSupport.install()`. It contains the whole library already bundled for the browser using browserify.
|
||||
|
||||
```html
|
||||
<script src="browser-source-map-support.js"></script>
|
||||
<script>sourceMapSupport.install();</script>
|
||||
```
|
||||
|
||||
This library also works if you use AMD (Asynchronous Module Definition), which is used in tools like [RequireJS](http://requirejs.org/). Just list `browser-source-map-support` as a dependency:
|
||||
|
||||
```html
|
||||
<script>
|
||||
define(['browser-source-map-support'], function(sourceMapSupport) {
|
||||
sourceMapSupport.install();
|
||||
});
|
||||
</script>
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
This module installs two things: a change to the `stack` property on `Error` objects and a handler for uncaught exceptions that mimics node's default exception handler (the handler can be seen in the demos below). You may want to disable the handler if you have your own uncaught exception handler. This can be done by passing an argument to the installer:
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install({
|
||||
handleUncaughtExceptions: false
|
||||
});
|
||||
```
|
||||
|
||||
This module loads source maps from the filesystem by default. You can provide alternate loading behavior through a callback as shown below. For example, [Meteor](https://github.com/meteor) keeps all source maps cached in memory to avoid disk access.
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install({
|
||||
retrieveSourceMap: function(source) {
|
||||
if (source === 'compiled.js') {
|
||||
return {
|
||||
url: 'original.js',
|
||||
map: fs.readFileSync('compiled.js.map', 'utf8')
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
The module will by default assume a browser environment if XMLHttpRequest and window are defined. If either of these do not exist it will instead assume a node environment.
|
||||
In some rare cases, e.g. when running a browser emulation and where both variables are also set, you can explictly specify the environment to be either 'browser' or 'node'.
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install({
|
||||
environment: 'node'
|
||||
});
|
||||
```
|
||||
|
||||
To support files with inline source maps, the `hookRequire` options can be specified, which will monitor all source files for inline source maps.
|
||||
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install({
|
||||
hookRequire: true
|
||||
});
|
||||
```
|
||||
|
||||
This monkey patches the `require` module loading chain, so is not enabled by default and is not recommended for any sort of production usage.
|
||||
|
||||
## Demos
|
||||
|
||||
#### Basic Demo
|
||||
|
||||
original.js:
|
||||
|
||||
```js
|
||||
throw new Error('test'); // This is the original code
|
||||
```
|
||||
|
||||
compiled.js:
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install();
|
||||
|
||||
throw new Error('test'); // This is the compiled code
|
||||
// The next line defines the sourceMapping.
|
||||
//# sourceMappingURL=compiled.js.map
|
||||
```
|
||||
|
||||
compiled.js.map:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": 3,
|
||||
"file": "compiled.js",
|
||||
"sources": ["original.js"],
|
||||
"names": [],
|
||||
"mappings": ";;AAAA,MAAM,IAAI"
|
||||
}
|
||||
```
|
||||
|
||||
Run compiled.js using node (notice how the stack trace uses original.js instead of compiled.js):
|
||||
|
||||
```
|
||||
$ node compiled.js
|
||||
|
||||
original.js:1
|
||||
throw new Error('test'); // This is the original code
|
||||
^
|
||||
Error: test
|
||||
at Object.<anonymous> (original.js:1:7)
|
||||
at Module._compile (module.js:456:26)
|
||||
at Object.Module._extensions..js (module.js:474:10)
|
||||
at Module.load (module.js:356:32)
|
||||
at Function.Module._load (module.js:312:12)
|
||||
at Function.Module.runMain (module.js:497:10)
|
||||
at startup (node.js:119:16)
|
||||
at node.js:901:3
|
||||
```
|
||||
|
||||
#### TypeScript Demo
|
||||
|
||||
demo.ts:
|
||||
|
||||
```typescript
|
||||
declare function require(name: string);
|
||||
require('@cspotcode/source-map-support').install();
|
||||
class Foo {
|
||||
constructor() { this.bar(); }
|
||||
bar() { throw new Error('this is a demo'); }
|
||||
}
|
||||
new Foo();
|
||||
```
|
||||
|
||||
Compile and run the file using the TypeScript compiler from the terminal:
|
||||
|
||||
```
|
||||
$ npm install source-map-support typescript
|
||||
$ node_modules/typescript/bin/tsc -sourcemap demo.ts
|
||||
$ node demo.js
|
||||
|
||||
demo.ts:5
|
||||
bar() { throw new Error('this is a demo'); }
|
||||
^
|
||||
Error: this is a demo
|
||||
at Foo.bar (demo.ts:5:17)
|
||||
at new Foo (demo.ts:4:24)
|
||||
at Object.<anonymous> (demo.ts:7:1)
|
||||
at Module._compile (module.js:456:26)
|
||||
at Object.Module._extensions..js (module.js:474:10)
|
||||
at Module.load (module.js:356:32)
|
||||
at Function.Module._load (module.js:312:12)
|
||||
at Function.Module.runMain (module.js:497:10)
|
||||
at startup (node.js:119:16)
|
||||
at node.js:901:3
|
||||
```
|
||||
|
||||
There is also the option to use `-r source-map-support/register` with typescript, without the need add the `require('@cspotcode/source-map-support').install()` in the code base:
|
||||
|
||||
```
|
||||
$ npm install source-map-support typescript
|
||||
$ node_modules/typescript/bin/tsc -sourcemap demo.ts
|
||||
$ node -r source-map-support/register demo.js
|
||||
|
||||
demo.ts:5
|
||||
bar() { throw new Error('this is a demo'); }
|
||||
^
|
||||
Error: this is a demo
|
||||
at Foo.bar (demo.ts:5:17)
|
||||
at new Foo (demo.ts:4:24)
|
||||
at Object.<anonymous> (demo.ts:7:1)
|
||||
at Module._compile (module.js:456:26)
|
||||
at Object.Module._extensions..js (module.js:474:10)
|
||||
at Module.load (module.js:356:32)
|
||||
at Function.Module._load (module.js:312:12)
|
||||
at Function.Module.runMain (module.js:497:10)
|
||||
at startup (node.js:119:16)
|
||||
at node.js:901:3
|
||||
```
|
||||
|
||||
#### CoffeeScript Demo
|
||||
|
||||
demo.coffee:
|
||||
|
||||
```coffee
|
||||
require('@cspotcode/source-map-support').install()
|
||||
foo = ->
|
||||
bar = -> throw new Error 'this is a demo'
|
||||
bar()
|
||||
foo()
|
||||
```
|
||||
|
||||
Compile and run the file using the CoffeeScript compiler from the terminal:
|
||||
|
||||
```sh
|
||||
$ npm install @cspotcode/source-map-support coffeescript
|
||||
$ node_modules/.bin/coffee --map --compile demo.coffee
|
||||
$ node demo.js
|
||||
|
||||
demo.coffee:3
|
||||
bar = -> throw new Error 'this is a demo'
|
||||
^
|
||||
Error: this is a demo
|
||||
at bar (demo.coffee:3:22)
|
||||
at foo (demo.coffee:4:3)
|
||||
at Object.<anonymous> (demo.coffee:5:1)
|
||||
at Object.<anonymous> (demo.coffee:1:1)
|
||||
at Module._compile (module.js:456:26)
|
||||
at Object.Module._extensions..js (module.js:474:10)
|
||||
at Module.load (module.js:356:32)
|
||||
at Function.Module._load (module.js:312:12)
|
||||
at Function.Module.runMain (module.js:497:10)
|
||||
at startup (node.js:119:16)
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
This repo contains both automated tests for node and manual tests for the browser. The automated tests can be run using mocha (type `mocha` in the root directory). To run the manual tests:
|
||||
|
||||
* Build the tests using `build.js`
|
||||
* Launch the HTTP server (`npm run serve-tests`) and visit
|
||||
* http://127.0.0.1:1336/amd-test
|
||||
* http://127.0.0.1:1336/browser-test
|
||||
* http://127.0.0.1:1336/browserify-test - **Currently not working** due to a bug with browserify (see [pull request #66](https://github.com/evanw/node-source-map-support/pull/66) for details).
|
||||
* For `header-test`, run `server.js` inside that directory and visit http://127.0.0.1:1337/
|
||||
|
||||
## License
|
||||
|
||||
This code is available under the [MIT license](http://opensource.org/licenses/MIT).
|
114
node_modules/@cspotcode/source-map-support/browser-source-map-support.js
generated
vendored
Normal file
114
node_modules/@cspotcode/source-map-support/browser-source-map-support.js
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
/*
|
||||
* Support for source maps in V8 stack traces
|
||||
* https://github.com/evanw/node-source-map-support
|
||||
*/
|
||||
/*
|
||||
The buffer module from node.js, for the browser.
|
||||
|
||||
@author Feross Aboukhadijeh <feross@feross.org> <http://feross.org>
|
||||
license MIT
|
||||
*/
|
||||
(this.define||function(R,U){this.sourceMapSupport=U()})("browser-source-map-support",function(R){(function e(C,J,A){function p(f,c){if(!J[f]){if(!C[f]){var l="function"==typeof require&&require;if(!c&&l)return l(f,!0);if(t)return t(f,!0);throw Error("Cannot find module '"+f+"'");}l=J[f]={exports:{}};C[f][0].call(l.exports,function(q){var r=C[f][1][q];return p(r?r:q)},l,l.exports,e,C,J,A)}return J[f].exports}for(var t="function"==typeof require&&require,m=0;m<A.length;m++)p(A[m]);return p})({1:[function(C,
|
||||
J,A){R=C("./source-map-support")},{"./source-map-support":21}],2:[function(C,J,A){(function(e){function p(m){m=m.charCodeAt(0);if(43===m)return 62;if(47===m)return 63;if(48>m)return-1;if(58>m)return m-48+52;if(91>m)return m-65;if(123>m)return m-97+26}var t="undefined"!==typeof Uint8Array?Uint8Array:Array;e.toByteArray=function(m){function f(d){q[k++]=d}if(0<m.length%4)throw Error("Invalid string. Length must be a multiple of 4");var c=m.length;var l="="===m.charAt(c-2)?2:"="===m.charAt(c-1)?1:0;var q=
|
||||
new t(3*m.length/4-l);var r=0<l?m.length-4:m.length;var k=0;for(c=0;c<r;c+=4){var u=p(m.charAt(c))<<18|p(m.charAt(c+1))<<12|p(m.charAt(c+2))<<6|p(m.charAt(c+3));f((u&16711680)>>16);f((u&65280)>>8);f(u&255)}2===l?(u=p(m.charAt(c))<<2|p(m.charAt(c+1))>>4,f(u&255)):1===l&&(u=p(m.charAt(c))<<10|p(m.charAt(c+1))<<4|p(m.charAt(c+2))>>2,f(u>>8&255),f(u&255));return q};e.fromByteArray=function(m){var f=m.length%3,c="",l;var q=0;for(l=m.length-f;q<l;q+=3){var r=(m[q]<<16)+(m[q+1]<<8)+m[q+2];r="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>
|
||||
18&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>12&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>6&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r&63);c+=r}switch(f){case 1:r=m[m.length-1];c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>2);c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r<<4&63);c+="==";break;case 2:r=(m[m.length-2]<<8)+
|
||||
m[m.length-1],c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>10),c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>4&63),c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r<<2&63),c+="="}return c}})("undefined"===typeof A?this.base64js={}:A)},{}],3:[function(C,J,A){},{}],4:[function(C,J,A){(function(e){var p=Object.prototype.toString,t="function"===typeof e.alloc&&"function"===typeof e.allocUnsafe&&"function"===
|
||||
typeof e.from;J.exports=function(m,f,c){if("number"===typeof m)throw new TypeError('"value" argument must not be a number');if("ArrayBuffer"===p.call(m).slice(8,-1)){f>>>=0;var l=m.byteLength-f;if(0>l)throw new RangeError("'offset' is out of bounds");if(void 0===c)c=l;else if(c>>>=0,c>l)throw new RangeError("'length' is out of bounds");return t?e.from(m.slice(f,f+c)):new e(new Uint8Array(m.slice(f,f+c)))}if("string"===typeof m){c=f;if("string"!==typeof c||""===c)c="utf8";if(!e.isEncoding(c))throw new TypeError('"encoding" must be a valid string encoding');
|
||||
return t?e.from(m,c):new e(m,c)}return t?e.from(m):new e(m)}}).call(this,C("buffer").Buffer)},{buffer:5}],5:[function(C,J,A){function e(a,b,h){if(!(this instanceof e))return new e(a,b,h);var w=typeof a;if("number"===w)var y=0<a?a>>>0:0;else if("string"===w){if("base64"===b)for(a=(a.trim?a.trim():a.replace(/^\s+|\s+$/g,"")).replace(L,"");0!==a.length%4;)a+="=";y=e.byteLength(a,b)}else if("object"===w&&null!==a)"Buffer"===a.type&&z(a.data)&&(a=a.data),y=0<+a.length?Math.floor(+a.length):0;else throw new TypeError("must start with number, buffer, array or string");
|
||||
if(this.length>G)throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+G.toString(16)+" bytes");if(e.TYPED_ARRAY_SUPPORT)var I=e._augment(new Uint8Array(y));else I=this,I.length=y,I._isBuffer=!0;if(e.TYPED_ARRAY_SUPPORT&&"number"===typeof a.byteLength)I._set(a);else{var K=a;if(z(K)||e.isBuffer(K)||K&&"object"===typeof K&&"number"===typeof K.length)if(e.isBuffer(a))for(b=0;b<y;b++)I[b]=a.readUInt8(b);else for(b=0;b<y;b++)I[b]=(a[b]%256+256)%256;else if("string"===w)I.write(a,
|
||||
0,b);else if("number"===w&&!e.TYPED_ARRAY_SUPPORT&&!h)for(b=0;b<y;b++)I[b]=0}return I}function p(a,b,h){var w="";for(h=Math.min(a.length,h);b<h;b++)w+=String.fromCharCode(a[b]);return w}function t(a,b,h){if(0!==a%1||0>a)throw new RangeError("offset is not uint");if(a+b>h)throw new RangeError("Trying to access beyond buffer length");}function m(a,b,h,w,y,I){if(!e.isBuffer(a))throw new TypeError("buffer must be a Buffer instance");if(b>y||b<I)throw new TypeError("value is out of bounds");if(h+w>a.length)throw new TypeError("index out of range");
|
||||
}function f(a,b,h,w){0>b&&(b=65535+b+1);for(var y=0,I=Math.min(a.length-h,2);y<I;y++)a[h+y]=(b&255<<8*(w?y:1-y))>>>8*(w?y:1-y)}function c(a,b,h,w){0>b&&(b=4294967295+b+1);for(var y=0,I=Math.min(a.length-h,4);y<I;y++)a[h+y]=b>>>8*(w?y:3-y)&255}function l(a,b,h,w,y,I){if(b>y||b<I)throw new TypeError("value is out of bounds");if(h+w>a.length)throw new TypeError("index out of range");}function q(a,b,h,w,y){y||l(a,b,h,4,3.4028234663852886E38,-3.4028234663852886E38);v.write(a,b,h,w,23,4);return h+4}function r(a,
|
||||
b,h,w,y){y||l(a,b,h,8,1.7976931348623157E308,-1.7976931348623157E308);v.write(a,b,h,w,52,8);return h+8}function k(a){for(var b=[],h=0;h<a.length;h++){var w=a.charCodeAt(h);if(127>=w)b.push(w);else{var y=h;55296<=w&&57343>=w&&h++;w=encodeURIComponent(a.slice(y,h+1)).substr(1).split("%");for(y=0;y<w.length;y++)b.push(parseInt(w[y],16))}}return b}function u(a){for(var b=[],h=0;h<a.length;h++)b.push(a.charCodeAt(h)&255);return b}function d(a,b,h,w,y){y&&(w-=w%y);for(y=0;y<w&&!(y+h>=b.length||y>=a.length);y++)b[y+
|
||||
h]=a[y];return y}function g(a){try{return decodeURIComponent(a)}catch(b){return String.fromCharCode(65533)}}var n=C("base64-js"),v=C("ieee754"),z=C("is-array");A.Buffer=e;A.SlowBuffer=e;A.INSPECT_MAX_BYTES=50;e.poolSize=8192;var G=1073741823;e.TYPED_ARRAY_SUPPORT=function(){try{var a=new ArrayBuffer(0),b=new Uint8Array(a);b.foo=function(){return 42};return 42===b.foo()&&"function"===typeof b.subarray&&0===(new Uint8Array(1)).subarray(1,1).byteLength}catch(h){return!1}}();e.isBuffer=function(a){return!(null==
|
||||
a||!a._isBuffer)};e.compare=function(a,b){if(!e.isBuffer(a)||!e.isBuffer(b))throw new TypeError("Arguments must be Buffers");for(var h=a.length,w=b.length,y=0,I=Math.min(h,w);y<I&&a[y]===b[y];y++);y!==I&&(h=a[y],w=b[y]);return h<w?-1:w<h?1:0};e.isEncoding=function(a){switch(String(a).toLowerCase()){case "hex":case "utf8":case "utf-8":case "ascii":case "binary":case "base64":case "raw":case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":return!0;default:return!1}};e.concat=function(a,b){if(!z(a))throw new TypeError("Usage: Buffer.concat(list[, length])");
|
||||
if(0===a.length)return new e(0);if(1===a.length)return a[0];var h;if(void 0===b)for(h=b=0;h<a.length;h++)b+=a[h].length;var w=new e(b),y=0;for(h=0;h<a.length;h++){var I=a[h];I.copy(w,y);y+=I.length}return w};e.byteLength=function(a,b){a+="";switch(b||"utf8"){case "ascii":case "binary":case "raw":var h=a.length;break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":h=2*a.length;break;case "hex":h=a.length>>>1;break;case "utf8":case "utf-8":h=k(a).length;break;case "base64":h=n.toByteArray(a).length;
|
||||
break;default:h=a.length}return h};e.prototype.length=void 0;e.prototype.parent=void 0;e.prototype.toString=function(a,b,h){var w=!1;b>>>=0;h=void 0===h||Infinity===h?this.length:h>>>0;a||(a="utf8");0>b&&(b=0);h>this.length&&(h=this.length);if(h<=b)return"";for(;;)switch(a){case "hex":a=b;b=h;h=this.length;if(!a||0>a)a=0;if(!b||0>b||b>h)b=h;w="";for(h=a;h<b;h++)a=w,w=this[h],w=16>w?"0"+w.toString(16):w.toString(16),w=a+w;return w;case "utf8":case "utf-8":w=a="";for(h=Math.min(this.length,h);b<h;b++)127>=
|
||||
this[b]?(a+=g(w)+String.fromCharCode(this[b]),w=""):w+="%"+this[b].toString(16);return a+g(w);case "ascii":return p(this,b,h);case "binary":return p(this,b,h);case "base64":return b=0===b&&h===this.length?n.fromByteArray(this):n.fromByteArray(this.slice(b,h)),b;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":b=this.slice(b,h);h="";for(a=0;a<b.length;a+=2)h+=String.fromCharCode(b[a]+256*b[a+1]);return h;default:if(w)throw new TypeError("Unknown encoding: "+a);a=(a+"").toLowerCase();w=!0}};
|
||||
e.prototype.equals=function(a){if(!e.isBuffer(a))throw new TypeError("Argument must be a Buffer");return 0===e.compare(this,a)};e.prototype.inspect=function(){var a="",b=A.INSPECT_MAX_BYTES;0<this.length&&(a=this.toString("hex",0,b).match(/.{2}/g).join(" "),this.length>b&&(a+=" ... "));return"<Buffer "+a+">"};e.prototype.compare=function(a){if(!e.isBuffer(a))throw new TypeError("Argument must be a Buffer");return e.compare(this,a)};e.prototype.get=function(a){console.log(".get() is deprecated. Access using array indexes instead.");
|
||||
return this.readUInt8(a)};e.prototype.set=function(a,b){console.log(".set() is deprecated. Access using array indexes instead.");return this.writeUInt8(a,b)};e.prototype.write=function(a,b,h,w){if(isFinite(b))isFinite(h)||(w=h,h=void 0);else{var y=w;w=b;b=h;h=y}b=Number(b)||0;y=this.length-b;h?(h=Number(h),h>y&&(h=y)):h=y;w=String(w||"utf8").toLowerCase();switch(w){case "hex":b=Number(b)||0;w=this.length-b;h?(h=Number(h),h>w&&(h=w)):h=w;w=a.length;if(0!==w%2)throw Error("Invalid hex string");h>w/
|
||||
2&&(h=w/2);for(w=0;w<h;w++){y=parseInt(a.substr(2*w,2),16);if(isNaN(y))throw Error("Invalid hex string");this[b+w]=y}a=w;break;case "utf8":case "utf-8":a=d(k(a),this,b,h);break;case "ascii":a=d(u(a),this,b,h);break;case "binary":a=d(u(a),this,b,h);break;case "base64":a=d(n.toByteArray(a),this,b,h);break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":y=[];for(var I=0;I<a.length;I++){var K=a.charCodeAt(I);w=K>>8;K%=256;y.push(K);y.push(w)}a=d(y,this,b,h,2);break;default:throw new TypeError("Unknown encoding: "+
|
||||
w);}return a};e.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};e.prototype.slice=function(a,b){var h=this.length;a=~~a;b=void 0===b?h:~~b;0>a?(a+=h,0>a&&(a=0)):a>h&&(a=h);0>b?(b+=h,0>b&&(b=0)):b>h&&(b=h);b<a&&(b=a);if(e.TYPED_ARRAY_SUPPORT)return e._augment(this.subarray(a,b));h=b-a;for(var w=new e(h,void 0,!0),y=0;y<h;y++)w[y]=this[y+a];return w};e.prototype.readUInt8=function(a,b){b||t(a,1,this.length);return this[a]};e.prototype.readUInt16LE=
|
||||
function(a,b){b||t(a,2,this.length);return this[a]|this[a+1]<<8};e.prototype.readUInt16BE=function(a,b){b||t(a,2,this.length);return this[a]<<8|this[a+1]};e.prototype.readUInt32LE=function(a,b){b||t(a,4,this.length);return(this[a]|this[a+1]<<8|this[a+2]<<16)+16777216*this[a+3]};e.prototype.readUInt32BE=function(a,b){b||t(a,4,this.length);return 16777216*this[a]+(this[a+1]<<16|this[a+2]<<8|this[a+3])};e.prototype.readInt8=function(a,b){b||t(a,1,this.length);return this[a]&128?-1*(255-this[a]+1):this[a]};
|
||||
e.prototype.readInt16LE=function(a,b){b||t(a,2,this.length);var h=this[a]|this[a+1]<<8;return h&32768?h|4294901760:h};e.prototype.readInt16BE=function(a,b){b||t(a,2,this.length);var h=this[a+1]|this[a]<<8;return h&32768?h|4294901760:h};e.prototype.readInt32LE=function(a,b){b||t(a,4,this.length);return this[a]|this[a+1]<<8|this[a+2]<<16|this[a+3]<<24};e.prototype.readInt32BE=function(a,b){b||t(a,4,this.length);return this[a]<<24|this[a+1]<<16|this[a+2]<<8|this[a+3]};e.prototype.readFloatLE=function(a,
|
||||
b){b||t(a,4,this.length);return v.read(this,a,!0,23,4)};e.prototype.readFloatBE=function(a,b){b||t(a,4,this.length);return v.read(this,a,!1,23,4)};e.prototype.readDoubleLE=function(a,b){b||t(a,8,this.length);return v.read(this,a,!0,52,8)};e.prototype.readDoubleBE=function(a,b){b||t(a,8,this.length);return v.read(this,a,!1,52,8)};e.prototype.writeUInt8=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,1,255,0);e.TYPED_ARRAY_SUPPORT||(a=Math.floor(a));this[b]=a;return b+1};e.prototype.writeUInt16LE=function(a,
|
||||
b,h){a=+a;b>>>=0;h||m(this,a,b,2,65535,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8):f(this,a,b,!0);return b+2};e.prototype.writeUInt16BE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,2,65535,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>8,this[b+1]=a):f(this,a,b,!1);return b+2};e.prototype.writeUInt32LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,4294967295,0);e.TYPED_ARRAY_SUPPORT?(this[b+3]=a>>>24,this[b+2]=a>>>16,this[b+1]=a>>>8,this[b]=a):c(this,a,b,!0);return b+4};e.prototype.writeUInt32BE=function(a,
|
||||
b,h){a=+a;b>>>=0;h||m(this,a,b,4,4294967295,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>24,this[b+1]=a>>>16,this[b+2]=a>>>8,this[b+3]=a):c(this,a,b,!1);return b+4};e.prototype.writeInt8=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,1,127,-128);e.TYPED_ARRAY_SUPPORT||(a=Math.floor(a));0>a&&(a=255+a+1);this[b]=a;return b+1};e.prototype.writeInt16LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,2,32767,-32768);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8):f(this,a,b,!0);return b+2};e.prototype.writeInt16BE=function(a,
|
||||
b,h){a=+a;b>>>=0;h||m(this,a,b,2,32767,-32768);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>8,this[b+1]=a):f(this,a,b,!1);return b+2};e.prototype.writeInt32LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,2147483647,-2147483648);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8,this[b+2]=a>>>16,this[b+3]=a>>>24):c(this,a,b,!0);return b+4};e.prototype.writeInt32BE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,2147483647,-2147483648);0>a&&(a=4294967295+a+1);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>24,this[b+1]=a>>>16,this[b+
|
||||
2]=a>>>8,this[b+3]=a):c(this,a,b,!1);return b+4};e.prototype.writeFloatLE=function(a,b,h){return q(this,a,b,!0,h)};e.prototype.writeFloatBE=function(a,b,h){return q(this,a,b,!1,h)};e.prototype.writeDoubleLE=function(a,b,h){return r(this,a,b,!0,h)};e.prototype.writeDoubleBE=function(a,b,h){return r(this,a,b,!1,h)};e.prototype.copy=function(a,b,h,w){h||(h=0);w||0===w||(w=this.length);b||(b=0);if(w!==h&&0!==a.length&&0!==this.length){if(w<h)throw new TypeError("sourceEnd < sourceStart");if(0>b||b>=a.length)throw new TypeError("targetStart out of bounds");
|
||||
if(0>h||h>=this.length)throw new TypeError("sourceStart out of bounds");if(0>w||w>this.length)throw new TypeError("sourceEnd out of bounds");w>this.length&&(w=this.length);a.length-b<w-h&&(w=a.length-b+h);w-=h;if(1E3>w||!e.TYPED_ARRAY_SUPPORT)for(var y=0;y<w;y++)a[y+b]=this[y+h];else a._set(this.subarray(h,h+w),b)}};e.prototype.fill=function(a,b,h){a||(a=0);b||(b=0);h||(h=this.length);if(h<b)throw new TypeError("end < start");if(h!==b&&0!==this.length){if(0>b||b>=this.length)throw new TypeError("start out of bounds");
|
||||
if(0>h||h>this.length)throw new TypeError("end out of bounds");if("number"===typeof a)for(;b<h;b++)this[b]=a;else{a=k(a.toString());for(var w=a.length;b<h;b++)this[b]=a[b%w]}return this}};e.prototype.toArrayBuffer=function(){if("undefined"!==typeof Uint8Array){if(e.TYPED_ARRAY_SUPPORT)return(new e(this)).buffer;for(var a=new Uint8Array(this.length),b=0,h=a.length;b<h;b+=1)a[b]=this[b];return a.buffer}throw new TypeError("Buffer.toArrayBuffer not supported in this browser");};var D=e.prototype;e._augment=
|
||||
function(a){a.constructor=e;a._isBuffer=!0;a._get=a.get;a._set=a.set;a.get=D.get;a.set=D.set;a.write=D.write;a.toString=D.toString;a.toLocaleString=D.toString;a.toJSON=D.toJSON;a.equals=D.equals;a.compare=D.compare;a.copy=D.copy;a.slice=D.slice;a.readUInt8=D.readUInt8;a.readUInt16LE=D.readUInt16LE;a.readUInt16BE=D.readUInt16BE;a.readUInt32LE=D.readUInt32LE;a.readUInt32BE=D.readUInt32BE;a.readInt8=D.readInt8;a.readInt16LE=D.readInt16LE;a.readInt16BE=D.readInt16BE;a.readInt32LE=D.readInt32LE;a.readInt32BE=
|
||||
D.readInt32BE;a.readFloatLE=D.readFloatLE;a.readFloatBE=D.readFloatBE;a.readDoubleLE=D.readDoubleLE;a.readDoubleBE=D.readDoubleBE;a.writeUInt8=D.writeUInt8;a.writeUInt16LE=D.writeUInt16LE;a.writeUInt16BE=D.writeUInt16BE;a.writeUInt32LE=D.writeUInt32LE;a.writeUInt32BE=D.writeUInt32BE;a.writeInt8=D.writeInt8;a.writeInt16LE=D.writeInt16LE;a.writeInt16BE=D.writeInt16BE;a.writeInt32LE=D.writeInt32LE;a.writeInt32BE=D.writeInt32BE;a.writeFloatLE=D.writeFloatLE;a.writeFloatBE=D.writeFloatBE;a.writeDoubleLE=
|
||||
D.writeDoubleLE;a.writeDoubleBE=D.writeDoubleBE;a.fill=D.fill;a.inspect=D.inspect;a.toArrayBuffer=D.toArrayBuffer;return a};var L=/[^+\/0-9A-z]/g},{"base64-js":2,ieee754:6,"is-array":7}],6:[function(C,J,A){A.read=function(e,p,t,m,f){var c=8*f-m-1;var l=(1<<c)-1,q=l>>1,r=-7;f=t?f-1:0;var k=t?-1:1,u=e[p+f];f+=k;t=u&(1<<-r)-1;u>>=-r;for(r+=c;0<r;t=256*t+e[p+f],f+=k,r-=8);c=t&(1<<-r)-1;t>>=-r;for(r+=m;0<r;c=256*c+e[p+f],f+=k,r-=8);if(0===t)t=1-q;else{if(t===l)return c?NaN:Infinity*(u?-1:1);c+=Math.pow(2,
|
||||
m);t-=q}return(u?-1:1)*c*Math.pow(2,t-m)};A.write=function(e,p,t,m,f,c){var l,q=8*c-f-1,r=(1<<q)-1,k=r>>1,u=23===f?Math.pow(2,-24)-Math.pow(2,-77):0;c=m?0:c-1;var d=m?1:-1,g=0>p||0===p&&0>1/p?1:0;p=Math.abs(p);isNaN(p)||Infinity===p?(p=isNaN(p)?1:0,m=r):(m=Math.floor(Math.log(p)/Math.LN2),1>p*(l=Math.pow(2,-m))&&(m--,l*=2),p=1<=m+k?p+u/l:p+u*Math.pow(2,1-k),2<=p*l&&(m++,l/=2),m+k>=r?(p=0,m=r):1<=m+k?(p=(p*l-1)*Math.pow(2,f),m+=k):(p=p*Math.pow(2,k-1)*Math.pow(2,f),m=0));for(;8<=f;e[t+c]=p&255,c+=
|
||||
d,p/=256,f-=8);m=m<<f|p;for(q+=f;0<q;e[t+c]=m&255,c+=d,m/=256,q-=8);e[t+c-d]|=128*g}},{}],7:[function(C,J,A){var e=Object.prototype.toString;J.exports=Array.isArray||function(p){return!!p&&"[object Array]"==e.call(p)}},{}],8:[function(C,J,A){(function(e){function p(c,l){for(var q=0,r=c.length-1;0<=r;r--){var k=c[r];"."===k?c.splice(r,1):".."===k?(c.splice(r,1),q++):q&&(c.splice(r,1),q--)}if(l)for(;q--;q)c.unshift("..");return c}function t(c,l){if(c.filter)return c.filter(l);for(var q=[],r=0;r<c.length;r++)l(c[r],
|
||||
r,c)&&q.push(c[r]);return q}var m=/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/;A.resolve=function(){for(var c="",l=!1,q=arguments.length-1;-1<=q&&!l;q--){var r=0<=q?arguments[q]:e.cwd();if("string"!==typeof r)throw new TypeError("Arguments to path.resolve must be strings");r&&(c=r+"/"+c,l="/"===r.charAt(0))}c=p(t(c.split("/"),function(k){return!!k}),!l).join("/");return(l?"/":"")+c||"."};A.normalize=function(c){var l=A.isAbsolute(c),q="/"===f(c,-1);(c=p(t(c.split("/"),function(r){return!!r}),
|
||||
!l).join("/"))||l||(c=".");c&&q&&(c+="/");return(l?"/":"")+c};A.isAbsolute=function(c){return"/"===c.charAt(0)};A.join=function(){var c=Array.prototype.slice.call(arguments,0);return A.normalize(t(c,function(l,q){if("string"!==typeof l)throw new TypeError("Arguments to path.join must be strings");return l}).join("/"))};A.relative=function(c,l){function q(n){for(var v=0;v<n.length&&""===n[v];v++);for(var z=n.length-1;0<=z&&""===n[z];z--);return v>z?[]:n.slice(v,z-v+1)}c=A.resolve(c).substr(1);l=A.resolve(l).substr(1);
|
||||
for(var r=q(c.split("/")),k=q(l.split("/")),u=Math.min(r.length,k.length),d=u,g=0;g<u;g++)if(r[g]!==k[g]){d=g;break}u=[];for(g=d;g<r.length;g++)u.push("..");u=u.concat(k.slice(d));return u.join("/")};A.sep="/";A.delimiter=":";A.dirname=function(c){var l=m.exec(c).slice(1);c=l[0];l=l[1];if(!c&&!l)return".";l&&(l=l.substr(0,l.length-1));return c+l};A.basename=function(c,l){var q=m.exec(c).slice(1)[2];l&&q.substr(-1*l.length)===l&&(q=q.substr(0,q.length-l.length));return q};A.extname=function(c){return m.exec(c).slice(1)[3]};
|
||||
var f="b"==="ab".substr(-1)?function(c,l,q){return c.substr(l,q)}:function(c,l,q){0>l&&(l=c.length+l);return c.substr(l,q)}}).call(this,C("g5I+bs"))},{"g5I+bs":9}],9:[function(C,J,A){function e(){}C=J.exports={};C.nextTick=function(){if("undefined"!==typeof window&&window.setImmediate)return function(t){return window.setImmediate(t)};if("undefined"!==typeof window&&window.postMessage&&window.addEventListener){var p=[];window.addEventListener("message",function(t){var m=t.source;m!==window&&null!==
|
||||
m||"process-tick"!==t.data||(t.stopPropagation(),0<p.length&&p.shift()())},!0);return function(t){p.push(t);window.postMessage("process-tick","*")}}return function(t){setTimeout(t,0)}}();C.title="browser";C.browser=!0;C.env={};C.argv=[];C.on=e;C.addListener=e;C.once=e;C.off=e;C.removeListener=e;C.removeAllListeners=e;C.emit=e;C.binding=function(p){throw Error("process.binding is not supported");};C.cwd=function(){return"/"};C.chdir=function(p){throw Error("process.chdir is not supported");}},{}],
|
||||
10:[function(C,J,A){function e(){this._array=[];this._set=m?new Map:Object.create(null)}var p=C("./util"),t=Object.prototype.hasOwnProperty,m="undefined"!==typeof Map;e.fromArray=function(f,c){for(var l=new e,q=0,r=f.length;q<r;q++)l.add(f[q],c);return l};e.prototype.size=function(){return m?this._set.size:Object.getOwnPropertyNames(this._set).length};e.prototype.add=function(f,c){var l=m?f:p.toSetString(f),q=m?this.has(f):t.call(this._set,l),r=this._array.length;q&&!c||this._array.push(f);q||(m?
|
||||
this._set.set(f,r):this._set[l]=r)};e.prototype.has=function(f){if(m)return this._set.has(f);f=p.toSetString(f);return t.call(this._set,f)};e.prototype.indexOf=function(f){if(m){var c=this._set.get(f);if(0<=c)return c}else if(c=p.toSetString(f),t.call(this._set,c))return this._set[c];throw Error('"'+f+'" is not in the set.');};e.prototype.at=function(f){if(0<=f&&f<this._array.length)return this._array[f];throw Error("No element indexed by "+f);};e.prototype.toArray=function(){return this._array.slice()};
|
||||
A.ArraySet=e},{"./util":19}],11:[function(C,J,A){var e=C("./base64");A.encode=function(p){var t="",m=0>p?(-p<<1)+1:p<<1;do p=m&31,m>>>=5,0<m&&(p|=32),t+=e.encode(p);while(0<m);return t};A.decode=function(p,t,m){var f=p.length,c=0,l=0;do{if(t>=f)throw Error("Expected more digits in base 64 VLQ value.");var q=e.decode(p.charCodeAt(t++));if(-1===q)throw Error("Invalid base64 digit: "+p.charAt(t-1));var r=!!(q&32);q&=31;c+=q<<l;l+=5}while(r);p=c>>1;m.value=1===(c&1)?-p:p;m.rest=t}},{"./base64":12}],12:[function(C,
|
||||
J,A){var e="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");A.encode=function(p){if(0<=p&&p<e.length)return e[p];throw new TypeError("Must be between 0 and 63: "+p);};A.decode=function(p){return 65<=p&&90>=p?p-65:97<=p&&122>=p?p-97+26:48<=p&&57>=p?p-48+52:43==p?62:47==p?63:-1}},{}],13:[function(C,J,A){function e(p,t,m,f,c,l){var q=Math.floor((t-p)/2)+p,r=c(m,f[q],!0);return 0===r?q:0<r?1<t-q?e(q,t,m,f,c,l):l==A.LEAST_UPPER_BOUND?t<f.length?t:-1:q:1<q-p?e(p,q,m,f,c,l):l==
|
||||
A.LEAST_UPPER_BOUND?q:0>p?-1:p}A.GREATEST_LOWER_BOUND=1;A.LEAST_UPPER_BOUND=2;A.search=function(p,t,m,f){if(0===t.length)return-1;p=e(-1,t.length,p,t,m,f||A.GREATEST_LOWER_BOUND);if(0>p)return-1;for(;0<=p-1&&0===m(t[p],t[p-1],!0);)--p;return p}},{}],14:[function(C,J,A){function e(){this._array=[];this._sorted=!0;this._last={generatedLine:-1,generatedColumn:0}}var p=C("./util");e.prototype.unsortedForEach=function(t,m){this._array.forEach(t,m)};e.prototype.add=function(t){var m=this._last,f=m.generatedLine,
|
||||
c=t.generatedLine,l=m.generatedColumn,q=t.generatedColumn;c>f||c==f&&q>=l||0>=p.compareByGeneratedPositionsInflated(m,t)?this._last=t:this._sorted=!1;this._array.push(t)};e.prototype.toArray=function(){this._sorted||(this._array.sort(p.compareByGeneratedPositionsInflated),this._sorted=!0);return this._array};A.MappingList=e},{"./util":19}],15:[function(C,J,A){function e(t,m,f){var c=t[m];t[m]=t[f];t[f]=c}function p(t,m,f,c){if(f<c){var l=f-1;e(t,Math.round(f+Math.random()*(c-f)),c);for(var q=t[c],
|
||||
r=f;r<c;r++)0>=m(t[r],q)&&(l+=1,e(t,l,r));e(t,l+1,r);l+=1;p(t,m,f,l-1);p(t,m,l+1,c)}}A.quickSort=function(t,m){p(t,m,0,t.length-1)}},{}],16:[function(C,J,A){function e(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));return null!=d.sections?new m(d,u):new p(d,u)}function p(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));var g=f.getArg(d,"version"),n=f.getArg(d,"sources"),v=f.getArg(d,"names",[]),z=f.getArg(d,"sourceRoot",null),G=f.getArg(d,"sourcesContent",null),D=f.getArg(d,
|
||||
"mappings");d=f.getArg(d,"file",null);if(g!=this._version)throw Error("Unsupported version: "+g);z&&(z=f.normalize(z));n=n.map(String).map(f.normalize).map(function(L){return z&&f.isAbsolute(z)&&f.isAbsolute(L)?f.relative(z,L):L});this._names=l.fromArray(v.map(String),!0);this._sources=l.fromArray(n,!0);this.sourceRoot=z;this.sourcesContent=G;this._mappings=D;this._sourceMapURL=u;this.file=d}function t(){this.generatedColumn=this.generatedLine=0;this.name=this.originalColumn=this.originalLine=this.source=
|
||||
null}function m(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));var g=f.getArg(d,"version");d=f.getArg(d,"sections");if(g!=this._version)throw Error("Unsupported version: "+g);this._sources=new l;this._names=new l;var n={line:-1,column:0};this._sections=d.map(function(v){if(v.url)throw Error("Support for url field in sections not implemented.");var z=f.getArg(v,"offset"),G=f.getArg(z,"line"),D=f.getArg(z,"column");if(G<n.line||G===n.line&&D<n.column)throw Error("Section offsets must be ordered and non-overlapping.");
|
||||
n=z;return{generatedOffset:{generatedLine:G+1,generatedColumn:D+1},consumer:new e(f.getArg(v,"map"),u)}})}var f=C("./util"),c=C("./binary-search"),l=C("./array-set").ArraySet,q=C("./base64-vlq"),r=C("./quick-sort").quickSort;e.fromSourceMap=function(k){return p.fromSourceMap(k)};e.prototype._version=3;e.prototype.__generatedMappings=null;Object.defineProperty(e.prototype,"_generatedMappings",{configurable:!0,enumerable:!0,get:function(){this.__generatedMappings||this._parseMappings(this._mappings,
|
||||
this.sourceRoot);return this.__generatedMappings}});e.prototype.__originalMappings=null;Object.defineProperty(e.prototype,"_originalMappings",{configurable:!0,enumerable:!0,get:function(){this.__originalMappings||this._parseMappings(this._mappings,this.sourceRoot);return this.__originalMappings}});e.prototype._charIsMappingSeparator=function(k,u){var d=k.charAt(u);return";"===d||","===d};e.prototype._parseMappings=function(k,u){throw Error("Subclasses must implement _parseMappings");};e.GENERATED_ORDER=
|
||||
1;e.ORIGINAL_ORDER=2;e.GREATEST_LOWER_BOUND=1;e.LEAST_UPPER_BOUND=2;e.prototype.eachMapping=function(k,u,d){u=u||null;switch(d||e.GENERATED_ORDER){case e.GENERATED_ORDER:d=this._generatedMappings;break;case e.ORIGINAL_ORDER:d=this._originalMappings;break;default:throw Error("Unknown order of iteration.");}var g=this.sourceRoot;d.map(function(n){var v=null===n.source?null:this._sources.at(n.source);v=f.computeSourceURL(g,v,this._sourceMapURL);return{source:v,generatedLine:n.generatedLine,generatedColumn:n.generatedColumn,
|
||||
originalLine:n.originalLine,originalColumn:n.originalColumn,name:null===n.name?null:this._names.at(n.name)}},this).forEach(k,u)};e.prototype.allGeneratedPositionsFor=function(k){var u=f.getArg(k,"line"),d={source:f.getArg(k,"source"),originalLine:u,originalColumn:f.getArg(k,"column",0)};null!=this.sourceRoot&&(d.source=f.relative(this.sourceRoot,d.source));if(!this._sources.has(d.source))return[];d.source=this._sources.indexOf(d.source);var g=[];d=this._findMapping(d,this._originalMappings,"originalLine",
|
||||
"originalColumn",f.compareByOriginalPositions,c.LEAST_UPPER_BOUND);if(0<=d){var n=this._originalMappings[d];if(void 0===k.column)for(u=n.originalLine;n&&n.originalLine===u;)g.push({line:f.getArg(n,"generatedLine",null),column:f.getArg(n,"generatedColumn",null),lastColumn:f.getArg(n,"lastGeneratedColumn",null)}),n=this._originalMappings[++d];else for(k=n.originalColumn;n&&n.originalLine===u&&n.originalColumn==k;)g.push({line:f.getArg(n,"generatedLine",null),column:f.getArg(n,"generatedColumn",null),
|
||||
lastColumn:f.getArg(n,"lastGeneratedColumn",null)}),n=this._originalMappings[++d]}return g};A.SourceMapConsumer=e;p.prototype=Object.create(e.prototype);p.prototype.consumer=e;p.fromSourceMap=function(k,u){var d=Object.create(p.prototype),g=d._names=l.fromArray(k._names.toArray(),!0),n=d._sources=l.fromArray(k._sources.toArray(),!0);d.sourceRoot=k._sourceRoot;d.sourcesContent=k._generateSourcesContent(d._sources.toArray(),d.sourceRoot);d.file=k._file;d._sourceMapURL=u;for(var v=k._mappings.toArray().slice(),
|
||||
z=d.__generatedMappings=[],G=d.__originalMappings=[],D=0,L=v.length;D<L;D++){var a=v[D],b=new t;b.generatedLine=a.generatedLine;b.generatedColumn=a.generatedColumn;a.source&&(b.source=n.indexOf(a.source),b.originalLine=a.originalLine,b.originalColumn=a.originalColumn,a.name&&(b.name=g.indexOf(a.name)),G.push(b));z.push(b)}r(d.__originalMappings,f.compareByOriginalPositions);return d};p.prototype._version=3;Object.defineProperty(p.prototype,"sources",{get:function(){return this._sources.toArray().map(function(k){return f.computeSourceURL(this.sourceRoot,
|
||||
k,this._sourceMapURL)},this)}});p.prototype._parseMappings=function(k,u){for(var d=1,g=0,n=0,v=0,z=0,G=0,D=k.length,L=0,a={},b={},h=[],w=[],y,I,K,N,P;L<D;)if(";"===k.charAt(L))d++,L++,g=0;else if(","===k.charAt(L))L++;else{y=new t;y.generatedLine=d;for(N=L;N<D&&!this._charIsMappingSeparator(k,N);N++);I=k.slice(L,N);if(K=a[I])L+=I.length;else{for(K=[];L<N;)q.decode(k,L,b),P=b.value,L=b.rest,K.push(P);if(2===K.length)throw Error("Found a source, but no line and column");if(3===K.length)throw Error("Found a source and line, but no column");
|
||||
a[I]=K}y.generatedColumn=g+K[0];g=y.generatedColumn;1<K.length&&(y.source=z+K[1],z+=K[1],y.originalLine=n+K[2],n=y.originalLine,y.originalLine+=1,y.originalColumn=v+K[3],v=y.originalColumn,4<K.length&&(y.name=G+K[4],G+=K[4]));w.push(y);"number"===typeof y.originalLine&&h.push(y)}r(w,f.compareByGeneratedPositionsDeflated);this.__generatedMappings=w;r(h,f.compareByOriginalPositions);this.__originalMappings=h};p.prototype._findMapping=function(k,u,d,g,n,v){if(0>=k[d])throw new TypeError("Line must be greater than or equal to 1, got "+
|
||||
k[d]);if(0>k[g])throw new TypeError("Column must be greater than or equal to 0, got "+k[g]);return c.search(k,u,n,v)};p.prototype.computeColumnSpans=function(){for(var k=0;k<this._generatedMappings.length;++k){var u=this._generatedMappings[k];if(k+1<this._generatedMappings.length){var d=this._generatedMappings[k+1];if(u.generatedLine===d.generatedLine){u.lastGeneratedColumn=d.generatedColumn-1;continue}}u.lastGeneratedColumn=Infinity}};p.prototype.originalPositionFor=function(k){var u={generatedLine:f.getArg(k,
|
||||
"line"),generatedColumn:f.getArg(k,"column")};k=this._findMapping(u,this._generatedMappings,"generatedLine","generatedColumn",f.compareByGeneratedPositionsDeflated,f.getArg(k,"bias",e.GREATEST_LOWER_BOUND));if(0<=k&&(k=this._generatedMappings[k],k.generatedLine===u.generatedLine)){u=f.getArg(k,"source",null);null!==u&&(u=this._sources.at(u),u=f.computeSourceURL(this.sourceRoot,u,this._sourceMapURL));var d=f.getArg(k,"name",null);null!==d&&(d=this._names.at(d));return{source:u,line:f.getArg(k,"originalLine",
|
||||
null),column:f.getArg(k,"originalColumn",null),name:d}}return{source:null,line:null,column:null,name:null}};p.prototype.hasContentsOfAllSources=function(){return this.sourcesContent?this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some(function(k){return null==k}):!1};p.prototype.sourceContentFor=function(k,u){if(!this.sourcesContent)return null;var d=k;null!=this.sourceRoot&&(d=f.relative(this.sourceRoot,d));if(this._sources.has(d))return this.sourcesContent[this._sources.indexOf(d)];
|
||||
var g=this.sources,n;for(n=0;n<g.length;++n)if(g[n]==k)return this.sourcesContent[n];var v;if(null!=this.sourceRoot&&(v=f.urlParse(this.sourceRoot))){g=d.replace(/^file:\/\//,"");if("file"==v.scheme&&this._sources.has(g))return this.sourcesContent[this._sources.indexOf(g)];if((!v.path||"/"==v.path)&&this._sources.has("/"+d))return this.sourcesContent[this._sources.indexOf("/"+d)]}if(u)return null;throw Error('"'+d+'" is not in the SourceMap.');};p.prototype.generatedPositionFor=function(k){var u=
|
||||
f.getArg(k,"source");null!=this.sourceRoot&&(u=f.relative(this.sourceRoot,u));if(!this._sources.has(u))return{line:null,column:null,lastColumn:null};u=this._sources.indexOf(u);u={source:u,originalLine:f.getArg(k,"line"),originalColumn:f.getArg(k,"column")};k=this._findMapping(u,this._originalMappings,"originalLine","originalColumn",f.compareByOriginalPositions,f.getArg(k,"bias",e.GREATEST_LOWER_BOUND));return 0<=k&&(k=this._originalMappings[k],k.source===u.source)?{line:f.getArg(k,"generatedLine",
|
||||
null),column:f.getArg(k,"generatedColumn",null),lastColumn:f.getArg(k,"lastGeneratedColumn",null)}:{line:null,column:null,lastColumn:null}};A.BasicSourceMapConsumer=p;m.prototype=Object.create(e.prototype);m.prototype.constructor=e;m.prototype._version=3;Object.defineProperty(m.prototype,"sources",{get:function(){for(var k=[],u=0;u<this._sections.length;u++)for(var d=0;d<this._sections[u].consumer.sources.length;d++)k.push(this._sections[u].consumer.sources[d]);return k}});m.prototype.originalPositionFor=
|
||||
function(k){var u={generatedLine:f.getArg(k,"line"),generatedColumn:f.getArg(k,"column")},d=c.search(u,this._sections,function(g,n){var v=g.generatedLine-n.generatedOffset.generatedLine;return v?v:g.generatedColumn-n.generatedOffset.generatedColumn});return(d=this._sections[d])?d.consumer.originalPositionFor({line:u.generatedLine-(d.generatedOffset.generatedLine-1),column:u.generatedColumn-(d.generatedOffset.generatedLine===u.generatedLine?d.generatedOffset.generatedColumn-1:0),bias:k.bias}):{source:null,
|
||||
line:null,column:null,name:null}};m.prototype.hasContentsOfAllSources=function(){return this._sections.every(function(k){return k.consumer.hasContentsOfAllSources()})};m.prototype.sourceContentFor=function(k,u){for(var d=0;d<this._sections.length;d++){var g=this._sections[d].consumer.sourceContentFor(k,!0);if(g)return g}if(u)return null;throw Error('"'+k+'" is not in the SourceMap.');};m.prototype.generatedPositionFor=function(k){for(var u=0;u<this._sections.length;u++){var d=this._sections[u];if(-1!==
|
||||
d.consumer.sources.indexOf(f.getArg(k,"source"))){var g=d.consumer.generatedPositionFor(k);if(g)return{line:g.line+(d.generatedOffset.generatedLine-1),column:g.column+(d.generatedOffset.generatedLine===g.line?d.generatedOffset.generatedColumn-1:0)}}}return{line:null,column:null}};m.prototype._parseMappings=function(k,u){this.__generatedMappings=[];this.__originalMappings=[];for(var d=0;d<this._sections.length;d++)for(var g=this._sections[d],n=g.consumer._generatedMappings,v=0;v<n.length;v++){var z=
|
||||
n[v],G=g.consumer._sources.at(z.source);G=f.computeSourceURL(g.consumer.sourceRoot,G,this._sourceMapURL);this._sources.add(G);G=this._sources.indexOf(G);var D=null;z.name&&(D=g.consumer._names.at(z.name),this._names.add(D),D=this._names.indexOf(D));z={source:G,generatedLine:z.generatedLine+(g.generatedOffset.generatedLine-1),generatedColumn:z.generatedColumn+(g.generatedOffset.generatedLine===z.generatedLine?g.generatedOffset.generatedColumn-1:0),originalLine:z.originalLine,originalColumn:z.originalColumn,
|
||||
name:D};this.__generatedMappings.push(z);"number"===typeof z.originalLine&&this.__originalMappings.push(z)}r(this.__generatedMappings,f.compareByGeneratedPositionsDeflated);r(this.__originalMappings,f.compareByOriginalPositions)};A.IndexedSourceMapConsumer=m},{"./array-set":10,"./base64-vlq":11,"./binary-search":13,"./quick-sort":15,"./util":19}],17:[function(C,J,A){function e(c){c||(c={});this._file=t.getArg(c,"file",null);this._sourceRoot=t.getArg(c,"sourceRoot",null);this._skipValidation=t.getArg(c,
|
||||
"skipValidation",!1);this._sources=new m;this._names=new m;this._mappings=new f;this._sourcesContents=null}var p=C("./base64-vlq"),t=C("./util"),m=C("./array-set").ArraySet,f=C("./mapping-list").MappingList;e.prototype._version=3;e.fromSourceMap=function(c){var l=c.sourceRoot,q=new e({file:c.file,sourceRoot:l});c.eachMapping(function(r){var k={generated:{line:r.generatedLine,column:r.generatedColumn}};null!=r.source&&(k.source=r.source,null!=l&&(k.source=t.relative(l,k.source)),k.original={line:r.originalLine,
|
||||
column:r.originalColumn},null!=r.name&&(k.name=r.name));q.addMapping(k)});c.sources.forEach(function(r){var k=r;null!==l&&(k=t.relative(l,r));q._sources.has(k)||q._sources.add(k);k=c.sourceContentFor(r);null!=k&&q.setSourceContent(r,k)});return q};e.prototype.addMapping=function(c){var l=t.getArg(c,"generated"),q=t.getArg(c,"original",null),r=t.getArg(c,"source",null);c=t.getArg(c,"name",null);this._skipValidation||this._validateMapping(l,q,r,c);null!=r&&(r=String(r),this._sources.has(r)||this._sources.add(r));
|
||||
null!=c&&(c=String(c),this._names.has(c)||this._names.add(c));this._mappings.add({generatedLine:l.line,generatedColumn:l.column,originalLine:null!=q&&q.line,originalColumn:null!=q&&q.column,source:r,name:c})};e.prototype.setSourceContent=function(c,l){var q=c;null!=this._sourceRoot&&(q=t.relative(this._sourceRoot,q));null!=l?(this._sourcesContents||(this._sourcesContents=Object.create(null)),this._sourcesContents[t.toSetString(q)]=l):this._sourcesContents&&(delete this._sourcesContents[t.toSetString(q)],
|
||||
0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))};e.prototype.applySourceMap=function(c,l,q){var r=l;if(null==l){if(null==c.file)throw Error('SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, or the source map\'s "file" property. Both were omitted.');r=c.file}var k=this._sourceRoot;null!=k&&(r=t.relative(k,r));var u=new m,d=new m;this._mappings.unsortedForEach(function(g){if(g.source===r&&null!=g.originalLine){var n=c.originalPositionFor({line:g.originalLine,
|
||||
column:g.originalColumn});null!=n.source&&(g.source=n.source,null!=q&&(g.source=t.join(q,g.source)),null!=k&&(g.source=t.relative(k,g.source)),g.originalLine=n.line,g.originalColumn=n.column,null!=n.name&&(g.name=n.name))}n=g.source;null==n||u.has(n)||u.add(n);g=g.name;null==g||d.has(g)||d.add(g)},this);this._sources=u;this._names=d;c.sources.forEach(function(g){var n=c.sourceContentFor(g);null!=n&&(null!=q&&(g=t.join(q,g)),null!=k&&(g=t.relative(k,g)),this.setSourceContent(g,n))},this)};e.prototype._validateMapping=
|
||||
function(c,l,q,r){if(l&&"number"!==typeof l.line&&"number"!==typeof l.column)throw Error("original.line and original.column are not numbers -- you probably meant to omit the original mapping entirely and only map the generated position. If so, pass null for the original mapping instead of an object with empty or null values.");if(!(c&&"line"in c&&"column"in c&&0<c.line&&0<=c.column&&!l&&!q&&!r||c&&"line"in c&&"column"in c&&l&&"line"in l&&"column"in l&&0<c.line&&0<=c.column&&0<l.line&&0<=l.column&&
|
||||
q))throw Error("Invalid mapping: "+JSON.stringify({generated:c,source:q,original:l,name:r}));};e.prototype._serializeMappings=function(){for(var c=0,l=1,q=0,r=0,k=0,u=0,d="",g,n,v,z=this._mappings.toArray(),G=0,D=z.length;G<D;G++){n=z[G];g="";if(n.generatedLine!==l)for(c=0;n.generatedLine!==l;)g+=";",l++;else if(0<G){if(!t.compareByGeneratedPositionsInflated(n,z[G-1]))continue;g+=","}g+=p.encode(n.generatedColumn-c);c=n.generatedColumn;null!=n.source&&(v=this._sources.indexOf(n.source),g+=p.encode(v-
|
||||
u),u=v,g+=p.encode(n.originalLine-1-r),r=n.originalLine-1,g+=p.encode(n.originalColumn-q),q=n.originalColumn,null!=n.name&&(n=this._names.indexOf(n.name),g+=p.encode(n-k),k=n));d+=g}return d};e.prototype._generateSourcesContent=function(c,l){return c.map(function(q){if(!this._sourcesContents)return null;null!=l&&(q=t.relative(l,q));q=t.toSetString(q);return Object.prototype.hasOwnProperty.call(this._sourcesContents,q)?this._sourcesContents[q]:null},this)};e.prototype.toJSON=function(){var c={version:this._version,
|
||||
sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};null!=this._file&&(c.file=this._file);null!=this._sourceRoot&&(c.sourceRoot=this._sourceRoot);this._sourcesContents&&(c.sourcesContent=this._generateSourcesContent(c.sources,c.sourceRoot));return c};e.prototype.toString=function(){return JSON.stringify(this.toJSON())};A.SourceMapGenerator=e},{"./array-set":10,"./base64-vlq":11,"./mapping-list":14,"./util":19}],18:[function(C,J,A){function e(f,c,l,q,r){this.children=
|
||||
[];this.sourceContents={};this.line=null==f?null:f;this.column=null==c?null:c;this.source=null==l?null:l;this.name=null==r?null:r;this.$$$isSourceNode$$$=!0;null!=q&&this.add(q)}var p=C("./source-map-generator").SourceMapGenerator,t=C("./util"),m=/(\r?\n)/;e.fromStringWithSourceMap=function(f,c,l){function q(z,G){if(null===z||void 0===z.source)r.add(G);else{var D=l?t.join(l,z.source):z.source;r.add(new e(z.originalLine,z.originalColumn,D,G,z.name))}}var r=new e,k=f.split(m),u=0,d=function(){var z=
|
||||
u<k.length?k[u++]:void 0,G=(u<k.length?k[u++]:void 0)||"";return z+G},g=1,n=0,v=null;c.eachMapping(function(z){if(null!==v)if(g<z.generatedLine)q(v,d()),g++,n=0;else{var G=k[u]||"",D=G.substr(0,z.generatedColumn-n);k[u]=G.substr(z.generatedColumn-n);n=z.generatedColumn;q(v,D);v=z;return}for(;g<z.generatedLine;)r.add(d()),g++;n<z.generatedColumn&&(G=k[u]||"",r.add(G.substr(0,z.generatedColumn)),k[u]=G.substr(z.generatedColumn),n=z.generatedColumn);v=z},this);u<k.length&&(v&&q(v,d()),r.add(k.splice(u).join("")));
|
||||
c.sources.forEach(function(z){var G=c.sourceContentFor(z);null!=G&&(null!=l&&(z=t.join(l,z)),r.setSourceContent(z,G))});return r};e.prototype.add=function(f){if(Array.isArray(f))f.forEach(function(c){this.add(c)},this);else if(f.$$$isSourceNode$$$||"string"===typeof f)f&&this.children.push(f);else throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+f);return this};e.prototype.prepend=function(f){if(Array.isArray(f))for(var c=f.length-1;0<=c;c--)this.prepend(f[c]);
|
||||
else if(f.$$$isSourceNode$$$||"string"===typeof f)this.children.unshift(f);else throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+f);return this};e.prototype.walk=function(f){for(var c,l=0,q=this.children.length;l<q;l++)c=this.children[l],c.$$$isSourceNode$$$?c.walk(f):""!==c&&f(c,{source:this.source,line:this.line,column:this.column,name:this.name})};e.prototype.join=function(f){var c,l=this.children.length;if(0<l){var q=[];for(c=0;c<l-1;c++)q.push(this.children[c]),
|
||||
q.push(f);q.push(this.children[c]);this.children=q}return this};e.prototype.replaceRight=function(f,c){var l=this.children[this.children.length-1];l.$$$isSourceNode$$$?l.replaceRight(f,c):"string"===typeof l?this.children[this.children.length-1]=l.replace(f,c):this.children.push("".replace(f,c));return this};e.prototype.setSourceContent=function(f,c){this.sourceContents[t.toSetString(f)]=c};e.prototype.walkSourceContents=function(f){for(var c=0,l=this.children.length;c<l;c++)this.children[c].$$$isSourceNode$$$&&
|
||||
this.children[c].walkSourceContents(f);var q=Object.keys(this.sourceContents);c=0;for(l=q.length;c<l;c++)f(t.fromSetString(q[c]),this.sourceContents[q[c]])};e.prototype.toString=function(){var f="";this.walk(function(c){f+=c});return f};e.prototype.toStringWithSourceMap=function(f){var c="",l=1,q=0,r=new p(f),k=!1,u=null,d=null,g=null,n=null;this.walk(function(v,z){c+=v;null!==z.source&&null!==z.line&&null!==z.column?(u===z.source&&d===z.line&&g===z.column&&n===z.name||r.addMapping({source:z.source,
|
||||
original:{line:z.line,column:z.column},generated:{line:l,column:q},name:z.name}),u=z.source,d=z.line,g=z.column,n=z.name,k=!0):k&&(r.addMapping({generated:{line:l,column:q}}),u=null,k=!1);for(var G=0,D=v.length;G<D;G++)10===v.charCodeAt(G)?(l++,q=0,G+1===D?(u=null,k=!1):k&&r.addMapping({source:z.source,original:{line:z.line,column:z.column},generated:{line:l,column:q},name:z.name})):q++});this.walkSourceContents(function(v,z){r.setSourceContent(v,z)});return{code:c,map:r}};A.SourceNode=e},{"./source-map-generator":17,
|
||||
"./util":19}],19:[function(C,J,A){function e(d){return(d=d.match(k))?{scheme:d[1],auth:d[2],host:d[3],port:d[4],path:d[5]}:null}function p(d){var g="";d.scheme&&(g+=d.scheme+":");g+="//";d.auth&&(g+=d.auth+"@");d.host&&(g+=d.host);d.port&&(g+=":"+d.port);d.path&&(g+=d.path);return g}function t(d){var g=d,n=e(d);if(n){if(!n.path)return d;g=n.path}d=A.isAbsolute(g);g=g.split(/\/+/);for(var v,z=0,G=g.length-1;0<=G;G--)v=g[G],"."===v?g.splice(G,1):".."===v?z++:0<z&&(""===v?(g.splice(G+1,z),z=0):(g.splice(G,
|
||||
2),z--));g=g.join("/");""===g&&(g=d?"/":".");return n?(n.path=g,p(n)):g}function m(d,g){""===d&&(d=".");""===g&&(g=".");var n=e(g),v=e(d);v&&(d=v.path||"/");if(n&&!n.scheme)return v&&(n.scheme=v.scheme),p(n);if(n||g.match(u))return g;if(v&&!v.host&&!v.path)return v.host=g,p(v);n="/"===g.charAt(0)?g:t(d.replace(/\/+$/,"")+"/"+g);return v?(v.path=n,p(v)):n}function f(d){return d}function c(d){return q(d)?"$"+d:d}function l(d){return q(d)?d.slice(1):d}function q(d){if(!d)return!1;var g=d.length;if(9>
|
||||
g||95!==d.charCodeAt(g-1)||95!==d.charCodeAt(g-2)||111!==d.charCodeAt(g-3)||116!==d.charCodeAt(g-4)||111!==d.charCodeAt(g-5)||114!==d.charCodeAt(g-6)||112!==d.charCodeAt(g-7)||95!==d.charCodeAt(g-8)||95!==d.charCodeAt(g-9))return!1;for(g-=10;0<=g;g--)if(36!==d.charCodeAt(g))return!1;return!0}function r(d,g){return d===g?0:null===d?1:null===g?-1:d>g?1:-1}A.getArg=function(d,g,n){if(g in d)return d[g];if(3===arguments.length)return n;throw Error('"'+g+'" is a required argument.');};var k=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/,
|
||||
u=/^data:.+,.+$/;A.urlParse=e;A.urlGenerate=p;A.normalize=t;A.join=m;A.isAbsolute=function(d){return"/"===d.charAt(0)||k.test(d)};A.relative=function(d,g){""===d&&(d=".");d=d.replace(/\/$/,"");for(var n=0;0!==g.indexOf(d+"/");){var v=d.lastIndexOf("/");if(0>v)return g;d=d.slice(0,v);if(d.match(/^([^\/]+:\/)?\/*$/))return g;++n}return Array(n+1).join("../")+g.substr(d.length+1)};C=!("__proto__"in Object.create(null));A.toSetString=C?f:c;A.fromSetString=C?f:l;A.compareByOriginalPositions=function(d,
|
||||
g,n){var v=r(d.source,g.source);if(0!==v)return v;v=d.originalLine-g.originalLine;if(0!==v)return v;v=d.originalColumn-g.originalColumn;if(0!==v||n)return v;v=d.generatedColumn-g.generatedColumn;if(0!==v)return v;v=d.generatedLine-g.generatedLine;return 0!==v?v:r(d.name,g.name)};A.compareByGeneratedPositionsDeflated=function(d,g,n){var v=d.generatedLine-g.generatedLine;if(0!==v)return v;v=d.generatedColumn-g.generatedColumn;if(0!==v||n)return v;v=r(d.source,g.source);if(0!==v)return v;v=d.originalLine-
|
||||
g.originalLine;if(0!==v)return v;v=d.originalColumn-g.originalColumn;return 0!==v?v:r(d.name,g.name)};A.compareByGeneratedPositionsInflated=function(d,g){var n=d.generatedLine-g.generatedLine;if(0!==n)return n;n=d.generatedColumn-g.generatedColumn;if(0!==n)return n;n=r(d.source,g.source);if(0!==n)return n;n=d.originalLine-g.originalLine;if(0!==n)return n;n=d.originalColumn-g.originalColumn;return 0!==n?n:r(d.name,g.name)};A.parseSourceMapInput=function(d){return JSON.parse(d.replace(/^\)]}'[^\n]*\n/,
|
||||
""))};A.computeSourceURL=function(d,g,n){g=g||"";d&&("/"!==d[d.length-1]&&"/"!==g[0]&&(d+="/"),g=d+g);if(n){d=e(n);if(!d)throw Error("sourceMapURL could not be parsed");d.path&&(n=d.path.lastIndexOf("/"),0<=n&&(d.path=d.path.substring(0,n+1)));g=m(p(d),g)}return t(g)}},{}],20:[function(C,J,A){A.SourceMapGenerator=C("./lib/source-map-generator").SourceMapGenerator;A.SourceMapConsumer=C("./lib/source-map-consumer").SourceMapConsumer;A.SourceNode=C("./lib/source-node").SourceNode},{"./lib/source-map-consumer":16,
|
||||
"./lib/source-map-generator":17,"./lib/source-node":18}],21:[function(C,J,A){(function(e){function p(){return"browser"===a?!0:"node"===a?!1:"undefined"!==typeof window&&"function"===typeof XMLHttpRequest&&!(window.require&&window.module&&window.process&&"renderer"===window.process.type)}function t(x){return function(B){for(var F=0;F<x.length;F++){var E=x[F](B);if(E)return E}return null}}function m(x,B){if(!x)return B;var F=n.dirname(x),E=/^\w+:\/\/[^\/]*/.exec(F);E=E?E[0]:"";var H=F.slice(E.length);
|
||||
return E&&/^\/\w:/.test(H)?(E+="/",E+n.resolve(F.slice(E.length),B).replace(/\\/g,"/")):E+n.resolve(F.slice(E.length),B)}function f(x){var B=h[x.source];if(!B){var F=N(x.source);F?(B=h[x.source]={url:F.url,map:new g(F.map)},B.map.sourcesContent&&B.map.sources.forEach(function(E,H){var M=B.map.sourcesContent[H];if(M){var S=m(B.url,E);b[S]=M}})):B=h[x.source]={url:null,map:null}}return B&&B.map&&"function"===typeof B.map.originalPositionFor&&(F=B.map.originalPositionFor(x),null!==F.source)?(F.source=
|
||||
m(B.url,F.source),F):x}function c(x){var B=/^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(x);return B?(x=f({source:B[2],line:+B[3],column:B[4]-1}),"eval at "+B[1]+" ("+x.source+":"+x.line+":"+(x.column+1)+")"):(B=/^eval at ([^(]+) \((.+)\)$/.exec(x))?"eval at "+B[1]+" ("+c(B[2])+")":x}function l(){var x="";if(this.isNative())x="native";else{var B=this.getScriptNameOrSourceURL();!B&&this.isEval()&&(x=this.getEvalOrigin(),x+=", ");x=B?x+B:x+"<anonymous>";B=this.getLineNumber();null!=B&&(x+=":"+B,(B=
|
||||
this.getColumnNumber())&&(x+=":"+B))}B="";var F=this.getFunctionName(),E=!0,H=this.isConstructor();if(this.isToplevel()||H)H?B+="new "+(F||"<anonymous>"):F?B+=F:(B+=x,E=!1);else{H=this.getTypeName();"[object Object]"===H&&(H="null");var M=this.getMethodName();F?(H&&0!=F.indexOf(H)&&(B+=H+"."),B+=F,M&&F.indexOf("."+M)!=F.length-M.length-1&&(B+=" [as "+M+"]")):B+=H+"."+(M||"<anonymous>")}E&&(B+=" ("+x+")");return B}function q(x){var B={};Object.getOwnPropertyNames(Object.getPrototypeOf(x)).forEach(function(F){B[F]=
|
||||
/^(?:is|get)/.test(F)?function(){return x[F].call(x)}:x[F]});B.toString=l;return B}function r(x,B){void 0===B&&(B={nextPosition:null,curPosition:null});if(x.isNative())return B.curPosition=null,x;var F=x.getFileName()||x.getScriptNameOrSourceURL();if(F){var E=x.getLineNumber(),H=x.getColumnNumber()-1,M=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/,S=M.test;var V="object"===typeof e&&null!==e?e.version:"";M=S.call(M,V)?0:62;1===E&&H>M&&!p()&&!x.isEval()&&(H-=M);var O=
|
||||
f({source:F,line:E,column:H});B.curPosition=O;x=q(x);var T=x.getFunctionName;x.getFunctionName=function(){return null==B.nextPosition?T():B.nextPosition.name||T()};x.getFileName=function(){return O.source};x.getLineNumber=function(){return O.line};x.getColumnNumber=function(){return O.column+1};x.getScriptNameOrSourceURL=function(){return O.source};return x}var Q=x.isEval()&&x.getEvalOrigin();Q&&(Q=c(Q),x=q(x),x.getEvalOrigin=function(){return Q});return x}function k(x,B){L&&(b={},h={});for(var F=
|
||||
(x.name||"Error")+": "+(x.message||""),E={nextPosition:null,curPosition:null},H=[],M=B.length-1;0<=M;M--)H.push("\n at "+r(B[M],E)),E.nextPosition=E.curPosition;E.curPosition=E.nextPosition=null;return F+H.reverse().join("")}function u(x){var B=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(x.stack);if(B){x=B[1];var F=+B[2];B=+B[3];var E=b[x];if(!E&&v&&v.existsSync(x))try{E=v.readFileSync(x,"utf8")}catch(H){E=""}if(E&&(E=E.split(/(?:\r\n|\r|\n)/)[F-1]))return x+":"+F+"\n"+E+"\n"+Array(B).join(" ")+
|
||||
"^"}return null}function d(){var x=e.emit;e.emit=function(B){if("uncaughtException"===B){var F=arguments[1]&&arguments[1].stack,E=0<this.listeners(B).length;if(F&&!E){F=arguments[1];E=u(F);var H="object"===typeof e&&null!==e?e.stderr:void 0;H&&H._handle&&H._handle.setBlocking&&H._handle.setBlocking(!0);E&&(console.error(),console.error(E));console.error(F.stack);"object"===typeof e&&null!==e&&"function"===typeof e.exit&&e.exit(1);return}}return x.apply(this,arguments)}}var g=C("source-map").SourceMapConsumer,
|
||||
n=C("path");try{var v=C("fs");v.existsSync&&v.readFileSync||(v=null)}catch(x){}var z=C("buffer-from"),G=!1,D=!1,L=!1,a="auto",b={},h={},w=/^data:application\/json[^,]+base64,/,y=[],I=[],K=t(y);y.push(function(x){x=x.trim();/^file:/.test(x)&&(x=x.replace(/file:\/\/\/(\w:)?/,function(E,H){return H?"":"/"}));if(x in b)return b[x];var B="";try{if(v)v.existsSync(x)&&(B=v.readFileSync(x,"utf8"));else{var F=new XMLHttpRequest;F.open("GET",x,!1);F.send(null);4===F.readyState&&200===F.status&&(B=F.responseText)}}catch(E){}return b[x]=
|
||||
B});var N=t(I);I.push(function(x){a:{if(p())try{var B=new XMLHttpRequest;B.open("GET",x,!1);B.send(null);var F=B.getResponseHeader("SourceMap")||B.getResponseHeader("X-SourceMap");if(F){var E=F;break a}}catch(M){}E=K(x);B=/(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg;for(var H;F=B.exec(E);)H=F;E=H?H[1]:null}if(!E)return null;w.test(E)?(H=E.slice(E.indexOf(",")+1),H=z(H,"base64").toString(),E=x):(E=m(x,E),H=K(E));return H?{url:E,
|
||||
map:H}:null});var P=y.slice(0),W=I.slice(0);A.wrapCallSite=r;A.getErrorSource=u;A.mapSourcePosition=f;A.retrieveSourceMap=N;A.install=function(x){x=x||{};if(x.environment&&(a=x.environment,-1===["node","browser","auto"].indexOf(a)))throw Error("environment "+a+" was unknown. Available options are {auto, browser, node}");x.retrieveFile&&(x.overrideRetrieveFile&&(y.length=0),y.unshift(x.retrieveFile));x.retrieveSourceMap&&(x.overrideRetrieveSourceMap&&(I.length=0),I.unshift(x.retrieveSourceMap));if(x.hookRequire&&
|
||||
!p()){var B=J.require("module"),F=B.prototype._compile;F.__sourceMapSupport||(B.prototype._compile=function(E,H){b[H]=E;h[H]=void 0;return F.call(this,E,H)},B.prototype._compile.__sourceMapSupport=!0)}L||(L="emptyCacheBetweenOperations"in x?x.emptyCacheBetweenOperations:!1);G||(G=!0,Error.prepareStackTrace=k);if(!D){x="handleUncaughtExceptions"in x?x.handleUncaughtExceptions:!0;try{!1===J.require("worker_threads").isMainThread&&(x=!1)}catch(E){}x&&"object"===typeof e&&null!==e&&"function"===typeof e.on&&
|
||||
(D=!0,d())}};A.resetRetrieveHandlers=function(){y.length=0;I.length=0;y=P.slice(0);I=W.slice(0);N=t(I);K=t(y)}}).call(this,C("g5I+bs"))},{"buffer-from":4,fs:3,"g5I+bs":9,path:8,"source-map":20}]},{},[1]);return R});
|
50
node_modules/@cspotcode/source-map-support/package.json
generated
vendored
Normal file
50
node_modules/@cspotcode/source-map-support/package.json
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
{
|
||||
"name": "@cspotcode/source-map-support",
|
||||
"description": "Fixes stack traces for files with source maps",
|
||||
"version": "0.8.1",
|
||||
"main": "./source-map-support.js",
|
||||
"types": "./source-map-support.d.ts",
|
||||
"scripts": {
|
||||
"build": "node build.js",
|
||||
"serve-tests": "http-server -p 1336",
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"/register.d.ts",
|
||||
"/register.js",
|
||||
"/register-hook-require.d.ts",
|
||||
"/register-hook-require.js",
|
||||
"/source-map-support.d.ts",
|
||||
"/source-map-support.js",
|
||||
"/browser-source-map-support.js"
|
||||
],
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "0.3.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/lodash": "^4.14.182",
|
||||
"browserify": "^4.2.3",
|
||||
"coffeescript": "^1.12.7",
|
||||
"http-server": "^0.11.1",
|
||||
"lodash": "^4.17.21",
|
||||
"mocha": "^3.5.3",
|
||||
"semver": "^7.3.7",
|
||||
"source-map": "0.6.1",
|
||||
"webpack": "^1.15.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/cspotcode/node-source-map-support"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/cspotcode/node-source-map-support/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"volta": {
|
||||
"node": "16.11.0",
|
||||
"npm": "7.24.2"
|
||||
}
|
||||
}
|
7
node_modules/@cspotcode/source-map-support/register-hook-require.d.ts
generated
vendored
Executable file
7
node_modules/@cspotcode/source-map-support/register-hook-require.d.ts
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
// tslint:disable:no-useless-files
|
||||
|
||||
// For following usage:
|
||||
// import '@cspotcode/source-map-support/register-hook-require'
|
||||
// Instead of:
|
||||
// import sourceMapSupport from '@cspotcode/source-map-support'
|
||||
// sourceMapSupport.install({hookRequire: true})
|
3
node_modules/@cspotcode/source-map-support/register-hook-require.js
generated
vendored
Normal file
3
node_modules/@cspotcode/source-map-support/register-hook-require.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
require('./').install({
|
||||
hookRequire: true
|
||||
});
|
7
node_modules/@cspotcode/source-map-support/register.d.ts
generated
vendored
Executable file
7
node_modules/@cspotcode/source-map-support/register.d.ts
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
// tslint:disable:no-useless-files
|
||||
|
||||
// For following usage:
|
||||
// import '@cspotcode/source-map-support/register'
|
||||
// Instead of:
|
||||
// import sourceMapSupport from '@cspotcode/source-map-support'
|
||||
// sourceMapSupport.install()
|
1
node_modules/@cspotcode/source-map-support/register.js
generated
vendored
Normal file
1
node_modules/@cspotcode/source-map-support/register.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
require('./').install();
|
76
node_modules/@cspotcode/source-map-support/source-map-support.d.ts
generated
vendored
Executable file
76
node_modules/@cspotcode/source-map-support/source-map-support.d.ts
generated
vendored
Executable file
@ -0,0 +1,76 @@
|
||||
// Type definitions for source-map-support 0.5
|
||||
// Project: https://github.com/evanw/node-source-map-support
|
||||
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
|
||||
// Jason Cheatham <https://github.com/jason0x43>
|
||||
// Alcedo Nathaniel De Guzman Jr <https://github.com/natealcedo>
|
||||
// Griffin Yourick <https://github.com/tough-griff>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
export interface RawSourceMap {
|
||||
version: 3;
|
||||
sources: string[];
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sourcesContent?: string[];
|
||||
mappings: string;
|
||||
file: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Output of retrieveSourceMap().
|
||||
* From source-map-support:
|
||||
* The map field may be either a string or the parsed JSON object (i.e.,
|
||||
* it must be a valid argument to the SourceMapConsumer constructor).
|
||||
*/
|
||||
export interface UrlAndMap {
|
||||
url: string;
|
||||
map: string | RawSourceMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options to install().
|
||||
*/
|
||||
export interface Options {
|
||||
handleUncaughtExceptions?: boolean | undefined;
|
||||
hookRequire?: boolean | undefined;
|
||||
emptyCacheBetweenOperations?: boolean | undefined;
|
||||
environment?: 'auto' | 'browser' | 'node' | undefined;
|
||||
overrideRetrieveFile?: boolean | undefined;
|
||||
overrideRetrieveSourceMap?: boolean | undefined;
|
||||
retrieveFile?(path: string): string;
|
||||
retrieveSourceMap?(source: string): UrlAndMap | null;
|
||||
/**
|
||||
* Set false to disable redirection of require / import `source-map-support` to `@cspotcode/source-map-support`
|
||||
*/
|
||||
redirectConflictingLibrary?: boolean;
|
||||
/**
|
||||
* Callback will be called every time we redirect due to `redirectConflictingLibrary`
|
||||
* This allows consumers to log helpful warnings if they choose.
|
||||
* @param parent NodeJS.Module which made the require() or require.resolve() call
|
||||
* @param options options object internally passed to node's `_resolveFilename` hook
|
||||
*/
|
||||
onConflictingLibraryRedirect?: (request: string, parent: any, isMain: boolean, options: any, redirectedRequest: string) => void;
|
||||
}
|
||||
|
||||
export interface Position {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
|
||||
export function wrapCallSite(frame: any /* StackFrame */): any /* StackFrame */;
|
||||
export function getErrorSource(error: Error): string | null;
|
||||
export function mapSourcePosition(position: Position): Position;
|
||||
export function retrieveSourceMap(source: string): UrlAndMap | null;
|
||||
export function resetRetrieveHandlers(): void;
|
||||
|
||||
/**
|
||||
* Install SourceMap support.
|
||||
* @param options Can be used to e.g. disable uncaughtException handler.
|
||||
*/
|
||||
export function install(options?: Options): void;
|
||||
|
||||
/**
|
||||
* Uninstall SourceMap support.
|
||||
*/
|
||||
export function uninstall(): void;
|
938
node_modules/@cspotcode/source-map-support/source-map-support.js
generated
vendored
Normal file
938
node_modules/@cspotcode/source-map-support/source-map-support.js
generated
vendored
Normal file
@ -0,0 +1,938 @@
|
||||
const { TraceMap, originalPositionFor, AnyMap } = require('@jridgewell/trace-mapping');
|
||||
var path = require('path');
|
||||
const { fileURLToPath, pathToFileURL } = require('url');
|
||||
var util = require('util');
|
||||
|
||||
var fs;
|
||||
try {
|
||||
fs = require('fs');
|
||||
if (!fs.existsSync || !fs.readFileSync) {
|
||||
// fs doesn't have all methods we need
|
||||
fs = null;
|
||||
}
|
||||
} catch (err) {
|
||||
/* nop */
|
||||
}
|
||||
|
||||
/**
|
||||
* Requires a module which is protected against bundler minification.
|
||||
*
|
||||
* @param {NodeModule} mod
|
||||
* @param {string} request
|
||||
*/
|
||||
function dynamicRequire(mod, request) {
|
||||
return mod.require(request);
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* enabled: boolean;
|
||||
* originalValue: any;
|
||||
* installedValue: any;
|
||||
* }} HookState
|
||||
* Used for installing and uninstalling hooks
|
||||
*/
|
||||
|
||||
// Increment this if the format of sharedData changes in a breaking way.
|
||||
var sharedDataVersion = 1;
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T} defaults
|
||||
* @returns {T}
|
||||
*/
|
||||
function initializeSharedData(defaults) {
|
||||
var sharedDataKey = 'source-map-support/sharedData';
|
||||
if (typeof Symbol !== 'undefined') {
|
||||
sharedDataKey = Symbol.for(sharedDataKey);
|
||||
}
|
||||
var sharedData = this[sharedDataKey];
|
||||
if (!sharedData) {
|
||||
sharedData = { version: sharedDataVersion };
|
||||
if (Object.defineProperty) {
|
||||
Object.defineProperty(this, sharedDataKey, { value: sharedData });
|
||||
} else {
|
||||
this[sharedDataKey] = sharedData;
|
||||
}
|
||||
}
|
||||
if (sharedDataVersion !== sharedData.version) {
|
||||
throw new Error("Multiple incompatible instances of source-map-support were loaded");
|
||||
}
|
||||
for (var key in defaults) {
|
||||
if (!(key in sharedData)) {
|
||||
sharedData[key] = defaults[key];
|
||||
}
|
||||
}
|
||||
return sharedData;
|
||||
}
|
||||
|
||||
// If multiple instances of source-map-support are loaded into the same
|
||||
// context, they shouldn't overwrite each other. By storing handlers, caches,
|
||||
// and other state on a shared object, different instances of
|
||||
// source-map-support can work together in a limited way. This does require
|
||||
// that future versions of source-map-support continue to support the fields on
|
||||
// this object. If this internal contract ever needs to be broken, increment
|
||||
// sharedDataVersion. (This version number is not the same as any of the
|
||||
// package's version numbers, which should reflect the *external* API of
|
||||
// source-map-support.)
|
||||
var sharedData = initializeSharedData({
|
||||
|
||||
// Only install once if called multiple times
|
||||
// Remember how the environment looked before installation so we can restore if able
|
||||
/** @type {HookState} */
|
||||
errorPrepareStackTraceHook: undefined,
|
||||
/** @type {HookState} */
|
||||
processEmitHook: undefined,
|
||||
/** @type {HookState} */
|
||||
moduleResolveFilenameHook: undefined,
|
||||
|
||||
/** @type {Array<(request: string, parent: any, isMain: boolean, options: any, redirectedRequest: string) => void>} */
|
||||
onConflictingLibraryRedirectArr: [],
|
||||
|
||||
// If true, the caches are reset before a stack trace formatting operation
|
||||
emptyCacheBetweenOperations: false,
|
||||
|
||||
// Maps a file path to a string containing the file contents
|
||||
fileContentsCache: Object.create(null),
|
||||
|
||||
// Maps a file path to a source map for that file
|
||||
/** @type {Record<string, {url: string, map: TraceMap}} */
|
||||
sourceMapCache: Object.create(null),
|
||||
|
||||
// Priority list of retrieve handlers
|
||||
retrieveFileHandlers: [],
|
||||
retrieveMapHandlers: [],
|
||||
|
||||
// Priority list of internally-implemented handlers.
|
||||
// When resetting state, we must keep these.
|
||||
internalRetrieveFileHandlers: [],
|
||||
internalRetrieveMapHandlers: [],
|
||||
|
||||
});
|
||||
|
||||
// Supports {browser, node, auto}
|
||||
var environment = "auto";
|
||||
|
||||
// Regex for detecting source maps
|
||||
var reSourceMap = /^data:application\/json[^,]+base64,/;
|
||||
|
||||
function isInBrowser() {
|
||||
if (environment === "browser")
|
||||
return true;
|
||||
if (environment === "node")
|
||||
return false;
|
||||
return ((typeof window !== 'undefined') && (typeof XMLHttpRequest === 'function') && !(window.require && window.module && window.process && window.process.type === "renderer"));
|
||||
}
|
||||
|
||||
function hasGlobalProcessEventEmitter() {
|
||||
return ((typeof process === 'object') && (process !== null) && (typeof process.on === 'function'));
|
||||
}
|
||||
|
||||
function tryFileURLToPath(v) {
|
||||
if(isFileUrl(v)) {
|
||||
return fileURLToPath(v);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
// TODO un-copy these from resolve-uri; see if they can be exported from that lib
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
|
||||
// #region Caches
|
||||
/** @param {string} pathOrFileUrl */
|
||||
function getCacheKey(pathOrFileUrl) {
|
||||
if(pathOrFileUrl.startsWith('node:')) return pathOrFileUrl;
|
||||
if(isFileUrl(pathOrFileUrl)) {
|
||||
// Must normalize spaces to %20, stuff like that
|
||||
return new URL(pathOrFileUrl).toString();
|
||||
} else {
|
||||
try {
|
||||
return pathToFileURL(pathOrFileUrl).toString();
|
||||
} catch {
|
||||
return pathOrFileUrl;
|
||||
}
|
||||
}
|
||||
}
|
||||
function getFileContentsCache(key) {
|
||||
return sharedData.fileContentsCache[getCacheKey(key)];
|
||||
}
|
||||
function hasFileContentsCacheFromKey(key) {
|
||||
return Object.prototype.hasOwnProperty.call(sharedData.fileContentsCache, key);
|
||||
}
|
||||
function getFileContentsCacheFromKey(key) {
|
||||
return sharedData.fileContentsCache[key];
|
||||
}
|
||||
function setFileContentsCache(key, value) {
|
||||
return sharedData.fileContentsCache[getCacheKey(key)] = value;
|
||||
}
|
||||
function getSourceMapCache(key) {
|
||||
return sharedData.sourceMapCache[getCacheKey(key)];
|
||||
}
|
||||
function setSourceMapCache(key, value) {
|
||||
return sharedData.sourceMapCache[getCacheKey(key)] = value;
|
||||
}
|
||||
function clearCaches() {
|
||||
sharedData.fileContentsCache = Object.create(null);
|
||||
sharedData.sourceMapCache = Object.create(null);
|
||||
}
|
||||
// #endregion Caches
|
||||
|
||||
function handlerExec(list, internalList) {
|
||||
return function(arg) {
|
||||
for (var i = 0; i < list.length; i++) {
|
||||
var ret = list[i](arg);
|
||||
if (ret) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
for (var i = 0; i < internalList.length; i++) {
|
||||
var ret = internalList[i](arg);
|
||||
if (ret) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
|
||||
var retrieveFile = handlerExec(sharedData.retrieveFileHandlers, sharedData.internalRetrieveFileHandlers);
|
||||
|
||||
sharedData.internalRetrieveFileHandlers.push(function(path) {
|
||||
// Trim the path to make sure there is no extra whitespace.
|
||||
path = path.trim();
|
||||
if (/^file:/.test(path)) {
|
||||
// existsSync/readFileSync can't handle file protocol, but once stripped, it works
|
||||
path = path.replace(/file:\/\/\/(\w:)?/, function(protocol, drive) {
|
||||
return drive ?
|
||||
'' : // file:///C:/dir/file -> C:/dir/file
|
||||
'/'; // file:///root-dir/file -> /root-dir/file
|
||||
});
|
||||
}
|
||||
const key = getCacheKey(path);
|
||||
if(hasFileContentsCacheFromKey(key)) {
|
||||
return getFileContentsCacheFromKey(key);
|
||||
}
|
||||
|
||||
var contents = '';
|
||||
try {
|
||||
if (!fs) {
|
||||
// Use SJAX if we are in the browser
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', path, /** async */ false);
|
||||
xhr.send(null);
|
||||
if (xhr.readyState === 4 && xhr.status === 200) {
|
||||
contents = xhr.responseText;
|
||||
}
|
||||
} else if (fs.existsSync(path)) {
|
||||
// Otherwise, use the filesystem
|
||||
contents = fs.readFileSync(path, 'utf8');
|
||||
}
|
||||
} catch (er) {
|
||||
/* ignore any errors */
|
||||
}
|
||||
|
||||
return setFileContentsCache(path, contents);
|
||||
});
|
||||
|
||||
// Support URLs relative to a directory, but be careful about a protocol prefix
|
||||
// in case we are in the browser (i.e. directories may start with "http://" or "file:///")
|
||||
function supportRelativeURL(file, url) {
|
||||
if(!file) return url;
|
||||
// given that this happens within error formatting codepath, probably best to
|
||||
// fallback instead of throwing if anything goes wrong
|
||||
try {
|
||||
// if should output a URL
|
||||
if(isAbsoluteUrl(file) || isSchemeRelativeUrl(file)) {
|
||||
if(isAbsoluteUrl(url) || isSchemeRelativeUrl(url)) {
|
||||
return new URL(url, file).toString();
|
||||
}
|
||||
if(path.isAbsolute(url)) {
|
||||
return new URL(pathToFileURL(url), file).toString();
|
||||
}
|
||||
// url is relative path or URL
|
||||
return new URL(url.replace(/\\/g, '/'), file).toString();
|
||||
}
|
||||
// if should output a path (unless URL is something like https://)
|
||||
if(path.isAbsolute(file)) {
|
||||
if(isFileUrl(url)) {
|
||||
return fileURLToPath(url);
|
||||
}
|
||||
if(isSchemeRelativeUrl(url)) {
|
||||
return fileURLToPath(new URL(url, 'file://'));
|
||||
}
|
||||
if(isAbsoluteUrl(url)) {
|
||||
// url is a non-file URL
|
||||
// Go with the URL
|
||||
return url;
|
||||
}
|
||||
if(path.isAbsolute(url)) {
|
||||
// Normalize at all? decodeURI or normalize slashes?
|
||||
return path.normalize(url);
|
||||
}
|
||||
// url is relative path or URL
|
||||
return path.join(file, '..', decodeURI(url));
|
||||
}
|
||||
// If we get here, file is relative.
|
||||
// Shouldn't happen since node identifies modules with absolute paths or URLs.
|
||||
// But we can take a stab at returning something meaningful anyway.
|
||||
if(isAbsoluteUrl(url) || isSchemeRelativeUrl(url)) {
|
||||
return url;
|
||||
}
|
||||
return path.join(file, '..', url);
|
||||
} catch(e) {
|
||||
return url;
|
||||
}
|
||||
}
|
||||
|
||||
// Return pathOrUrl in the same style as matchStyleOf: either a file URL or a native path
|
||||
function matchStyleOfPathOrUrl(matchStyleOf, pathOrUrl) {
|
||||
try {
|
||||
if(isAbsoluteUrl(matchStyleOf) || isSchemeRelativeUrl(matchStyleOf)) {
|
||||
if(isAbsoluteUrl(pathOrUrl) || isSchemeRelativeUrl(pathOrUrl)) return pathOrUrl;
|
||||
if(path.isAbsolute(pathOrUrl)) return pathToFileURL(pathOrUrl).toString();
|
||||
} else if(path.isAbsolute(matchStyleOf)) {
|
||||
if(isAbsoluteUrl(pathOrUrl) || isSchemeRelativeUrl(pathOrUrl)) {
|
||||
return fileURLToPath(new URL(pathOrUrl, 'file://'));
|
||||
}
|
||||
}
|
||||
return pathOrUrl;
|
||||
} catch(e) {
|
||||
return pathOrUrl;
|
||||
}
|
||||
}
|
||||
|
||||
function retrieveSourceMapURL(source) {
|
||||
var fileData;
|
||||
|
||||
if (isInBrowser()) {
|
||||
try {
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', source, false);
|
||||
xhr.send(null);
|
||||
fileData = xhr.readyState === 4 ? xhr.responseText : null;
|
||||
|
||||
// Support providing a sourceMappingURL via the SourceMap header
|
||||
var sourceMapHeader = xhr.getResponseHeader("SourceMap") ||
|
||||
xhr.getResponseHeader("X-SourceMap");
|
||||
if (sourceMapHeader) {
|
||||
return sourceMapHeader;
|
||||
}
|
||||
} catch (e) {
|
||||
}
|
||||
}
|
||||
|
||||
// Get the URL of the source map
|
||||
fileData = retrieveFile(tryFileURLToPath(source));
|
||||
var re = /(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg;
|
||||
// Keep executing the search to find the *last* sourceMappingURL to avoid
|
||||
// picking up sourceMappingURLs from comments, strings, etc.
|
||||
var lastMatch, match;
|
||||
while (match = re.exec(fileData)) lastMatch = match;
|
||||
if (!lastMatch) return null;
|
||||
return lastMatch[1];
|
||||
};
|
||||
|
||||
// Can be overridden by the retrieveSourceMap option to install. Takes a
|
||||
// generated source filename; returns a {map, optional url} object, or null if
|
||||
// there is no source map. The map field may be either a string or the parsed
|
||||
// JSON object (ie, it must be a valid argument to the SourceMapConsumer
|
||||
// constructor).
|
||||
/** @type {(source: string) => import('./source-map-support').UrlAndMap | null} */
|
||||
var retrieveSourceMap = handlerExec(sharedData.retrieveMapHandlers, sharedData.internalRetrieveMapHandlers);
|
||||
sharedData.internalRetrieveMapHandlers.push(function(source) {
|
||||
var sourceMappingURL = retrieveSourceMapURL(source);
|
||||
if (!sourceMappingURL) return null;
|
||||
|
||||
// Read the contents of the source map
|
||||
var sourceMapData;
|
||||
if (reSourceMap.test(sourceMappingURL)) {
|
||||
// Support source map URL as a data url
|
||||
var rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(',') + 1);
|
||||
sourceMapData = Buffer.from(rawData, "base64").toString();
|
||||
sourceMappingURL = source;
|
||||
} else {
|
||||
// Support source map URLs relative to the source URL
|
||||
sourceMappingURL = supportRelativeURL(source, sourceMappingURL);
|
||||
sourceMapData = retrieveFile(tryFileURLToPath(sourceMappingURL));
|
||||
}
|
||||
|
||||
if (!sourceMapData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
url: sourceMappingURL,
|
||||
map: sourceMapData
|
||||
};
|
||||
});
|
||||
|
||||
function mapSourcePosition(position) {
|
||||
var sourceMap = getSourceMapCache(position.source);
|
||||
if (!sourceMap) {
|
||||
// Call the (overrideable) retrieveSourceMap function to get the source map.
|
||||
var urlAndMap = retrieveSourceMap(position.source);
|
||||
if (urlAndMap) {
|
||||
sourceMap = setSourceMapCache(position.source, {
|
||||
url: urlAndMap.url,
|
||||
map: new AnyMap(urlAndMap.map, urlAndMap.url)
|
||||
});
|
||||
|
||||
// Overwrite trace-mapping's resolutions, because they do not handle
|
||||
// Windows paths the way we want.
|
||||
// TODO Remove now that windows path support was added to resolve-uri and thus trace-mapping?
|
||||
sourceMap.map.resolvedSources = sourceMap.map.sources.map(s => supportRelativeURL(sourceMap.url, s));
|
||||
|
||||
// Load all sources stored inline with the source map into the file cache
|
||||
// to pretend like they are already loaded. They may not exist on disk.
|
||||
if (sourceMap.map.sourcesContent) {
|
||||
sourceMap.map.resolvedSources.forEach(function(resolvedSource, i) {
|
||||
var contents = sourceMap.map.sourcesContent[i];
|
||||
if (contents) {
|
||||
setFileContentsCache(resolvedSource, contents);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
sourceMap = setSourceMapCache(position.source, {
|
||||
url: null,
|
||||
map: null
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the source URL relative to the URL of the source map
|
||||
if (sourceMap && sourceMap.map) {
|
||||
var originalPosition = originalPositionFor(sourceMap.map, position);
|
||||
|
||||
// Only return the original position if a matching line was found. If no
|
||||
// matching line is found then we return position instead, which will cause
|
||||
// the stack trace to print the path and line for the compiled file. It is
|
||||
// better to give a precise location in the compiled file than a vague
|
||||
// location in the original file.
|
||||
if (originalPosition.source !== null) {
|
||||
// originalPosition.source has *already* been resolved against sourceMap.url
|
||||
// so is *already* as absolute as possible.
|
||||
// However, we want to ensure we output in same format as input: URL or native path
|
||||
originalPosition.source = matchStyleOfPathOrUrl(
|
||||
position.source, originalPosition.source);
|
||||
return originalPosition;
|
||||
}
|
||||
}
|
||||
|
||||
return position;
|
||||
}
|
||||
|
||||
// Parses code generated by FormatEvalOrigin(), a function inside V8:
|
||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
||||
function mapEvalOrigin(origin) {
|
||||
// Most eval() calls are in this format
|
||||
var match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin);
|
||||
if (match) {
|
||||
var position = mapSourcePosition({
|
||||
source: match[2],
|
||||
line: +match[3],
|
||||
column: match[4] - 1
|
||||
});
|
||||
return 'eval at ' + match[1] + ' (' + position.source + ':' +
|
||||
position.line + ':' + (position.column + 1) + ')';
|
||||
}
|
||||
|
||||
// Parse nested eval() calls using recursion
|
||||
match = /^eval at ([^(]+) \((.+)\)$/.exec(origin);
|
||||
if (match) {
|
||||
return 'eval at ' + match[1] + ' (' + mapEvalOrigin(match[2]) + ')';
|
||||
}
|
||||
|
||||
// Make sure we still return useful information if we didn't find anything
|
||||
return origin;
|
||||
}
|
||||
|
||||
// This is copied almost verbatim from the V8 source code at
|
||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
||||
// Update 2022-04-29:
|
||||
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/builtins/builtins-callsite.cc#L175-L179
|
||||
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/objects/call-site-info.cc#L795-L804
|
||||
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/objects/call-site-info.cc#L717-L750
|
||||
// The implementation of wrapCallSite() used to just forward to the actual source
|
||||
// code of CallSite.prototype.toString but unfortunately a new release of V8
|
||||
// did something to the prototype chain and broke the shim. The only fix I
|
||||
// could find was copy/paste.
|
||||
function CallSiteToString() {
|
||||
var fileName;
|
||||
var fileLocation = "";
|
||||
if (this.isNative()) {
|
||||
fileLocation = "native";
|
||||
} else {
|
||||
fileName = this.getScriptNameOrSourceURL();
|
||||
if (!fileName && this.isEval()) {
|
||||
fileLocation = this.getEvalOrigin();
|
||||
fileLocation += ", "; // Expecting source position to follow.
|
||||
}
|
||||
|
||||
if (fileName) {
|
||||
fileLocation += fileName;
|
||||
} else {
|
||||
// Source code does not originate from a file and is not native, but we
|
||||
// can still get the source position inside the source string, e.g. in
|
||||
// an eval string.
|
||||
fileLocation += "<anonymous>";
|
||||
}
|
||||
var lineNumber = this.getLineNumber();
|
||||
if (lineNumber != null) {
|
||||
fileLocation += ":" + lineNumber;
|
||||
var columnNumber = this.getColumnNumber();
|
||||
if (columnNumber) {
|
||||
fileLocation += ":" + columnNumber;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var line = "";
|
||||
var isAsync = this.isAsync ? this.isAsync() : false;
|
||||
if(isAsync) {
|
||||
line += 'async ';
|
||||
var isPromiseAll = this.isPromiseAll ? this.isPromiseAll() : false;
|
||||
var isPromiseAny = this.isPromiseAny ? this.isPromiseAny() : false;
|
||||
if(isPromiseAny || isPromiseAll) {
|
||||
line += isPromiseAll ? 'Promise.all (index ' : 'Promise.any (index ';
|
||||
var promiseIndex = this.getPromiseIndex();
|
||||
line += promiseIndex + ')';
|
||||
}
|
||||
}
|
||||
var functionName = this.getFunctionName();
|
||||
var addSuffix = true;
|
||||
var isConstructor = this.isConstructor();
|
||||
var isMethodCall = !(this.isToplevel() || isConstructor);
|
||||
if (isMethodCall) {
|
||||
var typeName = this.getTypeName();
|
||||
// Fixes shim to be backward compatable with Node v0 to v4
|
||||
if (typeName === "[object Object]") {
|
||||
typeName = "null";
|
||||
}
|
||||
var methodName = this.getMethodName();
|
||||
if (functionName) {
|
||||
if (typeName && functionName.indexOf(typeName) != 0) {
|
||||
line += typeName + ".";
|
||||
}
|
||||
line += functionName;
|
||||
if (methodName && functionName.indexOf("." + methodName) != functionName.length - methodName.length - 1) {
|
||||
line += " [as " + methodName + "]";
|
||||
}
|
||||
} else {
|
||||
line += typeName + "." + (methodName || "<anonymous>");
|
||||
}
|
||||
} else if (isConstructor) {
|
||||
line += "new " + (functionName || "<anonymous>");
|
||||
} else if (functionName) {
|
||||
line += functionName;
|
||||
} else {
|
||||
line += fileLocation;
|
||||
addSuffix = false;
|
||||
}
|
||||
if (addSuffix) {
|
||||
line += " (" + fileLocation + ")";
|
||||
}
|
||||
return line;
|
||||
}
|
||||
|
||||
function cloneCallSite(frame) {
|
||||
var object = {};
|
||||
Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach(function(name) {
|
||||
object[name] = /^(?:is|get)/.test(name) ? function() { return frame[name].call(frame); } : frame[name];
|
||||
});
|
||||
object.toString = CallSiteToString;
|
||||
return object;
|
||||
}
|
||||
|
||||
function wrapCallSite(frame, state) {
|
||||
// provides interface backward compatibility
|
||||
if (state === undefined) {
|
||||
state = { nextPosition: null, curPosition: null }
|
||||
}
|
||||
if(frame.isNative()) {
|
||||
state.curPosition = null;
|
||||
return frame;
|
||||
}
|
||||
|
||||
// Most call sites will return the source file from getFileName(), but code
|
||||
// passed to eval() ending in "//# sourceURL=..." will return the source file
|
||||
// from getScriptNameOrSourceURL() instead
|
||||
var source = frame.getFileName() || frame.getScriptNameOrSourceURL();
|
||||
if (source) {
|
||||
// v8 does not expose its internal isWasm, etc methods, so we do this instead.
|
||||
if(source.startsWith('wasm://')) {
|
||||
state.curPosition = null;
|
||||
return frame;
|
||||
}
|
||||
|
||||
var line = frame.getLineNumber();
|
||||
var column = frame.getColumnNumber() - 1;
|
||||
|
||||
// Fix position in Node where some (internal) code is prepended.
|
||||
// See https://github.com/evanw/node-source-map-support/issues/36
|
||||
// Header removed in node at ^10.16 || >=11.11.0
|
||||
// v11 is not an LTS candidate, we can just test the one version with it.
|
||||
// Test node versions for: 10.16-19, 10.20+, 12-19, 20-99, 100+, or 11.11
|
||||
var noHeader = /^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;
|
||||
var headerLength = noHeader.test(process.version) ? 0 : 62;
|
||||
if (line === 1 && column > headerLength && !isInBrowser() && !frame.isEval()) {
|
||||
column -= headerLength;
|
||||
}
|
||||
|
||||
var position = mapSourcePosition({
|
||||
source: source,
|
||||
line: line,
|
||||
column: column
|
||||
});
|
||||
state.curPosition = position;
|
||||
frame = cloneCallSite(frame);
|
||||
var originalFunctionName = frame.getFunctionName;
|
||||
frame.getFunctionName = function() {
|
||||
if (state.nextPosition == null) {
|
||||
return originalFunctionName();
|
||||
}
|
||||
return state.nextPosition.name || originalFunctionName();
|
||||
};
|
||||
frame.getFileName = function() { return position.source; };
|
||||
frame.getLineNumber = function() { return position.line; };
|
||||
frame.getColumnNumber = function() { return position.column + 1; };
|
||||
frame.getScriptNameOrSourceURL = function() { return position.source; };
|
||||
return frame;
|
||||
}
|
||||
|
||||
// Code called using eval() needs special handling
|
||||
var origin = frame.isEval() && frame.getEvalOrigin();
|
||||
if (origin) {
|
||||
origin = mapEvalOrigin(origin);
|
||||
frame = cloneCallSite(frame);
|
||||
frame.getEvalOrigin = function() { return origin; };
|
||||
return frame;
|
||||
}
|
||||
|
||||
// If we get here then we were unable to change the source position
|
||||
return frame;
|
||||
}
|
||||
|
||||
var kIsNodeError = undefined;
|
||||
try {
|
||||
// Get a deliberate ERR_INVALID_ARG_TYPE
|
||||
// TODO is there a better way to reliably get an instance of NodeError?
|
||||
path.resolve(123);
|
||||
} catch(e) {
|
||||
const symbols = Object.getOwnPropertySymbols(e);
|
||||
const symbol = symbols.find(function (s) {return s.toString().indexOf('kIsNodeError') >= 0});
|
||||
if(symbol) kIsNodeError = symbol;
|
||||
}
|
||||
|
||||
const ErrorPrototypeToString = (err) =>Error.prototype.toString.call(err);
|
||||
|
||||
/** @param {HookState} hookState */
|
||||
function createPrepareStackTrace(hookState) {
|
||||
return prepareStackTrace;
|
||||
|
||||
// This function is part of the V8 stack trace API, for more info see:
|
||||
// https://v8.dev/docs/stack-trace-api
|
||||
function prepareStackTrace(error, stack) {
|
||||
if(!hookState.enabled) return hookState.originalValue.apply(this, arguments);
|
||||
|
||||
if (sharedData.emptyCacheBetweenOperations) {
|
||||
clearCaches();
|
||||
}
|
||||
|
||||
// node gives its own errors special treatment. Mimic that behavior
|
||||
// https://github.com/nodejs/node/blob/3cbaabc4622df1b4009b9d026a1a970bdbae6e89/lib/internal/errors.js#L118-L128
|
||||
// https://github.com/nodejs/node/pull/39182
|
||||
var errorString;
|
||||
if (kIsNodeError) {
|
||||
if(kIsNodeError in error) {
|
||||
errorString = `${error.name} [${error.code}]: ${error.message}`;
|
||||
} else {
|
||||
errorString = ErrorPrototypeToString(error);
|
||||
}
|
||||
} else {
|
||||
var name = error.name || 'Error';
|
||||
var message = error.message || '';
|
||||
errorString = message ? name + ": " + message : name;
|
||||
}
|
||||
|
||||
var state = { nextPosition: null, curPosition: null };
|
||||
var processedStack = [];
|
||||
for (var i = stack.length - 1; i >= 0; i--) {
|
||||
processedStack.push('\n at ' + wrapCallSite(stack[i], state));
|
||||
state.nextPosition = state.curPosition;
|
||||
}
|
||||
state.curPosition = state.nextPosition = null;
|
||||
return errorString + processedStack.reverse().join('');
|
||||
}
|
||||
}
|
||||
|
||||
// Generate position and snippet of original source with pointer
|
||||
function getErrorSource(error) {
|
||||
var match = /\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(error.stack);
|
||||
if (match) {
|
||||
var source = match[1];
|
||||
var line = +match[2];
|
||||
var column = +match[3];
|
||||
|
||||
// Support the inline sourceContents inside the source map
|
||||
var contents = getFileContentsCache(source);
|
||||
|
||||
const sourceAsPath = tryFileURLToPath(source);
|
||||
|
||||
// Support files on disk
|
||||
if (!contents && fs && fs.existsSync(sourceAsPath)) {
|
||||
try {
|
||||
contents = fs.readFileSync(sourceAsPath, 'utf8');
|
||||
} catch (er) {
|
||||
contents = '';
|
||||
}
|
||||
}
|
||||
|
||||
// Format the line from the original source code like node does
|
||||
if (contents) {
|
||||
var code = contents.split(/(?:\r\n|\r|\n)/)[line - 1];
|
||||
if (code) {
|
||||
return source + ':' + line + '\n' + code + '\n' +
|
||||
new Array(column).join(' ') + '^';
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function printFatalErrorUponExit (error) {
|
||||
var source = getErrorSource(error);
|
||||
|
||||
// Ensure error is printed synchronously and not truncated
|
||||
if (process.stderr._handle && process.stderr._handle.setBlocking) {
|
||||
process.stderr._handle.setBlocking(true);
|
||||
}
|
||||
|
||||
if (source) {
|
||||
console.error(source);
|
||||
}
|
||||
|
||||
// Matches node's behavior for colorized output
|
||||
console.error(
|
||||
util.inspect(error, {
|
||||
customInspect: false,
|
||||
colors: process.stderr.isTTY
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function shimEmitUncaughtException () {
|
||||
const originalValue = process.emit;
|
||||
var hook = sharedData.processEmitHook = {
|
||||
enabled: true,
|
||||
originalValue,
|
||||
installedValue: undefined
|
||||
};
|
||||
var isTerminatingDueToFatalException = false;
|
||||
var fatalException;
|
||||
|
||||
process.emit = sharedData.processEmitHook.installedValue = function (type) {
|
||||
const hadListeners = originalValue.apply(this, arguments);
|
||||
if(hook.enabled) {
|
||||
if (type === 'uncaughtException' && !hadListeners) {
|
||||
isTerminatingDueToFatalException = true;
|
||||
fatalException = arguments[1];
|
||||
process.exit(1);
|
||||
}
|
||||
if (type === 'exit' && isTerminatingDueToFatalException) {
|
||||
printFatalErrorUponExit(fatalException);
|
||||
}
|
||||
}
|
||||
return hadListeners;
|
||||
};
|
||||
}
|
||||
|
||||
var originalRetrieveFileHandlers = sharedData.retrieveFileHandlers.slice(0);
|
||||
var originalRetrieveMapHandlers = sharedData.retrieveMapHandlers.slice(0);
|
||||
|
||||
exports.wrapCallSite = wrapCallSite;
|
||||
exports.getErrorSource = getErrorSource;
|
||||
exports.mapSourcePosition = mapSourcePosition;
|
||||
exports.retrieveSourceMap = retrieveSourceMap;
|
||||
|
||||
exports.install = function(options) {
|
||||
options = options || {};
|
||||
|
||||
if (options.environment) {
|
||||
environment = options.environment;
|
||||
if (["node", "browser", "auto"].indexOf(environment) === -1) {
|
||||
throw new Error("environment " + environment + " was unknown. Available options are {auto, browser, node}")
|
||||
}
|
||||
}
|
||||
|
||||
// Use dynamicRequire to avoid including in browser bundles
|
||||
var Module = dynamicRequire(module, 'module');
|
||||
|
||||
// Redirect subsequent imports of "source-map-support"
|
||||
// to this package
|
||||
const {redirectConflictingLibrary = true, onConflictingLibraryRedirect} = options;
|
||||
if(redirectConflictingLibrary) {
|
||||
if (!sharedData.moduleResolveFilenameHook) {
|
||||
const originalValue = Module._resolveFilename;
|
||||
const moduleResolveFilenameHook = sharedData.moduleResolveFilenameHook = {
|
||||
enabled: true,
|
||||
originalValue,
|
||||
installedValue: undefined,
|
||||
}
|
||||
Module._resolveFilename = sharedData.moduleResolveFilenameHook.installedValue = function (request, parent, isMain, options) {
|
||||
if (moduleResolveFilenameHook.enabled) {
|
||||
// Match all source-map-support entrypoints: source-map-support, source-map-support/register
|
||||
let requestRedirect;
|
||||
if (request === 'source-map-support') {
|
||||
requestRedirect = './';
|
||||
} else if (request === 'source-map-support/register') {
|
||||
requestRedirect = './register';
|
||||
}
|
||||
|
||||
if (requestRedirect !== undefined) {
|
||||
const newRequest = require.resolve(requestRedirect);
|
||||
for (const cb of sharedData.onConflictingLibraryRedirectArr) {
|
||||
cb(request, parent, isMain, options, newRequest);
|
||||
}
|
||||
request = newRequest;
|
||||
}
|
||||
}
|
||||
|
||||
return originalValue.call(this, request, parent, isMain, options);
|
||||
}
|
||||
}
|
||||
if (onConflictingLibraryRedirect) {
|
||||
sharedData.onConflictingLibraryRedirectArr.push(onConflictingLibraryRedirect);
|
||||
}
|
||||
}
|
||||
|
||||
// Allow sources to be found by methods other than reading the files
|
||||
// directly from disk.
|
||||
if (options.retrieveFile) {
|
||||
if (options.overrideRetrieveFile) {
|
||||
sharedData.retrieveFileHandlers.length = 0;
|
||||
}
|
||||
|
||||
sharedData.retrieveFileHandlers.unshift(options.retrieveFile);
|
||||
}
|
||||
|
||||
// Allow source maps to be found by methods other than reading the files
|
||||
// directly from disk.
|
||||
if (options.retrieveSourceMap) {
|
||||
if (options.overrideRetrieveSourceMap) {
|
||||
sharedData.retrieveMapHandlers.length = 0;
|
||||
}
|
||||
|
||||
sharedData.retrieveMapHandlers.unshift(options.retrieveSourceMap);
|
||||
}
|
||||
|
||||
// Support runtime transpilers that include inline source maps
|
||||
if (options.hookRequire && !isInBrowser()) {
|
||||
var $compile = Module.prototype._compile;
|
||||
|
||||
if (!$compile.__sourceMapSupport) {
|
||||
Module.prototype._compile = function(content, filename) {
|
||||
setFileContentsCache(filename, content);
|
||||
setSourceMapCache(filename, undefined);
|
||||
return $compile.call(this, content, filename);
|
||||
};
|
||||
|
||||
Module.prototype._compile.__sourceMapSupport = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Configure options
|
||||
if (!sharedData.emptyCacheBetweenOperations) {
|
||||
sharedData.emptyCacheBetweenOperations = 'emptyCacheBetweenOperations' in options ?
|
||||
options.emptyCacheBetweenOperations : false;
|
||||
}
|
||||
|
||||
|
||||
// Install the error reformatter
|
||||
if (!sharedData.errorPrepareStackTraceHook) {
|
||||
const originalValue = Error.prepareStackTrace;
|
||||
sharedData.errorPrepareStackTraceHook = {
|
||||
enabled: true,
|
||||
originalValue,
|
||||
installedValue: undefined
|
||||
};
|
||||
Error.prepareStackTrace = sharedData.errorPrepareStackTraceHook.installedValue = createPrepareStackTrace(sharedData.errorPrepareStackTraceHook);
|
||||
}
|
||||
|
||||
if (!sharedData.processEmitHook) {
|
||||
var installHandler = 'handleUncaughtExceptions' in options ?
|
||||
options.handleUncaughtExceptions : true;
|
||||
|
||||
// Do not override 'uncaughtException' with our own handler in Node.js
|
||||
// Worker threads. Workers pass the error to the main thread as an event,
|
||||
// rather than printing something to stderr and exiting.
|
||||
try {
|
||||
// We need to use `dynamicRequire` because `require` on it's own will be optimized by WebPack/Browserify.
|
||||
var worker_threads = dynamicRequire(module, 'worker_threads');
|
||||
if (worker_threads.isMainThread === false) {
|
||||
installHandler = false;
|
||||
}
|
||||
} catch(e) {}
|
||||
|
||||
// Provide the option to not install the uncaught exception handler. This is
|
||||
// to support other uncaught exception handlers (in test frameworks, for
|
||||
// example). If this handler is not installed and there are no other uncaught
|
||||
// exception handlers, uncaught exceptions will be caught by node's built-in
|
||||
// exception handler and the process will still be terminated. However, the
|
||||
// generated JavaScript code will be shown above the stack trace instead of
|
||||
// the original source code.
|
||||
if (installHandler && hasGlobalProcessEventEmitter()) {
|
||||
shimEmitUncaughtException();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
exports.uninstall = function() {
|
||||
if(sharedData.processEmitHook) {
|
||||
// Disable behavior
|
||||
sharedData.processEmitHook.enabled = false;
|
||||
// If possible, remove our hook function. May not be possible if subsequent third-party hooks have wrapped around us.
|
||||
if(process.emit === sharedData.processEmitHook.installedValue) {
|
||||
process.emit = sharedData.processEmitHook.originalValue;
|
||||
}
|
||||
sharedData.processEmitHook = undefined;
|
||||
}
|
||||
if(sharedData.errorPrepareStackTraceHook) {
|
||||
// Disable behavior
|
||||
sharedData.errorPrepareStackTraceHook.enabled = false;
|
||||
// If possible or necessary, remove our hook function.
|
||||
// In vanilla environments, prepareStackTrace is `undefined`.
|
||||
// We cannot delegate to `undefined` the way we can to a function w/`.apply()`; our only option is to remove the function.
|
||||
// If we are the *first* hook installed, and another was installed on top of us, we have no choice but to remove both.
|
||||
if(Error.prepareStackTrace === sharedData.errorPrepareStackTraceHook.installedValue || typeof sharedData.errorPrepareStackTraceHook.originalValue !== 'function') {
|
||||
Error.prepareStackTrace = sharedData.errorPrepareStackTraceHook.originalValue;
|
||||
}
|
||||
sharedData.errorPrepareStackTraceHook = undefined;
|
||||
}
|
||||
if (sharedData.moduleResolveFilenameHook) {
|
||||
// Disable behavior
|
||||
sharedData.moduleResolveFilenameHook.enabled = false;
|
||||
// If possible, remove our hook function. May not be possible if subsequent third-party hooks have wrapped around us.
|
||||
var Module = dynamicRequire(module, 'module');
|
||||
if(Module._resolveFilename === sharedData.moduleResolveFilenameHook.installedValue) {
|
||||
Module._resolveFilename = sharedData.moduleResolveFilenameHook.originalValue;
|
||||
}
|
||||
sharedData.moduleResolveFilenameHook = undefined;
|
||||
}
|
||||
sharedData.onConflictingLibraryRedirectArr.length = 0;
|
||||
}
|
||||
|
||||
exports.resetRetrieveHandlers = function() {
|
||||
sharedData.retrieveFileHandlers.length = 0;
|
||||
sharedData.retrieveMapHandlers.length = 0;
|
||||
}
|
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
# @jridgewell/resolve-uri
|
||||
|
||||
> Resolve a URI relative to an optional base URI
|
||||
|
||||
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/resolve-uri
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function resolve(input: string, base?: string): string;
|
||||
```
|
||||
|
||||
```js
|
||||
import resolve from '@jridgewell/resolve-uri';
|
||||
|
||||
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
|
||||
```
|
||||
|
||||
| Input | Base | Resolution | Explanation |
|
||||
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
|
||||
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
|
||||
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
|
||||
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
|
||||
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
|
||||
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
|
||||
| `/example` | _rest_ | `/example` | Input is normalized only |
|
||||
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
|
||||
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
|
||||
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
|
||||
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
|
||||
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |
|
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
@ -0,0 +1,232 @@
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: 7 /* Absolute */,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = 6 /* SchemeRelative */;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = 5 /* AbsolutePath */;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? 3 /* Query */
|
||||
: input.startsWith('#')
|
||||
? 2 /* Hash */
|
||||
: 4 /* RelativePath */
|
||||
: 1 /* Empty */;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= 4 /* RelativePath */;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== 7 /* Absolute */) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case 1 /* Empty */:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case 2 /* Hash */:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case 3 /* Query */:
|
||||
case 4 /* RelativePath */:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case 5 /* AbsolutePath */:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case 6 /* SchemeRelative */:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case 2 /* Hash */:
|
||||
case 3 /* Query */:
|
||||
return queryHash;
|
||||
case 4 /* RelativePath */: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case 5 /* AbsolutePath */:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
export { resolve as default };
|
||||
//# sourceMappingURL=resolve-uri.mjs.map
|
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
@ -0,0 +1,240 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
|
||||
})(this, (function () { 'use strict';
|
||||
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: 7 /* Absolute */,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = 6 /* SchemeRelative */;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = 5 /* AbsolutePath */;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? 3 /* Query */
|
||||
: input.startsWith('#')
|
||||
? 2 /* Hash */
|
||||
: 4 /* RelativePath */
|
||||
: 1 /* Empty */;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= 4 /* RelativePath */;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== 7 /* Absolute */) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case 1 /* Empty */:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case 2 /* Hash */:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case 3 /* Query */:
|
||||
case 4 /* RelativePath */:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case 5 /* AbsolutePath */:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case 6 /* SchemeRelative */:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case 2 /* Hash */:
|
||||
case 3 /* Query */:
|
||||
return queryHash;
|
||||
case 4 /* RelativePath */: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case 5 /* AbsolutePath */:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
return resolve;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=resolve-uri.umd.js.map
|
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
{
|
||||
"name": "@jridgewell/resolve-uri",
|
||||
"version": "3.1.2",
|
||||
"description": "Resolve a URI relative to an optional base URI",
|
||||
"keywords": [
|
||||
"resolve",
|
||||
"uri",
|
||||
"url",
|
||||
"path"
|
||||
],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/resolve-uri",
|
||||
"main": "dist/resolve-uri.umd.js",
|
||||
"module": "dist/resolve-uri.mjs",
|
||||
"types": "dist/types/resolve-uri.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/resolve-uri.d.ts",
|
||||
"browser": "./dist/resolve-uri.umd.js",
|
||||
"require": "./dist/resolve-uri.umd.js",
|
||||
"import": "./dist/resolve-uri.mjs"
|
||||
},
|
||||
"./dist/resolve-uri.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"pretest": "run-s build:rollup",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"c8": "7.11.0",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.66.0",
|
||||
"typescript": "4.5.5"
|
||||
}
|
||||
}
|
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2015 Rich Harris
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
264
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
264
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
@ -0,0 +1,264 @@
|
||||
# @jridgewell/sourcemap-codec
|
||||
|
||||
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||
|
||||
|
||||
## Why?
|
||||
|
||||
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||
|
||||
This package makes the process slightly easier.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @jridgewell/sourcemap-codec
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
|
||||
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
|
||||
assert.deepEqual( decoded, [
|
||||
// the first line (of the generated code) has no mappings,
|
||||
// as shown by the starting semi-colon (which separates lines)
|
||||
[],
|
||||
|
||||
// the second line contains four (comma-separated) segments
|
||||
[
|
||||
// segments are encoded as you'd expect:
|
||||
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||
|
||||
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||
// name in the `map.names` array
|
||||
[ 2, 0, 2, 2, 0 ],
|
||||
|
||||
// the remaining segments are 4-length rather than 5-length,
|
||||
// because they don't map a name
|
||||
[ 4, 0, 2, 4 ],
|
||||
[ 6, 0, 2, 5 ],
|
||||
[ 7, 0, 2, 7 ]
|
||||
],
|
||||
|
||||
// the final line contains two segments
|
||||
[
|
||||
[ 2, 1, 10, 19 ],
|
||||
[ 12, 1, 11, 20 ]
|
||||
]
|
||||
]);
|
||||
|
||||
var encoded = encode( decoded );
|
||||
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v20.10.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 5815135 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 5868160 bytes
|
||||
sourcemap-codec 5492584 bytes
|
||||
source-map-0.6.1 13569984 bytes
|
||||
source-map-0.8.0 6390584 bytes
|
||||
chrome dev tools 8011136 bytes
|
||||
Smallest memory usage is sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 492 ops/sec ±1.22% (90 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 499 ops/sec ±1.16% (89 runs sampled)
|
||||
decode: sourcemap-codec x 376 ops/sec ±1.66% (89 runs sampled)
|
||||
decode: source-map-0.6.1 x 34.99 ops/sec ±0.94% (48 runs sampled)
|
||||
decode: source-map-0.8.0 x 351 ops/sec ±0.07% (95 runs sampled)
|
||||
chrome dev tools x 165 ops/sec ±0.91% (86 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 444248 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 623024 bytes
|
||||
sourcemap-codec 8696280 bytes
|
||||
source-map-0.6.1 8745176 bytes
|
||||
source-map-0.8.0 8736624 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 796 ops/sec ±0.11% (97 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 795 ops/sec ±0.25% (98 runs sampled)
|
||||
encode: sourcemap-codec x 231 ops/sec ±0.83% (86 runs sampled)
|
||||
encode: source-map-0.6.1 x 166 ops/sec ±0.57% (86 runs sampled)
|
||||
encode: source-map-0.8.0 x 203 ops/sec ±0.45% (88 runs sampled)
|
||||
Fastest is encode: local code,encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 35424960 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 35424696 bytes
|
||||
sourcemap-codec 36033464 bytes
|
||||
source-map-0.6.1 62253704 bytes
|
||||
source-map-0.8.0 43843920 bytes
|
||||
chrome dev tools 45111400 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 38.18 ops/sec ±5.44% (52 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 38.36 ops/sec ±5.02% (52 runs sampled)
|
||||
decode: sourcemap-codec x 34.05 ops/sec ±4.45% (47 runs sampled)
|
||||
decode: source-map-0.6.1 x 4.31 ops/sec ±2.76% (15 runs sampled)
|
||||
decode: source-map-0.8.0 x 55.60 ops/sec ±0.13% (73 runs sampled)
|
||||
chrome dev tools x 16.94 ops/sec ±3.78% (46 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 2606016 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 2626440 bytes
|
||||
sourcemap-codec 21152576 bytes
|
||||
source-map-0.6.1 25023928 bytes
|
||||
source-map-0.8.0 25256448 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 127 ops/sec ±0.18% (83 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 128 ops/sec ±0.26% (83 runs sampled)
|
||||
encode: sourcemap-codec x 29.31 ops/sec ±2.55% (53 runs sampled)
|
||||
encode: source-map-0.6.1 x 18.85 ops/sec ±3.19% (36 runs sampled)
|
||||
encode: source-map-0.8.0 x 19.34 ops/sec ±1.97% (36 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 261696 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 244296 bytes
|
||||
sourcemap-codec 302816 bytes
|
||||
source-map-0.6.1 939176 bytes
|
||||
source-map-0.8.0 336 bytes
|
||||
chrome dev tools 587368 bytes
|
||||
Smallest memory usage is source-map-0.8.0
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 17,782 ops/sec ±0.32% (97 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 17,863 ops/sec ±0.40% (100 runs sampled)
|
||||
decode: sourcemap-codec x 12,453 ops/sec ±0.27% (101 runs sampled)
|
||||
decode: source-map-0.6.1 x 1,288 ops/sec ±1.05% (96 runs sampled)
|
||||
decode: source-map-0.8.0 x 9,289 ops/sec ±0.27% (101 runs sampled)
|
||||
chrome dev tools x 4,769 ops/sec ±0.18% (100 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 262944 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 25544 bytes
|
||||
sourcemap-codec 323048 bytes
|
||||
source-map-0.6.1 507808 bytes
|
||||
source-map-0.8.0 507480 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 24,207 ops/sec ±0.79% (95 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 24,288 ops/sec ±0.48% (96 runs sampled)
|
||||
encode: sourcemap-codec x 6,761 ops/sec ±0.21% (100 runs sampled)
|
||||
encode: source-map-0.6.1 x 5,374 ops/sec ±0.17% (99 runs sampled)
|
||||
encode: source-map-0.8.0 x 5,633 ops/sec ±0.32% (99 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15,encode: local code
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 678816 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 678816 bytes
|
||||
sourcemap-codec 816400 bytes
|
||||
source-map-0.6.1 2288864 bytes
|
||||
source-map-0.8.0 721360 bytes
|
||||
chrome dev tools 1012512 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 6,178 ops/sec ±0.19% (98 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 6,261 ops/sec ±0.22% (100 runs sampled)
|
||||
decode: sourcemap-codec x 4,472 ops/sec ±0.90% (99 runs sampled)
|
||||
decode: source-map-0.6.1 x 449 ops/sec ±0.31% (95 runs sampled)
|
||||
decode: source-map-0.8.0 x 3,219 ops/sec ±0.13% (100 runs sampled)
|
||||
chrome dev tools x 1,743 ops/sec ±0.20% (99 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 140960 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 159808 bytes
|
||||
sourcemap-codec 969304 bytes
|
||||
source-map-0.6.1 930520 bytes
|
||||
source-map-0.8.0 930248 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 8,013 ops/sec ±0.19% (100 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 7,989 ops/sec ±0.20% (101 runs sampled)
|
||||
encode: sourcemap-codec x 2,472 ops/sec ±0.21% (99 runs sampled)
|
||||
encode: source-map-0.6.1 x 2,200 ops/sec ±0.17% (99 runs sampled)
|
||||
encode: source-map-0.8.0 x 2,220 ops/sec ±0.37% (99 runs sampled)
|
||||
Fastest is encode: local code
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
vscode.map - 2141001 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 198955264 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 199175352 bytes
|
||||
sourcemap-codec 199102688 bytes
|
||||
source-map-0.6.1 386323432 bytes
|
||||
source-map-0.8.0 244116432 bytes
|
||||
chrome dev tools 293734280 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 3.90 ops/sec ±22.21% (15 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 3.95 ops/sec ±23.53% (15 runs sampled)
|
||||
decode: sourcemap-codec x 3.82 ops/sec ±17.94% (14 runs sampled)
|
||||
decode: source-map-0.6.1 x 0.61 ops/sec ±7.81% (6 runs sampled)
|
||||
decode: source-map-0.8.0 x 9.54 ops/sec ±0.28% (28 runs sampled)
|
||||
chrome dev tools x 2.18 ops/sec ±10.58% (10 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 13509880 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 13537648 bytes
|
||||
sourcemap-codec 32540104 bytes
|
||||
source-map-0.6.1 127531040 bytes
|
||||
source-map-0.8.0 127535312 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 20.10 ops/sec ±0.19% (38 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 20.26 ops/sec ±0.32% (38 runs sampled)
|
||||
encode: sourcemap-codec x 5.44 ops/sec ±1.64% (18 runs sampled)
|
||||
encode: source-map-0.6.1 x 2.30 ops/sec ±4.79% (10 runs sampled)
|
||||
encode: source-map-0.8.0 x 2.46 ops/sec ±6.53% (10 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
424
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
424
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
@ -0,0 +1,424 @@
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function encodeInteger(builder, num, relative) {
|
||||
let delta = num - relative;
|
||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
||||
do {
|
||||
let clamped = delta & 0b011111;
|
||||
delta >>>= 5;
|
||||
if (delta > 0)
|
||||
clamped |= 0b100000;
|
||||
builder.write(intToChar[clamped]);
|
||||
} while (delta > 0);
|
||||
return num;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
|
||||
const bufLength = 1024 * 16;
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
class StringWriter {
|
||||
constructor() {
|
||||
this.pos = 0;
|
||||
this.out = '';
|
||||
this.buffer = new Uint8Array(bufLength);
|
||||
}
|
||||
write(v) {
|
||||
const { buffer } = this;
|
||||
buffer[this.pos++] = v;
|
||||
if (this.pos === bufLength) {
|
||||
this.out += td.decode(buffer);
|
||||
this.pos = 0;
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const { buffer, out, pos } = this;
|
||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
||||
}
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY = [];
|
||||
function decodeOriginalScopes(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const scopes = [];
|
||||
const stack = [];
|
||||
let line = 0;
|
||||
for (; reader.pos < length; reader.pos++) {
|
||||
line = decodeInteger(reader, line);
|
||||
const column = decodeInteger(reader, 0);
|
||||
if (!hasMoreVlq(reader, length)) {
|
||||
const last = stack.pop();
|
||||
last[2] = line;
|
||||
last[3] = column;
|
||||
continue;
|
||||
}
|
||||
const kind = decodeInteger(reader, 0);
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasName = fields & 0b0001;
|
||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
||||
let vars = EMPTY;
|
||||
if (hasMoreVlq(reader, length)) {
|
||||
vars = [];
|
||||
do {
|
||||
const varsIndex = decodeInteger(reader, 0);
|
||||
vars.push(varsIndex);
|
||||
} while (hasMoreVlq(reader, length));
|
||||
}
|
||||
scope.vars = vars;
|
||||
scopes.push(scope);
|
||||
stack.push(scope);
|
||||
}
|
||||
return scopes;
|
||||
}
|
||||
function encodeOriginalScopes(scopes) {
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < scopes.length;) {
|
||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
||||
const scope = scopes[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
||||
if (index > 0)
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
||||
encodeInteger(writer, startColumn, 0);
|
||||
encodeInteger(writer, kind, 0);
|
||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (scope.length === 6)
|
||||
encodeInteger(writer, scope[5], 0);
|
||||
for (const v of vars) {
|
||||
encodeInteger(writer, v, 0);
|
||||
}
|
||||
for (index++; index < scopes.length;) {
|
||||
const next = scopes[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
||||
}
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
||||
encodeInteger(writer, endColumn, 0);
|
||||
return index;
|
||||
}
|
||||
function decodeGeneratedRanges(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const ranges = [];
|
||||
const stack = [];
|
||||
let genLine = 0;
|
||||
let definitionSourcesIndex = 0;
|
||||
let definitionScopeIndex = 0;
|
||||
let callsiteSourcesIndex = 0;
|
||||
let callsiteLine = 0;
|
||||
let callsiteColumn = 0;
|
||||
let bindingLine = 0;
|
||||
let bindingColumn = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
let genColumn = 0;
|
||||
for (; reader.pos < semi; reader.pos++) {
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (!hasMoreVlq(reader, semi)) {
|
||||
const last = stack.pop();
|
||||
last[2] = genLine;
|
||||
last[3] = genColumn;
|
||||
continue;
|
||||
}
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasDefinition = fields & 0b0001;
|
||||
const hasCallsite = fields & 0b0010;
|
||||
const hasScope = fields & 0b0100;
|
||||
let callsite = null;
|
||||
let bindings = EMPTY;
|
||||
let range;
|
||||
if (hasDefinition) {
|
||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
||||
definitionSourcesIndex = defSourcesIndex;
|
||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
||||
}
|
||||
else {
|
||||
range = [genLine, genColumn, 0, 0];
|
||||
}
|
||||
range.isScope = !!hasScope;
|
||||
if (hasCallsite) {
|
||||
const prevCsi = callsiteSourcesIndex;
|
||||
const prevLine = callsiteLine;
|
||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
||||
}
|
||||
range.callsite = callsite;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
bindings = [];
|
||||
do {
|
||||
bindingLine = genLine;
|
||||
bindingColumn = genColumn;
|
||||
const expressionsCount = decodeInteger(reader, 0);
|
||||
let expressionRanges;
|
||||
if (expressionsCount < -1) {
|
||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
||||
for (let i = -1; i > expressionsCount; i--) {
|
||||
const prevBl = bindingLine;
|
||||
bindingLine = decodeInteger(reader, bindingLine);
|
||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
||||
const expression = decodeInteger(reader, 0);
|
||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
expressionRanges = [[expressionsCount]];
|
||||
}
|
||||
bindings.push(expressionRanges);
|
||||
} while (hasMoreVlq(reader, semi));
|
||||
}
|
||||
range.bindings = bindings;
|
||||
ranges.push(range);
|
||||
stack.push(range);
|
||||
}
|
||||
genLine++;
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos < length);
|
||||
return ranges;
|
||||
}
|
||||
function encodeGeneratedRanges(ranges) {
|
||||
if (ranges.length === 0)
|
||||
return '';
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < ranges.length;) {
|
||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
||||
const range = ranges[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
||||
if (state[0] < startLine) {
|
||||
catchupLine(writer, state[0], startLine);
|
||||
state[0] = startLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else if (index > 0) {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (range.length === 6) {
|
||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
||||
if (sourcesIndex !== state[2]) {
|
||||
state[3] = 0;
|
||||
}
|
||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
||||
}
|
||||
if (callsite) {
|
||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
||||
if (sourcesIndex !== state[4]) {
|
||||
state[5] = 0;
|
||||
state[6] = 0;
|
||||
}
|
||||
else if (callLine !== state[5]) {
|
||||
state[6] = 0;
|
||||
}
|
||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
||||
}
|
||||
if (bindings) {
|
||||
for (const binding of bindings) {
|
||||
if (binding.length > 1)
|
||||
encodeInteger(writer, -binding.length, 0);
|
||||
const expression = binding[0][0];
|
||||
encodeInteger(writer, expression, 0);
|
||||
let bindingStartLine = startLine;
|
||||
let bindingStartColumn = startColumn;
|
||||
for (let i = 1; i < binding.length; i++) {
|
||||
const expRange = binding[i];
|
||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
||||
encodeInteger(writer, expRange[0], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (index++; index < ranges.length;) {
|
||||
const next = ranges[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
||||
}
|
||||
if (state[0] < endLine) {
|
||||
catchupLine(writer, state[0], endLine);
|
||||
state[0] = endLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
||||
return index;
|
||||
}
|
||||
function catchupLine(writer, lastLine, line) {
|
||||
do {
|
||||
writer.write(semicolon);
|
||||
} while (++lastLine < line);
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const writer = new StringWriter();
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0)
|
||||
writer.write(semicolon);
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
let genColumn = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
if (j > 0)
|
||||
writer.write(comma);
|
||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
||||
}
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
|
||||
export { decode, decodeGeneratedRanges, decodeOriginalScopes, encode, encodeGeneratedRanges, encodeOriginalScopes };
|
||||
//# sourceMappingURL=sourcemap-codec.mjs.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
439
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
439
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
@ -0,0 +1,439 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function encodeInteger(builder, num, relative) {
|
||||
let delta = num - relative;
|
||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
||||
do {
|
||||
let clamped = delta & 0b011111;
|
||||
delta >>>= 5;
|
||||
if (delta > 0)
|
||||
clamped |= 0b100000;
|
||||
builder.write(intToChar[clamped]);
|
||||
} while (delta > 0);
|
||||
return num;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
|
||||
const bufLength = 1024 * 16;
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
class StringWriter {
|
||||
constructor() {
|
||||
this.pos = 0;
|
||||
this.out = '';
|
||||
this.buffer = new Uint8Array(bufLength);
|
||||
}
|
||||
write(v) {
|
||||
const { buffer } = this;
|
||||
buffer[this.pos++] = v;
|
||||
if (this.pos === bufLength) {
|
||||
this.out += td.decode(buffer);
|
||||
this.pos = 0;
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const { buffer, out, pos } = this;
|
||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
||||
}
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY = [];
|
||||
function decodeOriginalScopes(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const scopes = [];
|
||||
const stack = [];
|
||||
let line = 0;
|
||||
for (; reader.pos < length; reader.pos++) {
|
||||
line = decodeInteger(reader, line);
|
||||
const column = decodeInteger(reader, 0);
|
||||
if (!hasMoreVlq(reader, length)) {
|
||||
const last = stack.pop();
|
||||
last[2] = line;
|
||||
last[3] = column;
|
||||
continue;
|
||||
}
|
||||
const kind = decodeInteger(reader, 0);
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasName = fields & 0b0001;
|
||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
||||
let vars = EMPTY;
|
||||
if (hasMoreVlq(reader, length)) {
|
||||
vars = [];
|
||||
do {
|
||||
const varsIndex = decodeInteger(reader, 0);
|
||||
vars.push(varsIndex);
|
||||
} while (hasMoreVlq(reader, length));
|
||||
}
|
||||
scope.vars = vars;
|
||||
scopes.push(scope);
|
||||
stack.push(scope);
|
||||
}
|
||||
return scopes;
|
||||
}
|
||||
function encodeOriginalScopes(scopes) {
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < scopes.length;) {
|
||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
||||
const scope = scopes[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
||||
if (index > 0)
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
||||
encodeInteger(writer, startColumn, 0);
|
||||
encodeInteger(writer, kind, 0);
|
||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (scope.length === 6)
|
||||
encodeInteger(writer, scope[5], 0);
|
||||
for (const v of vars) {
|
||||
encodeInteger(writer, v, 0);
|
||||
}
|
||||
for (index++; index < scopes.length;) {
|
||||
const next = scopes[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
||||
}
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
||||
encodeInteger(writer, endColumn, 0);
|
||||
return index;
|
||||
}
|
||||
function decodeGeneratedRanges(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const ranges = [];
|
||||
const stack = [];
|
||||
let genLine = 0;
|
||||
let definitionSourcesIndex = 0;
|
||||
let definitionScopeIndex = 0;
|
||||
let callsiteSourcesIndex = 0;
|
||||
let callsiteLine = 0;
|
||||
let callsiteColumn = 0;
|
||||
let bindingLine = 0;
|
||||
let bindingColumn = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
let genColumn = 0;
|
||||
for (; reader.pos < semi; reader.pos++) {
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (!hasMoreVlq(reader, semi)) {
|
||||
const last = stack.pop();
|
||||
last[2] = genLine;
|
||||
last[3] = genColumn;
|
||||
continue;
|
||||
}
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasDefinition = fields & 0b0001;
|
||||
const hasCallsite = fields & 0b0010;
|
||||
const hasScope = fields & 0b0100;
|
||||
let callsite = null;
|
||||
let bindings = EMPTY;
|
||||
let range;
|
||||
if (hasDefinition) {
|
||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
||||
definitionSourcesIndex = defSourcesIndex;
|
||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
||||
}
|
||||
else {
|
||||
range = [genLine, genColumn, 0, 0];
|
||||
}
|
||||
range.isScope = !!hasScope;
|
||||
if (hasCallsite) {
|
||||
const prevCsi = callsiteSourcesIndex;
|
||||
const prevLine = callsiteLine;
|
||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
||||
}
|
||||
range.callsite = callsite;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
bindings = [];
|
||||
do {
|
||||
bindingLine = genLine;
|
||||
bindingColumn = genColumn;
|
||||
const expressionsCount = decodeInteger(reader, 0);
|
||||
let expressionRanges;
|
||||
if (expressionsCount < -1) {
|
||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
||||
for (let i = -1; i > expressionsCount; i--) {
|
||||
const prevBl = bindingLine;
|
||||
bindingLine = decodeInteger(reader, bindingLine);
|
||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
||||
const expression = decodeInteger(reader, 0);
|
||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
expressionRanges = [[expressionsCount]];
|
||||
}
|
||||
bindings.push(expressionRanges);
|
||||
} while (hasMoreVlq(reader, semi));
|
||||
}
|
||||
range.bindings = bindings;
|
||||
ranges.push(range);
|
||||
stack.push(range);
|
||||
}
|
||||
genLine++;
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos < length);
|
||||
return ranges;
|
||||
}
|
||||
function encodeGeneratedRanges(ranges) {
|
||||
if (ranges.length === 0)
|
||||
return '';
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < ranges.length;) {
|
||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
||||
const range = ranges[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
||||
if (state[0] < startLine) {
|
||||
catchupLine(writer, state[0], startLine);
|
||||
state[0] = startLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else if (index > 0) {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (range.length === 6) {
|
||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
||||
if (sourcesIndex !== state[2]) {
|
||||
state[3] = 0;
|
||||
}
|
||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
||||
}
|
||||
if (callsite) {
|
||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
||||
if (sourcesIndex !== state[4]) {
|
||||
state[5] = 0;
|
||||
state[6] = 0;
|
||||
}
|
||||
else if (callLine !== state[5]) {
|
||||
state[6] = 0;
|
||||
}
|
||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
||||
}
|
||||
if (bindings) {
|
||||
for (const binding of bindings) {
|
||||
if (binding.length > 1)
|
||||
encodeInteger(writer, -binding.length, 0);
|
||||
const expression = binding[0][0];
|
||||
encodeInteger(writer, expression, 0);
|
||||
let bindingStartLine = startLine;
|
||||
let bindingStartColumn = startColumn;
|
||||
for (let i = 1; i < binding.length; i++) {
|
||||
const expRange = binding[i];
|
||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
||||
encodeInteger(writer, expRange[0], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (index++; index < ranges.length;) {
|
||||
const next = ranges[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
||||
}
|
||||
if (state[0] < endLine) {
|
||||
catchupLine(writer, state[0], endLine);
|
||||
state[0] = endLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
||||
return index;
|
||||
}
|
||||
function catchupLine(writer, lastLine, line) {
|
||||
do {
|
||||
writer.write(semicolon);
|
||||
} while (++lastLine < line);
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const writer = new StringWriter();
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0)
|
||||
writer.write(semicolon);
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
let genColumn = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
if (j > 0)
|
||||
writer.write(comma);
|
||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
||||
}
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
|
||||
exports.decode = decode;
|
||||
exports.decodeGeneratedRanges = decodeGeneratedRanges;
|
||||
exports.decodeOriginalScopes = decodeOriginalScopes;
|
||||
exports.encode = encode;
|
||||
exports.encodeGeneratedRanges = encodeGeneratedRanges;
|
||||
exports.encodeOriginalScopes = encodeOriginalScopes;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
49
node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
Normal file
49
node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
declare type Line = number;
|
||||
declare type Column = number;
|
||||
declare type Kind = number;
|
||||
declare type Name = number;
|
||||
declare type Var = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type ScopesIndex = number;
|
||||
declare type Mix<A, B, O> = (A & O) | (B & O);
|
||||
export declare type OriginalScope = Mix<[
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
Kind
|
||||
], [
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
Kind,
|
||||
Name
|
||||
], {
|
||||
vars: Var[];
|
||||
}>;
|
||||
export declare type GeneratedRange = Mix<[
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column
|
||||
], [
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
SourcesIndex,
|
||||
ScopesIndex
|
||||
], {
|
||||
callsite: CallSite | null;
|
||||
bindings: Binding[];
|
||||
isScope: boolean;
|
||||
}>;
|
||||
export declare type CallSite = [SourcesIndex, Line, Column];
|
||||
declare type Binding = BindingExpressionRange[];
|
||||
export declare type BindingExpressionRange = [Name] | [Name, Line, Column];
|
||||
export declare function decodeOriginalScopes(input: string): OriginalScope[];
|
||||
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
|
||||
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
|
||||
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
|
||||
export {};
|
8
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes';
|
||||
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes';
|
||||
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||
export declare type SourceMapLine = SourceMapSegment[];
|
||||
export declare type SourceMapMappings = SourceMapLine[];
|
||||
export declare function decode(mappings: string): SourceMapMappings;
|
||||
export declare function encode(decoded: SourceMapMappings): string;
|
||||
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
15
node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
Normal file
15
node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
export declare class StringWriter {
|
||||
pos: number;
|
||||
private out;
|
||||
private buffer;
|
||||
write(v: number): void;
|
||||
flush(): string;
|
||||
}
|
||||
export declare class StringReader {
|
||||
pos: number;
|
||||
private buffer;
|
||||
constructor(buffer: string);
|
||||
next(): number;
|
||||
peek(): number;
|
||||
indexOf(char: string): number;
|
||||
}
|
6
node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
Normal file
6
node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type { StringReader, StringWriter } from './strings';
|
||||
export declare const comma: number;
|
||||
export declare const semicolon: number;
|
||||
export declare function decodeInteger(reader: StringReader, relative: number): number;
|
||||
export declare function encodeInteger(builder: StringWriter, num: number, relative: number): number;
|
||||
export declare function hasMoreVlq(reader: StringReader, max: number): boolean;
|
75
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
75
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
{
|
||||
"name": "@jridgewell/sourcemap-codec",
|
||||
"version": "1.5.0",
|
||||
"description": "Encode/decode sourcemap mappings",
|
||||
"keywords": [
|
||||
"sourcemap",
|
||||
"vlq"
|
||||
],
|
||||
"main": "dist/sourcemap-codec.umd.js",
|
||||
"module": "dist/sourcemap-codec.mjs",
|
||||
"types": "dist/types/sourcemap-codec.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||
"browser": "./dist/sourcemap-codec.umd.js",
|
||||
"require": "./dist/sourcemap-codec.umd.js",
|
||||
"import": "./dist/sourcemap-codec.mjs"
|
||||
},
|
||||
"./dist/sourcemap-codec.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||
},
|
||||
"author": "Rich Harris",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/mocha": "10.0.6",
|
||||
"@types/node": "17.0.15",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.64.0",
|
||||
"source-map": "0.6.1",
|
||||
"source-map-js": "1.0.2",
|
||||
"sourcemap-codec": "1.4.8",
|
||||
"tsx": "4.7.1",
|
||||
"typescript": "4.5.4"
|
||||
}
|
||||
}
|
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
193
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
193
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,193 @@
|
||||
# @jridgewell/trace-mapping
|
||||
|
||||
> Trace the original position through a source map
|
||||
|
||||
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||
original location in the source file through a source map.
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/trace-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { TraceMap, originalPositionFor, generatedPositionFor } from '@jridgewell/trace-mapping';
|
||||
|
||||
const tracer = new TraceMap({
|
||||
version: 3,
|
||||
sources: ['input.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'KAyCIA',
|
||||
});
|
||||
|
||||
// Lines start at line 1, columns at column 0.
|
||||
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||
assert.deepEqual(traced, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
const generated = generatedPositionFor(tracer, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
});
|
||||
assert.deepEqual(generated, {
|
||||
line: 1,
|
||||
column: 5,
|
||||
});
|
||||
```
|
||||
|
||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||
|
||||
```typescript
|
||||
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||
|
||||
// line is 0-base.
|
||||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||
|
||||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
// Again, line is 0-base and so is sourceLine
|
||||
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||
```
|
||||
|
||||
### SectionedSourceMaps
|
||||
|
||||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||
`TraceMap` instance:
|
||||
|
||||
```typescript
|
||||
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||
const fooOutput = 'foo';
|
||||
const barOutput = 'bar';
|
||||
const output = [fooOutput, barOutput].join('\n');
|
||||
|
||||
const sectioned = new AnyMap({
|
||||
version: 3,
|
||||
sections: [
|
||||
{
|
||||
// 0-base line and column
|
||||
offset: { line: 0, column: 0 },
|
||||
// fooOutput's sourcemap
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['foo.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
{
|
||||
// barOutput's sourcemap will not affect the first line, only the second
|
||||
offset: { line: 1, column: 0 },
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['bar.js'],
|
||||
names: ['bar'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const traced = originalPositionFor(sectioned, {
|
||||
line: 2,
|
||||
column: 0,
|
||||
});
|
||||
|
||||
assert.deepEqual(traced, {
|
||||
source: 'bar.js',
|
||||
line: 1,
|
||||
column: 0,
|
||||
name: 'bar',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map
|
||||
trace-mapping: decoded JSON input x 183 ops/sec ±0.41% (87 runs sampled)
|
||||
trace-mapping: encoded JSON input x 384 ops/sec ±0.89% (89 runs sampled)
|
||||
trace-mapping: decoded Object input x 3,085 ops/sec ±0.24% (100 runs sampled)
|
||||
trace-mapping: encoded Object input x 452 ops/sec ±0.80% (84 runs sampled)
|
||||
source-map-js: encoded Object input x 88.82 ops/sec ±0.45% (77 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 38.39 ops/sec ±1.88% (52 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
trace-mapping: decoded originalPositionFor x 4,025,347 ops/sec ±0.15% (97 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 3,333,136 ops/sec ±1.26% (90 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 824,978 ops/sec ±1.06% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 741,300 ops/sec ±0.93% (92 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 2,587,603 ops/sec ±0.75% (97 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
***
|
||||
|
||||
babel.min.js.map
|
||||
trace-mapping: decoded JSON input x 17.43 ops/sec ±8.81% (33 runs sampled)
|
||||
trace-mapping: encoded JSON input x 34.18 ops/sec ±4.67% (50 runs sampled)
|
||||
trace-mapping: decoded Object input x 1,010 ops/sec ±0.41% (98 runs sampled)
|
||||
trace-mapping: encoded Object input x 39.45 ops/sec ±4.01% (52 runs sampled)
|
||||
source-map-js: encoded Object input x 6.57 ops/sec ±3.04% (21 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 4.23 ops/sec ±2.93% (15 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
trace-mapping: decoded originalPositionFor x 7,576,265 ops/sec ±0.74% (96 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 5,019,743 ops/sec ±0.74% (94 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 3,396,137 ops/sec ±42.32% (95 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 3,753,176 ops/sec ±0.72% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 6,423,633 ops/sec ±0.74% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
***
|
||||
|
||||
preact.js.map
|
||||
trace-mapping: decoded JSON input x 3,499 ops/sec ±0.18% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 6,078 ops/sec ±0.25% (99 runs sampled)
|
||||
trace-mapping: decoded Object input x 254,788 ops/sec ±0.13% (100 runs sampled)
|
||||
trace-mapping: encoded Object input x 14,063 ops/sec ±0.27% (94 runs sampled)
|
||||
source-map-js: encoded Object input x 2,465 ops/sec ±0.25% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 1,174 ops/sec ±1.90% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
trace-mapping: decoded originalPositionFor x 7,720,171 ops/sec ±0.14% (97 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 6,864,485 ops/sec ±0.16% (101 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 2,387,219 ops/sec ±0.28% (98 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 1,565,339 ops/sec ±0.32% (101 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 3,819,732 ops/sec ±0.38% (98 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
***
|
||||
|
||||
react.js.map
|
||||
trace-mapping: decoded JSON input x 1,719 ops/sec ±0.19% (99 runs sampled)
|
||||
trace-mapping: encoded JSON input x 4,284 ops/sec ±0.51% (99 runs sampled)
|
||||
trace-mapping: decoded Object input x 94,668 ops/sec ±0.08% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 5,287 ops/sec ±0.24% (99 runs sampled)
|
||||
source-map-js: encoded Object input x 814 ops/sec ±0.20% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 429 ops/sec ±0.24% (94 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
trace-mapping: decoded originalPositionFor x 28,927,989 ops/sec ±0.61% (94 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 27,394,475 ops/sec ±0.55% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 16,856,730 ops/sec ±0.45% (96 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 12,258,950 ops/sec ±0.41% (97 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 22,272,990 ops/sec ±0.58% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
514
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
514
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,514 @@
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
import resolveUri from '@jridgewell/resolve-uri';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; i--, index--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||
if (!('sections' in parsed))
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const { sections } = parsed;
|
||||
let i = 0;
|
||||
for (; i < sections.length - 1; i++) {
|
||||
const no = sections[i + 1].offset;
|
||||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||
}
|
||||
if (sections.length > 0) {
|
||||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||
}
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||
const map = AnyMap(section.map, mapUrl);
|
||||
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||
append(names, map.names);
|
||||
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||
for (let i = mappings.length; i <= lineOffset; i++)
|
||||
mappings.push([]);
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range.
|
||||
const stopI = stopLine - lineOffset;
|
||||
const len = Math.min(decoded.length, stopI + 1);
|
||||
for (let i = 0; i < len; i++) {
|
||||
const line = decoded[i];
|
||||
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||
// loop above.
|
||||
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (i === stopI && column >= stopColumn)
|
||||
break;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
if (seg.length === 4) {
|
||||
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||
continue;
|
||||
}
|
||||
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||
// sourcemap would desynchronize the sources/contents.
|
||||
function fillSourcesContent(len) {
|
||||
const sourcesContent = [];
|
||||
for (let i = 0; i < len; i++)
|
||||
sourcesContent[i] = null;
|
||||
return sourcesContent;
|
||||
}
|
||||
|
||||
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null,
|
||||
});
|
||||
const INVALID_GENERATED_MAPPING = Object.freeze({
|
||||
line: null,
|
||||
column: null,
|
||||
});
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
let encodedMappings;
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
let decodedMappings;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
let traceSegment;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
let originalPositionFor;
|
||||
/**
|
||||
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||
*
|
||||
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||
*/
|
||||
let generatedPositionFor;
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
let eachMapping;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
let presortedDecodedMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
let decodedMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
let encodedMap;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map.constructor === TraceMap)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
if (sourceRoot || mapUrl) {
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
}
|
||||
else {
|
||||
this.resolvedSources = sources.map((s) => s || '');
|
||||
}
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
encodedMappings = (map) => {
|
||||
var _a;
|
||||
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded)));
|
||||
};
|
||||
decodedMappings = (map) => {
|
||||
return (map._decoded || (map._decoded = decode(map._encoded)));
|
||||
};
|
||||
traceSegment = (map, line, column) => {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
};
|
||||
originalPositionFor = (map, { line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (segment == null)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
if (segment.length == 1)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
const { names, resolvedSources } = map;
|
||||
return {
|
||||
source: resolvedSources[segment[SOURCES_INDEX]],
|
||||
line: segment[SOURCE_LINE] + 1,
|
||||
column: segment[SOURCE_COLUMN],
|
||||
name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
|
||||
};
|
||||
};
|
||||
generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||
const memos = map._bySourceMemos;
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (segment == null)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
return {
|
||||
line: segment[REV_GENERATED_LINE] + 1,
|
||||
column: segment[REV_GENERATED_COLUMN],
|
||||
};
|
||||
};
|
||||
eachMapping = (map, cb) => {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
presortedDecodedMap = (map, mapUrl) => {
|
||||
const clone = Object.assign({}, map);
|
||||
clone.mappings = [];
|
||||
const tracer = new TraceMap(clone, mapUrl);
|
||||
tracer._decoded = map.mappings;
|
||||
return tracer;
|
||||
};
|
||||
decodedMap = (map) => {
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings: decodedMappings(map),
|
||||
};
|
||||
};
|
||||
encodedMap = (map) => {
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings: encodedMappings(map),
|
||||
};
|
||||
};
|
||||
})();
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return null;
|
||||
return segments[index];
|
||||
}
|
||||
|
||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, traceSegment };
|
||||
//# sourceMappingURL=trace-mapping.mjs.map
|
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
528
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
528
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,528 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||
|
||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||
|
||||
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri);
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri__default["default"](input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; i--, index--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||
if (!('sections' in parsed))
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const { sections } = parsed;
|
||||
let i = 0;
|
||||
for (; i < sections.length - 1; i++) {
|
||||
const no = sections[i + 1].offset;
|
||||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||
}
|
||||
if (sections.length > 0) {
|
||||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||
}
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
return exports.presortedDecodedMap(joined);
|
||||
};
|
||||
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||
const map = AnyMap(section.map, mapUrl);
|
||||
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = exports.decodedMappings(map);
|
||||
const { resolvedSources } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||
append(names, map.names);
|
||||
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||
for (let i = mappings.length; i <= lineOffset; i++)
|
||||
mappings.push([]);
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range.
|
||||
const stopI = stopLine - lineOffset;
|
||||
const len = Math.min(decoded.length, stopI + 1);
|
||||
for (let i = 0; i < len; i++) {
|
||||
const line = decoded[i];
|
||||
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||
// loop above.
|
||||
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (i === stopI && column >= stopColumn)
|
||||
break;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
if (seg.length === 4) {
|
||||
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||
continue;
|
||||
}
|
||||
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||
// sourcemap would desynchronize the sources/contents.
|
||||
function fillSourcesContent(len) {
|
||||
const sourcesContent = [];
|
||||
for (let i = 0; i < len; i++)
|
||||
sourcesContent[i] = null;
|
||||
return sourcesContent;
|
||||
}
|
||||
|
||||
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null,
|
||||
});
|
||||
const INVALID_GENERATED_MAPPING = Object.freeze({
|
||||
line: null,
|
||||
column: null,
|
||||
});
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
exports.encodedMappings = void 0;
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
exports.decodedMappings = void 0;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
exports.traceSegment = void 0;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
exports.originalPositionFor = void 0;
|
||||
/**
|
||||
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||
*
|
||||
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||
*/
|
||||
exports.generatedPositionFor = void 0;
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
exports.eachMapping = void 0;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
exports.presortedDecodedMap = void 0;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
exports.decodedMap = void 0;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
exports.encodedMap = void 0;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map.constructor === TraceMap)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
if (sourceRoot || mapUrl) {
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
}
|
||||
else {
|
||||
this.resolvedSources = sources.map((s) => s || '');
|
||||
}
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
exports.encodedMappings = (map) => {
|
||||
var _a;
|
||||
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded)));
|
||||
};
|
||||
exports.decodedMappings = (map) => {
|
||||
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded)));
|
||||
};
|
||||
exports.traceSegment = (map, line, column) => {
|
||||
const decoded = exports.decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
};
|
||||
exports.originalPositionFor = (map, { line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = exports.decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (segment == null)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
if (segment.length == 1)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
const { names, resolvedSources } = map;
|
||||
return {
|
||||
source: resolvedSources[segment[SOURCES_INDEX]],
|
||||
line: segment[SOURCE_LINE] + 1,
|
||||
column: segment[SOURCE_COLUMN],
|
||||
name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
|
||||
};
|
||||
};
|
||||
exports.generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||
const memos = map._bySourceMemos;
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (segment == null)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
return {
|
||||
line: segment[REV_GENERATED_LINE] + 1,
|
||||
column: segment[REV_GENERATED_COLUMN],
|
||||
};
|
||||
};
|
||||
exports.eachMapping = (map, cb) => {
|
||||
const decoded = exports.decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
exports.presortedDecodedMap = (map, mapUrl) => {
|
||||
const clone = Object.assign({}, map);
|
||||
clone.mappings = [];
|
||||
const tracer = new TraceMap(clone, mapUrl);
|
||||
tracer._decoded = map.mappings;
|
||||
return tracer;
|
||||
};
|
||||
exports.decodedMap = (map) => {
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings: exports.decodedMappings(map),
|
||||
};
|
||||
};
|
||||
exports.encodedMap = (map) => {
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings: exports.encodedMappings(map),
|
||||
};
|
||||
};
|
||||
})();
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return null;
|
||||
return segments[index];
|
||||
}
|
||||
|
||||
exports.AnyMap = AnyMap;
|
||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||
exports.TraceMap = TraceMap;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import { TraceMap } from './trace-mapping';
|
||||
import type { SectionedSourceMapInput } from './types';
|
||||
declare type AnyMap = {
|
||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
};
|
||||
export declare const AnyMap: AnyMap;
|
||||
export {};
|
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||
export declare type MemoState = {
|
||||
lastKey: number;
|
||||
lastNeedle: number;
|
||||
lastIndex: number;
|
||||
};
|
||||
export declare let found: boolean;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function memoizedState(): MemoState;
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||
import type { MemoState } from './binary-search';
|
||||
export declare type Source = {
|
||||
__proto__: null;
|
||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||
};
|
||||
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
declare type GeneratedColumn = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type SourceLine = number;
|
||||
declare type SourceColumn = number;
|
||||
declare type NamesIndex = number;
|
||||
declare type GeneratedLine = number;
|
||||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export declare type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export declare const REV_GENERATED_LINE = 1;
|
||||
export declare const REV_GENERATED_COLUMN = 2;
|
||||
export {};
|
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
export default function stripFilename(path: string | undefined | null): string;
|
70
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
70
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||
export type { SourceMapSegment } from './sourcemap-segment';
|
||||
export type { SourceMapInput, SectionedSourceMapInput, DecodedSourceMap, EncodedSourceMap, SectionedSourceMap, InvalidOriginalMapping, OriginalMapping as Mapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, EachMapping, } from './types';
|
||||
export declare const LEAST_UPPER_BOUND = -1;
|
||||
export declare const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly<SourceMapSegment> | null;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping;
|
||||
/**
|
||||
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||
*
|
||||
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||
*/
|
||||
export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping;
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare let decodedMap: (map: TraceMap) => Omit<DecodedSourceMap, 'mappings'> & {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
};
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare let encodedMap: (map: TraceMap) => EncodedSourceMap;
|
||||
export { AnyMap } from './any-map';
|
||||
export declare class TraceMap implements SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: string[];
|
||||
private _encoded;
|
||||
private _decoded;
|
||||
private _decodedMemo;
|
||||
private _bySources;
|
||||
private _bySourceMemos;
|
||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||
}
|
85
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
85
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { TraceMap } from './trace-mapping';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: SourceMapSegment[][];
|
||||
}
|
||||
export interface Section {
|
||||
offset: {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||
}
|
||||
export interface SectionedSourceMap {
|
||||
file?: string | null;
|
||||
sections: Section[];
|
||||
version: 3;
|
||||
}
|
||||
export declare type OriginalMapping = {
|
||||
source: string | null;
|
||||
line: number;
|
||||
column: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare type InvalidOriginalMapping = {
|
||||
source: null;
|
||||
line: null;
|
||||
column: null;
|
||||
name: null;
|
||||
};
|
||||
export declare type GeneratedMapping = {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
export declare type InvalidGeneratedMapping = {
|
||||
line: null;
|
||||
column: null;
|
||||
};
|
||||
export declare type SourceMapInput = string | EncodedSourceMap | DecodedSourceMap | TraceMap;
|
||||
export declare type SectionedSourceMapInput = SourceMapInput | SectionedSourceMap;
|
||||
export declare type Needle = {
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: 1 | -1;
|
||||
};
|
||||
export declare type SourceNeedle = {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: 1 | -1;
|
||||
};
|
||||
export declare type EachMapping = {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: null;
|
||||
originalLine: null;
|
||||
originalColumn: null;
|
||||
name: null;
|
||||
} | {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: string | null;
|
||||
originalLine: number;
|
||||
originalColumn: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare abstract class SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: SourceMapV3['sources'];
|
||||
}
|
70
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
70
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
{
|
||||
"name": "@jridgewell/trace-mapping",
|
||||
"version": "0.3.9",
|
||||
"description": "Trace the original position through a source map",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"main": "dist/trace-mapping.umd.js",
|
||||
"module": "dist/trace-mapping.mjs",
|
||||
"typings": "dist/types/trace-mapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"browser": "./dist/trace-mapping.umd.js",
|
||||
"require": "./dist/trace-mapping.umd.js",
|
||||
"import": "./dist/trace-mapping.mjs"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node benchmark/index.mjs",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "ava debug",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 ava",
|
||||
"test:watch": "ava --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"ava": "4.0.1",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.0",
|
||||
"esbuild": "0.14.14",
|
||||
"esbuild-node-loader": "0.6.4",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.64.0",
|
||||
"typescript": "4.5.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.0.3",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
}
|
21
node_modules/@tsconfig/node10/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node10/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
38
node_modules/@tsconfig/node10/README.md
generated
vendored
Normal file
38
node_modules/@tsconfig/node10/README.md
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
### A base TSConfig for working with Node 10.
|
||||
|
||||
Add the package to your `"devDependencies"`:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @tsconfig/node10
|
||||
yarn add --dev @tsconfig/node10
|
||||
```
|
||||
|
||||
Add to your `tsconfig.json`:
|
||||
|
||||
```json
|
||||
"extends": "@tsconfig/node10/tsconfig.json"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `tsconfig.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2018"],
|
||||
"module": "commonjs",
|
||||
"target": "es2018",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node10.json).
|
15
node_modules/@tsconfig/node10/package.json
generated
vendored
Normal file
15
node_modules/@tsconfig/node10/package.json
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@tsconfig/node10",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tsconfig/bases.git",
|
||||
"directory": "bases"
|
||||
},
|
||||
"license": "MIT",
|
||||
"description": "A base TSConfig for working with Node 10.",
|
||||
"keywords": [
|
||||
"tsconfig",
|
||||
"node10"
|
||||
],
|
||||
"version": "1.0.11"
|
||||
}
|
14
node_modules/@tsconfig/node10/tsconfig.json
generated
vendored
Normal file
14
node_modules/@tsconfig/node10/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2018"],
|
||||
"module": "commonjs",
|
||||
"target": "es2018",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
21
node_modules/@tsconfig/node12/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node12/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
40
node_modules/@tsconfig/node12/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node12/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
### A base TSConfig for working with Node 12.
|
||||
|
||||
Add the package to your `"devDependencies"`:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @tsconfig/node12
|
||||
yarn add --dev @tsconfig/node12
|
||||
```
|
||||
|
||||
Add to your `tsconfig.json`:
|
||||
|
||||
```json
|
||||
"extends": "@tsconfig/node12/tsconfig.json"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `tsconfig.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 12",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2019", "es2020.promise", "es2020.bigint", "es2020.string"],
|
||||
"module": "commonjs",
|
||||
"target": "es2019",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node12.json).
|
1
node_modules/@tsconfig/node12/package.json
generated
vendored
Normal file
1
node_modules/@tsconfig/node12/package.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"name":"@tsconfig/node12","repository":{"type":"git","url":"https://github.com/tsconfig/bases.git","directory":"bases"},"license":"MIT","description":"A base TSConfig for working with Node 12.","keywords":["tsconfig","node12"],"version":"1.0.11"}
|
16
node_modules/@tsconfig/node12/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node12/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 12",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2019", "es2020.promise", "es2020.bigint", "es2020.string"],
|
||||
"module": "commonjs",
|
||||
"target": "es2019",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
21
node_modules/@tsconfig/node14/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node14/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
40
node_modules/@tsconfig/node14/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node14/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
### A base TSConfig for working with Node 14.
|
||||
|
||||
Add the package to your `"devDependencies"`:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @tsconfig/node14
|
||||
yarn add --dev @tsconfig/node14
|
||||
```
|
||||
|
||||
Add to your `tsconfig.json`:
|
||||
|
||||
```json
|
||||
"extends": "@tsconfig/node14/tsconfig.json"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `tsconfig.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 14",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2020"],
|
||||
"module": "commonjs",
|
||||
"target": "es2020",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node14.json).
|
1
node_modules/@tsconfig/node14/package.json
generated
vendored
Normal file
1
node_modules/@tsconfig/node14/package.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"name":"@tsconfig/node14","repository":{"type":"git","url":"https://github.com/tsconfig/bases.git","directory":"bases"},"license":"MIT","description":"A base TSConfig for working with Node 14.","keywords":["tsconfig","node14"],"version":"1.0.3"}
|
16
node_modules/@tsconfig/node14/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node14/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 14",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2020"],
|
||||
"module": "commonjs",
|
||||
"target": "es2020",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
21
node_modules/@tsconfig/node16/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node16/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
40
node_modules/@tsconfig/node16/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node16/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
### A base TSConfig for working with Node 16.
|
||||
|
||||
Add the package to your `"devDependencies"`:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @tsconfig/node16
|
||||
yarn add --dev @tsconfig/node16
|
||||
```
|
||||
|
||||
Add to your `tsconfig.json`:
|
||||
|
||||
```json
|
||||
"extends": "@tsconfig/node16/tsconfig.json"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `tsconfig.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 16",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2021"],
|
||||
"module": "Node16",
|
||||
"target": "es2021",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node16.json).
|
15
node_modules/@tsconfig/node16/package.json
generated
vendored
Normal file
15
node_modules/@tsconfig/node16/package.json
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@tsconfig/node16",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tsconfig/bases.git",
|
||||
"directory": "bases"
|
||||
},
|
||||
"license": "MIT",
|
||||
"description": "A base TSConfig for working with Node 16.",
|
||||
"keywords": [
|
||||
"tsconfig",
|
||||
"node16"
|
||||
],
|
||||
"version": "1.0.4"
|
||||
}
|
16
node_modules/@tsconfig/node16/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node16/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 16",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2021"],
|
||||
"module": "Node16",
|
||||
"target": "es2021",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
21
node_modules/@types/node/LICENSE
generated
vendored
Normal file
21
node_modules/@types/node/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
15
node_modules/@types/node/README.md
generated
vendored
Normal file
15
node_modules/@types/node/README.md
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
# Installation
|
||||
> `npm install --save @types/node`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for node (https://nodejs.org/).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node.
|
||||
|
||||
### Additional Details
|
||||
* Last updated: Tue, 28 Jan 2025 02:01:18 GMT
|
||||
* Dependencies: [undici-types](https://npmjs.com/package/undici-types)
|
||||
|
||||
# Credits
|
||||
These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), [Matteo Collina](https://github.com/mcollina), and [Dmitry Semigradsky](https://github.com/Semigradsky).
|
1040
node_modules/@types/node/assert.d.ts
generated
vendored
Normal file
1040
node_modules/@types/node/assert.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
8
node_modules/@types/node/assert/strict.d.ts
generated
vendored
Normal file
8
node_modules/@types/node/assert/strict.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
declare module "assert/strict" {
|
||||
import { strict } from "node:assert";
|
||||
export = strict;
|
||||
}
|
||||
declare module "node:assert/strict" {
|
||||
import { strict } from "node:assert";
|
||||
export = strict;
|
||||
}
|
541
node_modules/@types/node/async_hooks.d.ts
generated
vendored
Normal file
541
node_modules/@types/node/async_hooks.d.ts
generated
vendored
Normal file
@ -0,0 +1,541 @@
|
||||
/**
|
||||
* We strongly discourage the use of the `async_hooks` API.
|
||||
* Other APIs that can cover most of its use cases include:
|
||||
*
|
||||
* * [`AsyncLocalStorage`](https://nodejs.org/docs/latest-v22.x/api/async_context.html#class-asynclocalstorage) tracks async context
|
||||
* * [`process.getActiveResourcesInfo()`](https://nodejs.org/docs/latest-v22.x/api/process.html#processgetactiveresourcesinfo) tracks active resources
|
||||
*
|
||||
* The `node:async_hooks` module provides an API to track asynchronous resources.
|
||||
* It can be accessed using:
|
||||
*
|
||||
* ```js
|
||||
* import async_hooks from 'node:async_hooks';
|
||||
* ```
|
||||
* @experimental
|
||||
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/async_hooks.js)
|
||||
*/
|
||||
declare module "async_hooks" {
|
||||
/**
|
||||
* ```js
|
||||
* import { executionAsyncId } from 'node:async_hooks';
|
||||
* import fs from 'node:fs';
|
||||
*
|
||||
* console.log(executionAsyncId()); // 1 - bootstrap
|
||||
* const path = '.';
|
||||
* fs.open(path, 'r', (err, fd) => {
|
||||
* console.log(executionAsyncId()); // 6 - open()
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* The ID returned from `executionAsyncId()` is related to execution timing, not
|
||||
* causality (which is covered by `triggerAsyncId()`):
|
||||
*
|
||||
* ```js
|
||||
* const server = net.createServer((conn) => {
|
||||
* // Returns the ID of the server, not of the new connection, because the
|
||||
* // callback runs in the execution scope of the server's MakeCallback().
|
||||
* async_hooks.executionAsyncId();
|
||||
*
|
||||
* }).listen(port, () => {
|
||||
* // Returns the ID of a TickObject (process.nextTick()) because all
|
||||
* // callbacks passed to .listen() are wrapped in a nextTick().
|
||||
* async_hooks.executionAsyncId();
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Promise contexts may not get precise `executionAsyncIds` by default.
|
||||
* See the section on [promise execution tracking](https://nodejs.org/docs/latest-v22.x/api/async_hooks.html#promise-execution-tracking).
|
||||
* @since v8.1.0
|
||||
* @return The `asyncId` of the current execution context. Useful to track when something calls.
|
||||
*/
|
||||
function executionAsyncId(): number;
|
||||
/**
|
||||
* Resource objects returned by `executionAsyncResource()` are most often internal
|
||||
* Node.js handle objects with undocumented APIs. Using any functions or properties
|
||||
* on the object is likely to crash your application and should be avoided.
|
||||
*
|
||||
* Using `executionAsyncResource()` in the top-level execution context will
|
||||
* return an empty object as there is no handle or request object to use,
|
||||
* but having an object representing the top-level can be helpful.
|
||||
*
|
||||
* ```js
|
||||
* import { open } from 'node:fs';
|
||||
* import { executionAsyncId, executionAsyncResource } from 'node:async_hooks';
|
||||
*
|
||||
* console.log(executionAsyncId(), executionAsyncResource()); // 1 {}
|
||||
* open(new URL(import.meta.url), 'r', (err, fd) => {
|
||||
* console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* This can be used to implement continuation local storage without the
|
||||
* use of a tracking `Map` to store the metadata:
|
||||
*
|
||||
* ```js
|
||||
* import { createServer } from 'node:http';
|
||||
* import {
|
||||
* executionAsyncId,
|
||||
* executionAsyncResource,
|
||||
* createHook,
|
||||
* } from 'node:async_hooks';
|
||||
* const sym = Symbol('state'); // Private symbol to avoid pollution
|
||||
*
|
||||
* createHook({
|
||||
* init(asyncId, type, triggerAsyncId, resource) {
|
||||
* const cr = executionAsyncResource();
|
||||
* if (cr) {
|
||||
* resource[sym] = cr[sym];
|
||||
* }
|
||||
* },
|
||||
* }).enable();
|
||||
*
|
||||
* const server = createServer((req, res) => {
|
||||
* executionAsyncResource()[sym] = { state: req.url };
|
||||
* setTimeout(function() {
|
||||
* res.end(JSON.stringify(executionAsyncResource()[sym]));
|
||||
* }, 100);
|
||||
* }).listen(3000);
|
||||
* ```
|
||||
* @since v13.9.0, v12.17.0
|
||||
* @return The resource representing the current execution. Useful to store data within the resource.
|
||||
*/
|
||||
function executionAsyncResource(): object;
|
||||
/**
|
||||
* ```js
|
||||
* const server = net.createServer((conn) => {
|
||||
* // The resource that caused (or triggered) this callback to be called
|
||||
* // was that of the new connection. Thus the return value of triggerAsyncId()
|
||||
* // is the asyncId of "conn".
|
||||
* async_hooks.triggerAsyncId();
|
||||
*
|
||||
* }).listen(port, () => {
|
||||
* // Even though all callbacks passed to .listen() are wrapped in a nextTick()
|
||||
* // the callback itself exists because the call to the server's .listen()
|
||||
* // was made. So the return value would be the ID of the server.
|
||||
* async_hooks.triggerAsyncId();
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Promise contexts may not get valid `triggerAsyncId`s by default. See
|
||||
* the section on [promise execution tracking](https://nodejs.org/docs/latest-v22.x/api/async_hooks.html#promise-execution-tracking).
|
||||
* @return The ID of the resource responsible for calling the callback that is currently being executed.
|
||||
*/
|
||||
function triggerAsyncId(): number;
|
||||
interface HookCallbacks {
|
||||
/**
|
||||
* Called when a class is constructed that has the possibility to emit an asynchronous event.
|
||||
* @param asyncId A unique ID for the async resource
|
||||
* @param type The type of the async resource
|
||||
* @param triggerAsyncId The unique ID of the async resource in whose execution context this async resource was created
|
||||
* @param resource Reference to the resource representing the async operation, needs to be released during destroy
|
||||
*/
|
||||
init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void;
|
||||
/**
|
||||
* When an asynchronous operation is initiated or completes a callback is called to notify the user.
|
||||
* The before callback is called just before said callback is executed.
|
||||
* @param asyncId the unique identifier assigned to the resource about to execute the callback.
|
||||
*/
|
||||
before?(asyncId: number): void;
|
||||
/**
|
||||
* Called immediately after the callback specified in `before` is completed.
|
||||
*
|
||||
* If an uncaught exception occurs during execution of the callback, then `after` will run after the `'uncaughtException'` event is emitted or a `domain`'s handler runs.
|
||||
* @param asyncId the unique identifier assigned to the resource which has executed the callback.
|
||||
*/
|
||||
after?(asyncId: number): void;
|
||||
/**
|
||||
* Called when a promise has resolve() called. This may not be in the same execution id
|
||||
* as the promise itself.
|
||||
* @param asyncId the unique id for the promise that was resolve()d.
|
||||
*/
|
||||
promiseResolve?(asyncId: number): void;
|
||||
/**
|
||||
* Called after the resource corresponding to asyncId is destroyed
|
||||
* @param asyncId a unique ID for the async resource
|
||||
*/
|
||||
destroy?(asyncId: number): void;
|
||||
}
|
||||
interface AsyncHook {
|
||||
/**
|
||||
* Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop.
|
||||
*/
|
||||
enable(): this;
|
||||
/**
|
||||
* Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled.
|
||||
*/
|
||||
disable(): this;
|
||||
}
|
||||
/**
|
||||
* Registers functions to be called for different lifetime events of each async
|
||||
* operation.
|
||||
*
|
||||
* The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the
|
||||
* respective asynchronous event during a resource's lifetime.
|
||||
*
|
||||
* All callbacks are optional. For example, if only resource cleanup needs to
|
||||
* be tracked, then only the `destroy` callback needs to be passed. The
|
||||
* specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section.
|
||||
*
|
||||
* ```js
|
||||
* import { createHook } from 'node:async_hooks';
|
||||
*
|
||||
* const asyncHook = createHook({
|
||||
* init(asyncId, type, triggerAsyncId, resource) { },
|
||||
* destroy(asyncId) { },
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* The callbacks will be inherited via the prototype chain:
|
||||
*
|
||||
* ```js
|
||||
* class MyAsyncCallbacks {
|
||||
* init(asyncId, type, triggerAsyncId, resource) { }
|
||||
* destroy(asyncId) {}
|
||||
* }
|
||||
*
|
||||
* class MyAddedCallbacks extends MyAsyncCallbacks {
|
||||
* before(asyncId) { }
|
||||
* after(asyncId) { }
|
||||
* }
|
||||
*
|
||||
* const asyncHook = async_hooks.createHook(new MyAddedCallbacks());
|
||||
* ```
|
||||
*
|
||||
* Because promises are asynchronous resources whose lifecycle is tracked
|
||||
* via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises.
|
||||
* @since v8.1.0
|
||||
* @param callbacks The `Hook Callbacks` to register
|
||||
* @return Instance used for disabling and enabling hooks
|
||||
*/
|
||||
function createHook(callbacks: HookCallbacks): AsyncHook;
|
||||
interface AsyncResourceOptions {
|
||||
/**
|
||||
* The ID of the execution context that created this async event.
|
||||
* @default executionAsyncId()
|
||||
*/
|
||||
triggerAsyncId?: number | undefined;
|
||||
/**
|
||||
* Disables automatic `emitDestroy` when the object is garbage collected.
|
||||
* This usually does not need to be set (even if `emitDestroy` is called
|
||||
* manually), unless the resource's `asyncId` is retrieved and the
|
||||
* sensitive API's `emitDestroy` is called with it.
|
||||
* @default false
|
||||
*/
|
||||
requireManualDestroy?: boolean | undefined;
|
||||
}
|
||||
/**
|
||||
* The class `AsyncResource` is designed to be extended by the embedder's async
|
||||
* resources. Using this, users can easily trigger the lifetime events of their
|
||||
* own resources.
|
||||
*
|
||||
* The `init` hook will trigger when an `AsyncResource` is instantiated.
|
||||
*
|
||||
* The following is an overview of the `AsyncResource` API.
|
||||
*
|
||||
* ```js
|
||||
* import { AsyncResource, executionAsyncId } from 'node:async_hooks';
|
||||
*
|
||||
* // AsyncResource() is meant to be extended. Instantiating a
|
||||
* // new AsyncResource() also triggers init. If triggerAsyncId is omitted then
|
||||
* // async_hook.executionAsyncId() is used.
|
||||
* const asyncResource = new AsyncResource(
|
||||
* type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false },
|
||||
* );
|
||||
*
|
||||
* // Run a function in the execution context of the resource. This will
|
||||
* // * establish the context of the resource
|
||||
* // * trigger the AsyncHooks before callbacks
|
||||
* // * call the provided function `fn` with the supplied arguments
|
||||
* // * trigger the AsyncHooks after callbacks
|
||||
* // * restore the original execution context
|
||||
* asyncResource.runInAsyncScope(fn, thisArg, ...args);
|
||||
*
|
||||
* // Call AsyncHooks destroy callbacks.
|
||||
* asyncResource.emitDestroy();
|
||||
*
|
||||
* // Return the unique ID assigned to the AsyncResource instance.
|
||||
* asyncResource.asyncId();
|
||||
*
|
||||
* // Return the trigger ID for the AsyncResource instance.
|
||||
* asyncResource.triggerAsyncId();
|
||||
* ```
|
||||
*/
|
||||
class AsyncResource {
|
||||
/**
|
||||
* AsyncResource() is meant to be extended. Instantiating a
|
||||
* new AsyncResource() also triggers init. If triggerAsyncId is omitted then
|
||||
* async_hook.executionAsyncId() is used.
|
||||
* @param type The type of async event.
|
||||
* @param triggerAsyncId The ID of the execution context that created
|
||||
* this async event (default: `executionAsyncId()`), or an
|
||||
* AsyncResourceOptions object (since v9.3.0)
|
||||
*/
|
||||
constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions);
|
||||
/**
|
||||
* Binds the given function to the current execution context.
|
||||
* @since v14.8.0, v12.19.0
|
||||
* @param fn The function to bind to the current execution context.
|
||||
* @param type An optional name to associate with the underlying `AsyncResource`.
|
||||
*/
|
||||
static bind<Func extends (this: ThisArg, ...args: any[]) => any, ThisArg>(
|
||||
fn: Func,
|
||||
type?: string,
|
||||
thisArg?: ThisArg,
|
||||
): Func;
|
||||
/**
|
||||
* Binds the given function to execute to this `AsyncResource`'s scope.
|
||||
* @since v14.8.0, v12.19.0
|
||||
* @param fn The function to bind to the current `AsyncResource`.
|
||||
*/
|
||||
bind<Func extends (...args: any[]) => any>(fn: Func): Func;
|
||||
/**
|
||||
* Call the provided function with the provided arguments in the execution context
|
||||
* of the async resource. This will establish the context, trigger the AsyncHooks
|
||||
* before callbacks, call the function, trigger the AsyncHooks after callbacks, and
|
||||
* then restore the original execution context.
|
||||
* @since v9.6.0
|
||||
* @param fn The function to call in the execution context of this async resource.
|
||||
* @param thisArg The receiver to be used for the function call.
|
||||
* @param args Optional arguments to pass to the function.
|
||||
*/
|
||||
runInAsyncScope<This, Result>(
|
||||
fn: (this: This, ...args: any[]) => Result,
|
||||
thisArg?: This,
|
||||
...args: any[]
|
||||
): Result;
|
||||
/**
|
||||
* Call all `destroy` hooks. This should only ever be called once. An error will
|
||||
* be thrown if it is called more than once. This **must** be manually called. If
|
||||
* the resource is left to be collected by the GC then the `destroy` hooks will
|
||||
* never be called.
|
||||
* @return A reference to `asyncResource`.
|
||||
*/
|
||||
emitDestroy(): this;
|
||||
/**
|
||||
* @return The unique `asyncId` assigned to the resource.
|
||||
*/
|
||||
asyncId(): number;
|
||||
/**
|
||||
* @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor.
|
||||
*/
|
||||
triggerAsyncId(): number;
|
||||
}
|
||||
/**
|
||||
* This class creates stores that stay coherent through asynchronous operations.
|
||||
*
|
||||
* While you can create your own implementation on top of the `node:async_hooks` module, `AsyncLocalStorage` should be preferred as it is a performant and memory
|
||||
* safe implementation that involves significant optimizations that are non-obvious
|
||||
* to implement.
|
||||
*
|
||||
* The following example uses `AsyncLocalStorage` to build a simple logger
|
||||
* that assigns IDs to incoming HTTP requests and includes them in messages
|
||||
* logged within each request.
|
||||
*
|
||||
* ```js
|
||||
* import http from 'node:http';
|
||||
* import { AsyncLocalStorage } from 'node:async_hooks';
|
||||
*
|
||||
* const asyncLocalStorage = new AsyncLocalStorage();
|
||||
*
|
||||
* function logWithId(msg) {
|
||||
* const id = asyncLocalStorage.getStore();
|
||||
* console.log(`${id !== undefined ? id : '-'}:`, msg);
|
||||
* }
|
||||
*
|
||||
* let idSeq = 0;
|
||||
* http.createServer((req, res) => {
|
||||
* asyncLocalStorage.run(idSeq++, () => {
|
||||
* logWithId('start');
|
||||
* // Imagine any chain of async operations here
|
||||
* setImmediate(() => {
|
||||
* logWithId('finish');
|
||||
* res.end();
|
||||
* });
|
||||
* });
|
||||
* }).listen(8080);
|
||||
*
|
||||
* http.get('http://localhost:8080');
|
||||
* http.get('http://localhost:8080');
|
||||
* // Prints:
|
||||
* // 0: start
|
||||
* // 1: start
|
||||
* // 0: finish
|
||||
* // 1: finish
|
||||
* ```
|
||||
*
|
||||
* Each instance of `AsyncLocalStorage` maintains an independent storage context.
|
||||
* Multiple instances can safely exist simultaneously without risk of interfering
|
||||
* with each other's data.
|
||||
* @since v13.10.0, v12.17.0
|
||||
*/
|
||||
class AsyncLocalStorage<T> {
|
||||
/**
|
||||
* Binds the given function to the current execution context.
|
||||
* @since v19.8.0
|
||||
* @experimental
|
||||
* @param fn The function to bind to the current execution context.
|
||||
* @return A new function that calls `fn` within the captured execution context.
|
||||
*/
|
||||
static bind<Func extends (...args: any[]) => any>(fn: Func): Func;
|
||||
/**
|
||||
* Captures the current execution context and returns a function that accepts a
|
||||
* function as an argument. Whenever the returned function is called, it
|
||||
* calls the function passed to it within the captured context.
|
||||
*
|
||||
* ```js
|
||||
* const asyncLocalStorage = new AsyncLocalStorage();
|
||||
* const runInAsyncScope = asyncLocalStorage.run(123, () => AsyncLocalStorage.snapshot());
|
||||
* const result = asyncLocalStorage.run(321, () => runInAsyncScope(() => asyncLocalStorage.getStore()));
|
||||
* console.log(result); // returns 123
|
||||
* ```
|
||||
*
|
||||
* AsyncLocalStorage.snapshot() can replace the use of AsyncResource for simple
|
||||
* async context tracking purposes, for example:
|
||||
*
|
||||
* ```js
|
||||
* class Foo {
|
||||
* #runInAsyncScope = AsyncLocalStorage.snapshot();
|
||||
*
|
||||
* get() { return this.#runInAsyncScope(() => asyncLocalStorage.getStore()); }
|
||||
* }
|
||||
*
|
||||
* const foo = asyncLocalStorage.run(123, () => new Foo());
|
||||
* console.log(asyncLocalStorage.run(321, () => foo.get())); // returns 123
|
||||
* ```
|
||||
* @since v19.8.0
|
||||
* @experimental
|
||||
* @return A new function with the signature `(fn: (...args) : R, ...args) : R`.
|
||||
*/
|
||||
static snapshot(): <R, TArgs extends any[]>(fn: (...args: TArgs) => R, ...args: TArgs) => R;
|
||||
/**
|
||||
* Disables the instance of `AsyncLocalStorage`. All subsequent calls
|
||||
* to `asyncLocalStorage.getStore()` will return `undefined` until `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again.
|
||||
*
|
||||
* When calling `asyncLocalStorage.disable()`, all current contexts linked to the
|
||||
* instance will be exited.
|
||||
*
|
||||
* Calling `asyncLocalStorage.disable()` is required before the `asyncLocalStorage` can be garbage collected. This does not apply to stores
|
||||
* provided by the `asyncLocalStorage`, as those objects are garbage collected
|
||||
* along with the corresponding async resources.
|
||||
*
|
||||
* Use this method when the `asyncLocalStorage` is not in use anymore
|
||||
* in the current process.
|
||||
* @since v13.10.0, v12.17.0
|
||||
* @experimental
|
||||
*/
|
||||
disable(): void;
|
||||
/**
|
||||
* Returns the current store.
|
||||
* If called outside of an asynchronous context initialized by
|
||||
* calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it
|
||||
* returns `undefined`.
|
||||
* @since v13.10.0, v12.17.0
|
||||
*/
|
||||
getStore(): T | undefined;
|
||||
/**
|
||||
* Runs a function synchronously within a context and returns its
|
||||
* return value. The store is not accessible outside of the callback function.
|
||||
* The store is accessible to any asynchronous operations created within the
|
||||
* callback.
|
||||
*
|
||||
* The optional `args` are passed to the callback function.
|
||||
*
|
||||
* If the callback function throws an error, the error is thrown by `run()` too.
|
||||
* The stacktrace is not impacted by this call and the context is exited.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```js
|
||||
* const store = { id: 2 };
|
||||
* try {
|
||||
* asyncLocalStorage.run(store, () => {
|
||||
* asyncLocalStorage.getStore(); // Returns the store object
|
||||
* setTimeout(() => {
|
||||
* asyncLocalStorage.getStore(); // Returns the store object
|
||||
* }, 200);
|
||||
* throw new Error();
|
||||
* });
|
||||
* } catch (e) {
|
||||
* asyncLocalStorage.getStore(); // Returns undefined
|
||||
* // The error will be caught here
|
||||
* }
|
||||
* ```
|
||||
* @since v13.10.0, v12.17.0
|
||||
*/
|
||||
run<R>(store: T, callback: () => R): R;
|
||||
run<R, TArgs extends any[]>(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R;
|
||||
/**
|
||||
* Runs a function synchronously outside of a context and returns its
|
||||
* return value. The store is not accessible within the callback function or
|
||||
* the asynchronous operations created within the callback. Any `getStore()` call done within the callback function will always return `undefined`.
|
||||
*
|
||||
* The optional `args` are passed to the callback function.
|
||||
*
|
||||
* If the callback function throws an error, the error is thrown by `exit()` too.
|
||||
* The stacktrace is not impacted by this call and the context is re-entered.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```js
|
||||
* // Within a call to run
|
||||
* try {
|
||||
* asyncLocalStorage.getStore(); // Returns the store object or value
|
||||
* asyncLocalStorage.exit(() => {
|
||||
* asyncLocalStorage.getStore(); // Returns undefined
|
||||
* throw new Error();
|
||||
* });
|
||||
* } catch (e) {
|
||||
* asyncLocalStorage.getStore(); // Returns the same object or value
|
||||
* // The error will be caught here
|
||||
* }
|
||||
* ```
|
||||
* @since v13.10.0, v12.17.0
|
||||
* @experimental
|
||||
*/
|
||||
exit<R, TArgs extends any[]>(callback: (...args: TArgs) => R, ...args: TArgs): R;
|
||||
/**
|
||||
* Transitions into the context for the remainder of the current
|
||||
* synchronous execution and then persists the store through any following
|
||||
* asynchronous calls.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```js
|
||||
* const store = { id: 1 };
|
||||
* // Replaces previous store with the given store object
|
||||
* asyncLocalStorage.enterWith(store);
|
||||
* asyncLocalStorage.getStore(); // Returns the store object
|
||||
* someAsyncOperation(() => {
|
||||
* asyncLocalStorage.getStore(); // Returns the same object
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* This transition will continue for the _entire_ synchronous execution.
|
||||
* This means that if, for example, the context is entered within an event
|
||||
* handler subsequent event handlers will also run within that context unless
|
||||
* specifically bound to another context with an `AsyncResource`. That is why `run()` should be preferred over `enterWith()` unless there are strong reasons
|
||||
* to use the latter method.
|
||||
*
|
||||
* ```js
|
||||
* const store = { id: 1 };
|
||||
*
|
||||
* emitter.on('my-event', () => {
|
||||
* asyncLocalStorage.enterWith(store);
|
||||
* });
|
||||
* emitter.on('my-event', () => {
|
||||
* asyncLocalStorage.getStore(); // Returns the same object
|
||||
* });
|
||||
*
|
||||
* asyncLocalStorage.getStore(); // Returns undefined
|
||||
* emitter.emit('my-event');
|
||||
* asyncLocalStorage.getStore(); // Returns the same object
|
||||
* ```
|
||||
* @since v13.11.0, v12.17.0
|
||||
* @experimental
|
||||
*/
|
||||
enterWith(store: T): void;
|
||||
}
|
||||
}
|
||||
declare module "node:async_hooks" {
|
||||
export * from "async_hooks";
|
||||
}
|
385
node_modules/@types/node/buffer.buffer.d.ts
generated
vendored
Normal file
385
node_modules/@types/node/buffer.buffer.d.ts
generated
vendored
Normal file
@ -0,0 +1,385 @@
|
||||
declare module "buffer" {
|
||||
global {
|
||||
interface BufferConstructor {
|
||||
// see buffer.d.ts for implementation shared with all TypeScript versions
|
||||
|
||||
/**
|
||||
* Allocates a new buffer containing the given {str}.
|
||||
*
|
||||
* @param str String to store in buffer.
|
||||
* @param encoding encoding to use, optional. Default is 'utf8'
|
||||
* @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead.
|
||||
*/
|
||||
new(str: string, encoding?: BufferEncoding): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets.
|
||||
*
|
||||
* @param size count of octets to allocate.
|
||||
* @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`).
|
||||
*/
|
||||
new(size: number): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Allocates a new buffer containing the given {array} of octets.
|
||||
*
|
||||
* @param array The octets to store.
|
||||
* @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
|
||||
*/
|
||||
new(array: Uint8Array): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Produces a Buffer backed by the same allocated memory as
|
||||
* the given {ArrayBuffer}/{SharedArrayBuffer}.
|
||||
*
|
||||
* @param arrayBuffer The ArrayBuffer with which to share memory.
|
||||
* @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead.
|
||||
*/
|
||||
new<TArrayBuffer extends ArrayBufferLike = ArrayBuffer>(arrayBuffer: TArrayBuffer): Buffer<TArrayBuffer>;
|
||||
/**
|
||||
* Allocates a new buffer containing the given {array} of octets.
|
||||
*
|
||||
* @param array The octets to store.
|
||||
* @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
|
||||
*/
|
||||
new(array: readonly any[]): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Copies the passed {buffer} data onto a new {Buffer} instance.
|
||||
*
|
||||
* @param buffer The buffer to copy.
|
||||
* @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead.
|
||||
*/
|
||||
new(buffer: Buffer): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`.
|
||||
* Array entries outside that range will be truncated to fit into it.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'.
|
||||
* const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
|
||||
* ```
|
||||
*
|
||||
* If `array` is an `Array`\-like object (that is, one with a `length` property of
|
||||
* type `number`), it is treated as if it is an array, unless it is a `Buffer` or
|
||||
* a `Uint8Array`. This means all other `TypedArray` variants get treated as an `Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use `Buffer.copyBytesFrom()`.
|
||||
*
|
||||
* A `TypeError` will be thrown if `array` is not an `Array` or another type
|
||||
* appropriate for `Buffer.from()` variants.
|
||||
*
|
||||
* `Buffer.from(array)` and `Buffer.from(string)` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does.
|
||||
* @since v5.10.0
|
||||
*/
|
||||
from<TArrayBuffer extends ArrayBufferLike>(
|
||||
arrayBuffer: WithImplicitCoercion<TArrayBuffer>,
|
||||
byteOffset?: number,
|
||||
length?: number,
|
||||
): Buffer<TArrayBuffer>;
|
||||
/**
|
||||
* Creates a new Buffer using the passed {data}
|
||||
* @param data data to create a new Buffer
|
||||
*/
|
||||
from(data: Uint8Array | readonly number[]): Buffer<ArrayBuffer>;
|
||||
from(data: WithImplicitCoercion<Uint8Array | readonly number[] | string>): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Creates a new Buffer containing the given JavaScript string {str}.
|
||||
* If provided, the {encoding} parameter identifies the character encoding.
|
||||
* If not provided, {encoding} defaults to 'utf8'.
|
||||
*/
|
||||
from(
|
||||
str:
|
||||
| WithImplicitCoercion<string>
|
||||
| {
|
||||
[Symbol.toPrimitive](hint: "string"): string;
|
||||
},
|
||||
encoding?: BufferEncoding,
|
||||
): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Creates a new Buffer using the passed {data}
|
||||
* @param values to create a new Buffer
|
||||
*/
|
||||
of(...items: number[]): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Returns a new `Buffer` which is the result of concatenating all the `Buffer` instances in the `list` together.
|
||||
*
|
||||
* If the list has no items, or if the `totalLength` is 0, then a new zero-length `Buffer` is returned.
|
||||
*
|
||||
* If `totalLength` is not provided, it is calculated from the `Buffer` instances
|
||||
* in `list` by adding their lengths.
|
||||
*
|
||||
* If `totalLength` is provided, it is coerced to an unsigned integer. If the
|
||||
* combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is
|
||||
* truncated to `totalLength`.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* // Create a single `Buffer` from a list of three `Buffer` instances.
|
||||
*
|
||||
* const buf1 = Buffer.alloc(10);
|
||||
* const buf2 = Buffer.alloc(14);
|
||||
* const buf3 = Buffer.alloc(18);
|
||||
* const totalLength = buf1.length + buf2.length + buf3.length;
|
||||
*
|
||||
* console.log(totalLength);
|
||||
* // Prints: 42
|
||||
*
|
||||
* const bufA = Buffer.concat([buf1, buf2, buf3], totalLength);
|
||||
*
|
||||
* console.log(bufA);
|
||||
* // Prints: <Buffer 00 00 00 00 ...>
|
||||
* console.log(bufA.length);
|
||||
* // Prints: 42
|
||||
* ```
|
||||
*
|
||||
* `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does.
|
||||
* @since v0.7.11
|
||||
* @param list List of `Buffer` or {@link Uint8Array} instances to concatenate.
|
||||
* @param totalLength Total length of the `Buffer` instances in `list` when concatenated.
|
||||
*/
|
||||
concat(list: readonly Uint8Array[], totalLength?: number): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Copies the underlying memory of `view` into a new `Buffer`.
|
||||
*
|
||||
* ```js
|
||||
* const u16 = new Uint16Array([0, 0xffff]);
|
||||
* const buf = Buffer.copyBytesFrom(u16, 1, 1);
|
||||
* u16[1] = 0;
|
||||
* console.log(buf.length); // 2
|
||||
* console.log(buf[0]); // 255
|
||||
* console.log(buf[1]); // 255
|
||||
* ```
|
||||
* @since v19.8.0
|
||||
* @param view The {TypedArray} to copy.
|
||||
* @param [offset=0] The starting offset within `view`.
|
||||
* @param [length=view.length - offset] The number of elements from `view` to copy.
|
||||
*/
|
||||
copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const buf = Buffer.alloc(5);
|
||||
*
|
||||
* console.log(buf);
|
||||
* // Prints: <Buffer 00 00 00 00 00>
|
||||
* ```
|
||||
*
|
||||
* If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown.
|
||||
*
|
||||
* If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const buf = Buffer.alloc(5, 'a');
|
||||
*
|
||||
* console.log(buf);
|
||||
* // Prints: <Buffer 61 61 61 61 61>
|
||||
* ```
|
||||
*
|
||||
* If both `fill` and `encoding` are specified, the allocated `Buffer` will be
|
||||
* initialized by calling `buf.fill(fill, encoding)`.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
|
||||
*
|
||||
* console.log(buf);
|
||||
* // Prints: <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
|
||||
* ```
|
||||
*
|
||||
* Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance
|
||||
* contents will never contain sensitive data from previous allocations, including
|
||||
* data that might not have been allocated for `Buffer`s.
|
||||
*
|
||||
* A `TypeError` will be thrown if `size` is not a number.
|
||||
* @since v5.10.0
|
||||
* @param size The desired length of the new `Buffer`.
|
||||
* @param [fill=0] A value to pre-fill the new `Buffer` with.
|
||||
* @param [encoding='utf8'] If `fill` is a string, this is its encoding.
|
||||
*/
|
||||
alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown.
|
||||
*
|
||||
* The underlying memory for `Buffer` instances created in this way is _not_
|
||||
* _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const buf = Buffer.allocUnsafe(10);
|
||||
*
|
||||
* console.log(buf);
|
||||
* // Prints (contents may vary): <Buffer a0 8b 28 3f 01 00 00 00 50 32>
|
||||
*
|
||||
* buf.fill(0);
|
||||
*
|
||||
* console.log(buf);
|
||||
* // Prints: <Buffer 00 00 00 00 00 00 00 00 00 00>
|
||||
* ```
|
||||
*
|
||||
* A `TypeError` will be thrown if `size` is not a number.
|
||||
*
|
||||
* The `Buffer` module pre-allocates an internal `Buffer` instance of
|
||||
* size `Buffer.poolSize` that is used as a pool for the fast allocation of new `Buffer` instances created using `Buffer.allocUnsafe()`, `Buffer.from(array)`,
|
||||
* and `Buffer.concat()` only when `size` is less than `Buffer.poolSize >>> 1` (floor of `Buffer.poolSize` divided by two).
|
||||
*
|
||||
* Use of this pre-allocated internal memory pool is a key difference between
|
||||
* calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
|
||||
* Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less
|
||||
* than or equal to half `Buffer.poolSize`. The
|
||||
* difference is subtle but can be important when an application requires the
|
||||
* additional performance that `Buffer.allocUnsafe()` provides.
|
||||
* @since v5.10.0
|
||||
* @param size The desired length of the new `Buffer`.
|
||||
*/
|
||||
allocUnsafe(size: number): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. A zero-length `Buffer` is created if
|
||||
* `size` is 0.
|
||||
*
|
||||
* The underlying memory for `Buffer` instances created in this way is _not_
|
||||
* _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize
|
||||
* such `Buffer` instances with zeroes.
|
||||
*
|
||||
* When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
|
||||
* allocations under 4 KiB are sliced from a single pre-allocated `Buffer`. This
|
||||
* allows applications to avoid the garbage collection overhead of creating many
|
||||
* individually allocated `Buffer` instances. This approach improves both
|
||||
* performance and memory usage by eliminating the need to track and clean up as
|
||||
* many individual `ArrayBuffer` objects.
|
||||
*
|
||||
* However, in the case where a developer may need to retain a small chunk of
|
||||
* memory from a pool for an indeterminate amount of time, it may be appropriate
|
||||
* to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and
|
||||
* then copying out the relevant bits.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* // Need to keep around a few small chunks of memory.
|
||||
* const store = [];
|
||||
*
|
||||
* socket.on('readable', () => {
|
||||
* let data;
|
||||
* while (null !== (data = readable.read())) {
|
||||
* // Allocate for retained data.
|
||||
* const sb = Buffer.allocUnsafeSlow(10);
|
||||
*
|
||||
* // Copy the data into the new allocation.
|
||||
* data.copy(sb, 0, 0, 10);
|
||||
*
|
||||
* store.push(sb);
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* A `TypeError` will be thrown if `size` is not a number.
|
||||
* @since v5.12.0
|
||||
* @param size The desired length of the new `Buffer`.
|
||||
*/
|
||||
allocUnsafeSlow(size: number): Buffer<ArrayBuffer>;
|
||||
}
|
||||
interface Buffer<TArrayBuffer extends ArrayBufferLike = ArrayBufferLike> extends Uint8Array<TArrayBuffer> {
|
||||
// see buffer.d.ts for implementation shared with all TypeScript versions
|
||||
|
||||
/**
|
||||
* Returns a new `Buffer` that references the same memory as the original, but
|
||||
* offset and cropped by the `start` and `end` indices.
|
||||
*
|
||||
* This method is not compatible with the `Uint8Array.prototype.slice()`,
|
||||
* which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const buf = Buffer.from('buffer');
|
||||
*
|
||||
* const copiedBuf = Uint8Array.prototype.slice.call(buf);
|
||||
* copiedBuf[0]++;
|
||||
* console.log(copiedBuf.toString());
|
||||
* // Prints: cuffer
|
||||
*
|
||||
* console.log(buf.toString());
|
||||
* // Prints: buffer
|
||||
*
|
||||
* // With buf.slice(), the original buffer is modified.
|
||||
* const notReallyCopiedBuf = buf.slice();
|
||||
* notReallyCopiedBuf[0]++;
|
||||
* console.log(notReallyCopiedBuf.toString());
|
||||
* // Prints: cuffer
|
||||
* console.log(buf.toString());
|
||||
* // Also prints: cuffer (!)
|
||||
* ```
|
||||
* @since v0.3.0
|
||||
* @deprecated Use `subarray` instead.
|
||||
* @param [start=0] Where the new `Buffer` will start.
|
||||
* @param [end=buf.length] Where the new `Buffer` will end (not inclusive).
|
||||
*/
|
||||
slice(start?: number, end?: number): Buffer<ArrayBuffer>;
|
||||
/**
|
||||
* Returns a new `Buffer` that references the same memory as the original, but
|
||||
* offset and cropped by the `start` and `end` indices.
|
||||
*
|
||||
* Specifying `end` greater than `buf.length` will return the same result as
|
||||
* that of `end` equal to `buf.length`.
|
||||
*
|
||||
* This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray).
|
||||
*
|
||||
* Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte
|
||||
* // from the original `Buffer`.
|
||||
*
|
||||
* const buf1 = Buffer.allocUnsafe(26);
|
||||
*
|
||||
* for (let i = 0; i < 26; i++) {
|
||||
* // 97 is the decimal ASCII value for 'a'.
|
||||
* buf1[i] = i + 97;
|
||||
* }
|
||||
*
|
||||
* const buf2 = buf1.subarray(0, 3);
|
||||
*
|
||||
* console.log(buf2.toString('ascii', 0, buf2.length));
|
||||
* // Prints: abc
|
||||
*
|
||||
* buf1[0] = 33;
|
||||
*
|
||||
* console.log(buf2.toString('ascii', 0, buf2.length));
|
||||
* // Prints: !bc
|
||||
* ```
|
||||
*
|
||||
* Specifying negative indexes causes the slice to be generated relative to the
|
||||
* end of `buf` rather than the beginning.
|
||||
*
|
||||
* ```js
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const buf = Buffer.from('buffer');
|
||||
*
|
||||
* console.log(buf.subarray(-6, -1).toString());
|
||||
* // Prints: buffe
|
||||
* // (Equivalent to buf.subarray(0, 5).)
|
||||
*
|
||||
* console.log(buf.subarray(-6, -2).toString());
|
||||
* // Prints: buff
|
||||
* // (Equivalent to buf.subarray(0, 4).)
|
||||
*
|
||||
* console.log(buf.subarray(-5, -2).toString());
|
||||
* // Prints: uff
|
||||
* // (Equivalent to buf.subarray(1, 4).)
|
||||
* ```
|
||||
* @since v3.0.0
|
||||
* @param [start=0] Where the new `Buffer` will start.
|
||||
* @param [end=buf.length] Where the new `Buffer` will end (not inclusive).
|
||||
*/
|
||||
subarray(start?: number, end?: number): Buffer<TArrayBuffer>;
|
||||
}
|
||||
}
|
||||
}
|
1933
node_modules/@types/node/buffer.d.ts
generated
vendored
Normal file
1933
node_modules/@types/node/buffer.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1549
node_modules/@types/node/child_process.d.ts
generated
vendored
Normal file
1549
node_modules/@types/node/child_process.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
579
node_modules/@types/node/cluster.d.ts
generated
vendored
Normal file
579
node_modules/@types/node/cluster.d.ts
generated
vendored
Normal file
@ -0,0 +1,579 @@
|
||||
/**
|
||||
* Clusters of Node.js processes can be used to run multiple instances of Node.js
|
||||
* that can distribute workloads among their application threads. When process isolation
|
||||
* is not needed, use the [`worker_threads`](https://nodejs.org/docs/latest-v22.x/api/worker_threads.html)
|
||||
* module instead, which allows running multiple application threads within a single Node.js instance.
|
||||
*
|
||||
* The cluster module allows easy creation of child processes that all share
|
||||
* server ports.
|
||||
*
|
||||
* ```js
|
||||
* import cluster from 'node:cluster';
|
||||
* import http from 'node:http';
|
||||
* import { availableParallelism } from 'node:os';
|
||||
* import process from 'node:process';
|
||||
*
|
||||
* const numCPUs = availableParallelism();
|
||||
*
|
||||
* if (cluster.isPrimary) {
|
||||
* console.log(`Primary ${process.pid} is running`);
|
||||
*
|
||||
* // Fork workers.
|
||||
* for (let i = 0; i < numCPUs; i++) {
|
||||
* cluster.fork();
|
||||
* }
|
||||
*
|
||||
* cluster.on('exit', (worker, code, signal) => {
|
||||
* console.log(`worker ${worker.process.pid} died`);
|
||||
* });
|
||||
* } else {
|
||||
* // Workers can share any TCP connection
|
||||
* // In this case it is an HTTP server
|
||||
* http.createServer((req, res) => {
|
||||
* res.writeHead(200);
|
||||
* res.end('hello world\n');
|
||||
* }).listen(8000);
|
||||
*
|
||||
* console.log(`Worker ${process.pid} started`);
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Running Node.js will now share port 8000 between the workers:
|
||||
*
|
||||
* ```console
|
||||
* $ node server.js
|
||||
* Primary 3596 is running
|
||||
* Worker 4324 started
|
||||
* Worker 4520 started
|
||||
* Worker 6056 started
|
||||
* Worker 5644 started
|
||||
* ```
|
||||
*
|
||||
* On Windows, it is not yet possible to set up a named pipe server in a worker.
|
||||
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/cluster.js)
|
||||
*/
|
||||
declare module "cluster" {
|
||||
import * as child from "node:child_process";
|
||||
import EventEmitter = require("node:events");
|
||||
import * as net from "node:net";
|
||||
type SerializationType = "json" | "advanced";
|
||||
export interface ClusterSettings {
|
||||
/**
|
||||
* List of string arguments passed to the Node.js executable.
|
||||
* @default process.execArgv
|
||||
*/
|
||||
execArgv?: string[] | undefined;
|
||||
/**
|
||||
* File path to worker file.
|
||||
* @default process.argv[1]
|
||||
*/
|
||||
exec?: string | undefined;
|
||||
/**
|
||||
* String arguments passed to worker.
|
||||
* @default process.argv.slice(2)
|
||||
*/
|
||||
args?: string[] | undefined;
|
||||
/**
|
||||
* Whether or not to send output to parent's stdio.
|
||||
* @default false
|
||||
*/
|
||||
silent?: boolean | undefined;
|
||||
/**
|
||||
* Configures the stdio of forked processes. Because the cluster module relies on IPC to function, this configuration must
|
||||
* contain an `'ipc'` entry. When this option is provided, it overrides `silent`. See [`child_prcess.spawn()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processspawncommand-args-options)'s
|
||||
* [`stdio`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#optionsstdio).
|
||||
*/
|
||||
stdio?: any[] | undefined;
|
||||
/**
|
||||
* Sets the user identity of the process. (See [`setuid(2)`](https://man7.org/linux/man-pages/man2/setuid.2.html).)
|
||||
*/
|
||||
uid?: number | undefined;
|
||||
/**
|
||||
* Sets the group identity of the process. (See [`setgid(2)`](https://man7.org/linux/man-pages/man2/setgid.2.html).)
|
||||
*/
|
||||
gid?: number | undefined;
|
||||
/**
|
||||
* Sets inspector port of worker. This can be a number, or a function that takes no arguments and returns a number.
|
||||
* By default each worker gets its own port, incremented from the primary's `process.debugPort`.
|
||||
*/
|
||||
inspectPort?: number | (() => number) | undefined;
|
||||
/**
|
||||
* Specify the kind of serialization used for sending messages between processes. Possible values are `'json'` and `'advanced'`.
|
||||
* See [Advanced serialization for `child_process`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#advanced-serialization) for more details.
|
||||
* @default false
|
||||
*/
|
||||
serialization?: SerializationType | undefined;
|
||||
/**
|
||||
* Current working directory of the worker process.
|
||||
* @default undefined (inherits from parent process)
|
||||
*/
|
||||
cwd?: string | undefined;
|
||||
/**
|
||||
* Hide the forked processes console window that would normally be created on Windows systems.
|
||||
* @default false
|
||||
*/
|
||||
windowsHide?: boolean | undefined;
|
||||
}
|
||||
export interface Address {
|
||||
address: string;
|
||||
port: number;
|
||||
/**
|
||||
* The `addressType` is one of:
|
||||
*
|
||||
* * `4` (TCPv4)
|
||||
* * `6` (TCPv6)
|
||||
* * `-1` (Unix domain socket)
|
||||
* * `'udp4'` or `'udp6'` (UDPv4 or UDPv6)
|
||||
*/
|
||||
addressType: 4 | 6 | -1 | "udp4" | "udp6";
|
||||
}
|
||||
/**
|
||||
* A `Worker` object contains all public information and method about a worker.
|
||||
* In the primary it can be obtained using `cluster.workers`. In a worker
|
||||
* it can be obtained using `cluster.worker`.
|
||||
* @since v0.7.0
|
||||
*/
|
||||
export class Worker extends EventEmitter {
|
||||
/**
|
||||
* Each new worker is given its own unique id, this id is stored in the `id`.
|
||||
*
|
||||
* While a worker is alive, this is the key that indexes it in `cluster.workers`.
|
||||
* @since v0.8.0
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* All workers are created using [`child_process.fork()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processforkmodulepath-args-options), the returned object
|
||||
* from this function is stored as `.process`. In a worker, the global `process` is stored.
|
||||
*
|
||||
* See: [Child Process module](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processforkmodulepath-args-options).
|
||||
*
|
||||
* Workers will call `process.exit(0)` if the `'disconnect'` event occurs
|
||||
* on `process` and `.exitedAfterDisconnect` is not `true`. This protects against
|
||||
* accidental disconnection.
|
||||
* @since v0.7.0
|
||||
*/
|
||||
process: child.ChildProcess;
|
||||
/**
|
||||
* Send a message to a worker or primary, optionally with a handle.
|
||||
*
|
||||
* In the primary, this sends a message to a specific worker. It is identical to [`ChildProcess.send()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#subprocesssendmessage-sendhandle-options-callback).
|
||||
*
|
||||
* In a worker, this sends a message to the primary. It is identical to `process.send()`.
|
||||
*
|
||||
* This example will echo back all messages from the primary:
|
||||
*
|
||||
* ```js
|
||||
* if (cluster.isPrimary) {
|
||||
* const worker = cluster.fork();
|
||||
* worker.send('hi there');
|
||||
*
|
||||
* } else if (cluster.isWorker) {
|
||||
* process.on('message', (msg) => {
|
||||
* process.send(msg);
|
||||
* });
|
||||
* }
|
||||
* ```
|
||||
* @since v0.7.0
|
||||
* @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles.
|
||||
*/
|
||||
send(message: child.Serializable, callback?: (error: Error | null) => void): boolean;
|
||||
send(
|
||||
message: child.Serializable,
|
||||
sendHandle: child.SendHandle,
|
||||
callback?: (error: Error | null) => void,
|
||||
): boolean;
|
||||
send(
|
||||
message: child.Serializable,
|
||||
sendHandle: child.SendHandle,
|
||||
options?: child.MessageOptions,
|
||||
callback?: (error: Error | null) => void,
|
||||
): boolean;
|
||||
/**
|
||||
* This function will kill the worker. In the primary worker, it does this by
|
||||
* disconnecting the `worker.process`, and once disconnected, killing with `signal`. In the worker, it does it by killing the process with `signal`.
|
||||
*
|
||||
* The `kill()` function kills the worker process without waiting for a graceful
|
||||
* disconnect, it has the same behavior as `worker.process.kill()`.
|
||||
*
|
||||
* This method is aliased as `worker.destroy()` for backwards compatibility.
|
||||
*
|
||||
* In a worker, `process.kill()` exists, but it is not this function;
|
||||
* it is [`kill()`](https://nodejs.org/docs/latest-v22.x/api/process.html#processkillpid-signal).
|
||||
* @since v0.9.12
|
||||
* @param [signal='SIGTERM'] Name of the kill signal to send to the worker process.
|
||||
*/
|
||||
kill(signal?: string): void;
|
||||
destroy(signal?: string): void;
|
||||
/**
|
||||
* In a worker, this function will close all servers, wait for the `'close'` event
|
||||
* on those servers, and then disconnect the IPC channel.
|
||||
*
|
||||
* In the primary, an internal message is sent to the worker causing it to call `.disconnect()` on itself.
|
||||
*
|
||||
* Causes `.exitedAfterDisconnect` to be set.
|
||||
*
|
||||
* After a server is closed, it will no longer accept new connections,
|
||||
* but connections may be accepted by any other listening worker. Existing
|
||||
* connections will be allowed to close as usual. When no more connections exist,
|
||||
* see `server.close()`, the IPC channel to the worker will close allowing it
|
||||
* to die gracefully.
|
||||
*
|
||||
* The above applies _only_ to server connections, client connections are not
|
||||
* automatically closed by workers, and disconnect does not wait for them to close
|
||||
* before exiting.
|
||||
*
|
||||
* In a worker, `process.disconnect` exists, but it is not this function;
|
||||
* it is `disconnect()`.
|
||||
*
|
||||
* Because long living server connections may block workers from disconnecting, it
|
||||
* may be useful to send a message, so application specific actions may be taken to
|
||||
* close them. It also may be useful to implement a timeout, killing a worker if
|
||||
* the `'disconnect'` event has not been emitted after some time.
|
||||
*
|
||||
* ```js
|
||||
* import net from 'node:net';
|
||||
*
|
||||
* if (cluster.isPrimary) {
|
||||
* const worker = cluster.fork();
|
||||
* let timeout;
|
||||
*
|
||||
* worker.on('listening', (address) => {
|
||||
* worker.send('shutdown');
|
||||
* worker.disconnect();
|
||||
* timeout = setTimeout(() => {
|
||||
* worker.kill();
|
||||
* }, 2000);
|
||||
* });
|
||||
*
|
||||
* worker.on('disconnect', () => {
|
||||
* clearTimeout(timeout);
|
||||
* });
|
||||
*
|
||||
* } else if (cluster.isWorker) {
|
||||
* const server = net.createServer((socket) => {
|
||||
* // Connections never end
|
||||
* });
|
||||
*
|
||||
* server.listen(8000);
|
||||
*
|
||||
* process.on('message', (msg) => {
|
||||
* if (msg === 'shutdown') {
|
||||
* // Initiate graceful close of any connections to server
|
||||
* }
|
||||
* });
|
||||
* }
|
||||
* ```
|
||||
* @since v0.7.7
|
||||
* @return A reference to `worker`.
|
||||
*/
|
||||
disconnect(): void;
|
||||
/**
|
||||
* This function returns `true` if the worker is connected to its primary via its
|
||||
* IPC channel, `false` otherwise. A worker is connected to its primary after it
|
||||
* has been created. It is disconnected after the `'disconnect'` event is emitted.
|
||||
* @since v0.11.14
|
||||
*/
|
||||
isConnected(): boolean;
|
||||
/**
|
||||
* This function returns `true` if the worker's process has terminated (either
|
||||
* because of exiting or being signaled). Otherwise, it returns `false`.
|
||||
*
|
||||
* ```js
|
||||
* import cluster from 'node:cluster';
|
||||
* import http from 'node:http';
|
||||
* import { availableParallelism } from 'node:os';
|
||||
* import process from 'node:process';
|
||||
*
|
||||
* const numCPUs = availableParallelism();
|
||||
*
|
||||
* if (cluster.isPrimary) {
|
||||
* console.log(`Primary ${process.pid} is running`);
|
||||
*
|
||||
* // Fork workers.
|
||||
* for (let i = 0; i < numCPUs; i++) {
|
||||
* cluster.fork();
|
||||
* }
|
||||
*
|
||||
* cluster.on('fork', (worker) => {
|
||||
* console.log('worker is dead:', worker.isDead());
|
||||
* });
|
||||
*
|
||||
* cluster.on('exit', (worker, code, signal) => {
|
||||
* console.log('worker is dead:', worker.isDead());
|
||||
* });
|
||||
* } else {
|
||||
* // Workers can share any TCP connection. In this case, it is an HTTP server.
|
||||
* http.createServer((req, res) => {
|
||||
* res.writeHead(200);
|
||||
* res.end(`Current process\n ${process.pid}`);
|
||||
* process.kill(process.pid);
|
||||
* }).listen(8000);
|
||||
* }
|
||||
* ```
|
||||
* @since v0.11.14
|
||||
*/
|
||||
isDead(): boolean;
|
||||
/**
|
||||
* This property is `true` if the worker exited due to `.disconnect()`.
|
||||
* If the worker exited any other way, it is `false`. If the
|
||||
* worker has not exited, it is `undefined`.
|
||||
*
|
||||
* The boolean `worker.exitedAfterDisconnect` allows distinguishing between
|
||||
* voluntary and accidental exit, the primary may choose not to respawn a worker
|
||||
* based on this value.
|
||||
*
|
||||
* ```js
|
||||
* cluster.on('exit', (worker, code, signal) => {
|
||||
* if (worker.exitedAfterDisconnect === true) {
|
||||
* console.log('Oh, it was just voluntary – no need to worry');
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* // kill worker
|
||||
* worker.kill();
|
||||
* ```
|
||||
* @since v6.0.0
|
||||
*/
|
||||
exitedAfterDisconnect: boolean;
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. disconnect
|
||||
* 2. error
|
||||
* 3. exit
|
||||
* 4. listening
|
||||
* 5. message
|
||||
* 6. online
|
||||
*/
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(event: "disconnect", listener: () => void): this;
|
||||
addListener(event: "error", listener: (error: Error) => void): this;
|
||||
addListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
addListener(event: "listening", listener: (address: Address) => void): this;
|
||||
addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
addListener(event: "online", listener: () => void): this;
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
emit(event: "disconnect"): boolean;
|
||||
emit(event: "error", error: Error): boolean;
|
||||
emit(event: "exit", code: number, signal: string): boolean;
|
||||
emit(event: "listening", address: Address): boolean;
|
||||
emit(event: "message", message: any, handle: net.Socket | net.Server): boolean;
|
||||
emit(event: "online"): boolean;
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(event: "disconnect", listener: () => void): this;
|
||||
on(event: "error", listener: (error: Error) => void): this;
|
||||
on(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
on(event: "listening", listener: (address: Address) => void): this;
|
||||
on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
on(event: "online", listener: () => void): this;
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(event: "disconnect", listener: () => void): this;
|
||||
once(event: "error", listener: (error: Error) => void): this;
|
||||
once(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
once(event: "listening", listener: (address: Address) => void): this;
|
||||
once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
once(event: "online", listener: () => void): this;
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: "disconnect", listener: () => void): this;
|
||||
prependListener(event: "error", listener: (error: Error) => void): this;
|
||||
prependListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
prependListener(event: "listening", listener: (address: Address) => void): this;
|
||||
prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependListener(event: "online", listener: () => void): this;
|
||||
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependOnceListener(event: "disconnect", listener: () => void): this;
|
||||
prependOnceListener(event: "error", listener: (error: Error) => void): this;
|
||||
prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
prependOnceListener(event: "listening", listener: (address: Address) => void): this;
|
||||
prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependOnceListener(event: "online", listener: () => void): this;
|
||||
}
|
||||
export interface Cluster extends EventEmitter {
|
||||
disconnect(callback?: () => void): void;
|
||||
/**
|
||||
* Spawn a new worker process.
|
||||
*
|
||||
* This can only be called from the primary process.
|
||||
* @param env Key/value pairs to add to worker process environment.
|
||||
* @since v0.6.0
|
||||
*/
|
||||
fork(env?: any): Worker;
|
||||
/** @deprecated since v16.0.0 - use isPrimary. */
|
||||
readonly isMaster: boolean;
|
||||
/**
|
||||
* True if the process is a primary. This is determined by the `process.env.NODE_UNIQUE_ID`. If `process.env.NODE_UNIQUE_ID`
|
||||
* is undefined, then `isPrimary` is `true`.
|
||||
* @since v16.0.0
|
||||
*/
|
||||
readonly isPrimary: boolean;
|
||||
/**
|
||||
* True if the process is not a primary (it is the negation of `cluster.isPrimary`).
|
||||
* @since v0.6.0
|
||||
*/
|
||||
readonly isWorker: boolean;
|
||||
/**
|
||||
* The scheduling policy, either `cluster.SCHED_RR` for round-robin or `cluster.SCHED_NONE` to leave it to the operating system. This is a
|
||||
* global setting and effectively frozen once either the first worker is spawned, or [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings)
|
||||
* is called, whichever comes first.
|
||||
*
|
||||
* `SCHED_RR` is the default on all operating systems except Windows. Windows will change to `SCHED_RR` once libuv is able to effectively distribute
|
||||
* IOCP handles without incurring a large performance hit.
|
||||
*
|
||||
* `cluster.schedulingPolicy` can also be set through the `NODE_CLUSTER_SCHED_POLICY` environment variable. Valid values are `'rr'` and `'none'`.
|
||||
* @since v0.11.2
|
||||
*/
|
||||
schedulingPolicy: number;
|
||||
/**
|
||||
* After calling [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings)
|
||||
* (or [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv)) this settings object will contain
|
||||
* the settings, including the default values.
|
||||
*
|
||||
* This object is not intended to be changed or set manually.
|
||||
* @since v0.7.1
|
||||
*/
|
||||
readonly settings: ClusterSettings;
|
||||
/** @deprecated since v16.0.0 - use [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings) instead. */
|
||||
setupMaster(settings?: ClusterSettings): void;
|
||||
/**
|
||||
* `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in `cluster.settings`.
|
||||
*
|
||||
* Any settings changes only affect future calls to [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv)
|
||||
* and have no effect on workers that are already running.
|
||||
*
|
||||
* The only attribute of a worker that cannot be set via `.setupPrimary()` is the `env` passed to
|
||||
* [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv).
|
||||
*
|
||||
* The defaults above apply to the first call only; the defaults for later calls are the current values at the time of
|
||||
* `cluster.setupPrimary()` is called.
|
||||
*
|
||||
* ```js
|
||||
* import cluster from 'node:cluster';
|
||||
*
|
||||
* cluster.setupPrimary({
|
||||
* exec: 'worker.js',
|
||||
* args: ['--use', 'https'],
|
||||
* silent: true,
|
||||
* });
|
||||
* cluster.fork(); // https worker
|
||||
* cluster.setupPrimary({
|
||||
* exec: 'worker.js',
|
||||
* args: ['--use', 'http'],
|
||||
* });
|
||||
* cluster.fork(); // http worker
|
||||
* ```
|
||||
*
|
||||
* This can only be called from the primary process.
|
||||
* @since v16.0.0
|
||||
*/
|
||||
setupPrimary(settings?: ClusterSettings): void;
|
||||
/**
|
||||
* A reference to the current worker object. Not available in the primary process.
|
||||
*
|
||||
* ```js
|
||||
* import cluster from 'node:cluster';
|
||||
*
|
||||
* if (cluster.isPrimary) {
|
||||
* console.log('I am primary');
|
||||
* cluster.fork();
|
||||
* cluster.fork();
|
||||
* } else if (cluster.isWorker) {
|
||||
* console.log(`I am worker #${cluster.worker.id}`);
|
||||
* }
|
||||
* ```
|
||||
* @since v0.7.0
|
||||
*/
|
||||
readonly worker?: Worker | undefined;
|
||||
/**
|
||||
* A hash that stores the active worker objects, keyed by `id` field. This makes it easy to loop through all the workers. It is only available in the primary process.
|
||||
*
|
||||
* A worker is removed from `cluster.workers` after the worker has disconnected _and_ exited. The order between these two events cannot be determined in advance. However, it
|
||||
* is guaranteed that the removal from the `cluster.workers` list happens before the last `'disconnect'` or `'exit'` event is emitted.
|
||||
*
|
||||
* ```js
|
||||
* import cluster from 'node:cluster';
|
||||
*
|
||||
* for (const worker of Object.values(cluster.workers)) {
|
||||
* worker.send('big announcement to all workers');
|
||||
* }
|
||||
* ```
|
||||
* @since v0.7.0
|
||||
*/
|
||||
readonly workers?: NodeJS.Dict<Worker> | undefined;
|
||||
readonly SCHED_NONE: number;
|
||||
readonly SCHED_RR: number;
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. disconnect
|
||||
* 2. exit
|
||||
* 3. fork
|
||||
* 4. listening
|
||||
* 5. message
|
||||
* 6. online
|
||||
* 7. setup
|
||||
*/
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
addListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||
addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
addListener(
|
||||
event: "message",
|
||||
listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void,
|
||||
): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
addListener(event: "online", listener: (worker: Worker) => void): this;
|
||||
addListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
emit(event: "disconnect", worker: Worker): boolean;
|
||||
emit(event: "exit", worker: Worker, code: number, signal: string): boolean;
|
||||
emit(event: "fork", worker: Worker): boolean;
|
||||
emit(event: "listening", worker: Worker, address: Address): boolean;
|
||||
emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean;
|
||||
emit(event: "online", worker: Worker): boolean;
|
||||
emit(event: "setup", settings: ClusterSettings): boolean;
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
on(event: "fork", listener: (worker: Worker) => void): this;
|
||||
on(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
on(event: "online", listener: (worker: Worker) => void): this;
|
||||
on(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
once(event: "fork", listener: (worker: Worker) => void): this;
|
||||
once(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
once(event: "online", listener: (worker: Worker) => void): this;
|
||||
once(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
prependListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||
prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
// the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependListener(
|
||||
event: "message",
|
||||
listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void,
|
||||
): this;
|
||||
prependListener(event: "online", listener: (worker: Worker) => void): this;
|
||||
prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
prependOnceListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
// the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependOnceListener(
|
||||
event: "message",
|
||||
listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void,
|
||||
): this;
|
||||
prependOnceListener(event: "online", listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
}
|
||||
const cluster: Cluster;
|
||||
export default cluster;
|
||||
}
|
||||
declare module "node:cluster" {
|
||||
export * from "cluster";
|
||||
export { default as default } from "cluster";
|
||||
}
|
16
node_modules/@types/node/compatibility/disposable.d.ts
generated
vendored
Normal file
16
node_modules/@types/node/compatibility/disposable.d.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
// Polyfills for the explicit resource management types added in TypeScript 5.2.
|
||||
// TODO: remove once this package no longer supports TS 5.1, and replace with a
|
||||
// <reference> to TypeScript's disposable library in index.d.ts.
|
||||
|
||||
interface SymbolConstructor {
|
||||
readonly dispose: unique symbol;
|
||||
readonly asyncDispose: unique symbol;
|
||||
}
|
||||
|
||||
interface Disposable {
|
||||
[Symbol.dispose](): void;
|
||||
}
|
||||
|
||||
interface AsyncDisposable {
|
||||
[Symbol.asyncDispose](): PromiseLike<void>;
|
||||
}
|
9
node_modules/@types/node/compatibility/index.d.ts
generated
vendored
Normal file
9
node_modules/@types/node/compatibility/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
// Declaration files in this directory contain types relating to TypeScript library features
|
||||
// that are not included in all TypeScript versions supported by DefinitelyTyped, but
|
||||
// which can be made backwards-compatible without needing `typesVersions`.
|
||||
// If adding declarations to this directory, please specify which versions of TypeScript require them,
|
||||
// so that they can be removed when no longer needed.
|
||||
|
||||
/// <reference path="disposable.d.ts" />
|
||||
/// <reference path="indexable.d.ts" />
|
||||
/// <reference path="iterators.d.ts" />
|
23
node_modules/@types/node/compatibility/indexable.d.ts
generated
vendored
Normal file
23
node_modules/@types/node/compatibility/indexable.d.ts
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
// Polyfill for ES2022's .at() method on string/array prototypes, added to TypeScript in 4.6.
|
||||
// TODO: these methods are not used within @types/node, and should be removed at the next
|
||||
// major @types/node version; users should include the es2022 TypeScript libraries
|
||||
// if they need these features.
|
||||
|
||||
interface RelativeIndexable<T> {
|
||||
at(index: number): T | undefined;
|
||||
}
|
||||
|
||||
interface String extends RelativeIndexable<string> {}
|
||||
interface Array<T> extends RelativeIndexable<T> {}
|
||||
interface ReadonlyArray<T> extends RelativeIndexable<T> {}
|
||||
interface Int8Array extends RelativeIndexable<number> {}
|
||||
interface Uint8Array extends RelativeIndexable<number> {}
|
||||
interface Uint8ClampedArray extends RelativeIndexable<number> {}
|
||||
interface Int16Array extends RelativeIndexable<number> {}
|
||||
interface Uint16Array extends RelativeIndexable<number> {}
|
||||
interface Int32Array extends RelativeIndexable<number> {}
|
||||
interface Uint32Array extends RelativeIndexable<number> {}
|
||||
interface Float32Array extends RelativeIndexable<number> {}
|
||||
interface Float64Array extends RelativeIndexable<number> {}
|
||||
interface BigInt64Array extends RelativeIndexable<bigint> {}
|
||||
interface BigUint64Array extends RelativeIndexable<bigint> {}
|
21
node_modules/@types/node/compatibility/iterators.d.ts
generated
vendored
Normal file
21
node_modules/@types/node/compatibility/iterators.d.ts
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
// Backwards-compatible iterator interfaces, augmented with iterator helper methods by lib.esnext.iterator in TypeScript 5.6.
|
||||
// The IterableIterator interface does not contain these methods, which creates assignability issues in places where IteratorObjects
|
||||
// are expected (eg. DOM-compatible APIs) if lib.esnext.iterator is loaded.
|
||||
// Also ensures that iterators returned by the Node API, which inherit from Iterator.prototype, correctly expose the iterator helper methods
|
||||
// if lib.esnext.iterator is loaded.
|
||||
// TODO: remove once this package no longer supports TS 5.5, and replace NodeJS.BuiltinIteratorReturn with BuiltinIteratorReturn.
|
||||
|
||||
// Placeholders for TS <5.6
|
||||
interface IteratorObject<T, TReturn, TNext> {}
|
||||
interface AsyncIteratorObject<T, TReturn, TNext> {}
|
||||
|
||||
declare namespace NodeJS {
|
||||
// Populate iterator methods for TS <5.6
|
||||
interface Iterator<T, TReturn, TNext> extends globalThis.Iterator<T, TReturn, TNext> {}
|
||||
interface AsyncIterator<T, TReturn, TNext> extends globalThis.AsyncIterator<T, TReturn, TNext> {}
|
||||
|
||||
// Polyfill for TS 5.6's instrinsic BuiltinIteratorReturn type, required for DOM-compatible iterators
|
||||
type BuiltinIteratorReturn = ReturnType<any[][typeof Symbol.iterator]> extends
|
||||
globalThis.Iterator<any, infer TReturn> ? TReturn
|
||||
: any;
|
||||
}
|
452
node_modules/@types/node/console.d.ts
generated
vendored
Normal file
452
node_modules/@types/node/console.d.ts
generated
vendored
Normal file
@ -0,0 +1,452 @@
|
||||
/**
|
||||
* The `node:console` module provides a simple debugging console that is similar to
|
||||
* the JavaScript console mechanism provided by web browsers.
|
||||
*
|
||||
* The module exports two specific components:
|
||||
*
|
||||
* * A `Console` class with methods such as `console.log()`, `console.error()`, and `console.warn()` that can be used to write to any Node.js stream.
|
||||
* * A global `console` instance configured to write to [`process.stdout`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstdout) and
|
||||
* [`process.stderr`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstderr). The global `console` can be used without importing the `node:console` module.
|
||||
*
|
||||
* _**Warning**_: The global console object's methods are neither consistently
|
||||
* synchronous like the browser APIs they resemble, nor are they consistently
|
||||
* asynchronous like all other Node.js streams. See the [`note on process I/O`](https://nodejs.org/docs/latest-v22.x/api/process.html#a-note-on-process-io) for
|
||||
* more information.
|
||||
*
|
||||
* Example using the global `console`:
|
||||
*
|
||||
* ```js
|
||||
* console.log('hello world');
|
||||
* // Prints: hello world, to stdout
|
||||
* console.log('hello %s', 'world');
|
||||
* // Prints: hello world, to stdout
|
||||
* console.error(new Error('Whoops, something bad happened'));
|
||||
* // Prints error message and stack trace to stderr:
|
||||
* // Error: Whoops, something bad happened
|
||||
* // at [eval]:5:15
|
||||
* // at Script.runInThisContext (node:vm:132:18)
|
||||
* // at Object.runInThisContext (node:vm:309:38)
|
||||
* // at node:internal/process/execution:77:19
|
||||
* // at [eval]-wrapper:6:22
|
||||
* // at evalScript (node:internal/process/execution:76:60)
|
||||
* // at node:internal/main/eval_string:23:3
|
||||
*
|
||||
* const name = 'Will Robinson';
|
||||
* console.warn(`Danger ${name}! Danger!`);
|
||||
* // Prints: Danger Will Robinson! Danger!, to stderr
|
||||
* ```
|
||||
*
|
||||
* Example using the `Console` class:
|
||||
*
|
||||
* ```js
|
||||
* const out = getStreamSomehow();
|
||||
* const err = getStreamSomehow();
|
||||
* const myConsole = new console.Console(out, err);
|
||||
*
|
||||
* myConsole.log('hello world');
|
||||
* // Prints: hello world, to out
|
||||
* myConsole.log('hello %s', 'world');
|
||||
* // Prints: hello world, to out
|
||||
* myConsole.error(new Error('Whoops, something bad happened'));
|
||||
* // Prints: [Error: Whoops, something bad happened], to err
|
||||
*
|
||||
* const name = 'Will Robinson';
|
||||
* myConsole.warn(`Danger ${name}! Danger!`);
|
||||
* // Prints: Danger Will Robinson! Danger!, to err
|
||||
* ```
|
||||
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/console.js)
|
||||
*/
|
||||
declare module "console" {
|
||||
import console = require("node:console");
|
||||
export = console;
|
||||
}
|
||||
declare module "node:console" {
|
||||
import { InspectOptions } from "node:util";
|
||||
global {
|
||||
// This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build
|
||||
interface Console {
|
||||
Console: console.ConsoleConstructor;
|
||||
/**
|
||||
* `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only
|
||||
* writes a message and does not otherwise affect execution. The output always
|
||||
* starts with `"Assertion failed"`. If provided, `message` is formatted using
|
||||
* [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args).
|
||||
*
|
||||
* If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens.
|
||||
*
|
||||
* ```js
|
||||
* console.assert(true, 'does nothing');
|
||||
*
|
||||
* console.assert(false, 'Whoops %s work', 'didn\'t');
|
||||
* // Assertion failed: Whoops didn't work
|
||||
*
|
||||
* console.assert();
|
||||
* // Assertion failed
|
||||
* ```
|
||||
* @since v0.1.101
|
||||
* @param value The value tested for being truthy.
|
||||
* @param message All arguments besides `value` are used as error message.
|
||||
*/
|
||||
assert(value: any, message?: string, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* When `stdout` is a TTY, calling `console.clear()` will attempt to clear the
|
||||
* TTY. When `stdout` is not a TTY, this method does nothing.
|
||||
*
|
||||
* The specific operation of `console.clear()` can vary across operating systems
|
||||
* and terminal types. For most Linux operating systems, `console.clear()` operates similarly to the `clear` shell command. On Windows, `console.clear()` will clear only the output in the
|
||||
* current terminal viewport for the Node.js
|
||||
* binary.
|
||||
* @since v8.3.0
|
||||
*/
|
||||
clear(): void;
|
||||
/**
|
||||
* Maintains an internal counter specific to `label` and outputs to `stdout` the
|
||||
* number of times `console.count()` has been called with the given `label`.
|
||||
*
|
||||
* ```js
|
||||
* > console.count()
|
||||
* default: 1
|
||||
* undefined
|
||||
* > console.count('default')
|
||||
* default: 2
|
||||
* undefined
|
||||
* > console.count('abc')
|
||||
* abc: 1
|
||||
* undefined
|
||||
* > console.count('xyz')
|
||||
* xyz: 1
|
||||
* undefined
|
||||
* > console.count('abc')
|
||||
* abc: 2
|
||||
* undefined
|
||||
* > console.count()
|
||||
* default: 3
|
||||
* undefined
|
||||
* >
|
||||
* ```
|
||||
* @since v8.3.0
|
||||
* @param [label='default'] The display label for the counter.
|
||||
*/
|
||||
count(label?: string): void;
|
||||
/**
|
||||
* Resets the internal counter specific to `label`.
|
||||
*
|
||||
* ```js
|
||||
* > console.count('abc');
|
||||
* abc: 1
|
||||
* undefined
|
||||
* > console.countReset('abc');
|
||||
* undefined
|
||||
* > console.count('abc');
|
||||
* abc: 1
|
||||
* undefined
|
||||
* >
|
||||
* ```
|
||||
* @since v8.3.0
|
||||
* @param [label='default'] The display label for the counter.
|
||||
*/
|
||||
countReset(label?: string): void;
|
||||
/**
|
||||
* The `console.debug()` function is an alias for {@link log}.
|
||||
* @since v8.0.0
|
||||
*/
|
||||
debug(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* Uses [`util.inspect()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilinspectobject-options) on `obj` and prints the resulting string to `stdout`.
|
||||
* This function bypasses any custom `inspect()` function defined on `obj`.
|
||||
* @since v0.1.101
|
||||
*/
|
||||
dir(obj: any, options?: InspectOptions): void;
|
||||
/**
|
||||
* This method calls `console.log()` passing it the arguments received.
|
||||
* This method does not produce any XML formatting.
|
||||
* @since v8.0.0
|
||||
*/
|
||||
dirxml(...data: any[]): void;
|
||||
/**
|
||||
* Prints to `stderr` with newline. Multiple arguments can be passed, with the
|
||||
* first used as the primary message and all additional used as substitution
|
||||
* values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html)
|
||||
* (the arguments are all passed to [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args)).
|
||||
*
|
||||
* ```js
|
||||
* const code = 5;
|
||||
* console.error('error #%d', code);
|
||||
* // Prints: error #5, to stderr
|
||||
* console.error('error', code);
|
||||
* // Prints: error 5, to stderr
|
||||
* ```
|
||||
*
|
||||
* If formatting elements (e.g. `%d`) are not found in the first string then
|
||||
* [`util.inspect()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilinspectobject-options) is called on each argument and the
|
||||
* resulting string values are concatenated. See [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args)
|
||||
* for more information.
|
||||
* @since v0.1.100
|
||||
*/
|
||||
error(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* Increases indentation of subsequent lines by spaces for `groupIndentation` length.
|
||||
*
|
||||
* If one or more `label`s are provided, those are printed first without the
|
||||
* additional indentation.
|
||||
* @since v8.5.0
|
||||
*/
|
||||
group(...label: any[]): void;
|
||||
/**
|
||||
* An alias for {@link group}.
|
||||
* @since v8.5.0
|
||||
*/
|
||||
groupCollapsed(...label: any[]): void;
|
||||
/**
|
||||
* Decreases indentation of subsequent lines by spaces for `groupIndentation` length.
|
||||
* @since v8.5.0
|
||||
*/
|
||||
groupEnd(): void;
|
||||
/**
|
||||
* The `console.info()` function is an alias for {@link log}.
|
||||
* @since v0.1.100
|
||||
*/
|
||||
info(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* Prints to `stdout` with newline. Multiple arguments can be passed, with the
|
||||
* first used as the primary message and all additional used as substitution
|
||||
* values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html)
|
||||
* (the arguments are all passed to [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args)).
|
||||
*
|
||||
* ```js
|
||||
* const count = 5;
|
||||
* console.log('count: %d', count);
|
||||
* // Prints: count: 5, to stdout
|
||||
* console.log('count:', count);
|
||||
* // Prints: count: 5, to stdout
|
||||
* ```
|
||||
*
|
||||
* See [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args) for more information.
|
||||
* @since v0.1.100
|
||||
*/
|
||||
log(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* Try to construct a table with the columns of the properties of `tabularData` (or use `properties`) and rows of `tabularData` and log it. Falls back to just
|
||||
* logging the argument if it can't be parsed as tabular.
|
||||
*
|
||||
* ```js
|
||||
* // These can't be parsed as tabular data
|
||||
* console.table(Symbol());
|
||||
* // Symbol()
|
||||
*
|
||||
* console.table(undefined);
|
||||
* // undefined
|
||||
*
|
||||
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]);
|
||||
* // ┌─────────┬─────┬─────┐
|
||||
* // │ (index) │ a │ b │
|
||||
* // ├─────────┼─────┼─────┤
|
||||
* // │ 0 │ 1 │ 'Y' │
|
||||
* // │ 1 │ 'Z' │ 2 │
|
||||
* // └─────────┴─────┴─────┘
|
||||
*
|
||||
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']);
|
||||
* // ┌─────────┬─────┐
|
||||
* // │ (index) │ a │
|
||||
* // ├─────────┼─────┤
|
||||
* // │ 0 │ 1 │
|
||||
* // │ 1 │ 'Z' │
|
||||
* // └─────────┴─────┘
|
||||
* ```
|
||||
* @since v10.0.0
|
||||
* @param properties Alternate properties for constructing the table.
|
||||
*/
|
||||
table(tabularData: any, properties?: readonly string[]): void;
|
||||
/**
|
||||
* Starts a timer that can be used to compute the duration of an operation. Timers
|
||||
* are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in
|
||||
* suitable time units to `stdout`. For example, if the elapsed
|
||||
* time is 3869ms, `console.timeEnd()` displays "3.869s".
|
||||
* @since v0.1.104
|
||||
* @param [label='default']
|
||||
*/
|
||||
time(label?: string): void;
|
||||
/**
|
||||
* Stops a timer that was previously started by calling {@link time} and
|
||||
* prints the result to `stdout`:
|
||||
*
|
||||
* ```js
|
||||
* console.time('bunch-of-stuff');
|
||||
* // Do a bunch of stuff.
|
||||
* console.timeEnd('bunch-of-stuff');
|
||||
* // Prints: bunch-of-stuff: 225.438ms
|
||||
* ```
|
||||
* @since v0.1.104
|
||||
* @param [label='default']
|
||||
*/
|
||||
timeEnd(label?: string): void;
|
||||
/**
|
||||
* For a timer that was previously started by calling {@link time}, prints
|
||||
* the elapsed time and other `data` arguments to `stdout`:
|
||||
*
|
||||
* ```js
|
||||
* console.time('process');
|
||||
* const value = expensiveProcess1(); // Returns 42
|
||||
* console.timeLog('process', value);
|
||||
* // Prints "process: 365.227ms 42".
|
||||
* doExpensiveProcess2(value);
|
||||
* console.timeEnd('process');
|
||||
* ```
|
||||
* @since v10.7.0
|
||||
* @param [label='default']
|
||||
*/
|
||||
timeLog(label?: string, ...data: any[]): void;
|
||||
/**
|
||||
* Prints to `stderr` the string `'Trace: '`, followed by the [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args)
|
||||
* formatted message and stack trace to the current position in the code.
|
||||
*
|
||||
* ```js
|
||||
* console.trace('Show me');
|
||||
* // Prints: (stack trace will vary based on where trace is called)
|
||||
* // Trace: Show me
|
||||
* // at repl:2:9
|
||||
* // at REPLServer.defaultEval (repl.js:248:27)
|
||||
* // at bound (domain.js:287:14)
|
||||
* // at REPLServer.runBound [as eval] (domain.js:300:12)
|
||||
* // at REPLServer.<anonymous> (repl.js:412:12)
|
||||
* // at emitOne (events.js:82:20)
|
||||
* // at REPLServer.emit (events.js:169:7)
|
||||
* // at REPLServer.Interface._onLine (readline.js:210:10)
|
||||
* // at REPLServer.Interface._line (readline.js:549:8)
|
||||
* // at REPLServer.Interface._ttyWrite (readline.js:826:14)
|
||||
* ```
|
||||
* @since v0.1.104
|
||||
*/
|
||||
trace(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* The `console.warn()` function is an alias for {@link error}.
|
||||
* @since v0.1.100
|
||||
*/
|
||||
warn(message?: any, ...optionalParams: any[]): void;
|
||||
// --- Inspector mode only ---
|
||||
/**
|
||||
* This method does not display anything unless used in the inspector. The `console.profile()`
|
||||
* method starts a JavaScript CPU profile with an optional label until {@link profileEnd}
|
||||
* is called. The profile is then added to the Profile panel of the inspector.
|
||||
*
|
||||
* ```js
|
||||
* console.profile('MyLabel');
|
||||
* // Some code
|
||||
* console.profileEnd('MyLabel');
|
||||
* // Adds the profile 'MyLabel' to the Profiles panel of the inspector.
|
||||
* ```
|
||||
* @since v8.0.0
|
||||
*/
|
||||
profile(label?: string): void;
|
||||
/**
|
||||
* This method does not display anything unless used in the inspector. Stops the current
|
||||
* JavaScript CPU profiling session if one has been started and prints the report to the
|
||||
* Profiles panel of the inspector. See {@link profile} for an example.
|
||||
*
|
||||
* If this method is called without a label, the most recently started profile is stopped.
|
||||
* @since v8.0.0
|
||||
*/
|
||||
profileEnd(label?: string): void;
|
||||
/**
|
||||
* This method does not display anything unless used in the inspector. The `console.timeStamp()`
|
||||
* method adds an event with the label `'label'` to the Timeline panel of the inspector.
|
||||
* @since v8.0.0
|
||||
*/
|
||||
timeStamp(label?: string): void;
|
||||
}
|
||||
/**
|
||||
* The `console` module provides a simple debugging console that is similar to the
|
||||
* JavaScript console mechanism provided by web browsers.
|
||||
*
|
||||
* The module exports two specific components:
|
||||
*
|
||||
* * A `Console` class with methods such as `console.log()`, `console.error()` and `console.warn()` that can be used to write to any Node.js stream.
|
||||
* * A global `console` instance configured to write to [`process.stdout`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstdout) and
|
||||
* [`process.stderr`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstderr). The global `console` can be used without importing the `node:console` module.
|
||||
*
|
||||
* _**Warning**_: The global console object's methods are neither consistently
|
||||
* synchronous like the browser APIs they resemble, nor are they consistently
|
||||
* asynchronous like all other Node.js streams. See the [`note on process I/O`](https://nodejs.org/docs/latest-v22.x/api/process.html#a-note-on-process-io) for
|
||||
* more information.
|
||||
*
|
||||
* Example using the global `console`:
|
||||
*
|
||||
* ```js
|
||||
* console.log('hello world');
|
||||
* // Prints: hello world, to stdout
|
||||
* console.log('hello %s', 'world');
|
||||
* // Prints: hello world, to stdout
|
||||
* console.error(new Error('Whoops, something bad happened'));
|
||||
* // Prints error message and stack trace to stderr:
|
||||
* // Error: Whoops, something bad happened
|
||||
* // at [eval]:5:15
|
||||
* // at Script.runInThisContext (node:vm:132:18)
|
||||
* // at Object.runInThisContext (node:vm:309:38)
|
||||
* // at node:internal/process/execution:77:19
|
||||
* // at [eval]-wrapper:6:22
|
||||
* // at evalScript (node:internal/process/execution:76:60)
|
||||
* // at node:internal/main/eval_string:23:3
|
||||
*
|
||||
* const name = 'Will Robinson';
|
||||
* console.warn(`Danger ${name}! Danger!`);
|
||||
* // Prints: Danger Will Robinson! Danger!, to stderr
|
||||
* ```
|
||||
*
|
||||
* Example using the `Console` class:
|
||||
*
|
||||
* ```js
|
||||
* const out = getStreamSomehow();
|
||||
* const err = getStreamSomehow();
|
||||
* const myConsole = new console.Console(out, err);
|
||||
*
|
||||
* myConsole.log('hello world');
|
||||
* // Prints: hello world, to out
|
||||
* myConsole.log('hello %s', 'world');
|
||||
* // Prints: hello world, to out
|
||||
* myConsole.error(new Error('Whoops, something bad happened'));
|
||||
* // Prints: [Error: Whoops, something bad happened], to err
|
||||
*
|
||||
* const name = 'Will Robinson';
|
||||
* myConsole.warn(`Danger ${name}! Danger!`);
|
||||
* // Prints: Danger Will Robinson! Danger!, to err
|
||||
* ```
|
||||
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/console.js)
|
||||
*/
|
||||
namespace console {
|
||||
interface ConsoleConstructorOptions {
|
||||
stdout: NodeJS.WritableStream;
|
||||
stderr?: NodeJS.WritableStream | undefined;
|
||||
/**
|
||||
* Ignore errors when writing to the underlying streams.
|
||||
* @default true
|
||||
*/
|
||||
ignoreErrors?: boolean | undefined;
|
||||
/**
|
||||
* Set color support for this `Console` instance. Setting to true enables coloring while inspecting
|
||||
* values. Setting to `false` disables coloring while inspecting values. Setting to `'auto'` makes color
|
||||
* support depend on the value of the `isTTY` property and the value returned by `getColorDepth()` on the
|
||||
* respective stream. This option can not be used, if `inspectOptions.colors` is set as well.
|
||||
* @default auto
|
||||
*/
|
||||
colorMode?: boolean | "auto" | undefined;
|
||||
/**
|
||||
* Specifies options that are passed along to
|
||||
* [`util.inspect()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilinspectobject-options).
|
||||
*/
|
||||
inspectOptions?: InspectOptions | undefined;
|
||||
/**
|
||||
* Set group indentation.
|
||||
* @default 2
|
||||
*/
|
||||
groupIndentation?: number | undefined;
|
||||
}
|
||||
interface ConsoleConstructor {
|
||||
prototype: Console;
|
||||
new(stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console;
|
||||
new(options: ConsoleConstructorOptions): Console;
|
||||
}
|
||||
}
|
||||
var console: Console;
|
||||
}
|
||||
export = globalThis.console;
|
||||
}
|
19
node_modules/@types/node/constants.d.ts
generated
vendored
Normal file
19
node_modules/@types/node/constants.d.ts
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */
|
||||
declare module "constants" {
|
||||
import { constants as osConstants, SignalConstants } from "node:os";
|
||||
import { constants as cryptoConstants } from "node:crypto";
|
||||
import { constants as fsConstants } from "node:fs";
|
||||
|
||||
const exp:
|
||||
& typeof osConstants.errno
|
||||
& typeof osConstants.priority
|
||||
& SignalConstants
|
||||
& typeof cryptoConstants
|
||||
& typeof fsConstants;
|
||||
export = exp;
|
||||
}
|
||||
|
||||
declare module "node:constants" {
|
||||
import constants = require("constants");
|
||||
export = constants;
|
||||
}
|
4475
node_modules/@types/node/crypto.d.ts
generated
vendored
Normal file
4475
node_modules/@types/node/crypto.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
597
node_modules/@types/node/dgram.d.ts
generated
vendored
Normal file
597
node_modules/@types/node/dgram.d.ts
generated
vendored
Normal file
@ -0,0 +1,597 @@
|
||||
/**
|
||||
* The `node:dgram` module provides an implementation of UDP datagram sockets.
|
||||
*
|
||||
* ```js
|
||||
* import dgram from 'node:dgram';
|
||||
*
|
||||
* const server = dgram.createSocket('udp4');
|
||||
*
|
||||
* server.on('error', (err) => {
|
||||
* console.error(`server error:\n${err.stack}`);
|
||||
* server.close();
|
||||
* });
|
||||
*
|
||||
* server.on('message', (msg, rinfo) => {
|
||||
* console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`);
|
||||
* });
|
||||
*
|
||||
* server.on('listening', () => {
|
||||
* const address = server.address();
|
||||
* console.log(`server listening ${address.address}:${address.port}`);
|
||||
* });
|
||||
*
|
||||
* server.bind(41234);
|
||||
* // Prints: server listening 0.0.0.0:41234
|
||||
* ```
|
||||
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/dgram.js)
|
||||
*/
|
||||
declare module "dgram" {
|
||||
import { AddressInfo } from "node:net";
|
||||
import * as dns from "node:dns";
|
||||
import { Abortable, EventEmitter } from "node:events";
|
||||
interface RemoteInfo {
|
||||
address: string;
|
||||
family: "IPv4" | "IPv6";
|
||||
port: number;
|
||||
size: number;
|
||||
}
|
||||
interface BindOptions {
|
||||
port?: number | undefined;
|
||||
address?: string | undefined;
|
||||
exclusive?: boolean | undefined;
|
||||
fd?: number | undefined;
|
||||
}
|
||||
type SocketType = "udp4" | "udp6";
|
||||
interface SocketOptions extends Abortable {
|
||||
type: SocketType;
|
||||
reuseAddr?: boolean | undefined;
|
||||
reusePort?: boolean | undefined;
|
||||
/**
|
||||
* @default false
|
||||
*/
|
||||
ipv6Only?: boolean | undefined;
|
||||
recvBufferSize?: number | undefined;
|
||||
sendBufferSize?: number | undefined;
|
||||
lookup?:
|
||||
| ((
|
||||
hostname: string,
|
||||
options: dns.LookupOneOptions,
|
||||
callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void,
|
||||
) => void)
|
||||
| undefined;
|
||||
}
|
||||
/**
|
||||
* Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram
|
||||
* messages. When `address` and `port` are not passed to `socket.bind()` the
|
||||
* method will bind the socket to the "all interfaces" address on a random port
|
||||
* (it does the right thing for both `udp4` and `udp6` sockets). The bound address
|
||||
* and port can be retrieved using `socket.address().address` and `socket.address().port`.
|
||||
*
|
||||
* If the `signal` option is enabled, calling `.abort()` on the corresponding `AbortController` is similar to calling `.close()` on the socket:
|
||||
*
|
||||
* ```js
|
||||
* const controller = new AbortController();
|
||||
* const { signal } = controller;
|
||||
* const server = dgram.createSocket({ type: 'udp4', signal });
|
||||
* server.on('message', (msg, rinfo) => {
|
||||
* console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`);
|
||||
* });
|
||||
* // Later, when you want to close the server.
|
||||
* controller.abort();
|
||||
* ```
|
||||
* @since v0.11.13
|
||||
* @param options Available options are:
|
||||
* @param callback Attached as a listener for `'message'` events. Optional.
|
||||
*/
|
||||
function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket;
|
||||
function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket;
|
||||
/**
|
||||
* Encapsulates the datagram functionality.
|
||||
*
|
||||
* New instances of `dgram.Socket` are created using {@link createSocket}.
|
||||
* The `new` keyword is not to be used to create `dgram.Socket` instances.
|
||||
* @since v0.1.99
|
||||
*/
|
||||
class Socket extends EventEmitter {
|
||||
/**
|
||||
* Tells the kernel to join a multicast group at the given `multicastAddress` and `multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the `multicastInterface` argument is not
|
||||
* specified, the operating system will choose
|
||||
* one interface and will add membership to it. To add membership to every
|
||||
* available interface, call `addMembership` multiple times, once per interface.
|
||||
*
|
||||
* When called on an unbound socket, this method will implicitly bind to a random
|
||||
* port, listening on all interfaces.
|
||||
*
|
||||
* When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur:
|
||||
*
|
||||
* ```js
|
||||
* import cluster from 'node:cluster';
|
||||
* import dgram from 'node:dgram';
|
||||
*
|
||||
* if (cluster.isPrimary) {
|
||||
* cluster.fork(); // Works ok.
|
||||
* cluster.fork(); // Fails with EADDRINUSE.
|
||||
* } else {
|
||||
* const s = dgram.createSocket('udp4');
|
||||
* s.bind(1234, () => {
|
||||
* s.addMembership('224.0.0.114');
|
||||
* });
|
||||
* }
|
||||
* ```
|
||||
* @since v0.6.9
|
||||
*/
|
||||
addMembership(multicastAddress: string, multicastInterface?: string): void;
|
||||
/**
|
||||
* Returns an object containing the address information for a socket.
|
||||
* For UDP sockets, this object will contain `address`, `family`, and `port` properties.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.1.99
|
||||
*/
|
||||
address(): AddressInfo;
|
||||
/**
|
||||
* For UDP sockets, causes the `dgram.Socket` to listen for datagram
|
||||
* messages on a named `port` and optional `address`. If `port` is not
|
||||
* specified or is `0`, the operating system will attempt to bind to a
|
||||
* random port. If `address` is not specified, the operating system will
|
||||
* attempt to listen on all addresses. Once binding is complete, a `'listening'` event is emitted and the optional `callback` function is
|
||||
* called.
|
||||
*
|
||||
* Specifying both a `'listening'` event listener and passing a `callback` to the `socket.bind()` method is not harmful but not very
|
||||
* useful.
|
||||
*
|
||||
* A bound datagram socket keeps the Node.js process running to receive
|
||||
* datagram messages.
|
||||
*
|
||||
* If binding fails, an `'error'` event is generated. In rare case (e.g.
|
||||
* attempting to bind with a closed socket), an `Error` may be thrown.
|
||||
*
|
||||
* Example of a UDP server listening on port 41234:
|
||||
*
|
||||
* ```js
|
||||
* import dgram from 'node:dgram';
|
||||
*
|
||||
* const server = dgram.createSocket('udp4');
|
||||
*
|
||||
* server.on('error', (err) => {
|
||||
* console.error(`server error:\n${err.stack}`);
|
||||
* server.close();
|
||||
* });
|
||||
*
|
||||
* server.on('message', (msg, rinfo) => {
|
||||
* console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`);
|
||||
* });
|
||||
*
|
||||
* server.on('listening', () => {
|
||||
* const address = server.address();
|
||||
* console.log(`server listening ${address.address}:${address.port}`);
|
||||
* });
|
||||
*
|
||||
* server.bind(41234);
|
||||
* // Prints: server listening 0.0.0.0:41234
|
||||
* ```
|
||||
* @since v0.1.99
|
||||
* @param callback with no parameters. Called when binding is complete.
|
||||
*/
|
||||
bind(port?: number, address?: string, callback?: () => void): this;
|
||||
bind(port?: number, callback?: () => void): this;
|
||||
bind(callback?: () => void): this;
|
||||
bind(options: BindOptions, callback?: () => void): this;
|
||||
/**
|
||||
* Close the underlying socket and stop listening for data on it. If a callback is
|
||||
* provided, it is added as a listener for the `'close'` event.
|
||||
* @since v0.1.99
|
||||
* @param callback Called when the socket has been closed.
|
||||
*/
|
||||
close(callback?: () => void): this;
|
||||
/**
|
||||
* Associates the `dgram.Socket` to a remote address and port. Every
|
||||
* message sent by this handle is automatically sent to that destination. Also,
|
||||
* the socket will only receive messages from that remote peer.
|
||||
* Trying to call `connect()` on an already connected socket will result
|
||||
* in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not
|
||||
* provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets)
|
||||
* will be used by default. Once the connection is complete, a `'connect'` event
|
||||
* is emitted and the optional `callback` function is called. In case of failure,
|
||||
* the `callback` is called or, failing this, an `'error'` event is emitted.
|
||||
* @since v12.0.0
|
||||
* @param callback Called when the connection is completed or on error.
|
||||
*/
|
||||
connect(port: number, address?: string, callback?: () => void): void;
|
||||
connect(port: number, callback: () => void): void;
|
||||
/**
|
||||
* A synchronous function that disassociates a connected `dgram.Socket` from
|
||||
* its remote address. Trying to call `disconnect()` on an unbound or already
|
||||
* disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception.
|
||||
* @since v12.0.0
|
||||
*/
|
||||
disconnect(): void;
|
||||
/**
|
||||
* Instructs the kernel to leave a multicast group at `multicastAddress` using the `IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the
|
||||
* kernel when the socket is closed or the process terminates, so most apps will
|
||||
* never have reason to call this.
|
||||
*
|
||||
* If `multicastInterface` is not specified, the operating system will attempt to
|
||||
* drop membership on all valid interfaces.
|
||||
* @since v0.6.9
|
||||
*/
|
||||
dropMembership(multicastAddress: string, multicastInterface?: string): void;
|
||||
/**
|
||||
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
|
||||
* @since v8.7.0
|
||||
* @return the `SO_RCVBUF` socket receive buffer size in bytes.
|
||||
*/
|
||||
getRecvBufferSize(): number;
|
||||
/**
|
||||
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
|
||||
* @since v8.7.0
|
||||
* @return the `SO_SNDBUF` socket send buffer size in bytes.
|
||||
*/
|
||||
getSendBufferSize(): number;
|
||||
/**
|
||||
* @since v18.8.0, v16.19.0
|
||||
* @return Number of bytes queued for sending.
|
||||
*/
|
||||
getSendQueueSize(): number;
|
||||
/**
|
||||
* @since v18.8.0, v16.19.0
|
||||
* @return Number of send requests currently in the queue awaiting to be processed.
|
||||
*/
|
||||
getSendQueueCount(): number;
|
||||
/**
|
||||
* By default, binding a socket will cause it to block the Node.js process from
|
||||
* exiting as long as the socket is open. The `socket.unref()` method can be used
|
||||
* to exclude the socket from the reference counting that keeps the Node.js
|
||||
* process active. The `socket.ref()` method adds the socket back to the reference
|
||||
* counting and restores the default behavior.
|
||||
*
|
||||
* Calling `socket.ref()` multiples times will have no additional effect.
|
||||
*
|
||||
* The `socket.ref()` method returns a reference to the socket so calls can be
|
||||
* chained.
|
||||
* @since v0.9.1
|
||||
*/
|
||||
ref(): this;
|
||||
/**
|
||||
* Returns an object containing the `address`, `family`, and `port` of the remote
|
||||
* endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception
|
||||
* if the socket is not connected.
|
||||
* @since v12.0.0
|
||||
*/
|
||||
remoteAddress(): AddressInfo;
|
||||
/**
|
||||
* Broadcasts a datagram on the socket.
|
||||
* For connectionless sockets, the destination `port` and `address` must be
|
||||
* specified. Connected sockets, on the other hand, will use their associated
|
||||
* remote endpoint, so the `port` and `address` arguments must not be set.
|
||||
*
|
||||
* The `msg` argument contains the message to be sent.
|
||||
* Depending on its type, different behavior can apply. If `msg` is a `Buffer`,
|
||||
* any `TypedArray` or a `DataView`,
|
||||
* the `offset` and `length` specify the offset within the `Buffer` where the
|
||||
* message begins and the number of bytes in the message, respectively.
|
||||
* If `msg` is a `String`, then it is automatically converted to a `Buffer` with `'utf8'` encoding. With messages that
|
||||
* contain multi-byte characters, `offset` and `length` will be calculated with
|
||||
* respect to `byte length` and not the character position.
|
||||
* If `msg` is an array, `offset` and `length` must not be specified.
|
||||
*
|
||||
* The `address` argument is a string. If the value of `address` is a host name,
|
||||
* DNS will be used to resolve the address of the host. If `address` is not
|
||||
* provided or otherwise nullish, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets) will be used by default.
|
||||
*
|
||||
* If the socket has not been previously bound with a call to `bind`, the socket
|
||||
* is assigned a random port number and is bound to the "all interfaces" address
|
||||
* (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.)
|
||||
*
|
||||
* An optional `callback` function may be specified to as a way of reporting
|
||||
* DNS errors or for determining when it is safe to reuse the `buf` object.
|
||||
* DNS lookups delay the time to send for at least one tick of the
|
||||
* Node.js event loop.
|
||||
*
|
||||
* The only way to know for sure that the datagram has been sent is by using a `callback`. If an error occurs and a `callback` is given, the error will be
|
||||
* passed as the first argument to the `callback`. If a `callback` is not given,
|
||||
* the error is emitted as an `'error'` event on the `socket` object.
|
||||
*
|
||||
* Offset and length are optional but both _must_ be set if either are used.
|
||||
* They are supported only when the first argument is a `Buffer`, a `TypedArray`,
|
||||
* or a `DataView`.
|
||||
*
|
||||
* This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket.
|
||||
*
|
||||
* Example of sending a UDP packet to a port on `localhost`;
|
||||
*
|
||||
* ```js
|
||||
* import dgram from 'node:dgram';
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const message = Buffer.from('Some bytes');
|
||||
* const client = dgram.createSocket('udp4');
|
||||
* client.send(message, 41234, 'localhost', (err) => {
|
||||
* client.close();
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`;
|
||||
*
|
||||
* ```js
|
||||
* import dgram from 'node:dgram';
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const buf1 = Buffer.from('Some ');
|
||||
* const buf2 = Buffer.from('bytes');
|
||||
* const client = dgram.createSocket('udp4');
|
||||
* client.send([buf1, buf2], 41234, (err) => {
|
||||
* client.close();
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Sending multiple buffers might be faster or slower depending on the
|
||||
* application and operating system. Run benchmarks to
|
||||
* determine the optimal strategy on a case-by-case basis. Generally speaking,
|
||||
* however, sending multiple buffers is faster.
|
||||
*
|
||||
* Example of sending a UDP packet using a socket connected to a port on `localhost`:
|
||||
*
|
||||
* ```js
|
||||
* import dgram from 'node:dgram';
|
||||
* import { Buffer } from 'node:buffer';
|
||||
*
|
||||
* const message = Buffer.from('Some bytes');
|
||||
* const client = dgram.createSocket('udp4');
|
||||
* client.connect(41234, 'localhost', (err) => {
|
||||
* client.send(message, (err) => {
|
||||
* client.close();
|
||||
* });
|
||||
* });
|
||||
* ```
|
||||
* @since v0.1.99
|
||||
* @param msg Message to be sent.
|
||||
* @param offset Offset in the buffer where the message starts.
|
||||
* @param length Number of bytes in the message.
|
||||
* @param port Destination port.
|
||||
* @param address Destination host name or IP address.
|
||||
* @param callback Called when the message has been sent.
|
||||
*/
|
||||
send(
|
||||
msg: string | NodeJS.ArrayBufferView | readonly any[],
|
||||
port?: number,
|
||||
address?: string,
|
||||
callback?: (error: Error | null, bytes: number) => void,
|
||||
): void;
|
||||
send(
|
||||
msg: string | NodeJS.ArrayBufferView | readonly any[],
|
||||
port?: number,
|
||||
callback?: (error: Error | null, bytes: number) => void,
|
||||
): void;
|
||||
send(
|
||||
msg: string | NodeJS.ArrayBufferView | readonly any[],
|
||||
callback?: (error: Error | null, bytes: number) => void,
|
||||
): void;
|
||||
send(
|
||||
msg: string | NodeJS.ArrayBufferView,
|
||||
offset: number,
|
||||
length: number,
|
||||
port?: number,
|
||||
address?: string,
|
||||
callback?: (error: Error | null, bytes: number) => void,
|
||||
): void;
|
||||
send(
|
||||
msg: string | NodeJS.ArrayBufferView,
|
||||
offset: number,
|
||||
length: number,
|
||||
port?: number,
|
||||
callback?: (error: Error | null, bytes: number) => void,
|
||||
): void;
|
||||
send(
|
||||
msg: string | NodeJS.ArrayBufferView,
|
||||
offset: number,
|
||||
length: number,
|
||||
callback?: (error: Error | null, bytes: number) => void,
|
||||
): void;
|
||||
/**
|
||||
* Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP
|
||||
* packets may be sent to a local interface's broadcast address.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.6.9
|
||||
*/
|
||||
setBroadcast(flag: boolean): void;
|
||||
/**
|
||||
* _All references to scope in this section are referring to [IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC
|
||||
* 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_
|
||||
* _with a scope index is written as `'IP%scope'` where scope is an interface name_
|
||||
* _or interface number._
|
||||
*
|
||||
* Sets the default outgoing multicast interface of the socket to a chosen
|
||||
* interface or back to system interface selection. The `multicastInterface` must
|
||||
* be a valid string representation of an IP from the socket's family.
|
||||
*
|
||||
* For IPv4 sockets, this should be the IP configured for the desired physical
|
||||
* interface. All packets sent to multicast on the socket will be sent on the
|
||||
* interface determined by the most recent successful use of this call.
|
||||
*
|
||||
* For IPv6 sockets, `multicastInterface` should include a scope to indicate the
|
||||
* interface as in the examples that follow. In IPv6, individual `send` calls can
|
||||
* also use explicit scope in addresses, so only packets sent to a multicast
|
||||
* address without specifying an explicit scope are affected by the most recent
|
||||
* successful use of this call.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
*
|
||||
* #### Example: IPv6 outgoing multicast interface
|
||||
*
|
||||
* On most systems, where scope format uses the interface name:
|
||||
*
|
||||
* ```js
|
||||
* const socket = dgram.createSocket('udp6');
|
||||
*
|
||||
* socket.bind(1234, () => {
|
||||
* socket.setMulticastInterface('::%eth1');
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* On Windows, where scope format uses an interface number:
|
||||
*
|
||||
* ```js
|
||||
* const socket = dgram.createSocket('udp6');
|
||||
*
|
||||
* socket.bind(1234, () => {
|
||||
* socket.setMulticastInterface('::%2');
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* #### Example: IPv4 outgoing multicast interface
|
||||
*
|
||||
* All systems use an IP of the host on the desired physical interface:
|
||||
*
|
||||
* ```js
|
||||
* const socket = dgram.createSocket('udp4');
|
||||
*
|
||||
* socket.bind(1234, () => {
|
||||
* socket.setMulticastInterface('10.0.0.2');
|
||||
* });
|
||||
* ```
|
||||
* @since v8.6.0
|
||||
*/
|
||||
setMulticastInterface(multicastInterface: string): void;
|
||||
/**
|
||||
* Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`,
|
||||
* multicast packets will also be received on the local interface.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.3.8
|
||||
*/
|
||||
setMulticastLoopback(flag: boolean): boolean;
|
||||
/**
|
||||
* Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for
|
||||
* "Time to Live", in this context it specifies the number of IP hops that a
|
||||
* packet is allowed to travel through, specifically for multicast traffic. Each
|
||||
* router or gateway that forwards a packet decrements the TTL. If the TTL is
|
||||
* decremented to 0 by a router, it will not be forwarded.
|
||||
*
|
||||
* The `ttl` argument may be between 0 and 255\. The default on most systems is `1`.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.3.8
|
||||
*/
|
||||
setMulticastTTL(ttl: number): number;
|
||||
/**
|
||||
* Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer
|
||||
* in bytes.
|
||||
*
|
||||
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
|
||||
* @since v8.7.0
|
||||
*/
|
||||
setRecvBufferSize(size: number): void;
|
||||
/**
|
||||
* Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer
|
||||
* in bytes.
|
||||
*
|
||||
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
|
||||
* @since v8.7.0
|
||||
*/
|
||||
setSendBufferSize(size: number): void;
|
||||
/**
|
||||
* Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live",
|
||||
* in this context it specifies the number of IP hops that a packet is allowed to
|
||||
* travel through. Each router or gateway that forwards a packet decrements the
|
||||
* TTL. If the TTL is decremented to 0 by a router, it will not be forwarded.
|
||||
* Changing TTL values is typically done for network probes or when multicasting.
|
||||
*
|
||||
* The `ttl` argument may be between 1 and 255\. The default on most systems
|
||||
* is 64.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.1.101
|
||||
*/
|
||||
setTTL(ttl: number): number;
|
||||
/**
|
||||
* By default, binding a socket will cause it to block the Node.js process from
|
||||
* exiting as long as the socket is open. The `socket.unref()` method can be used
|
||||
* to exclude the socket from the reference counting that keeps the Node.js
|
||||
* process active, allowing the process to exit even if the socket is still
|
||||
* listening.
|
||||
*
|
||||
* Calling `socket.unref()` multiple times will have no additional effect.
|
||||
*
|
||||
* The `socket.unref()` method returns a reference to the socket so calls can be
|
||||
* chained.
|
||||
* @since v0.9.1
|
||||
*/
|
||||
unref(): this;
|
||||
/**
|
||||
* Tells the kernel to join a source-specific multicast channel at the given `sourceAddress` and `groupAddress`, using the `multicastInterface` with the `IP_ADD_SOURCE_MEMBERSHIP` socket
|
||||
* option. If the `multicastInterface` argument
|
||||
* is not specified, the operating system will choose one interface and will add
|
||||
* membership to it. To add membership to every available interface, call `socket.addSourceSpecificMembership()` multiple times, once per interface.
|
||||
*
|
||||
* When called on an unbound socket, this method will implicitly bind to a random
|
||||
* port, listening on all interfaces.
|
||||
* @since v13.1.0, v12.16.0
|
||||
*/
|
||||
addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void;
|
||||
/**
|
||||
* Instructs the kernel to leave a source-specific multicast channel at the given `sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP` socket option. This method is
|
||||
* automatically called by the kernel when the
|
||||
* socket is closed or the process terminates, so most apps will never have
|
||||
* reason to call this.
|
||||
*
|
||||
* If `multicastInterface` is not specified, the operating system will attempt to
|
||||
* drop membership on all valid interfaces.
|
||||
* @since v13.1.0, v12.16.0
|
||||
*/
|
||||
dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void;
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. close
|
||||
* 2. connect
|
||||
* 3. error
|
||||
* 4. listening
|
||||
* 5. message
|
||||
*/
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(event: "close", listener: () => void): this;
|
||||
addListener(event: "connect", listener: () => void): this;
|
||||
addListener(event: "error", listener: (err: Error) => void): this;
|
||||
addListener(event: "listening", listener: () => void): this;
|
||||
addListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
emit(event: "close"): boolean;
|
||||
emit(event: "connect"): boolean;
|
||||
emit(event: "error", err: Error): boolean;
|
||||
emit(event: "listening"): boolean;
|
||||
emit(event: "message", msg: Buffer, rinfo: RemoteInfo): boolean;
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(event: "close", listener: () => void): this;
|
||||
on(event: "connect", listener: () => void): this;
|
||||
on(event: "error", listener: (err: Error) => void): this;
|
||||
on(event: "listening", listener: () => void): this;
|
||||
on(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(event: "close", listener: () => void): this;
|
||||
once(event: "connect", listener: () => void): this;
|
||||
once(event: "error", listener: (err: Error) => void): this;
|
||||
once(event: "listening", listener: () => void): this;
|
||||
once(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: "close", listener: () => void): this;
|
||||
prependListener(event: "connect", listener: () => void): this;
|
||||
prependListener(event: "error", listener: (err: Error) => void): this;
|
||||
prependListener(event: "listening", listener: () => void): this;
|
||||
prependListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependOnceListener(event: "close", listener: () => void): this;
|
||||
prependOnceListener(event: "connect", listener: () => void): this;
|
||||
prependOnceListener(event: "error", listener: (err: Error) => void): this;
|
||||
prependOnceListener(event: "listening", listener: () => void): this;
|
||||
prependOnceListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
/**
|
||||
* Calls `socket.close()` and returns a promise that fulfills when the socket has closed.
|
||||
* @since v20.5.0
|
||||
*/
|
||||
[Symbol.asyncDispose](): Promise<void>;
|
||||
}
|
||||
}
|
||||
declare module "node:dgram" {
|
||||
export * from "dgram";
|
||||
}
|
554
node_modules/@types/node/diagnostics_channel.d.ts
generated
vendored
Normal file
554
node_modules/@types/node/diagnostics_channel.d.ts
generated
vendored
Normal file
@ -0,0 +1,554 @@
|
||||
/**
|
||||
* The `node:diagnostics_channel` module provides an API to create named channels
|
||||
* to report arbitrary message data for diagnostics purposes.
|
||||
*
|
||||
* It can be accessed using:
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
* ```
|
||||
*
|
||||
* It is intended that a module writer wanting to report diagnostics messages
|
||||
* will create one or many top-level channels to report messages through.
|
||||
* Channels may also be acquired at runtime but it is not encouraged
|
||||
* due to the additional overhead of doing so. Channels may be exported for
|
||||
* convenience, but as long as the name is known it can be acquired anywhere.
|
||||
*
|
||||
* If you intend for your module to produce diagnostics data for others to
|
||||
* consume it is recommended that you include documentation of what named
|
||||
* channels are used along with the shape of the message data. Channel names
|
||||
* should generally include the module name to avoid collisions with data from
|
||||
* other modules.
|
||||
* @since v15.1.0, v14.17.0
|
||||
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/diagnostics_channel.js)
|
||||
*/
|
||||
declare module "diagnostics_channel" {
|
||||
import { AsyncLocalStorage } from "node:async_hooks";
|
||||
/**
|
||||
* Check if there are active subscribers to the named channel. This is helpful if
|
||||
* the message you want to send might be expensive to prepare.
|
||||
*
|
||||
* This API is optional but helpful when trying to publish messages from very
|
||||
* performance-sensitive code.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* if (diagnostics_channel.hasSubscribers('my-channel')) {
|
||||
* // There are subscribers, prepare and publish message
|
||||
* }
|
||||
* ```
|
||||
* @since v15.1.0, v14.17.0
|
||||
* @param name The channel name
|
||||
* @return If there are active subscribers
|
||||
*/
|
||||
function hasSubscribers(name: string | symbol): boolean;
|
||||
/**
|
||||
* This is the primary entry-point for anyone wanting to publish to a named
|
||||
* channel. It produces a channel object which is optimized to reduce overhead at
|
||||
* publish time as much as possible.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channel = diagnostics_channel.channel('my-channel');
|
||||
* ```
|
||||
* @since v15.1.0, v14.17.0
|
||||
* @param name The channel name
|
||||
* @return The named channel object
|
||||
*/
|
||||
function channel(name: string | symbol): Channel;
|
||||
type ChannelListener = (message: unknown, name: string | symbol) => void;
|
||||
/**
|
||||
* Register a message handler to subscribe to this channel. This message handler
|
||||
* will be run synchronously whenever a message is published to the channel. Any
|
||||
* errors thrown in the message handler will trigger an `'uncaughtException'`.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* diagnostics_channel.subscribe('my-channel', (message, name) => {
|
||||
* // Received data
|
||||
* });
|
||||
* ```
|
||||
* @since v18.7.0, v16.17.0
|
||||
* @param name The channel name
|
||||
* @param onMessage The handler to receive channel messages
|
||||
*/
|
||||
function subscribe(name: string | symbol, onMessage: ChannelListener): void;
|
||||
/**
|
||||
* Remove a message handler previously registered to this channel with {@link subscribe}.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* function onMessage(message, name) {
|
||||
* // Received data
|
||||
* }
|
||||
*
|
||||
* diagnostics_channel.subscribe('my-channel', onMessage);
|
||||
*
|
||||
* diagnostics_channel.unsubscribe('my-channel', onMessage);
|
||||
* ```
|
||||
* @since v18.7.0, v16.17.0
|
||||
* @param name The channel name
|
||||
* @param onMessage The previous subscribed handler to remove
|
||||
* @return `true` if the handler was found, `false` otherwise.
|
||||
*/
|
||||
function unsubscribe(name: string | symbol, onMessage: ChannelListener): boolean;
|
||||
/**
|
||||
* Creates a `TracingChannel` wrapper for the given `TracingChannel Channels`. If a name is given, the corresponding tracing
|
||||
* channels will be created in the form of `tracing:${name}:${eventType}` where `eventType` corresponds to the types of `TracingChannel Channels`.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channelsByName = diagnostics_channel.tracingChannel('my-channel');
|
||||
*
|
||||
* // or...
|
||||
*
|
||||
* const channelsByCollection = diagnostics_channel.tracingChannel({
|
||||
* start: diagnostics_channel.channel('tracing:my-channel:start'),
|
||||
* end: diagnostics_channel.channel('tracing:my-channel:end'),
|
||||
* asyncStart: diagnostics_channel.channel('tracing:my-channel:asyncStart'),
|
||||
* asyncEnd: diagnostics_channel.channel('tracing:my-channel:asyncEnd'),
|
||||
* error: diagnostics_channel.channel('tracing:my-channel:error'),
|
||||
* });
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param nameOrChannels Channel name or object containing all the `TracingChannel Channels`
|
||||
* @return Collection of channels to trace with
|
||||
*/
|
||||
function tracingChannel<
|
||||
StoreType = unknown,
|
||||
ContextType extends object = StoreType extends object ? StoreType : object,
|
||||
>(
|
||||
nameOrChannels: string | TracingChannelCollection<StoreType, ContextType>,
|
||||
): TracingChannel<StoreType, ContextType>;
|
||||
/**
|
||||
* The class `Channel` represents an individual named channel within the data
|
||||
* pipeline. It is used to track subscribers and to publish messages when there
|
||||
* are subscribers present. It exists as a separate object to avoid channel
|
||||
* lookups at publish time, enabling very fast publish speeds and allowing
|
||||
* for heavy use while incurring very minimal cost. Channels are created with {@link channel}, constructing a channel directly
|
||||
* with `new Channel(name)` is not supported.
|
||||
* @since v15.1.0, v14.17.0
|
||||
*/
|
||||
class Channel<StoreType = unknown, ContextType = StoreType> {
|
||||
readonly name: string | symbol;
|
||||
/**
|
||||
* Check if there are active subscribers to this channel. This is helpful if
|
||||
* the message you want to send might be expensive to prepare.
|
||||
*
|
||||
* This API is optional but helpful when trying to publish messages from very
|
||||
* performance-sensitive code.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channel = diagnostics_channel.channel('my-channel');
|
||||
*
|
||||
* if (channel.hasSubscribers) {
|
||||
* // There are subscribers, prepare and publish message
|
||||
* }
|
||||
* ```
|
||||
* @since v15.1.0, v14.17.0
|
||||
*/
|
||||
readonly hasSubscribers: boolean;
|
||||
private constructor(name: string | symbol);
|
||||
/**
|
||||
* Publish a message to any subscribers to the channel. This will trigger
|
||||
* message handlers synchronously so they will execute within the same context.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channel = diagnostics_channel.channel('my-channel');
|
||||
*
|
||||
* channel.publish({
|
||||
* some: 'message',
|
||||
* });
|
||||
* ```
|
||||
* @since v15.1.0, v14.17.0
|
||||
* @param message The message to send to the channel subscribers
|
||||
*/
|
||||
publish(message: unknown): void;
|
||||
/**
|
||||
* Register a message handler to subscribe to this channel. This message handler
|
||||
* will be run synchronously whenever a message is published to the channel. Any
|
||||
* errors thrown in the message handler will trigger an `'uncaughtException'`.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channel = diagnostics_channel.channel('my-channel');
|
||||
*
|
||||
* channel.subscribe((message, name) => {
|
||||
* // Received data
|
||||
* });
|
||||
* ```
|
||||
* @since v15.1.0, v14.17.0
|
||||
* @deprecated Since v18.7.0,v16.17.0 - Use {@link subscribe(name, onMessage)}
|
||||
* @param onMessage The handler to receive channel messages
|
||||
*/
|
||||
subscribe(onMessage: ChannelListener): void;
|
||||
/**
|
||||
* Remove a message handler previously registered to this channel with `channel.subscribe(onMessage)`.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channel = diagnostics_channel.channel('my-channel');
|
||||
*
|
||||
* function onMessage(message, name) {
|
||||
* // Received data
|
||||
* }
|
||||
*
|
||||
* channel.subscribe(onMessage);
|
||||
*
|
||||
* channel.unsubscribe(onMessage);
|
||||
* ```
|
||||
* @since v15.1.0, v14.17.0
|
||||
* @deprecated Since v18.7.0,v16.17.0 - Use {@link unsubscribe(name, onMessage)}
|
||||
* @param onMessage The previous subscribed handler to remove
|
||||
* @return `true` if the handler was found, `false` otherwise.
|
||||
*/
|
||||
unsubscribe(onMessage: ChannelListener): void;
|
||||
/**
|
||||
* When `channel.runStores(context, ...)` is called, the given context data
|
||||
* will be applied to any store bound to the channel. If the store has already been
|
||||
* bound the previous `transform` function will be replaced with the new one.
|
||||
* The `transform` function may be omitted to set the given context data as the
|
||||
* context directly.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
* import { AsyncLocalStorage } from 'node:async_hooks';
|
||||
*
|
||||
* const store = new AsyncLocalStorage();
|
||||
*
|
||||
* const channel = diagnostics_channel.channel('my-channel');
|
||||
*
|
||||
* channel.bindStore(store, (data) => {
|
||||
* return { data };
|
||||
* });
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param store The store to which to bind the context data
|
||||
* @param transform Transform context data before setting the store context
|
||||
*/
|
||||
bindStore(store: AsyncLocalStorage<StoreType>, transform?: (context: ContextType) => StoreType): void;
|
||||
/**
|
||||
* Remove a message handler previously registered to this channel with `channel.bindStore(store)`.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
* import { AsyncLocalStorage } from 'node:async_hooks';
|
||||
*
|
||||
* const store = new AsyncLocalStorage();
|
||||
*
|
||||
* const channel = diagnostics_channel.channel('my-channel');
|
||||
*
|
||||
* channel.bindStore(store);
|
||||
* channel.unbindStore(store);
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param store The store to unbind from the channel.
|
||||
* @return `true` if the store was found, `false` otherwise.
|
||||
*/
|
||||
unbindStore(store: any): void;
|
||||
/**
|
||||
* Applies the given data to any AsyncLocalStorage instances bound to the channel
|
||||
* for the duration of the given function, then publishes to the channel within
|
||||
* the scope of that data is applied to the stores.
|
||||
*
|
||||
* If a transform function was given to `channel.bindStore(store)` it will be
|
||||
* applied to transform the message data before it becomes the context value for
|
||||
* the store. The prior storage context is accessible from within the transform
|
||||
* function in cases where context linking is required.
|
||||
*
|
||||
* The context applied to the store should be accessible in any async code which
|
||||
* continues from execution which began during the given function, however
|
||||
* there are some situations in which `context loss` may occur.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
* import { AsyncLocalStorage } from 'node:async_hooks';
|
||||
*
|
||||
* const store = new AsyncLocalStorage();
|
||||
*
|
||||
* const channel = diagnostics_channel.channel('my-channel');
|
||||
*
|
||||
* channel.bindStore(store, (message) => {
|
||||
* const parent = store.getStore();
|
||||
* return new Span(message, parent);
|
||||
* });
|
||||
* channel.runStores({ some: 'message' }, () => {
|
||||
* store.getStore(); // Span({ some: 'message' })
|
||||
* });
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param context Message to send to subscribers and bind to stores
|
||||
* @param fn Handler to run within the entered storage context
|
||||
* @param thisArg The receiver to be used for the function call.
|
||||
* @param args Optional arguments to pass to the function.
|
||||
*/
|
||||
runStores(): void;
|
||||
}
|
||||
interface TracingChannelSubscribers<ContextType extends object> {
|
||||
start: (message: ContextType) => void;
|
||||
end: (
|
||||
message: ContextType & {
|
||||
error?: unknown;
|
||||
result?: unknown;
|
||||
},
|
||||
) => void;
|
||||
asyncStart: (
|
||||
message: ContextType & {
|
||||
error?: unknown;
|
||||
result?: unknown;
|
||||
},
|
||||
) => void;
|
||||
asyncEnd: (
|
||||
message: ContextType & {
|
||||
error?: unknown;
|
||||
result?: unknown;
|
||||
},
|
||||
) => void;
|
||||
error: (
|
||||
message: ContextType & {
|
||||
error: unknown;
|
||||
},
|
||||
) => void;
|
||||
}
|
||||
interface TracingChannelCollection<StoreType = unknown, ContextType = StoreType> {
|
||||
start: Channel<StoreType, ContextType>;
|
||||
end: Channel<StoreType, ContextType>;
|
||||
asyncStart: Channel<StoreType, ContextType>;
|
||||
asyncEnd: Channel<StoreType, ContextType>;
|
||||
error: Channel<StoreType, ContextType>;
|
||||
}
|
||||
/**
|
||||
* The class `TracingChannel` is a collection of `TracingChannel Channels` which
|
||||
* together express a single traceable action. It is used to formalize and
|
||||
* simplify the process of producing events for tracing application flow. {@link tracingChannel} is used to construct a `TracingChannel`. As with `Channel` it is recommended to create and reuse a
|
||||
* single `TracingChannel` at the top-level of the file rather than creating them
|
||||
* dynamically.
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
*/
|
||||
class TracingChannel<StoreType = unknown, ContextType extends object = {}> implements TracingChannelCollection {
|
||||
start: Channel<StoreType, ContextType>;
|
||||
end: Channel<StoreType, ContextType>;
|
||||
asyncStart: Channel<StoreType, ContextType>;
|
||||
asyncEnd: Channel<StoreType, ContextType>;
|
||||
error: Channel<StoreType, ContextType>;
|
||||
/**
|
||||
* Helper to subscribe a collection of functions to the corresponding channels.
|
||||
* This is the same as calling `channel.subscribe(onMessage)` on each channel
|
||||
* individually.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channels = diagnostics_channel.tracingChannel('my-channel');
|
||||
*
|
||||
* channels.subscribe({
|
||||
* start(message) {
|
||||
* // Handle start message
|
||||
* },
|
||||
* end(message) {
|
||||
* // Handle end message
|
||||
* },
|
||||
* asyncStart(message) {
|
||||
* // Handle asyncStart message
|
||||
* },
|
||||
* asyncEnd(message) {
|
||||
* // Handle asyncEnd message
|
||||
* },
|
||||
* error(message) {
|
||||
* // Handle error message
|
||||
* },
|
||||
* });
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param subscribers Set of `TracingChannel Channels` subscribers
|
||||
*/
|
||||
subscribe(subscribers: TracingChannelSubscribers<ContextType>): void;
|
||||
/**
|
||||
* Helper to unsubscribe a collection of functions from the corresponding channels.
|
||||
* This is the same as calling `channel.unsubscribe(onMessage)` on each channel
|
||||
* individually.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channels = diagnostics_channel.tracingChannel('my-channel');
|
||||
*
|
||||
* channels.unsubscribe({
|
||||
* start(message) {
|
||||
* // Handle start message
|
||||
* },
|
||||
* end(message) {
|
||||
* // Handle end message
|
||||
* },
|
||||
* asyncStart(message) {
|
||||
* // Handle asyncStart message
|
||||
* },
|
||||
* asyncEnd(message) {
|
||||
* // Handle asyncEnd message
|
||||
* },
|
||||
* error(message) {
|
||||
* // Handle error message
|
||||
* },
|
||||
* });
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param subscribers Set of `TracingChannel Channels` subscribers
|
||||
* @return `true` if all handlers were successfully unsubscribed, and `false` otherwise.
|
||||
*/
|
||||
unsubscribe(subscribers: TracingChannelSubscribers<ContextType>): void;
|
||||
/**
|
||||
* Trace a synchronous function call. This will always produce a `start event` and `end event` around the execution and may produce an `error event` if the given function throws an error.
|
||||
* This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all
|
||||
* events should have any bound stores set to match this trace context.
|
||||
*
|
||||
* To ensure only correct trace graphs are formed, events will only be published if subscribers are present prior to starting the trace. Subscriptions
|
||||
* which are added after the trace begins will not receive future events from that trace, only future traces will be seen.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channels = diagnostics_channel.tracingChannel('my-channel');
|
||||
*
|
||||
* channels.traceSync(() => {
|
||||
* // Do something
|
||||
* }, {
|
||||
* some: 'thing',
|
||||
* });
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param fn Function to wrap a trace around
|
||||
* @param context Shared object to correlate events through
|
||||
* @param thisArg The receiver to be used for the function call
|
||||
* @param args Optional arguments to pass to the function
|
||||
* @return The return value of the given function
|
||||
*/
|
||||
traceSync<ThisArg = any, Args extends any[] = any[]>(
|
||||
fn: (this: ThisArg, ...args: Args) => any,
|
||||
context?: ContextType,
|
||||
thisArg?: ThisArg,
|
||||
...args: Args
|
||||
): void;
|
||||
/**
|
||||
* Trace a promise-returning function call. This will always produce a `start event` and `end event` around the synchronous portion of the
|
||||
* function execution, and will produce an `asyncStart event` and `asyncEnd event` when a promise continuation is reached. It may also
|
||||
* produce an `error event` if the given function throws an error or the
|
||||
* returned promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all
|
||||
* events should have any bound stores set to match this trace context.
|
||||
*
|
||||
* To ensure only correct trace graphs are formed, events will only be published if subscribers are present prior to starting the trace. Subscriptions
|
||||
* which are added after the trace begins will not receive future events from that trace, only future traces will be seen.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channels = diagnostics_channel.tracingChannel('my-channel');
|
||||
*
|
||||
* channels.tracePromise(async () => {
|
||||
* // Do something
|
||||
* }, {
|
||||
* some: 'thing',
|
||||
* });
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param fn Promise-returning function to wrap a trace around
|
||||
* @param context Shared object to correlate trace events through
|
||||
* @param thisArg The receiver to be used for the function call
|
||||
* @param args Optional arguments to pass to the function
|
||||
* @return Chained from promise returned by the given function
|
||||
*/
|
||||
tracePromise<ThisArg = any, Args extends any[] = any[]>(
|
||||
fn: (this: ThisArg, ...args: Args) => Promise<any>,
|
||||
context?: ContextType,
|
||||
thisArg?: ThisArg,
|
||||
...args: Args
|
||||
): void;
|
||||
/**
|
||||
* Trace a callback-receiving function call. This will always produce a `start event` and `end event` around the synchronous portion of the
|
||||
* function execution, and will produce a `asyncStart event` and `asyncEnd event` around the callback execution. It may also produce an `error event` if the given function throws an error or
|
||||
* the returned
|
||||
* promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all
|
||||
* events should have any bound stores set to match this trace context.
|
||||
*
|
||||
* The `position` will be -1 by default to indicate the final argument should
|
||||
* be used as the callback.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
*
|
||||
* const channels = diagnostics_channel.tracingChannel('my-channel');
|
||||
*
|
||||
* channels.traceCallback((arg1, callback) => {
|
||||
* // Do something
|
||||
* callback(null, 'result');
|
||||
* }, 1, {
|
||||
* some: 'thing',
|
||||
* }, thisArg, arg1, callback);
|
||||
* ```
|
||||
*
|
||||
* The callback will also be run with `channel.runStores(context, ...)` which
|
||||
* enables context loss recovery in some cases.
|
||||
*
|
||||
* To ensure only correct trace graphs are formed, events will only be published if subscribers are present prior to starting the trace. Subscriptions
|
||||
* which are added after the trace begins will not receive future events from that trace, only future traces will be seen.
|
||||
*
|
||||
* ```js
|
||||
* import diagnostics_channel from 'node:diagnostics_channel';
|
||||
* import { AsyncLocalStorage } from 'node:async_hooks';
|
||||
*
|
||||
* const channels = diagnostics_channel.tracingChannel('my-channel');
|
||||
* const myStore = new AsyncLocalStorage();
|
||||
*
|
||||
* // The start channel sets the initial store data to something
|
||||
* // and stores that store data value on the trace context object
|
||||
* channels.start.bindStore(myStore, (data) => {
|
||||
* const span = new Span(data);
|
||||
* data.span = span;
|
||||
* return span;
|
||||
* });
|
||||
*
|
||||
* // Then asyncStart can restore from that data it stored previously
|
||||
* channels.asyncStart.bindStore(myStore, (data) => {
|
||||
* return data.span;
|
||||
* });
|
||||
* ```
|
||||
* @since v19.9.0
|
||||
* @experimental
|
||||
* @param fn callback using function to wrap a trace around
|
||||
* @param position Zero-indexed argument position of expected callback
|
||||
* @param context Shared object to correlate trace events through
|
||||
* @param thisArg The receiver to be used for the function call
|
||||
* @param args Optional arguments to pass to the function
|
||||
* @return The return value of the given function
|
||||
*/
|
||||
traceCallback<Fn extends (this: any, ...args: any[]) => any>(
|
||||
fn: Fn,
|
||||
position?: number,
|
||||
context?: ContextType,
|
||||
thisArg?: any,
|
||||
...args: Parameters<Fn>
|
||||
): void;
|
||||
}
|
||||
}
|
||||
declare module "node:diagnostics_channel" {
|
||||
export * from "diagnostics_channel";
|
||||
}
|
865
node_modules/@types/node/dns.d.ts
generated
vendored
Normal file
865
node_modules/@types/node/dns.d.ts
generated
vendored
Normal file
@ -0,0 +1,865 @@
|
||||
/**
|
||||
* The `node:dns` module enables name resolution. For example, use it to look up IP
|
||||
* addresses of host names.
|
||||
*
|
||||
* Although named for the [Domain Name System (DNS)](https://en.wikipedia.org/wiki/Domain_Name_System), it does not always use the
|
||||
* DNS protocol for lookups. {@link lookup} uses the operating system
|
||||
* facilities to perform name resolution. It may not need to perform any network
|
||||
* communication. To perform name resolution the way other applications on the same
|
||||
* system do, use {@link lookup}.
|
||||
*
|
||||
* ```js
|
||||
* import dns from 'node:dns';
|
||||
*
|
||||
* dns.lookup('example.org', (err, address, family) => {
|
||||
* console.log('address: %j family: IPv%s', address, family);
|
||||
* });
|
||||
* // address: "93.184.216.34" family: IPv4
|
||||
* ```
|
||||
*
|
||||
* All other functions in the `node:dns` module connect to an actual DNS server to
|
||||
* perform name resolution. They will always use the network to perform DNS
|
||||
* queries. These functions do not use the same set of configuration files used by {@link lookup} (e.g. `/etc/hosts`). Use these functions to always perform
|
||||
* DNS queries, bypassing other name-resolution facilities.
|
||||
*
|
||||
* ```js
|
||||
* import dns from 'node:dns';
|
||||
*
|
||||
* dns.resolve4('archive.org', (err, addresses) => {
|
||||
* if (err) throw err;
|
||||
*
|
||||
* console.log(`addresses: ${JSON.stringify(addresses)}`);
|
||||
*
|
||||
* addresses.forEach((a) => {
|
||||
* dns.reverse(a, (err, hostnames) => {
|
||||
* if (err) {
|
||||
* throw err;
|
||||
* }
|
||||
* console.log(`reverse for ${a}: ${JSON.stringify(hostnames)}`);
|
||||
* });
|
||||
* });
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* See the [Implementation considerations section](https://nodejs.org/docs/latest-v22.x/api/dns.html#implementation-considerations) for more information.
|
||||
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/dns.js)
|
||||
*/
|
||||
declare module "dns" {
|
||||
import * as dnsPromises from "node:dns/promises";
|
||||
// Supported getaddrinfo flags.
|
||||
/**
|
||||
* Limits returned address types to the types of non-loopback addresses configured on the system. For example, IPv4 addresses are
|
||||
* only returned if the current system has at least one IPv4 address configured.
|
||||
*/
|
||||
export const ADDRCONFIG: number;
|
||||
/**
|
||||
* If the IPv6 family was specified, but no IPv6 addresses were found, then return IPv4 mapped IPv6 addresses. It is not supported
|
||||
* on some operating systems (e.g. FreeBSD 10.1).
|
||||
*/
|
||||
export const V4MAPPED: number;
|
||||
/**
|
||||
* If `dns.V4MAPPED` is specified, return resolved IPv6 addresses as
|
||||
* well as IPv4 mapped IPv6 addresses.
|
||||
*/
|
||||
export const ALL: number;
|
||||
export interface LookupOptions {
|
||||
/**
|
||||
* The record family. Must be `4`, `6`, or `0`. For backward compatibility reasons, `'IPv4'` and `'IPv6'` are interpreted
|
||||
* as `4` and `6` respectively. The value 0 indicates that either an IPv4 or IPv6 address is returned. If the value `0` is used
|
||||
* with `{ all: true } (see below)`, both IPv4 and IPv6 addresses are returned.
|
||||
* @default 0
|
||||
*/
|
||||
family?: number | "IPv4" | "IPv6" | undefined;
|
||||
/**
|
||||
* One or more [supported `getaddrinfo`](https://nodejs.org/docs/latest-v22.x/api/dns.html#supported-getaddrinfo-flags) flags. Multiple flags may be
|
||||
* passed by bitwise `OR`ing their values.
|
||||
*/
|
||||
hints?: number | undefined;
|
||||
/**
|
||||
* When `true`, the callback returns all resolved addresses in an array. Otherwise, returns a single address.
|
||||
* @default false
|
||||
*/
|
||||
all?: boolean | undefined;
|
||||
/**
|
||||
* When `verbatim`, the resolved addresses are return unsorted. When `ipv4first`, the resolved addresses are sorted
|
||||
* by placing IPv4 addresses before IPv6 addresses. When `ipv6first`, the resolved addresses are sorted by placing IPv6
|
||||
* addresses before IPv4 addresses. Default value is configurable using
|
||||
* {@link setDefaultResultOrder} or [`--dns-result-order`](https://nodejs.org/docs/latest-v22.x/api/cli.html#--dns-result-orderorder).
|
||||
* @default `verbatim` (addresses are not reordered)
|
||||
* @since v22.1.0
|
||||
*/
|
||||
order?: "ipv4first" | "ipv6first" | "verbatim" | undefined;
|
||||
/**
|
||||
* When `true`, the callback receives IPv4 and IPv6 addresses in the order the DNS resolver returned them. When `false`, IPv4
|
||||
* addresses are placed before IPv6 addresses. This option will be deprecated in favor of `order`. When both are specified,
|
||||
* `order` has higher precedence. New code should only use `order`. Default value is configurable using {@link setDefaultResultOrder}
|
||||
* @default true (addresses are not reordered)
|
||||
* @deprecated Please use `order` option
|
||||
*/
|
||||
verbatim?: boolean | undefined;
|
||||
}
|
||||
export interface LookupOneOptions extends LookupOptions {
|
||||
all?: false | undefined;
|
||||
}
|
||||
export interface LookupAllOptions extends LookupOptions {
|
||||
all: true;
|
||||
}
|
||||
export interface LookupAddress {
|
||||
/**
|
||||
* A string representation of an IPv4 or IPv6 address.
|
||||
*/
|
||||
address: string;
|
||||
/**
|
||||
* `4` or `6`, denoting the family of `address`, or `0` if the address is not an IPv4 or IPv6 address. `0` is a likely indicator of a
|
||||
* bug in the name resolution service used by the operating system.
|
||||
*/
|
||||
family: number;
|
||||
}
|
||||
/**
|
||||
* Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or
|
||||
* AAAA (IPv6) record. All `option` properties are optional. If `options` is an
|
||||
* integer, then it must be `4` or `6` – if `options` is `0` or not provided, then
|
||||
* IPv4 and IPv6 addresses are both returned if found.
|
||||
*
|
||||
* With the `all` option set to `true`, the arguments for `callback` change to `(err, addresses)`, with `addresses` being an array of objects with the
|
||||
* properties `address` and `family`.
|
||||
*
|
||||
* On error, `err` is an `Error` object, where `err.code` is the error code.
|
||||
* Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when
|
||||
* the host name does not exist but also when the lookup fails in other ways
|
||||
* such as no available file descriptors.
|
||||
*
|
||||
* `dns.lookup()` does not necessarily have anything to do with the DNS protocol.
|
||||
* The implementation uses an operating system facility that can associate names
|
||||
* with addresses and vice versa. This implementation can have subtle but
|
||||
* important consequences on the behavior of any Node.js program. Please take some
|
||||
* time to consult the [Implementation considerations section](https://nodejs.org/docs/latest-v22.x/api/dns.html#implementation-considerations)
|
||||
* before using `dns.lookup()`.
|
||||
*
|
||||
* Example usage:
|
||||
*
|
||||
* ```js
|
||||
* import dns from 'node:dns';
|
||||
* const options = {
|
||||
* family: 6,
|
||||
* hints: dns.ADDRCONFIG | dns.V4MAPPED,
|
||||
* };
|
||||
* dns.lookup('example.com', options, (err, address, family) =>
|
||||
* console.log('address: %j family: IPv%s', address, family));
|
||||
* // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6
|
||||
*
|
||||
* // When options.all is true, the result will be an Array.
|
||||
* options.all = true;
|
||||
* dns.lookup('example.com', options, (err, addresses) =>
|
||||
* console.log('addresses: %j', addresses));
|
||||
* // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}]
|
||||
* ```
|
||||
*
|
||||
* If this method is invoked as its [util.promisify()](https://nodejs.org/docs/latest-v22.x/api/util.html#utilpromisifyoriginal) ed
|
||||
* version, and `all` is not set to `true`, it returns a `Promise` for an `Object` with `address` and `family` properties.
|
||||
* @since v0.1.90
|
||||
*/
|
||||
export function lookup(
|
||||
hostname: string,
|
||||
family: number,
|
||||
callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void,
|
||||
): void;
|
||||
export function lookup(
|
||||
hostname: string,
|
||||
options: LookupOneOptions,
|
||||
callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void,
|
||||
): void;
|
||||
export function lookup(
|
||||
hostname: string,
|
||||
options: LookupAllOptions,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: LookupAddress[]) => void,
|
||||
): void;
|
||||
export function lookup(
|
||||
hostname: string,
|
||||
options: LookupOptions,
|
||||
callback: (err: NodeJS.ErrnoException | null, address: string | LookupAddress[], family: number) => void,
|
||||
): void;
|
||||
export function lookup(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void,
|
||||
): void;
|
||||
export namespace lookup {
|
||||
function __promisify__(hostname: string, options: LookupAllOptions): Promise<LookupAddress[]>;
|
||||
function __promisify__(hostname: string, options?: LookupOneOptions | number): Promise<LookupAddress>;
|
||||
function __promisify__(hostname: string, options: LookupOptions): Promise<LookupAddress | LookupAddress[]>;
|
||||
}
|
||||
/**
|
||||
* Resolves the given `address` and `port` into a host name and service using
|
||||
* the operating system's underlying `getnameinfo` implementation.
|
||||
*
|
||||
* If `address` is not a valid IP address, a `TypeError` will be thrown.
|
||||
* The `port` will be coerced to a number. If it is not a legal port, a `TypeError` will be thrown.
|
||||
*
|
||||
* On an error, `err` is an [`Error`](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-error) object,
|
||||
* where `err.code` is the error code.
|
||||
*
|
||||
* ```js
|
||||
* import dns from 'node:dns';
|
||||
* dns.lookupService('127.0.0.1', 22, (err, hostname, service) => {
|
||||
* console.log(hostname, service);
|
||||
* // Prints: localhost ssh
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* If this method is invoked as its [util.promisify()](https://nodejs.org/docs/latest-v22.x/api/util.html#utilpromisifyoriginal) ed
|
||||
* version, it returns a `Promise` for an `Object` with `hostname` and `service` properties.
|
||||
* @since v0.11.14
|
||||
*/
|
||||
export function lookupService(
|
||||
address: string,
|
||||
port: number,
|
||||
callback: (err: NodeJS.ErrnoException | null, hostname: string, service: string) => void,
|
||||
): void;
|
||||
export namespace lookupService {
|
||||
function __promisify__(
|
||||
address: string,
|
||||
port: number,
|
||||
): Promise<{
|
||||
hostname: string;
|
||||
service: string;
|
||||
}>;
|
||||
}
|
||||
export interface ResolveOptions {
|
||||
ttl: boolean;
|
||||
}
|
||||
export interface ResolveWithTtlOptions extends ResolveOptions {
|
||||
ttl: true;
|
||||
}
|
||||
export interface RecordWithTtl {
|
||||
address: string;
|
||||
ttl: number;
|
||||
}
|
||||
/** @deprecated Use `AnyARecord` or `AnyAaaaRecord` instead. */
|
||||
export type AnyRecordWithTtl = AnyARecord | AnyAaaaRecord;
|
||||
export interface AnyARecord extends RecordWithTtl {
|
||||
type: "A";
|
||||
}
|
||||
export interface AnyAaaaRecord extends RecordWithTtl {
|
||||
type: "AAAA";
|
||||
}
|
||||
export interface CaaRecord {
|
||||
critical: number;
|
||||
issue?: string | undefined;
|
||||
issuewild?: string | undefined;
|
||||
iodef?: string | undefined;
|
||||
contactemail?: string | undefined;
|
||||
contactphone?: string | undefined;
|
||||
}
|
||||
export interface MxRecord {
|
||||
priority: number;
|
||||
exchange: string;
|
||||
}
|
||||
export interface AnyMxRecord extends MxRecord {
|
||||
type: "MX";
|
||||
}
|
||||
export interface NaptrRecord {
|
||||
flags: string;
|
||||
service: string;
|
||||
regexp: string;
|
||||
replacement: string;
|
||||
order: number;
|
||||
preference: number;
|
||||
}
|
||||
export interface AnyNaptrRecord extends NaptrRecord {
|
||||
type: "NAPTR";
|
||||
}
|
||||
export interface SoaRecord {
|
||||
nsname: string;
|
||||
hostmaster: string;
|
||||
serial: number;
|
||||
refresh: number;
|
||||
retry: number;
|
||||
expire: number;
|
||||
minttl: number;
|
||||
}
|
||||
export interface AnySoaRecord extends SoaRecord {
|
||||
type: "SOA";
|
||||
}
|
||||
export interface SrvRecord {
|
||||
priority: number;
|
||||
weight: number;
|
||||
port: number;
|
||||
name: string;
|
||||
}
|
||||
export interface AnySrvRecord extends SrvRecord {
|
||||
type: "SRV";
|
||||
}
|
||||
export interface AnyTxtRecord {
|
||||
type: "TXT";
|
||||
entries: string[];
|
||||
}
|
||||
export interface AnyNsRecord {
|
||||
type: "NS";
|
||||
value: string;
|
||||
}
|
||||
export interface AnyPtrRecord {
|
||||
type: "PTR";
|
||||
value: string;
|
||||
}
|
||||
export interface AnyCnameRecord {
|
||||
type: "CNAME";
|
||||
value: string;
|
||||
}
|
||||
export type AnyRecord =
|
||||
| AnyARecord
|
||||
| AnyAaaaRecord
|
||||
| AnyCnameRecord
|
||||
| AnyMxRecord
|
||||
| AnyNaptrRecord
|
||||
| AnyNsRecord
|
||||
| AnyPtrRecord
|
||||
| AnySoaRecord
|
||||
| AnySrvRecord
|
||||
| AnyTxtRecord;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array
|
||||
* of the resource records. The `callback` function has arguments `(err, records)`. When successful, `records` will be an array of resource
|
||||
* records. The type and structure of individual results varies based on `rrtype`:
|
||||
*
|
||||
* <omitted>
|
||||
*
|
||||
* On error, `err` is an [`Error`](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-error) object,
|
||||
* where `err.code` is one of the `DNS error codes`.
|
||||
* @since v0.1.27
|
||||
* @param hostname Host name to resolve.
|
||||
* @param [rrtype='A'] Resource record type.
|
||||
*/
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "A",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "AAAA",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "ANY",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "CNAME",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "MX",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "NAPTR",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "NS",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "PTR",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "SOA",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: SoaRecord) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "SRV",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: "TXT",
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void,
|
||||
): void;
|
||||
export function resolve(
|
||||
hostname: string,
|
||||
rrtype: string,
|
||||
callback: (
|
||||
err: NodeJS.ErrnoException | null,
|
||||
addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[],
|
||||
) => void,
|
||||
): void;
|
||||
export namespace resolve {
|
||||
function __promisify__(hostname: string, rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR"): Promise<string[]>;
|
||||
function __promisify__(hostname: string, rrtype: "ANY"): Promise<AnyRecord[]>;
|
||||
function __promisify__(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
|
||||
function __promisify__(hostname: string, rrtype: "NAPTR"): Promise<NaptrRecord[]>;
|
||||
function __promisify__(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
|
||||
function __promisify__(hostname: string, rrtype: "SRV"): Promise<SrvRecord[]>;
|
||||
function __promisify__(hostname: string, rrtype: "TXT"): Promise<string[][]>;
|
||||
function __promisify__(
|
||||
hostname: string,
|
||||
rrtype: string,
|
||||
): Promise<string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the `hostname`. The `addresses` argument passed to the `callback` function
|
||||
* will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`).
|
||||
* @since v0.1.16
|
||||
* @param hostname Host name to resolve.
|
||||
*/
|
||||
export function resolve4(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export function resolve4(
|
||||
hostname: string,
|
||||
options: ResolveWithTtlOptions,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void,
|
||||
): void;
|
||||
export function resolve4(
|
||||
hostname: string,
|
||||
options: ResolveOptions,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void,
|
||||
): void;
|
||||
export namespace resolve4 {
|
||||
function __promisify__(hostname: string): Promise<string[]>;
|
||||
function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise<RecordWithTtl[]>;
|
||||
function __promisify__(hostname: string, options?: ResolveOptions): Promise<string[] | RecordWithTtl[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the `hostname`. The `addresses` argument passed to the `callback` function
|
||||
* will contain an array of IPv6 addresses.
|
||||
* @since v0.1.16
|
||||
* @param hostname Host name to resolve.
|
||||
*/
|
||||
export function resolve6(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export function resolve6(
|
||||
hostname: string,
|
||||
options: ResolveWithTtlOptions,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void,
|
||||
): void;
|
||||
export function resolve6(
|
||||
hostname: string,
|
||||
options: ResolveOptions,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void,
|
||||
): void;
|
||||
export namespace resolve6 {
|
||||
function __promisify__(hostname: string): Promise<string[]>;
|
||||
function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise<RecordWithTtl[]>;
|
||||
function __promisify__(hostname: string, options?: ResolveOptions): Promise<string[] | RecordWithTtl[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve `CNAME` records for the `hostname`. The `addresses` argument passed to the `callback` function
|
||||
* will contain an array of canonical name records available for the `hostname` (e.g. `['bar.example.com']`).
|
||||
* @since v0.3.2
|
||||
*/
|
||||
export function resolveCname(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export namespace resolveCname {
|
||||
function __promisify__(hostname: string): Promise<string[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve `CAA` records for the `hostname`. The `addresses` argument passed to the `callback` function
|
||||
* will contain an array of certification authority authorization records
|
||||
* available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`).
|
||||
* @since v15.0.0, v14.17.0
|
||||
*/
|
||||
export function resolveCaa(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, records: CaaRecord[]) => void,
|
||||
): void;
|
||||
export namespace resolveCaa {
|
||||
function __promisify__(hostname: string): Promise<CaaRecord[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve mail exchange records (`MX` records) for the `hostname`. The `addresses` argument passed to the `callback` function will
|
||||
* contain an array of objects containing both a `priority` and `exchange` property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`).
|
||||
* @since v0.1.27
|
||||
*/
|
||||
export function resolveMx(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void,
|
||||
): void;
|
||||
export namespace resolveMx {
|
||||
function __promisify__(hostname: string): Promise<MxRecord[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve regular expression-based records (`NAPTR` records) for the `hostname`. The `addresses` argument passed to the `callback` function will contain an array of
|
||||
* objects with the following properties:
|
||||
*
|
||||
* * `flags`
|
||||
* * `service`
|
||||
* * `regexp`
|
||||
* * `replacement`
|
||||
* * `order`
|
||||
* * `preference`
|
||||
*
|
||||
* ```js
|
||||
* {
|
||||
* flags: 's',
|
||||
* service: 'SIP+D2U',
|
||||
* regexp: '',
|
||||
* replacement: '_sip._udp.example.com',
|
||||
* order: 30,
|
||||
* preference: 100
|
||||
* }
|
||||
* ```
|
||||
* @since v0.9.12
|
||||
*/
|
||||
export function resolveNaptr(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void,
|
||||
): void;
|
||||
export namespace resolveNaptr {
|
||||
function __promisify__(hostname: string): Promise<NaptrRecord[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve name server records (`NS` records) for the `hostname`. The `addresses` argument passed to the `callback` function will
|
||||
* contain an array of name server records available for `hostname` (e.g. `['ns1.example.com', 'ns2.example.com']`).
|
||||
* @since v0.1.90
|
||||
*/
|
||||
export function resolveNs(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export namespace resolveNs {
|
||||
function __promisify__(hostname: string): Promise<string[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve pointer records (`PTR` records) for the `hostname`. The `addresses` argument passed to the `callback` function will
|
||||
* be an array of strings containing the reply records.
|
||||
* @since v6.0.0
|
||||
*/
|
||||
export function resolvePtr(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void,
|
||||
): void;
|
||||
export namespace resolvePtr {
|
||||
function __promisify__(hostname: string): Promise<string[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve a start of authority record (`SOA` record) for
|
||||
* the `hostname`. The `address` argument passed to the `callback` function will
|
||||
* be an object with the following properties:
|
||||
*
|
||||
* * `nsname`
|
||||
* * `hostmaster`
|
||||
* * `serial`
|
||||
* * `refresh`
|
||||
* * `retry`
|
||||
* * `expire`
|
||||
* * `minttl`
|
||||
*
|
||||
* ```js
|
||||
* {
|
||||
* nsname: 'ns.example.com',
|
||||
* hostmaster: 'root.example.com',
|
||||
* serial: 2013101809,
|
||||
* refresh: 10000,
|
||||
* retry: 2400,
|
||||
* expire: 604800,
|
||||
* minttl: 3600
|
||||
* }
|
||||
* ```
|
||||
* @since v0.11.10
|
||||
*/
|
||||
export function resolveSoa(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, address: SoaRecord) => void,
|
||||
): void;
|
||||
export namespace resolveSoa {
|
||||
function __promisify__(hostname: string): Promise<SoaRecord>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve service records (`SRV` records) for the `hostname`. The `addresses` argument passed to the `callback` function will
|
||||
* be an array of objects with the following properties:
|
||||
*
|
||||
* * `priority`
|
||||
* * `weight`
|
||||
* * `port`
|
||||
* * `name`
|
||||
*
|
||||
* ```js
|
||||
* {
|
||||
* priority: 10,
|
||||
* weight: 5,
|
||||
* port: 21223,
|
||||
* name: 'service.example.com'
|
||||
* }
|
||||
* ```
|
||||
* @since v0.1.27
|
||||
*/
|
||||
export function resolveSrv(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void,
|
||||
): void;
|
||||
export namespace resolveSrv {
|
||||
function __promisify__(hostname: string): Promise<SrvRecord[]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve text queries (`TXT` records) for the `hostname`. The `records` argument passed to the `callback` function is a
|
||||
* two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of
|
||||
* one record. Depending on the use case, these could be either joined together or
|
||||
* treated separately.
|
||||
* @since v0.1.27
|
||||
*/
|
||||
export function resolveTxt(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void,
|
||||
): void;
|
||||
export namespace resolveTxt {
|
||||
function __promisify__(hostname: string): Promise<string[][]>;
|
||||
}
|
||||
/**
|
||||
* Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query).
|
||||
* The `ret` argument passed to the `callback` function will be an array containing
|
||||
* various types of records. Each object has a property `type` that indicates the
|
||||
* type of the current record. And depending on the `type`, additional properties
|
||||
* will be present on the object:
|
||||
*
|
||||
* <omitted>
|
||||
*
|
||||
* Here is an example of the `ret` object passed to the callback:
|
||||
*
|
||||
* ```js
|
||||
* [ { type: 'A', address: '127.0.0.1', ttl: 299 },
|
||||
* { type: 'CNAME', value: 'example.com' },
|
||||
* { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 },
|
||||
* { type: 'NS', value: 'ns1.example.com' },
|
||||
* { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] },
|
||||
* { type: 'SOA',
|
||||
* nsname: 'ns1.example.com',
|
||||
* hostmaster: 'admin.example.com',
|
||||
* serial: 156696742,
|
||||
* refresh: 900,
|
||||
* retry: 900,
|
||||
* expire: 1800,
|
||||
* minttl: 60 } ]
|
||||
* ```
|
||||
*
|
||||
* DNS server operators may choose not to respond to `ANY` queries. It may be better to call individual methods like {@link resolve4}, {@link resolveMx}, and so on. For more details, see
|
||||
* [RFC 8482](https://tools.ietf.org/html/rfc8482).
|
||||
*/
|
||||
export function resolveAny(
|
||||
hostname: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void,
|
||||
): void;
|
||||
export namespace resolveAny {
|
||||
function __promisify__(hostname: string): Promise<AnyRecord[]>;
|
||||
}
|
||||
/**
|
||||
* Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an
|
||||
* array of host names.
|
||||
*
|
||||
* On error, `err` is an [`Error`](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-error) object, where `err.code` is
|
||||
* one of the [DNS error codes](https://nodejs.org/docs/latest-v22.x/api/dns.html#error-codes).
|
||||
* @since v0.1.16
|
||||
*/
|
||||
export function reverse(
|
||||
ip: string,
|
||||
callback: (err: NodeJS.ErrnoException | null, hostnames: string[]) => void,
|
||||
): void;
|
||||
/**
|
||||
* Get the default value for `order` in {@link lookup} and [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v22.x/api/dns.html#dnspromiseslookuphostname-options).
|
||||
* The value could be:
|
||||
*
|
||||
* * `ipv4first`: for `order` defaulting to `ipv4first`.
|
||||
* * `ipv6first`: for `order` defaulting to `ipv6first`.
|
||||
* * `verbatim`: for `order` defaulting to `verbatim`.
|
||||
* @since v18.17.0
|
||||
*/
|
||||
export function getDefaultResultOrder(): "ipv4first" | "ipv6first" | "verbatim";
|
||||
/**
|
||||
* Sets the IP address and port of servers to be used when performing DNS
|
||||
* resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted
|
||||
* addresses. If the port is the IANA default DNS port (53) it can be omitted.
|
||||
*
|
||||
* ```js
|
||||
* dns.setServers([
|
||||
* '4.4.4.4',
|
||||
* '[2001:4860:4860::8888]',
|
||||
* '4.4.4.4:1053',
|
||||
* '[2001:4860:4860::8888]:1053',
|
||||
* ]);
|
||||
* ```
|
||||
*
|
||||
* An error will be thrown if an invalid address is provided.
|
||||
*
|
||||
* The `dns.setServers()` method must not be called while a DNS query is in
|
||||
* progress.
|
||||
*
|
||||
* The {@link setServers} method affects only {@link resolve}, `dns.resolve*()` and {@link reverse} (and specifically _not_ {@link lookup}).
|
||||
*
|
||||
* This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html).
|
||||
* That is, if attempting to resolve with the first server provided results in a `NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with
|
||||
* subsequent servers provided. Fallback DNS servers will only be used if the
|
||||
* earlier ones time out or result in some other error.
|
||||
* @since v0.11.3
|
||||
* @param servers array of [RFC 5952](https://datatracker.ietf.org/doc/html/rfc5952#section-6) formatted addresses
|
||||
*/
|
||||
export function setServers(servers: readonly string[]): void;
|
||||
/**
|
||||
* Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6),
|
||||
* that are currently configured for DNS resolution. A string will include a port
|
||||
* section if a custom port is used.
|
||||
*
|
||||
* ```js
|
||||
* [
|
||||
* '4.4.4.4',
|
||||
* '2001:4860:4860::8888',
|
||||
* '4.4.4.4:1053',
|
||||
* '[2001:4860:4860::8888]:1053',
|
||||
* ]
|
||||
* ```
|
||||
* @since v0.11.3
|
||||
*/
|
||||
export function getServers(): string[];
|
||||
/**
|
||||
* Set the default value of `order` in {@link lookup} and [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v22.x/api/dns.html#dnspromiseslookuphostname-options).
|
||||
* The value could be:
|
||||
*
|
||||
* * `ipv4first`: sets default `order` to `ipv4first`.
|
||||
* * `ipv6first`: sets default `order` to `ipv6first`.
|
||||
* * `verbatim`: sets default `order` to `verbatim`.
|
||||
*
|
||||
* The default is `verbatim` and {@link setDefaultResultOrder} have higher
|
||||
* priority than [`--dns-result-order`](https://nodejs.org/docs/latest-v22.x/api/cli.html#--dns-result-orderorder). When using
|
||||
* [worker threads](https://nodejs.org/docs/latest-v22.x/api/worker_threads.html), {@link setDefaultResultOrder} from the main
|
||||
* thread won't affect the default dns orders in workers.
|
||||
* @since v16.4.0, v14.18.0
|
||||
* @param order must be `'ipv4first'`, `'ipv6first'` or `'verbatim'`.
|
||||
*/
|
||||
export function setDefaultResultOrder(order: "ipv4first" | "ipv6first" | "verbatim"): void;
|
||||
// Error codes
|
||||
export const NODATA: "ENODATA";
|
||||
export const FORMERR: "EFORMERR";
|
||||
export const SERVFAIL: "ESERVFAIL";
|
||||
export const NOTFOUND: "ENOTFOUND";
|
||||
export const NOTIMP: "ENOTIMP";
|
||||
export const REFUSED: "EREFUSED";
|
||||
export const BADQUERY: "EBADQUERY";
|
||||
export const BADNAME: "EBADNAME";
|
||||
export const BADFAMILY: "EBADFAMILY";
|
||||
export const BADRESP: "EBADRESP";
|
||||
export const CONNREFUSED: "ECONNREFUSED";
|
||||
export const TIMEOUT: "ETIMEOUT";
|
||||
export const EOF: "EOF";
|
||||
export const FILE: "EFILE";
|
||||
export const NOMEM: "ENOMEM";
|
||||
export const DESTRUCTION: "EDESTRUCTION";
|
||||
export const BADSTR: "EBADSTR";
|
||||
export const BADFLAGS: "EBADFLAGS";
|
||||
export const NONAME: "ENONAME";
|
||||
export const BADHINTS: "EBADHINTS";
|
||||
export const NOTINITIALIZED: "ENOTINITIALIZED";
|
||||
export const LOADIPHLPAPI: "ELOADIPHLPAPI";
|
||||
export const ADDRGETNETWORKPARAMS: "EADDRGETNETWORKPARAMS";
|
||||
export const CANCELLED: "ECANCELLED";
|
||||
export interface ResolverOptions {
|
||||
/**
|
||||
* Query timeout in milliseconds, or `-1` to use the default timeout.
|
||||
*/
|
||||
timeout?: number | undefined;
|
||||
/**
|
||||
* The number of tries the resolver will try contacting each name server before giving up.
|
||||
* @default 4
|
||||
*/
|
||||
tries?: number;
|
||||
}
|
||||
/**
|
||||
* An independent resolver for DNS requests.
|
||||
*
|
||||
* Creating a new resolver uses the default server settings. Setting
|
||||
* the servers used for a resolver using [`resolver.setServers()`](https://nodejs.org/docs/latest-v22.x/api/dns.html#dnssetserversservers) does not affect
|
||||
* other resolvers:
|
||||
*
|
||||
* ```js
|
||||
* import { Resolver } from 'node:dns';
|
||||
* const resolver = new Resolver();
|
||||
* resolver.setServers(['4.4.4.4']);
|
||||
*
|
||||
* // This request will use the server at 4.4.4.4, independent of global settings.
|
||||
* resolver.resolve4('example.org', (err, addresses) => {
|
||||
* // ...
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* The following methods from the `node:dns` module are available:
|
||||
*
|
||||
* * `resolver.getServers()`
|
||||
* * `resolver.resolve()`
|
||||
* * `resolver.resolve4()`
|
||||
* * `resolver.resolve6()`
|
||||
* * `resolver.resolveAny()`
|
||||
* * `resolver.resolveCaa()`
|
||||
* * `resolver.resolveCname()`
|
||||
* * `resolver.resolveMx()`
|
||||
* * `resolver.resolveNaptr()`
|
||||
* * `resolver.resolveNs()`
|
||||
* * `resolver.resolvePtr()`
|
||||
* * `resolver.resolveSoa()`
|
||||
* * `resolver.resolveSrv()`
|
||||
* * `resolver.resolveTxt()`
|
||||
* * `resolver.reverse()`
|
||||
* * `resolver.setServers()`
|
||||
* @since v8.3.0
|
||||
*/
|
||||
export class Resolver {
|
||||
constructor(options?: ResolverOptions);
|
||||
/**
|
||||
* Cancel all outstanding DNS queries made by this resolver. The corresponding
|
||||
* callbacks will be called with an error with code `ECANCELLED`.
|
||||
* @since v8.3.0
|
||||
*/
|
||||
cancel(): void;
|
||||
getServers: typeof getServers;
|
||||
resolve: typeof resolve;
|
||||
resolve4: typeof resolve4;
|
||||
resolve6: typeof resolve6;
|
||||
resolveAny: typeof resolveAny;
|
||||
resolveCaa: typeof resolveCaa;
|
||||
resolveCname: typeof resolveCname;
|
||||
resolveMx: typeof resolveMx;
|
||||
resolveNaptr: typeof resolveNaptr;
|
||||
resolveNs: typeof resolveNs;
|
||||
resolvePtr: typeof resolvePtr;
|
||||
resolveSoa: typeof resolveSoa;
|
||||
resolveSrv: typeof resolveSrv;
|
||||
resolveTxt: typeof resolveTxt;
|
||||
reverse: typeof reverse;
|
||||
/**
|
||||
* The resolver instance will send its requests from the specified IP address.
|
||||
* This allows programs to specify outbound interfaces when used on multi-homed
|
||||
* systems.
|
||||
*
|
||||
* If a v4 or v6 address is not specified, it is set to the default and the
|
||||
* operating system will choose a local address automatically.
|
||||
*
|
||||
* The resolver will use the v4 local address when making requests to IPv4 DNS
|
||||
* servers, and the v6 local address when making requests to IPv6 DNS servers.
|
||||
* The `rrtype` of resolution requests has no impact on the local address used.
|
||||
* @since v15.1.0, v14.17.0
|
||||
* @param [ipv4='0.0.0.0'] A string representation of an IPv4 address.
|
||||
* @param [ipv6='::0'] A string representation of an IPv6 address.
|
||||
*/
|
||||
setLocalAddress(ipv4?: string, ipv6?: string): void;
|
||||
setServers: typeof setServers;
|
||||
}
|
||||
export { dnsPromises as promises };
|
||||
}
|
||||
declare module "node:dns" {
|
||||
export * from "dns";
|
||||
}
|
476
node_modules/@types/node/dns/promises.d.ts
generated
vendored
Normal file
476
node_modules/@types/node/dns/promises.d.ts
generated
vendored
Normal file
@ -0,0 +1,476 @@
|
||||
/**
|
||||
* The `dns.promises` API provides an alternative set of asynchronous DNS methods
|
||||
* that return `Promise` objects rather than using callbacks. The API is accessible
|
||||
* via `import { promises as dnsPromises } from 'node:dns'` or `import dnsPromises from 'node:dns/promises'`.
|
||||
* @since v10.6.0
|
||||
*/
|
||||
declare module "dns/promises" {
|
||||
import {
|
||||
AnyRecord,
|
||||
CaaRecord,
|
||||
LookupAddress,
|
||||
LookupAllOptions,
|
||||
LookupOneOptions,
|
||||
LookupOptions,
|
||||
MxRecord,
|
||||
NaptrRecord,
|
||||
RecordWithTtl,
|
||||
ResolveOptions,
|
||||
ResolverOptions,
|
||||
ResolveWithTtlOptions,
|
||||
SoaRecord,
|
||||
SrvRecord,
|
||||
} from "node:dns";
|
||||
/**
|
||||
* Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6),
|
||||
* that are currently configured for DNS resolution. A string will include a port
|
||||
* section if a custom port is used.
|
||||
*
|
||||
* ```js
|
||||
* [
|
||||
* '4.4.4.4',
|
||||
* '2001:4860:4860::8888',
|
||||
* '4.4.4.4:1053',
|
||||
* '[2001:4860:4860::8888]:1053',
|
||||
* ]
|
||||
* ```
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function getServers(): string[];
|
||||
/**
|
||||
* Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or
|
||||
* AAAA (IPv6) record. All `option` properties are optional. If `options` is an
|
||||
* integer, then it must be `4` or `6` – if `options` is not provided, then IPv4
|
||||
* and IPv6 addresses are both returned if found.
|
||||
*
|
||||
* With the `all` option set to `true`, the `Promise` is resolved with `addresses` being an array of objects with the properties `address` and `family`.
|
||||
*
|
||||
* On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` is the error code.
|
||||
* Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when
|
||||
* the host name does not exist but also when the lookup fails in other ways
|
||||
* such as no available file descriptors.
|
||||
*
|
||||
* [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromiseslookuphostname-options) does not necessarily have anything to do with the DNS
|
||||
* protocol. The implementation uses an operating system facility that can
|
||||
* associate names with addresses and vice versa. This implementation can have
|
||||
* subtle but important consequences on the behavior of any Node.js program. Please
|
||||
* take some time to consult the [Implementation considerations section](https://nodejs.org/docs/latest-v20.x/api/dns.html#implementation-considerations) before
|
||||
* using `dnsPromises.lookup()`.
|
||||
*
|
||||
* Example usage:
|
||||
*
|
||||
* ```js
|
||||
* import dns from 'node:dns';
|
||||
* const dnsPromises = dns.promises;
|
||||
* const options = {
|
||||
* family: 6,
|
||||
* hints: dns.ADDRCONFIG | dns.V4MAPPED,
|
||||
* };
|
||||
*
|
||||
* dnsPromises.lookup('example.com', options).then((result) => {
|
||||
* console.log('address: %j family: IPv%s', result.address, result.family);
|
||||
* // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6
|
||||
* });
|
||||
*
|
||||
* // When options.all is true, the result will be an Array.
|
||||
* options.all = true;
|
||||
* dnsPromises.lookup('example.com', options).then((result) => {
|
||||
* console.log('addresses: %j', result);
|
||||
* // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}]
|
||||
* });
|
||||
* ```
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function lookup(hostname: string, family: number): Promise<LookupAddress>;
|
||||
function lookup(hostname: string, options: LookupOneOptions): Promise<LookupAddress>;
|
||||
function lookup(hostname: string, options: LookupAllOptions): Promise<LookupAddress[]>;
|
||||
function lookup(hostname: string, options: LookupOptions): Promise<LookupAddress | LookupAddress[]>;
|
||||
function lookup(hostname: string): Promise<LookupAddress>;
|
||||
/**
|
||||
* Resolves the given `address` and `port` into a host name and service using
|
||||
* the operating system's underlying `getnameinfo` implementation.
|
||||
*
|
||||
* If `address` is not a valid IP address, a `TypeError` will be thrown.
|
||||
* The `port` will be coerced to a number. If it is not a legal port, a `TypeError` will be thrown.
|
||||
*
|
||||
* On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` is the error code.
|
||||
*
|
||||
* ```js
|
||||
* import dnsPromises from 'node:dns';
|
||||
* dnsPromises.lookupService('127.0.0.1', 22).then((result) => {
|
||||
* console.log(result.hostname, result.service);
|
||||
* // Prints: localhost ssh
|
||||
* });
|
||||
* ```
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function lookupService(
|
||||
address: string,
|
||||
port: number,
|
||||
): Promise<{
|
||||
hostname: string;
|
||||
service: string;
|
||||
}>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array
|
||||
* of the resource records. When successful, the `Promise` is resolved with an
|
||||
* array of resource records. The type and structure of individual results vary
|
||||
* based on `rrtype`:
|
||||
*
|
||||
* <omitted>
|
||||
*
|
||||
* On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code`
|
||||
* is one of the [DNS error codes](https://nodejs.org/docs/latest-v20.x/api/dns.html#error-codes).
|
||||
* @since v10.6.0
|
||||
* @param hostname Host name to resolve.
|
||||
* @param [rrtype='A'] Resource record type.
|
||||
*/
|
||||
function resolve(hostname: string): Promise<string[]>;
|
||||
function resolve(hostname: string, rrtype: "A"): Promise<string[]>;
|
||||
function resolve(hostname: string, rrtype: "AAAA"): Promise<string[]>;
|
||||
function resolve(hostname: string, rrtype: "ANY"): Promise<AnyRecord[]>;
|
||||
function resolve(hostname: string, rrtype: "CAA"): Promise<CaaRecord[]>;
|
||||
function resolve(hostname: string, rrtype: "CNAME"): Promise<string[]>;
|
||||
function resolve(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
|
||||
function resolve(hostname: string, rrtype: "NAPTR"): Promise<NaptrRecord[]>;
|
||||
function resolve(hostname: string, rrtype: "NS"): Promise<string[]>;
|
||||
function resolve(hostname: string, rrtype: "PTR"): Promise<string[]>;
|
||||
function resolve(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
|
||||
function resolve(hostname: string, rrtype: "SRV"): Promise<SrvRecord[]>;
|
||||
function resolve(hostname: string, rrtype: "TXT"): Promise<string[][]>;
|
||||
function resolve(
|
||||
hostname: string,
|
||||
rrtype: string,
|
||||
): Promise<string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the `hostname`. On success, the `Promise` is resolved with an array of IPv4
|
||||
* addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`).
|
||||
* @since v10.6.0
|
||||
* @param hostname Host name to resolve.
|
||||
*/
|
||||
function resolve4(hostname: string): Promise<string[]>;
|
||||
function resolve4(hostname: string, options: ResolveWithTtlOptions): Promise<RecordWithTtl[]>;
|
||||
function resolve4(hostname: string, options: ResolveOptions): Promise<string[] | RecordWithTtl[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the `hostname`. On success, the `Promise` is resolved with an array of IPv6
|
||||
* addresses.
|
||||
* @since v10.6.0
|
||||
* @param hostname Host name to resolve.
|
||||
*/
|
||||
function resolve6(hostname: string): Promise<string[]>;
|
||||
function resolve6(hostname: string, options: ResolveWithTtlOptions): Promise<RecordWithTtl[]>;
|
||||
function resolve6(hostname: string, options: ResolveOptions): Promise<string[] | RecordWithTtl[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query).
|
||||
* On success, the `Promise` is resolved with an array containing various types of
|
||||
* records. Each object has a property `type` that indicates the type of the
|
||||
* current record. And depending on the `type`, additional properties will be
|
||||
* present on the object:
|
||||
*
|
||||
* <omitted>
|
||||
*
|
||||
* Here is an example of the result object:
|
||||
*
|
||||
* ```js
|
||||
* [ { type: 'A', address: '127.0.0.1', ttl: 299 },
|
||||
* { type: 'CNAME', value: 'example.com' },
|
||||
* { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 },
|
||||
* { type: 'NS', value: 'ns1.example.com' },
|
||||
* { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] },
|
||||
* { type: 'SOA',
|
||||
* nsname: 'ns1.example.com',
|
||||
* hostmaster: 'admin.example.com',
|
||||
* serial: 156696742,
|
||||
* refresh: 900,
|
||||
* retry: 900,
|
||||
* expire: 1800,
|
||||
* minttl: 60 } ]
|
||||
* ```
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolveAny(hostname: string): Promise<AnyRecord[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve `CAA` records for the `hostname`. On success,
|
||||
* the `Promise` is resolved with an array of objects containing available
|
||||
* certification authority authorization records available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`).
|
||||
* @since v15.0.0, v14.17.0
|
||||
*/
|
||||
function resolveCaa(hostname: string): Promise<CaaRecord[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success,
|
||||
* the `Promise` is resolved with an array of canonical name records available for
|
||||
* the `hostname` (e.g. `['bar.example.com']`).
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolveCname(hostname: string): Promise<string[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve mail exchange records (`MX` records) for the `hostname`. On success, the `Promise` is resolved with an array of objects
|
||||
* containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`).
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolveMx(hostname: string): Promise<MxRecord[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve regular expression-based records (`NAPTR` records) for the `hostname`. On success, the `Promise` is resolved with an array
|
||||
* of objects with the following properties:
|
||||
*
|
||||
* * `flags`
|
||||
* * `service`
|
||||
* * `regexp`
|
||||
* * `replacement`
|
||||
* * `order`
|
||||
* * `preference`
|
||||
*
|
||||
* ```js
|
||||
* {
|
||||
* flags: 's',
|
||||
* service: 'SIP+D2U',
|
||||
* regexp: '',
|
||||
* replacement: '_sip._udp.example.com',
|
||||
* order: 30,
|
||||
* preference: 100
|
||||
* }
|
||||
* ```
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolveNaptr(hostname: string): Promise<NaptrRecord[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve name server records (`NS` records) for the `hostname`. On success, the `Promise` is resolved with an array of name server
|
||||
* records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`).
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolveNs(hostname: string): Promise<string[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve pointer records (`PTR` records) for the `hostname`. On success, the `Promise` is resolved with an array of strings
|
||||
* containing the reply records.
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolvePtr(hostname: string): Promise<string[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve a start of authority record (`SOA` record) for
|
||||
* the `hostname`. On success, the `Promise` is resolved with an object with the
|
||||
* following properties:
|
||||
*
|
||||
* * `nsname`
|
||||
* * `hostmaster`
|
||||
* * `serial`
|
||||
* * `refresh`
|
||||
* * `retry`
|
||||
* * `expire`
|
||||
* * `minttl`
|
||||
*
|
||||
* ```js
|
||||
* {
|
||||
* nsname: 'ns.example.com',
|
||||
* hostmaster: 'root.example.com',
|
||||
* serial: 2013101809,
|
||||
* refresh: 10000,
|
||||
* retry: 2400,
|
||||
* expire: 604800,
|
||||
* minttl: 3600
|
||||
* }
|
||||
* ```
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolveSoa(hostname: string): Promise<SoaRecord>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve service records (`SRV` records) for the `hostname`. On success, the `Promise` is resolved with an array of objects with
|
||||
* the following properties:
|
||||
*
|
||||
* * `priority`
|
||||
* * `weight`
|
||||
* * `port`
|
||||
* * `name`
|
||||
*
|
||||
* ```js
|
||||
* {
|
||||
* priority: 10,
|
||||
* weight: 5,
|
||||
* port: 21223,
|
||||
* name: 'service.example.com'
|
||||
* }
|
||||
* ```
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolveSrv(hostname: string): Promise<SrvRecord[]>;
|
||||
/**
|
||||
* Uses the DNS protocol to resolve text queries (`TXT` records) for the `hostname`. On success, the `Promise` is resolved with a two-dimensional array
|
||||
* of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of
|
||||
* one record. Depending on the use case, these could be either joined together or
|
||||
* treated separately.
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function resolveTxt(hostname: string): Promise<string[][]>;
|
||||
/**
|
||||
* Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an
|
||||
* array of host names.
|
||||
*
|
||||
* On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code`
|
||||
* is one of the [DNS error codes](https://nodejs.org/docs/latest-v20.x/api/dns.html#error-codes).
|
||||
* @since v10.6.0
|
||||
*/
|
||||
function reverse(ip: string): Promise<string[]>;
|
||||
/**
|
||||
* Get the default value for `verbatim` in {@link lookup} and [dnsPromises.lookup()](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromiseslookuphostname-options).
|
||||
* The value could be:
|
||||
*
|
||||
* * `ipv4first`: for `verbatim` defaulting to `false`.
|
||||
* * `verbatim`: for `verbatim` defaulting to `true`.
|
||||
* @since v20.1.0
|
||||
*/
|
||||
function getDefaultResultOrder(): "ipv4first" | "verbatim";
|
||||
/**
|
||||
* Sets the IP address and port of servers to be used when performing DNS
|
||||
* resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted
|
||||
* addresses. If the port is the IANA default DNS port (53) it can be omitted.
|
||||
*
|
||||
* ```js
|
||||
* dnsPromises.setServers([
|
||||
* '4.4.4.4',
|
||||
* '[2001:4860:4860::8888]',
|
||||
* '4.4.4.4:1053',
|
||||
* '[2001:4860:4860::8888]:1053',
|
||||
* ]);
|
||||
* ```
|
||||
*
|
||||
* An error will be thrown if an invalid address is provided.
|
||||
*
|
||||
* The `dnsPromises.setServers()` method must not be called while a DNS query is in
|
||||
* progress.
|
||||
*
|
||||
* This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html).
|
||||
* That is, if attempting to resolve with the first server provided results in a `NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with
|
||||
* subsequent servers provided. Fallback DNS servers will only be used if the
|
||||
* earlier ones time out or result in some other error.
|
||||
* @since v10.6.0
|
||||
* @param servers array of `RFC 5952` formatted addresses
|
||||
*/
|
||||
function setServers(servers: readonly string[]): void;
|
||||
/**
|
||||
* Set the default value of `order` in `dns.lookup()` and `{@link lookup}`. The value could be:
|
||||
*
|
||||
* * `ipv4first`: sets default `order` to `ipv4first`.
|
||||
* * `ipv6first`: sets default `order` to `ipv6first`.
|
||||
* * `verbatim`: sets default `order` to `verbatim`.
|
||||
*
|
||||
* The default is `verbatim` and [dnsPromises.setDefaultResultOrder()](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetdefaultresultorderorder)
|
||||
* have higher priority than [`--dns-result-order`](https://nodejs.org/docs/latest-v20.x/api/cli.html#--dns-result-orderorder).
|
||||
* When using [worker threads](https://nodejs.org/docs/latest-v20.x/api/worker_threads.html), [`dnsPromises.setDefaultResultOrder()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetdefaultresultorderorder)
|
||||
* from the main thread won't affect the default dns orders in workers.
|
||||
* @since v16.4.0, v14.18.0
|
||||
* @param order must be `'ipv4first'`, `'ipv6first'` or `'verbatim'`.
|
||||
*/
|
||||
function setDefaultResultOrder(order: "ipv4first" | "ipv6first" | "verbatim"): void;
|
||||
// Error codes
|
||||
const NODATA: "ENODATA";
|
||||
const FORMERR: "EFORMERR";
|
||||
const SERVFAIL: "ESERVFAIL";
|
||||
const NOTFOUND: "ENOTFOUND";
|
||||
const NOTIMP: "ENOTIMP";
|
||||
const REFUSED: "EREFUSED";
|
||||
const BADQUERY: "EBADQUERY";
|
||||
const BADNAME: "EBADNAME";
|
||||
const BADFAMILY: "EBADFAMILY";
|
||||
const BADRESP: "EBADRESP";
|
||||
const CONNREFUSED: "ECONNREFUSED";
|
||||
const TIMEOUT: "ETIMEOUT";
|
||||
const EOF: "EOF";
|
||||
const FILE: "EFILE";
|
||||
const NOMEM: "ENOMEM";
|
||||
const DESTRUCTION: "EDESTRUCTION";
|
||||
const BADSTR: "EBADSTR";
|
||||
const BADFLAGS: "EBADFLAGS";
|
||||
const NONAME: "ENONAME";
|
||||
const BADHINTS: "EBADHINTS";
|
||||
const NOTINITIALIZED: "ENOTINITIALIZED";
|
||||
const LOADIPHLPAPI: "ELOADIPHLPAPI";
|
||||
const ADDRGETNETWORKPARAMS: "EADDRGETNETWORKPARAMS";
|
||||
const CANCELLED: "ECANCELLED";
|
||||
|
||||
/**
|
||||
* An independent resolver for DNS requests.
|
||||
*
|
||||
* Creating a new resolver uses the default server settings. Setting
|
||||
* the servers used for a resolver using [`resolver.setServers()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetserversservers) does not affect
|
||||
* other resolvers:
|
||||
*
|
||||
* ```js
|
||||
* import { promises } from 'node:dns';
|
||||
* const resolver = new promises.Resolver();
|
||||
* resolver.setServers(['4.4.4.4']);
|
||||
*
|
||||
* // This request will use the server at 4.4.4.4, independent of global settings.
|
||||
* resolver.resolve4('example.org').then((addresses) => {
|
||||
* // ...
|
||||
* });
|
||||
*
|
||||
* // Alternatively, the same code can be written using async-await style.
|
||||
* (async function() {
|
||||
* const addresses = await resolver.resolve4('example.org');
|
||||
* })();
|
||||
* ```
|
||||
*
|
||||
* The following methods from the `dnsPromises` API are available:
|
||||
*
|
||||
* * `resolver.getServers()`
|
||||
* * `resolver.resolve()`
|
||||
* * `resolver.resolve4()`
|
||||
* * `resolver.resolve6()`
|
||||
* * `resolver.resolveAny()`
|
||||
* * `resolver.resolveCaa()`
|
||||
* * `resolver.resolveCname()`
|
||||
* * `resolver.resolveMx()`
|
||||
* * `resolver.resolveNaptr()`
|
||||
* * `resolver.resolveNs()`
|
||||
* * `resolver.resolvePtr()`
|
||||
* * `resolver.resolveSoa()`
|
||||
* * `resolver.resolveSrv()`
|
||||
* * `resolver.resolveTxt()`
|
||||
* * `resolver.reverse()`
|
||||
* * `resolver.setServers()`
|
||||
* @since v10.6.0
|
||||
*/
|
||||
class Resolver {
|
||||
constructor(options?: ResolverOptions);
|
||||
/**
|
||||
* Cancel all outstanding DNS queries made by this resolver. The corresponding
|
||||
* callbacks will be called with an error with code `ECANCELLED`.
|
||||
* @since v8.3.0
|
||||
*/
|
||||
cancel(): void;
|
||||
getServers: typeof getServers;
|
||||
resolve: typeof resolve;
|
||||
resolve4: typeof resolve4;
|
||||
resolve6: typeof resolve6;
|
||||
resolveAny: typeof resolveAny;
|
||||
resolveCaa: typeof resolveCaa;
|
||||
resolveCname: typeof resolveCname;
|
||||
resolveMx: typeof resolveMx;
|
||||
resolveNaptr: typeof resolveNaptr;
|
||||
resolveNs: typeof resolveNs;
|
||||
resolvePtr: typeof resolvePtr;
|
||||
resolveSoa: typeof resolveSoa;
|
||||
resolveSrv: typeof resolveSrv;
|
||||
resolveTxt: typeof resolveTxt;
|
||||
reverse: typeof reverse;
|
||||
/**
|
||||
* The resolver instance will send its requests from the specified IP address.
|
||||
* This allows programs to specify outbound interfaces when used on multi-homed
|
||||
* systems.
|
||||
*
|
||||
* If a v4 or v6 address is not specified, it is set to the default and the
|
||||
* operating system will choose a local address automatically.
|
||||
*
|
||||
* The resolver will use the v4 local address when making requests to IPv4 DNS
|
||||
* servers, and the v6 local address when making requests to IPv6 DNS servers.
|
||||
* The `rrtype` of resolution requests has no impact on the local address used.
|
||||
* @since v15.1.0, v14.17.0
|
||||
* @param [ipv4='0.0.0.0'] A string representation of an IPv4 address.
|
||||
* @param [ipv6='::0'] A string representation of an IPv6 address.
|
||||
*/
|
||||
setLocalAddress(ipv4?: string, ipv6?: string): void;
|
||||
setServers: typeof setServers;
|
||||
}
|
||||
}
|
||||
declare module "node:dns/promises" {
|
||||
export * from "dns/promises";
|
||||
}
|
124
node_modules/@types/node/dom-events.d.ts
generated
vendored
Normal file
124
node_modules/@types/node/dom-events.d.ts
generated
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
export {}; // Don't export anything!
|
||||
|
||||
//// DOM-like Events
|
||||
// NB: The Event / EventTarget / EventListener implementations below were copied
|
||||
// from lib.dom.d.ts, then edited to reflect Node's documentation at
|
||||
// https://nodejs.org/api/events.html#class-eventtarget.
|
||||
// Please read that link to understand important implementation differences.
|
||||
|
||||
// This conditional type will be the existing global Event in a browser, or
|
||||
// the copy below in a Node environment.
|
||||
type __Event = typeof globalThis extends { onmessage: any; Event: any } ? {}
|
||||
: {
|
||||
/** This is not used in Node.js and is provided purely for completeness. */
|
||||
readonly bubbles: boolean;
|
||||
/** Alias for event.stopPropagation(). This is not used in Node.js and is provided purely for completeness. */
|
||||
cancelBubble: () => void;
|
||||
/** True if the event was created with the cancelable option */
|
||||
readonly cancelable: boolean;
|
||||
/** This is not used in Node.js and is provided purely for completeness. */
|
||||
readonly composed: boolean;
|
||||
/** Returns an array containing the current EventTarget as the only entry or empty if the event is not being dispatched. This is not used in Node.js and is provided purely for completeness. */
|
||||
composedPath(): [EventTarget?];
|
||||
/** Alias for event.target. */
|
||||
readonly currentTarget: EventTarget | null;
|
||||
/** Is true if cancelable is true and event.preventDefault() has been called. */
|
||||
readonly defaultPrevented: boolean;
|
||||
/** This is not used in Node.js and is provided purely for completeness. */
|
||||
readonly eventPhase: 0 | 2;
|
||||
/** The `AbortSignal` "abort" event is emitted with `isTrusted` set to `true`. The value is `false` in all other cases. */
|
||||
readonly isTrusted: boolean;
|
||||
/** Sets the `defaultPrevented` property to `true` if `cancelable` is `true`. */
|
||||
preventDefault(): void;
|
||||
/** This is not used in Node.js and is provided purely for completeness. */
|
||||
returnValue: boolean;
|
||||
/** Alias for event.target. */
|
||||
readonly srcElement: EventTarget | null;
|
||||
/** Stops the invocation of event listeners after the current one completes. */
|
||||
stopImmediatePropagation(): void;
|
||||
/** This is not used in Node.js and is provided purely for completeness. */
|
||||
stopPropagation(): void;
|
||||
/** The `EventTarget` dispatching the event */
|
||||
readonly target: EventTarget | null;
|
||||
/** The millisecond timestamp when the Event object was created. */
|
||||
readonly timeStamp: number;
|
||||
/** Returns the type of event, e.g. "click", "hashchange", or "submit". */
|
||||
readonly type: string;
|
||||
};
|
||||
|
||||
// See comment above explaining conditional type
|
||||
type __EventTarget = typeof globalThis extends { onmessage: any; EventTarget: any } ? {}
|
||||
: {
|
||||
/**
|
||||
* Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value.
|
||||
*
|
||||
* If the `once` option is true, the `listener` is removed after the next time a `type` event is dispatched.
|
||||
*
|
||||
* The `capture` option is not used by Node.js in any functional way other than tracking registered event listeners per the `EventTarget` specification.
|
||||
* Specifically, the `capture` option is used as part of the key when registering a `listener`.
|
||||
* Any individual `listener` may be added once with `capture = false`, and once with `capture = true`.
|
||||
*/
|
||||
addEventListener(
|
||||
type: string,
|
||||
listener: EventListener | EventListenerObject,
|
||||
options?: AddEventListenerOptions | boolean,
|
||||
): void;
|
||||
/** Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. */
|
||||
dispatchEvent(event: Event): boolean;
|
||||
/** Removes the event listener in target's event listener list with the same type, callback, and options. */
|
||||
removeEventListener(
|
||||
type: string,
|
||||
listener: EventListener | EventListenerObject,
|
||||
options?: EventListenerOptions | boolean,
|
||||
): void;
|
||||
};
|
||||
|
||||
interface EventInit {
|
||||
bubbles?: boolean;
|
||||
cancelable?: boolean;
|
||||
composed?: boolean;
|
||||
}
|
||||
|
||||
interface EventListenerOptions {
|
||||
/** Not directly used by Node.js. Added for API completeness. Default: `false`. */
|
||||
capture?: boolean;
|
||||
}
|
||||
|
||||
interface AddEventListenerOptions extends EventListenerOptions {
|
||||
/** When `true`, the listener is automatically removed when it is first invoked. Default: `false`. */
|
||||
once?: boolean;
|
||||
/** When `true`, serves as a hint that the listener will not call the `Event` object's `preventDefault()` method. Default: false. */
|
||||
passive?: boolean;
|
||||
/** The listener will be removed when the given AbortSignal object's `abort()` method is called. */
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
interface EventListener {
|
||||
(evt: Event): void;
|
||||
}
|
||||
|
||||
interface EventListenerObject {
|
||||
handleEvent(object: Event): void;
|
||||
}
|
||||
|
||||
import {} from "events"; // Make this an ambient declaration
|
||||
declare global {
|
||||
/** An event which takes place in the DOM. */
|
||||
interface Event extends __Event {}
|
||||
var Event: typeof globalThis extends { onmessage: any; Event: infer T } ? T
|
||||
: {
|
||||
prototype: __Event;
|
||||
new(type: string, eventInitDict?: EventInit): __Event;
|
||||
};
|
||||
|
||||
/**
|
||||
* EventTarget is a DOM interface implemented by objects that can
|
||||
* receive events and may have listeners for them.
|
||||
*/
|
||||
interface EventTarget extends __EventTarget {}
|
||||
var EventTarget: typeof globalThis extends { onmessage: any; EventTarget: infer T } ? T
|
||||
: {
|
||||
prototype: __EventTarget;
|
||||
new(): __EventTarget;
|
||||
};
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user