updata
This commit is contained in:
parent
0ec4b59392
commit
b1ff9a68e4
69
App.vue
69
App.vue
|
|
@ -22,6 +22,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
import store from '@/store/index';
|
import store from '@/store/index';
|
||||||
|
import { HTTP_REQUEST_URL } from '@/config/app.js'
|
||||||
export default {
|
export default {
|
||||||
globalData: {
|
globalData: {
|
||||||
// FPT上传地址
|
// FPT上传地址
|
||||||
|
|
@ -117,9 +118,9 @@
|
||||||
|
|
||||||
};
|
};
|
||||||
console.log(this['globalData']['parameters'],'测试取参');
|
console.log(this['globalData']['parameters'],'测试取参');
|
||||||
|
this.clearUrlParams();
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
|
||||||
// 小程序进入H5逻辑
|
// 小程序进入H5逻辑
|
||||||
/**
|
/**
|
||||||
* 小程序进入H5会携带从小程序中获取到的部分参数(利用encodeURIComponent函数编码,所以接收是需要decodeURIComponent函数解码)
|
* 小程序进入H5会携带从小程序中获取到的部分参数(利用encodeURIComponent函数编码,所以接收是需要decodeURIComponent函数解码)
|
||||||
|
|
@ -139,13 +140,57 @@
|
||||||
|
|
||||||
|
|
||||||
},
|
},
|
||||||
|
|
||||||
|
methods: {
|
||||||
|
clearUrlParams() {
|
||||||
|
// 存储分享链接
|
||||||
|
// let basurl = uri(decodeURIComponent(window.location.href))
|
||||||
|
// let querys = basurl.query(true);
|
||||||
|
// for(let key in querys){
|
||||||
|
// querys[key] = JSON.parse(querys[key])
|
||||||
|
// };
|
||||||
|
// let parameters = { ...querys['userinfor'], ...querys['query']};
|
||||||
|
// parameters.token = '';
|
||||||
|
|
||||||
|
// const safeDecode = (value) => {
|
||||||
|
// if (value == null) return ''; // 默认空值
|
||||||
|
// try {
|
||||||
|
// return typeof value === 'string' ? decodeURIComponent(value) : value;
|
||||||
|
// } catch {
|
||||||
|
// return value; // 解码失败返回原值
|
||||||
|
// }
|
||||||
|
// };
|
||||||
|
// const userinfor = {
|
||||||
|
// cityInfor: {
|
||||||
|
// lat: safeDecode(parameters.cityInfor.lat),
|
||||||
|
// lng: safeDecode(parameters.cityInfor.lng),
|
||||||
|
// city: safeDecode(parameters.cityInfor.address),
|
||||||
|
// province: safeDecode(parameters.cityInfor.province),
|
||||||
|
// adcode: safeDecode(parameters.cityInfor.adcode),
|
||||||
|
// },
|
||||||
|
// isWechat: true,
|
||||||
|
// };
|
||||||
|
|
||||||
|
// let query = `userinfor=${encodedUserInfor}×tamp=${Date.now()}`;
|
||||||
|
|
||||||
|
// console.log(HTTP_REQUEST_URL+'?'+query, '分享链接');
|
||||||
|
// uni.setStorageSync('SHARELINK', HTTP_REQUEST_URL+'?'+query);
|
||||||
|
|
||||||
|
// 清除
|
||||||
|
const url = new URL(window.location.href);
|
||||||
|
url.search = ''; // 移除所有查询参数
|
||||||
|
window.history.replaceState(null, '', url.href);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
onShow: function() {
|
onShow: function() {
|
||||||
console.log('App Show');
|
console.log('App Show');
|
||||||
// 全局初始化参数中存在isWechat属性则代表就是从小程序跳入
|
// 全局初始化参数中存在isWechat属性则代表就是从小程序跳入
|
||||||
if (this['globalData']['parameters']?.isWechat) {
|
if (this['globalData']['parameters']?.isWechat) {
|
||||||
store.commit('SetcityInfor', this['globalData']['parameters']['cityInfor']);
|
store.commit('SetcityInfor', this['globalData']['parameters']['cityInfor']);
|
||||||
|
if (this['globalData']['parameters']['token']) {
|
||||||
store.commit('SetToken', this['globalData']['parameters']['token']);
|
store.commit('SetToken', this['globalData']['parameters']['token']);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// 反之则代表从APP进入H5
|
// 反之则代表从APP进入H5
|
||||||
/**
|
/**
|
||||||
|
|
@ -153,15 +198,17 @@
|
||||||
*/
|
*/
|
||||||
// APP是由第三方开发,第三方开发文档中API-getLocation方法获取用户位置
|
// APP是由第三方开发,第三方开发文档中API-getLocation方法获取用户位置
|
||||||
// 因为IOS端会跳进Safari浏览器,而Safari浏览器会提示getLocation方法不存在所以此处进行判断打断执行
|
// 因为IOS端会跳进Safari浏览器,而Safari浏览器会提示getLocation方法不存在所以此处进行判断打断执行
|
||||||
let isSafari = (/Safari/.test(navigator.userAgent) && !/Chrome/.test(navigator.userAgent));
|
|
||||||
console.log(isSafari, 'isSafari');
|
// 注释(不需要)
|
||||||
if (isSafari) return;
|
// let isSafari = (/Safari/.test(navigator.userAgent) && !/Chrome/.test(navigator.userAgent));
|
||||||
if (process.env.NODE_ENV !== 'development' && !this['globalData']['parameters']['isWechat']) {
|
// console.log(isSafari, 'isSafari');
|
||||||
getLocation().then(res => {
|
// if (isSafari) return;
|
||||||
console.log(res, 'res');
|
// if (process.env.NODE_ENV !== 'development' && !this['globalData']['parameters']['isWechat']) {
|
||||||
store.commit('SetcityInfor', res);
|
// getLocation().then(res => {
|
||||||
});
|
// console.log(res, 'res');
|
||||||
}
|
// store.commit('SetcityInfor', res);
|
||||||
|
// });
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
onHide: function() {
|
onHide: function() {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,3 @@
|
||||||
module.exports = {
|
module.exports = {
|
||||||
HTTP_REQUEST_URL: 'https://tpoint.agrimedia.cn',
|
HTTP_REQUEST_URL: 'https://point.agrimedia.cn',
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
../acorn/bin/acorn
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
../browserslist/cli.js
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
../update-browserslist-db/cli.js
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
../nanoid/bin/nanoid.cjs
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
../@babel/parser/bin/babel-parser.js
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
../terser/bin/terser
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
../webpack/bin/webpack.js
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright (C) 2012-2014 by various contributors (see AUTHORS)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
# @babel/parser
|
||||||
|
|
||||||
|
> A JavaScript parser
|
||||||
|
|
||||||
|
See our website [@babel/parser](https://babeljs.io/docs/en/babel-parser) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20parser%20(babylon)%22+is%3Aopen) associated with this package.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Using npm:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install --save-dev @babel/parser
|
||||||
|
```
|
||||||
|
|
||||||
|
or using yarn:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
yarn add @babel/parser --dev
|
||||||
|
```
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
/* eslint no-var: 0 */
|
||||||
|
|
||||||
|
var parser = require("..");
|
||||||
|
var fs = require("fs");
|
||||||
|
|
||||||
|
var filename = process.argv[2];
|
||||||
|
if (!filename) {
|
||||||
|
console.error("no filename specified");
|
||||||
|
} else {
|
||||||
|
var file = fs.readFileSync(filename, "utf8");
|
||||||
|
var ast = parser.parse(file);
|
||||||
|
|
||||||
|
console.log(JSON.stringify(ast, null, " "));
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
try {
|
||||||
|
module.exports = require("./lib/index.cjs");
|
||||||
|
} catch {
|
||||||
|
module.exports = require("./lib/index.js");
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,46 @@
|
||||||
|
{
|
||||||
|
"name": "@babel/parser",
|
||||||
|
"version": "7.19.0",
|
||||||
|
"description": "A JavaScript parser",
|
||||||
|
"author": "The Babel Team (https://babel.dev/team)",
|
||||||
|
"homepage": "https://babel.dev/docs/en/next/babel-parser",
|
||||||
|
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A+parser+%28babylon%29%22+is%3Aopen",
|
||||||
|
"license": "MIT",
|
||||||
|
"publishConfig": {
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"babel",
|
||||||
|
"javascript",
|
||||||
|
"parser",
|
||||||
|
"tc39",
|
||||||
|
"ecmascript",
|
||||||
|
"@babel/parser"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/babel/babel.git",
|
||||||
|
"directory": "packages/babel-parser"
|
||||||
|
},
|
||||||
|
"main": "./lib/index.js",
|
||||||
|
"types": "./typings/babel-parser.d.ts",
|
||||||
|
"files": [
|
||||||
|
"bin",
|
||||||
|
"lib",
|
||||||
|
"typings",
|
||||||
|
"index.cjs"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@babel/code-frame": "^7.18.6",
|
||||||
|
"@babel/helper-check-duplicate-nodes": "^7.18.6",
|
||||||
|
"@babel/helper-fixtures": "^7.18.6",
|
||||||
|
"@babel/helper-string-parser": "^7.18.10",
|
||||||
|
"@babel/helper-validator-identifier": "^7.18.6",
|
||||||
|
"charcodes": "^0.2.0"
|
||||||
|
},
|
||||||
|
"bin": "./bin/babel-parser.js",
|
||||||
|
"type": "commonjs"
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,215 @@
|
||||||
|
// Type definitions for @babel/parser
|
||||||
|
// Project: https://github.com/babel/babel/tree/main/packages/babel-parser
|
||||||
|
// Definitions by: Troy Gerwien <https://github.com/yortus>
|
||||||
|
// Marvin Hagemeister <https://github.com/marvinhagemeister>
|
||||||
|
// Avi Vahl <https://github.com/AviVahl>
|
||||||
|
// TypeScript Version: 2.9
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the provided code as an entire ECMAScript program.
|
||||||
|
*/
|
||||||
|
export function parse(
|
||||||
|
input: string,
|
||||||
|
options?: ParserOptions
|
||||||
|
): ParseResult<import("@babel/types").File>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the provided code as a single expression.
|
||||||
|
*/
|
||||||
|
export function parseExpression(
|
||||||
|
input: string,
|
||||||
|
options?: ParserOptions
|
||||||
|
): ParseResult<import("@babel/types").Expression>;
|
||||||
|
|
||||||
|
export interface ParserOptions {
|
||||||
|
/**
|
||||||
|
* By default, import and export declarations can only appear at a program's top level.
|
||||||
|
* Setting this option to true allows them anywhere where a statement is allowed.
|
||||||
|
*/
|
||||||
|
allowImportExportEverywhere?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, await use is not allowed outside of an async function.
|
||||||
|
* Set this to true to accept such code.
|
||||||
|
*/
|
||||||
|
allowAwaitOutsideFunction?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, a return statement at the top level raises an error.
|
||||||
|
* Set this to true to accept such code.
|
||||||
|
*/
|
||||||
|
allowReturnOutsideFunction?: boolean;
|
||||||
|
|
||||||
|
allowSuperOutsideMethod?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, exported identifiers must refer to a declared variable.
|
||||||
|
* Set this to true to allow export statements to reference undeclared variables.
|
||||||
|
*/
|
||||||
|
allowUndeclaredExports?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, Babel attaches comments to adjacent AST nodes.
|
||||||
|
* When this option is set to false, comments are not attached.
|
||||||
|
* It can provide up to 30% performance improvement when the input code has many comments.
|
||||||
|
* @babel/eslint-parser will set it for you.
|
||||||
|
* It is not recommended to use attachComment: false with Babel transform,
|
||||||
|
* as doing so removes all the comments in output code, and renders annotations such as
|
||||||
|
* /* istanbul ignore next *\/ nonfunctional.
|
||||||
|
*/
|
||||||
|
attachComment?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, Babel always throws an error when it finds some invalid code.
|
||||||
|
* When this option is set to true, it will store the parsing error and
|
||||||
|
* try to continue parsing the invalid input file.
|
||||||
|
*/
|
||||||
|
errorRecovery?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicate the mode the code should be parsed in.
|
||||||
|
* Can be one of "script", "module", or "unambiguous". Defaults to "script".
|
||||||
|
* "unambiguous" will make @babel/parser attempt to guess, based on the presence
|
||||||
|
* of ES6 import or export statements.
|
||||||
|
* Files with ES6 imports and exports are considered "module" and are otherwise "script".
|
||||||
|
*/
|
||||||
|
sourceType?: "script" | "module" | "unambiguous";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Correlate output AST nodes with their source filename.
|
||||||
|
* Useful when generating code and source maps from the ASTs of multiple input files.
|
||||||
|
*/
|
||||||
|
sourceFilename?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, the first line of code parsed is treated as line 1.
|
||||||
|
* You can provide a line number to alternatively start with.
|
||||||
|
* Useful for integration with other source tools.
|
||||||
|
*/
|
||||||
|
startLine?: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, the parsed code is treated as if it starts from line 1, column 0.
|
||||||
|
* You can provide a column number to alternatively start with.
|
||||||
|
* Useful for integration with other source tools.
|
||||||
|
*/
|
||||||
|
startColumn?: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Array containing the plugins that you want to enable.
|
||||||
|
*/
|
||||||
|
plugins?: ParserPlugin[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Should the parser work in strict mode.
|
||||||
|
* Defaults to true if sourceType === 'module'. Otherwise, false.
|
||||||
|
*/
|
||||||
|
strictMode?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a ranges property to each node: [node.start, node.end]
|
||||||
|
*/
|
||||||
|
ranges?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds all parsed tokens to a tokens property on the File node.
|
||||||
|
*/
|
||||||
|
tokens?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, the parser adds information about parentheses by setting
|
||||||
|
* `extra.parenthesized` to `true` as needed.
|
||||||
|
* When this option is `true` the parser creates `ParenthesizedExpression`
|
||||||
|
* AST nodes instead of using the `extra` property.
|
||||||
|
*/
|
||||||
|
createParenthesizedExpressions?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ParserPlugin =
|
||||||
|
| "asyncDoExpressions"
|
||||||
|
| "asyncGenerators"
|
||||||
|
| "bigInt"
|
||||||
|
| "classPrivateMethods"
|
||||||
|
| "classPrivateProperties"
|
||||||
|
| "classProperties"
|
||||||
|
| "classStaticBlock" // Enabled by default
|
||||||
|
| "decimal"
|
||||||
|
| "decorators"
|
||||||
|
| "decorators-legacy"
|
||||||
|
| "decoratorAutoAccessors"
|
||||||
|
| "destructuringPrivate"
|
||||||
|
| "doExpressions"
|
||||||
|
| "dynamicImport"
|
||||||
|
| "estree"
|
||||||
|
| "exportDefaultFrom"
|
||||||
|
| "exportNamespaceFrom" // deprecated
|
||||||
|
| "flow"
|
||||||
|
| "flowComments"
|
||||||
|
| "functionBind"
|
||||||
|
| "functionSent"
|
||||||
|
| "importMeta"
|
||||||
|
| "jsx"
|
||||||
|
| "logicalAssignment"
|
||||||
|
| "importAssertions"
|
||||||
|
| "moduleBlocks"
|
||||||
|
| "moduleStringNames"
|
||||||
|
| "nullishCoalescingOperator"
|
||||||
|
| "numericSeparator"
|
||||||
|
| "objectRestSpread"
|
||||||
|
| "optionalCatchBinding"
|
||||||
|
| "optionalChaining"
|
||||||
|
| "partialApplication"
|
||||||
|
| "pipelineOperator"
|
||||||
|
| "placeholders"
|
||||||
|
| "privateIn" // Enabled by default
|
||||||
|
| "recordAndTuple"
|
||||||
|
| "regexpUnicodeSets"
|
||||||
|
| "throwExpressions"
|
||||||
|
| "topLevelAwait"
|
||||||
|
| "typescript"
|
||||||
|
| "v8intrinsic"
|
||||||
|
| ParserPluginWithOptions;
|
||||||
|
|
||||||
|
export type ParserPluginWithOptions =
|
||||||
|
| ["decorators", DecoratorsPluginOptions]
|
||||||
|
| ["pipelineOperator", PipelineOperatorPluginOptions]
|
||||||
|
| ["recordAndTuple", RecordAndTuplePluginOptions]
|
||||||
|
| ["flow", FlowPluginOptions]
|
||||||
|
| ["typescript", TypeScriptPluginOptions];
|
||||||
|
|
||||||
|
export interface DecoratorsPluginOptions {
|
||||||
|
decoratorsBeforeExport?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PipelineOperatorPluginOptions {
|
||||||
|
proposal: "minimal" | "fsharp" | "hack" | "smart";
|
||||||
|
topicToken?: "%" | "#" | "@@" | "^^" | "^";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RecordAndTuplePluginOptions {
|
||||||
|
syntaxType?: "bar" | "hash";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FlowPluginOptions {
|
||||||
|
all?: boolean;
|
||||||
|
enums?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TypeScriptPluginOptions {
|
||||||
|
dts?: boolean;
|
||||||
|
disallowAmbiguousJSXLike?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const tokTypes: {
|
||||||
|
// todo(flow->ts) real token type
|
||||||
|
[name: string]: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface ParseError {
|
||||||
|
code: string;
|
||||||
|
reasonCode: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
type ParseResult<Result> = Result & {
|
||||||
|
errors: ParseError[];
|
||||||
|
};
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,227 @@
|
||||||
|
# @jridgewell/gen-mapping
|
||||||
|
|
||||||
|
> Generate source maps
|
||||||
|
|
||||||
|
`gen-mapping` allows you to generate a source map during transpilation or minification.
|
||||||
|
With a source map, you're able to trace the original location in the source file, either in Chrome's
|
||||||
|
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
|
||||||
|
provides the same `addMapping` and `setSourceContent` API.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/gen-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
const map = new GenMapping({
|
||||||
|
file: 'output.js',
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
});
|
||||||
|
|
||||||
|
setSourceContent(map, 'input.js', `function foo() {}`);
|
||||||
|
|
||||||
|
addMapping(map, {
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
addMapping(map, {
|
||||||
|
generated: { line: 1, column: 9 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 9 },
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toDecodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
file: 'output.js',
|
||||||
|
names: ['foo'],
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['function foo() {}'],
|
||||||
|
mappings: [
|
||||||
|
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toEncodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
file: 'output.js',
|
||||||
|
names: ['foo'],
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['function foo() {}'],
|
||||||
|
mappings: 'AAAA,SAASA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Smaller Sourcemaps
|
||||||
|
|
||||||
|
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
|
||||||
|
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
|
||||||
|
intelligently determine if this marking adds useful information. If not, the marking will be
|
||||||
|
skipped.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { maybeAddMapping } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
const map = new GenMapping();
|
||||||
|
|
||||||
|
// Adding a sourceless marking at the beginning of a line isn't useful.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Adding a new source marking is useful.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
// But adding another marking pointing to the exact same original location isn't, even if the
|
||||||
|
// generated column changed.
|
||||||
|
maybeAddMapping(map, {
|
||||||
|
generated: { line: 1, column: 9 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(toEncodedMap(map), {
|
||||||
|
version: 3,
|
||||||
|
names: [],
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: [null],
|
||||||
|
mappings: 'AAAA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 5852872 bytes
|
||||||
|
gen-mapping: addMapping 7716042 bytes
|
||||||
|
source-map-js 6143250 bytes
|
||||||
|
source-map-0.6.1 6124102 bytes
|
||||||
|
source-map-0.8.0 6121173 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
|
||||||
|
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
|
||||||
|
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
|
||||||
|
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
|
||||||
|
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 37578063 bytes
|
||||||
|
gen-mapping: addMapping 37212897 bytes
|
||||||
|
source-map-js 47638527 bytes
|
||||||
|
source-map-0.6.1 47690503 bytes
|
||||||
|
source-map-0.8.0 47470188 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addMapping
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
|
||||||
|
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
|
||||||
|
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
|
||||||
|
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
|
||||||
|
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 416247 bytes
|
||||||
|
gen-mapping: addMapping 419824 bytes
|
||||||
|
source-map-js 1024619 bytes
|
||||||
|
source-map-0.6.1 1146004 bytes
|
||||||
|
source-map-0.8.0 1113250 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
|
||||||
|
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
|
||||||
|
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
|
||||||
|
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
|
||||||
|
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map
|
||||||
|
Memory Usage:
|
||||||
|
gen-mapping: addSegment 975096 bytes
|
||||||
|
gen-mapping: addMapping 1102981 bytes
|
||||||
|
source-map-js 2918836 bytes
|
||||||
|
source-map-0.6.1 2885435 bytes
|
||||||
|
source-map-0.8.0 2874336 bytes
|
||||||
|
Smallest memory usage is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Adding speed:
|
||||||
|
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
|
||||||
|
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
|
||||||
|
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
|
||||||
|
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
|
||||||
|
Fastest is gen-mapping: addSegment
|
||||||
|
|
||||||
|
Generate speed:
|
||||||
|
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
|
||||||
|
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
|
||||||
|
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
|
||||||
|
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
|
||||||
|
Fastest is gen-mapping: decoded output
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
||||||
|
[trace-mapping]: https://github.com/jridgewell/trace-mapping
|
||||||
|
|
@ -0,0 +1,230 @@
|
||||||
|
import { SetArray, put } from '@jridgewell/set-array';
|
||||||
|
import { encode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
/**
|
||||||
|
* A low-level API to associate a generated position with an original source position. Line and
|
||||||
|
* column here are 0-based, unlike `addMapping`.
|
||||||
|
*/
|
||||||
|
let addSegment;
|
||||||
|
/**
|
||||||
|
* A high-level API to associate a generated position with an original source position. Line is
|
||||||
|
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||||
|
*/
|
||||||
|
let addMapping;
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
let maybeAddSegment;
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
let maybeAddMapping;
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
let setSourceContent;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let toDecodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let toEncodedMap;
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
let fromMap;
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
let allMappings;
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
let addSegmentInternal;
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
class GenMapping {
|
||||||
|
constructor({ file, sourceRoot } = {}) {
|
||||||
|
this._names = new SetArray();
|
||||||
|
this._sources = new SetArray();
|
||||||
|
this._sourcesContent = [];
|
||||||
|
this._mappings = [];
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
};
|
||||||
|
maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
};
|
||||||
|
addMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(false, map, mapping);
|
||||||
|
};
|
||||||
|
maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping);
|
||||||
|
};
|
||||||
|
setSourceContent = (map, source, content) => {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = map;
|
||||||
|
sourcesContent[put(sources, source)] = content;
|
||||||
|
};
|
||||||
|
toDecodedMap = (map) => {
|
||||||
|
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
toEncodedMap = (map) => {
|
||||||
|
const decoded = toDecodedMap(map);
|
||||||
|
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
||||||
|
};
|
||||||
|
allMappings = (map) => {
|
||||||
|
const out = [];
|
||||||
|
const { _mappings: mappings, _sources: sources, _names: names } = map;
|
||||||
|
for (let i = 0; i < mappings.length; i++) {
|
||||||
|
const line = mappings[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||||
|
let source = undefined;
|
||||||
|
let original = undefined;
|
||||||
|
let name = undefined;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = sources.array[seg[SOURCES_INDEX]];
|
||||||
|
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names.array[seg[NAMES_INDEX]];
|
||||||
|
}
|
||||||
|
out.push({ generated, source, original, name });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
};
|
||||||
|
fromMap = (input) => {
|
||||||
|
const map = new TraceMap(input);
|
||||||
|
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||||
|
putAll(gen._names, map.names);
|
||||||
|
putAll(gen._sources, map.sources);
|
||||||
|
gen._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||||
|
gen._mappings = decodedMappings(map);
|
||||||
|
return gen;
|
||||||
|
};
|
||||||
|
// Internal helpers
|
||||||
|
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index))
|
||||||
|
return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
const sourcesIndex = put(sources, source);
|
||||||
|
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length)
|
||||||
|
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return insert(line, index, name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function getLine(mappings, index) {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
function getColumnIndex(line, genColumn) {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN])
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
function removeEmptyFinalLines(mappings) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (len < length)
|
||||||
|
mappings.length = len;
|
||||||
|
}
|
||||||
|
function putAll(strarr, array) {
|
||||||
|
for (let i = 0; i < array.length; i++)
|
||||||
|
put(strarr, array[i]);
|
||||||
|
}
|
||||||
|
function skipSourceless(line, index) {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0)
|
||||||
|
return true;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0)
|
||||||
|
return false;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1)
|
||||||
|
return false;
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||||
|
}
|
||||||
|
function addMappingInternal(skipable, map, mapping) {
|
||||||
|
const { generated, source, original, name, content } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||||
|
}
|
||||||
|
const s = source;
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setSourceContent, toDecodedMap, toEncodedMap };
|
||||||
|
//# sourceMappingURL=gen-mapping.mjs.map
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,236 @@
|
||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping));
|
||||||
|
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict';
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
/**
|
||||||
|
* A low-level API to associate a generated position with an original source position. Line and
|
||||||
|
* column here are 0-based, unlike `addMapping`.
|
||||||
|
*/
|
||||||
|
exports.addSegment = void 0;
|
||||||
|
/**
|
||||||
|
* A high-level API to associate a generated position with an original source position. Line is
|
||||||
|
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||||
|
*/
|
||||||
|
exports.addMapping = void 0;
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
exports.maybeAddSegment = void 0;
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
exports.maybeAddMapping = void 0;
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
exports.setSourceContent = void 0;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
exports.toDecodedMap = void 0;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
exports.toEncodedMap = void 0;
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
exports.fromMap = void 0;
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
exports.allMappings = void 0;
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
let addSegmentInternal;
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
class GenMapping {
|
||||||
|
constructor({ file, sourceRoot } = {}) {
|
||||||
|
this._names = new setArray.SetArray();
|
||||||
|
this._sources = new setArray.SetArray();
|
||||||
|
this._sourcesContent = [];
|
||||||
|
this._mappings = [];
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
exports.addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
};
|
||||||
|
exports.maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||||
|
};
|
||||||
|
exports.addMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(false, map, mapping);
|
||||||
|
};
|
||||||
|
exports.maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping);
|
||||||
|
};
|
||||||
|
exports.setSourceContent = (map, source, content) => {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = map;
|
||||||
|
sourcesContent[setArray.put(sources, source)] = content;
|
||||||
|
};
|
||||||
|
exports.toDecodedMap = (map) => {
|
||||||
|
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
exports.toEncodedMap = (map) => {
|
||||||
|
const decoded = exports.toDecodedMap(map);
|
||||||
|
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) });
|
||||||
|
};
|
||||||
|
exports.allMappings = (map) => {
|
||||||
|
const out = [];
|
||||||
|
const { _mappings: mappings, _sources: sources, _names: names } = map;
|
||||||
|
for (let i = 0; i < mappings.length; i++) {
|
||||||
|
const line = mappings[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||||
|
let source = undefined;
|
||||||
|
let original = undefined;
|
||||||
|
let name = undefined;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = sources.array[seg[SOURCES_INDEX]];
|
||||||
|
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names.array[seg[NAMES_INDEX]];
|
||||||
|
}
|
||||||
|
out.push({ generated, source, original, name });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
};
|
||||||
|
exports.fromMap = (input) => {
|
||||||
|
const map = new traceMapping.TraceMap(input);
|
||||||
|
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||||
|
putAll(gen._names, map.names);
|
||||||
|
putAll(gen._sources, map.sources);
|
||||||
|
gen._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||||
|
gen._mappings = traceMapping.decodedMappings(map);
|
||||||
|
return gen;
|
||||||
|
};
|
||||||
|
// Internal helpers
|
||||||
|
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index))
|
||||||
|
return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
const sourcesIndex = setArray.put(sources, source);
|
||||||
|
const namesIndex = name ? setArray.put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length)
|
||||||
|
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return insert(line, index, name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function getLine(mappings, index) {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
function getColumnIndex(line, genColumn) {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN])
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
function removeEmptyFinalLines(mappings) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (len < length)
|
||||||
|
mappings.length = len;
|
||||||
|
}
|
||||||
|
function putAll(strarr, array) {
|
||||||
|
for (let i = 0; i < array.length; i++)
|
||||||
|
setArray.put(strarr, array[i]);
|
||||||
|
}
|
||||||
|
function skipSourceless(line, index) {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0)
|
||||||
|
return true;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0)
|
||||||
|
return false;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1)
|
||||||
|
return false;
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||||
|
}
|
||||||
|
function addMappingInternal(skipable, map, mapping) {
|
||||||
|
const { generated, source, original, name, content } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||||
|
}
|
||||||
|
const s = source;
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.GenMapping = GenMapping;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=gen-mapping.umd.js.map
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,90 @@
|
||||||
|
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||||
|
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
||||||
|
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||||
|
export declare type Options = {
|
||||||
|
file?: string | null;
|
||||||
|
sourceRoot?: string | null;
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* A low-level API to associate a generated position with an original source position. Line and
|
||||||
|
* column here are 0-based, unlike `addMapping`.
|
||||||
|
*/
|
||||||
|
export declare let addSegment: {
|
||||||
|
(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
|
||||||
|
(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
|
||||||
|
(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* A high-level API to associate a generated position with an original source position. Line is
|
||||||
|
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare let addMapping: {
|
||||||
|
(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source?: null;
|
||||||
|
original?: null;
|
||||||
|
name?: null;
|
||||||
|
content?: null;
|
||||||
|
}): void;
|
||||||
|
(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name?: null;
|
||||||
|
content?: string | null;
|
||||||
|
}): void;
|
||||||
|
(map: GenMapping, mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: string;
|
||||||
|
content?: string | null;
|
||||||
|
}): void;
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
export declare let maybeAddSegment: typeof addSegment;
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
export declare let maybeAddMapping: typeof addMapping;
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
export declare let setSourceContent: (map: GenMapping, source: string, content: string | null) => void;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare let toDecodedMap: (map: GenMapping) => DecodedSourceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare let toEncodedMap: (map: GenMapping) => EncodedSourceMap;
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
export declare let fromMap: (input: SourceMapInput) => GenMapping;
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare let allMappings: (map: GenMapping) => Mapping[];
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
export declare class GenMapping {
|
||||||
|
private _names;
|
||||||
|
private _sources;
|
||||||
|
private _sourcesContent;
|
||||||
|
private _mappings;
|
||||||
|
file: string | null | undefined;
|
||||||
|
sourceRoot: string | null | undefined;
|
||||||
|
constructor({ file, sourceRoot }?: Options);
|
||||||
|
}
|
||||||
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
declare type GeneratedColumn = number;
|
||||||
|
declare type SourcesIndex = number;
|
||||||
|
declare type SourceLine = number;
|
||||||
|
declare type SourceColumn = number;
|
||||||
|
declare type NamesIndex = number;
|
||||||
|
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
export declare const COLUMN = 0;
|
||||||
|
export declare const SOURCES_INDEX = 1;
|
||||||
|
export declare const SOURCE_LINE = 2;
|
||||||
|
export declare const SOURCE_COLUMN = 3;
|
||||||
|
export declare const NAMES_INDEX = 4;
|
||||||
|
export {};
|
||||||
|
|
@ -0,0 +1,35 @@
|
||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: readonly string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: readonly (string | null)[];
|
||||||
|
sourcesContent?: readonly (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
export interface Pos {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
}
|
||||||
|
export declare type Mapping = {
|
||||||
|
generated: Pos;
|
||||||
|
source: undefined;
|
||||||
|
original: undefined;
|
||||||
|
name: undefined;
|
||||||
|
} | {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: string;
|
||||||
|
} | {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: undefined;
|
||||||
|
};
|
||||||
|
|
@ -0,0 +1,78 @@
|
||||||
|
{
|
||||||
|
"name": "@jridgewell/gen-mapping",
|
||||||
|
"version": "0.3.2",
|
||||||
|
"description": "Generate source maps",
|
||||||
|
"keywords": [
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/gen-mapping",
|
||||||
|
"main": "dist/gen-mapping.umd.js",
|
||||||
|
"module": "dist/gen-mapping.mjs",
|
||||||
|
"typings": "dist/types/gen-mapping.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/gen-mapping.d.ts",
|
||||||
|
"browser": "./dist/gen-mapping.umd.js",
|
||||||
|
"require": "./dist/gen-mapping.umd.js",
|
||||||
|
"import": "./dist/gen-mapping.mjs"
|
||||||
|
},
|
||||||
|
"./dist/gen-mapping.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist",
|
||||||
|
"src"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node benchmark/index.mjs",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:coverage",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "run-p 'build:rollup -- --watch' 'test:only -- --watch'",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.2",
|
||||||
|
"@types/mocha": "9.1.1",
|
||||||
|
"@types/node": "17.0.29",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.21.0",
|
||||||
|
"@typescript-eslint/parser": "5.21.0",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "7.11.2",
|
||||||
|
"eslint": "8.14.0",
|
||||||
|
"eslint-config-prettier": "8.5.0",
|
||||||
|
"mocha": "9.2.2",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.6.2",
|
||||||
|
"rollup": "2.70.2",
|
||||||
|
"typescript": "4.6.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/set-array": "^1.0.1",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.9"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,458 @@
|
||||||
|
import { SetArray, put } from '@jridgewell/set-array';
|
||||||
|
import { encode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
import {
|
||||||
|
COLUMN,
|
||||||
|
SOURCES_INDEX,
|
||||||
|
SOURCE_LINE,
|
||||||
|
SOURCE_COLUMN,
|
||||||
|
NAMES_INDEX,
|
||||||
|
} from './sourcemap-segment';
|
||||||
|
|
||||||
|
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
||||||
|
|
||||||
|
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||||
|
|
||||||
|
export type Options = {
|
||||||
|
file?: string | null;
|
||||||
|
sourceRoot?: string | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A low-level API to associate a generated position with an original source position. Line and
|
||||||
|
* column here are 0-based, unlike `addMapping`.
|
||||||
|
*/
|
||||||
|
export let addSegment: {
|
||||||
|
(
|
||||||
|
map: GenMapping,
|
||||||
|
genLine: number,
|
||||||
|
genColumn: number,
|
||||||
|
source?: null,
|
||||||
|
sourceLine?: null,
|
||||||
|
sourceColumn?: null,
|
||||||
|
name?: null,
|
||||||
|
content?: null,
|
||||||
|
): void;
|
||||||
|
(
|
||||||
|
map: GenMapping,
|
||||||
|
genLine: number,
|
||||||
|
genColumn: number,
|
||||||
|
source: string,
|
||||||
|
sourceLine: number,
|
||||||
|
sourceColumn: number,
|
||||||
|
name?: null,
|
||||||
|
content?: string | null,
|
||||||
|
): void;
|
||||||
|
(
|
||||||
|
map: GenMapping,
|
||||||
|
genLine: number,
|
||||||
|
genColumn: number,
|
||||||
|
source: string,
|
||||||
|
sourceLine: number,
|
||||||
|
sourceColumn: number,
|
||||||
|
name: string,
|
||||||
|
content?: string | null,
|
||||||
|
): void;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A high-level API to associate a generated position with an original source position. Line is
|
||||||
|
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||||
|
*/
|
||||||
|
export let addMapping: {
|
||||||
|
(
|
||||||
|
map: GenMapping,
|
||||||
|
mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source?: null;
|
||||||
|
original?: null;
|
||||||
|
name?: null;
|
||||||
|
content?: null;
|
||||||
|
},
|
||||||
|
): void;
|
||||||
|
(
|
||||||
|
map: GenMapping,
|
||||||
|
mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name?: null;
|
||||||
|
content?: string | null;
|
||||||
|
},
|
||||||
|
): void;
|
||||||
|
(
|
||||||
|
map: GenMapping,
|
||||||
|
mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: string;
|
||||||
|
content?: string | null;
|
||||||
|
},
|
||||||
|
): void;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||||
|
* not add a segment with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
export let maybeAddSegment: typeof addSegment;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
export let maybeAddMapping: typeof addMapping;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
export let setSourceContent: (map: GenMapping, source: string, content: string | null) => void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export let toDecodedMap: (map: GenMapping) => DecodedSourceMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export let toEncodedMap: (map: GenMapping) => EncodedSourceMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||||
|
*/
|
||||||
|
export let fromMap: (input: SourceMapInput) => GenMapping;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||||
|
* passed to the `source-map` library.
|
||||||
|
*/
|
||||||
|
export let allMappings: (map: GenMapping) => Mapping[];
|
||||||
|
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
let addSegmentInternal: <S extends string | null | undefined>(
|
||||||
|
skipable: boolean,
|
||||||
|
map: GenMapping,
|
||||||
|
genLine: number,
|
||||||
|
genColumn: number,
|
||||||
|
source: S,
|
||||||
|
sourceLine: S extends string ? number : null | undefined,
|
||||||
|
sourceColumn: S extends string ? number : null | undefined,
|
||||||
|
name: S extends string ? string | null | undefined : null | undefined,
|
||||||
|
content: S extends string ? string | null | undefined : null | undefined,
|
||||||
|
) => void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
export class GenMapping {
|
||||||
|
private _names = new SetArray();
|
||||||
|
private _sources = new SetArray();
|
||||||
|
private _sourcesContent: (string | null)[] = [];
|
||||||
|
private _mappings: SourceMapSegment[][] = [];
|
||||||
|
declare file: string | null | undefined;
|
||||||
|
declare sourceRoot: string | null | undefined;
|
||||||
|
|
||||||
|
constructor({ file, sourceRoot }: Options = {}) {
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||||
|
return addSegmentInternal(
|
||||||
|
false,
|
||||||
|
map,
|
||||||
|
genLine,
|
||||||
|
genColumn,
|
||||||
|
source,
|
||||||
|
sourceLine,
|
||||||
|
sourceColumn,
|
||||||
|
name,
|
||||||
|
content,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
maybeAddSegment = (
|
||||||
|
map,
|
||||||
|
genLine,
|
||||||
|
genColumn,
|
||||||
|
source,
|
||||||
|
sourceLine,
|
||||||
|
sourceColumn,
|
||||||
|
name,
|
||||||
|
content,
|
||||||
|
) => {
|
||||||
|
return addSegmentInternal(
|
||||||
|
true,
|
||||||
|
map,
|
||||||
|
genLine,
|
||||||
|
genColumn,
|
||||||
|
source,
|
||||||
|
sourceLine,
|
||||||
|
sourceColumn,
|
||||||
|
name,
|
||||||
|
content,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
addMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(false, map, mapping as Parameters<typeof addMappingInternal>[2]);
|
||||||
|
};
|
||||||
|
|
||||||
|
maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping as Parameters<typeof addMappingInternal>[2]);
|
||||||
|
};
|
||||||
|
|
||||||
|
setSourceContent = (map, source, content) => {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = map;
|
||||||
|
sourcesContent[put(sources, source)] = content;
|
||||||
|
};
|
||||||
|
|
||||||
|
toDecodedMap = (map) => {
|
||||||
|
const {
|
||||||
|
file,
|
||||||
|
sourceRoot,
|
||||||
|
_mappings: mappings,
|
||||||
|
_sources: sources,
|
||||||
|
_sourcesContent: sourcesContent,
|
||||||
|
_names: names,
|
||||||
|
} = map;
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
toEncodedMap = (map) => {
|
||||||
|
const decoded = toDecodedMap(map);
|
||||||
|
return {
|
||||||
|
...decoded,
|
||||||
|
mappings: encode(decoded.mappings as SourceMapSegment[][]),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
allMappings = (map) => {
|
||||||
|
const out: Mapping[] = [];
|
||||||
|
const { _mappings: mappings, _sources: sources, _names: names } = map;
|
||||||
|
|
||||||
|
for (let i = 0; i < mappings.length; i++) {
|
||||||
|
const line = mappings[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
|
||||||
|
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||||
|
let source: string | undefined = undefined;
|
||||||
|
let original: Pos | undefined = undefined;
|
||||||
|
let name: string | undefined = undefined;
|
||||||
|
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = sources.array[seg[SOURCES_INDEX]];
|
||||||
|
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||||
|
|
||||||
|
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
|
||||||
|
}
|
||||||
|
|
||||||
|
out.push({ generated, source, original, name } as Mapping);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return out;
|
||||||
|
};
|
||||||
|
|
||||||
|
fromMap = (input) => {
|
||||||
|
const map = new TraceMap(input);
|
||||||
|
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||||
|
|
||||||
|
putAll(gen._names, map.names);
|
||||||
|
putAll(gen._sources, map.sources as string[]);
|
||||||
|
gen._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||||
|
gen._mappings = decodedMappings(map) as GenMapping['_mappings'];
|
||||||
|
|
||||||
|
return gen;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Internal helpers
|
||||||
|
addSegmentInternal = (
|
||||||
|
skipable,
|
||||||
|
map,
|
||||||
|
genLine,
|
||||||
|
genColumn,
|
||||||
|
source,
|
||||||
|
sourceLine,
|
||||||
|
sourceColumn,
|
||||||
|
name,
|
||||||
|
content,
|
||||||
|
) => {
|
||||||
|
const {
|
||||||
|
_mappings: mappings,
|
||||||
|
_sources: sources,
|
||||||
|
_sourcesContent: sourcesContent,
|
||||||
|
_names: names,
|
||||||
|
} = map;
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index)) return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sigh, TypeScript can't figure out sourceLine and sourceColumn aren't nullish if source
|
||||||
|
// isn't nullish.
|
||||||
|
assert<number>(sourceLine);
|
||||||
|
assert<number>(sourceColumn);
|
||||||
|
|
||||||
|
const sourcesIndex = put(sources, source);
|
||||||
|
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content ?? null;
|
||||||
|
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return insert(
|
||||||
|
line,
|
||||||
|
index,
|
||||||
|
name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn],
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function assert<T>(_val: unknown): asserts _val is T {
|
||||||
|
// noop.
|
||||||
|
}
|
||||||
|
|
||||||
|
function getLine(mappings: SourceMapSegment[][], index: number): SourceMapSegment[] {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
function getColumnIndex(line: SourceMapSegment[], genColumn: number): number {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN]) break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
|
||||||
|
function insert<T>(array: T[], index: number, value: T) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeEmptyFinalLines(mappings: SourceMapSegment[][]) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0) break;
|
||||||
|
}
|
||||||
|
if (len < length) mappings.length = len;
|
||||||
|
}
|
||||||
|
|
||||||
|
function putAll(strarr: SetArray, array: string[]) {
|
||||||
|
for (let i = 0; i < array.length; i++) put(strarr, array[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipSourceless(line: SourceMapSegment[], index: number): boolean {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0) return true;
|
||||||
|
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipSource(
|
||||||
|
line: SourceMapSegment[],
|
||||||
|
index: number,
|
||||||
|
sourcesIndex: number,
|
||||||
|
sourceLine: number,
|
||||||
|
sourceColumn: number,
|
||||||
|
namesIndex: number,
|
||||||
|
): boolean {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0) return false;
|
||||||
|
|
||||||
|
const prev = line[index - 1];
|
||||||
|
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1) return false;
|
||||||
|
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (
|
||||||
|
sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function addMappingInternal<S extends string | null | undefined>(
|
||||||
|
skipable: boolean,
|
||||||
|
map: GenMapping,
|
||||||
|
mapping: {
|
||||||
|
generated: Pos;
|
||||||
|
source: S;
|
||||||
|
original: S extends string ? Pos : null | undefined;
|
||||||
|
name: S extends string ? string | null | undefined : null | undefined;
|
||||||
|
content: S extends string ? string | null | undefined : null | undefined;
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
const { generated, source, original, name, content } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(
|
||||||
|
skipable,
|
||||||
|
map,
|
||||||
|
generated.line - 1,
|
||||||
|
generated.column,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const s: string = source;
|
||||||
|
assert<Pos>(original);
|
||||||
|
return addSegmentInternal(
|
||||||
|
skipable,
|
||||||
|
map,
|
||||||
|
generated.line - 1,
|
||||||
|
generated.column,
|
||||||
|
s,
|
||||||
|
original.line - 1,
|
||||||
|
original.column,
|
||||||
|
name,
|
||||||
|
content,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,16 @@
|
||||||
|
type GeneratedColumn = number;
|
||||||
|
type SourcesIndex = number;
|
||||||
|
type SourceLine = number;
|
||||||
|
type SourceColumn = number;
|
||||||
|
type NamesIndex = number;
|
||||||
|
|
||||||
|
export type SourceMapSegment =
|
||||||
|
| [GeneratedColumn]
|
||||||
|
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]
|
||||||
|
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
|
||||||
|
export const COLUMN = 0;
|
||||||
|
export const SOURCES_INDEX = 1;
|
||||||
|
export const SOURCE_LINE = 2;
|
||||||
|
export const SOURCE_COLUMN = 3;
|
||||||
|
export const NAMES_INDEX = 4;
|
||||||
|
|
@ -0,0 +1,43 @@
|
||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: readonly string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: readonly (string | null)[];
|
||||||
|
sourcesContent?: readonly (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Pos {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Mapping =
|
||||||
|
| {
|
||||||
|
generated: Pos;
|
||||||
|
source: undefined;
|
||||||
|
original: undefined;
|
||||||
|
name: undefined;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
generated: Pos;
|
||||||
|
source: string;
|
||||||
|
original: Pos;
|
||||||
|
name: undefined;
|
||||||
|
};
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,40 @@
|
||||||
|
# @jridgewell/resolve-uri
|
||||||
|
|
||||||
|
> Resolve a URI relative to an optional base URI
|
||||||
|
|
||||||
|
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/resolve-uri
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
function resolve(input: string, base?: string): string;
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
import resolve from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
|
||||||
|
```
|
||||||
|
|
||||||
|
| Input | Base | Resolution | Explanation |
|
||||||
|
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
|
||||||
|
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
|
||||||
|
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
|
||||||
|
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
|
||||||
|
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
|
||||||
|
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
|
||||||
|
| `/example` | _rest_ | `/example` | Input is normalized only |
|
||||||
|
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
|
||||||
|
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |
|
||||||
|
|
@ -0,0 +1,242 @@
|
||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
/**
|
||||||
|
* Matches the parts of a URL:
|
||||||
|
* 1. Scheme, including ":", guaranteed.
|
||||||
|
* 2. User/password, including "@", optional.
|
||||||
|
* 3. Host, guaranteed.
|
||||||
|
* 4. Port, including ":", optional.
|
||||||
|
* 5. Path, including "/", optional.
|
||||||
|
* 6. Query, including "?", optional.
|
||||||
|
* 7. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||||
|
/**
|
||||||
|
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||||
|
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||||
|
*
|
||||||
|
* 1. Host, optional.
|
||||||
|
* 2. Path, which may include "/", guaranteed.
|
||||||
|
* 3. Query, including "?", optional.
|
||||||
|
* 4. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||||
|
var UrlType;
|
||||||
|
(function (UrlType) {
|
||||||
|
UrlType[UrlType["Empty"] = 1] = "Empty";
|
||||||
|
UrlType[UrlType["Hash"] = 2] = "Hash";
|
||||||
|
UrlType[UrlType["Query"] = 3] = "Query";
|
||||||
|
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
|
||||||
|
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
|
||||||
|
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
|
||||||
|
UrlType[UrlType["Absolute"] = 7] = "Absolute";
|
||||||
|
})(UrlType || (UrlType = {}));
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
function isAbsolutePath(input) {
|
||||||
|
return input.startsWith('/');
|
||||||
|
}
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function isRelative(input) {
|
||||||
|
return /^[.?#]/.test(input);
|
||||||
|
}
|
||||||
|
function parseAbsoluteUrl(input) {
|
||||||
|
const match = urlRegex.exec(input);
|
||||||
|
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||||
|
}
|
||||||
|
function parseFileUrl(input) {
|
||||||
|
const match = fileRegex.exec(input);
|
||||||
|
const path = match[2];
|
||||||
|
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||||
|
}
|
||||||
|
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||||
|
return {
|
||||||
|
scheme,
|
||||||
|
user,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
query,
|
||||||
|
hash,
|
||||||
|
type: UrlType.Absolute,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function parseUrl(input) {
|
||||||
|
if (isSchemeRelativeUrl(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http:' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.type = UrlType.SchemeRelative;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isAbsolutePath(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = UrlType.AbsolutePath;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isFileUrl(input))
|
||||||
|
return parseFileUrl(input);
|
||||||
|
if (isAbsoluteUrl(input))
|
||||||
|
return parseAbsoluteUrl(input);
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = input
|
||||||
|
? input.startsWith('?')
|
||||||
|
? UrlType.Query
|
||||||
|
: input.startsWith('#')
|
||||||
|
? UrlType.Hash
|
||||||
|
: UrlType.RelativePath
|
||||||
|
: UrlType.Empty;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
function stripPathFilename(path) {
|
||||||
|
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||||
|
// paths. It's not a file, so we can't strip it.
|
||||||
|
if (path.endsWith('/..'))
|
||||||
|
return path;
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
function mergePaths(url, base) {
|
||||||
|
normalizePath(base, base.type);
|
||||||
|
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||||
|
// path).
|
||||||
|
if (url.path === '/') {
|
||||||
|
url.path = base.path;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Resolution happens relative to the base path's directory, not the file.
|
||||||
|
url.path = stripPathFilename(base.path) + url.path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||||
|
* "foo/.". We need to normalize to a standard representation.
|
||||||
|
*/
|
||||||
|
function normalizePath(url, type) {
|
||||||
|
const rel = type <= UrlType.RelativePath;
|
||||||
|
const pieces = url.path.split('/');
|
||||||
|
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||||
|
// pieces[0] is an empty string.
|
||||||
|
let pointer = 1;
|
||||||
|
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||||
|
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||||
|
let positive = 0;
|
||||||
|
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||||
|
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||||
|
// real directory, we won't need to append, unless the other conditions happen again.
|
||||||
|
let addTrailingSlash = false;
|
||||||
|
for (let i = 1; i < pieces.length; i++) {
|
||||||
|
const piece = pieces[i];
|
||||||
|
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||||
|
if (!piece) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// If we encounter a real directory, then we don't need to append anymore.
|
||||||
|
addTrailingSlash = false;
|
||||||
|
// A current directory, which we can always drop.
|
||||||
|
if (piece === '.')
|
||||||
|
continue;
|
||||||
|
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||||
|
// have an excess of parents, and we'll need to keep the "..".
|
||||||
|
if (piece === '..') {
|
||||||
|
if (positive) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
positive--;
|
||||||
|
pointer--;
|
||||||
|
}
|
||||||
|
else if (rel) {
|
||||||
|
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||||
|
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||||
|
// any popped or dropped directories.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
positive++;
|
||||||
|
}
|
||||||
|
let path = '';
|
||||||
|
for (let i = 1; i < pointer; i++) {
|
||||||
|
path += '/' + pieces[i];
|
||||||
|
}
|
||||||
|
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||||
|
path += '/';
|
||||||
|
}
|
||||||
|
url.path = path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
function resolve(input, base) {
|
||||||
|
if (!input && !base)
|
||||||
|
return '';
|
||||||
|
const url = parseUrl(input);
|
||||||
|
let inputType = url.type;
|
||||||
|
if (base && inputType !== UrlType.Absolute) {
|
||||||
|
const baseUrl = parseUrl(base);
|
||||||
|
const baseType = baseUrl.type;
|
||||||
|
switch (inputType) {
|
||||||
|
case UrlType.Empty:
|
||||||
|
url.hash = baseUrl.hash;
|
||||||
|
// fall through
|
||||||
|
case UrlType.Hash:
|
||||||
|
url.query = baseUrl.query;
|
||||||
|
// fall through
|
||||||
|
case UrlType.Query:
|
||||||
|
case UrlType.RelativePath:
|
||||||
|
mergePaths(url, baseUrl);
|
||||||
|
// fall through
|
||||||
|
case UrlType.AbsolutePath:
|
||||||
|
// The host, user, and port are joined, you can't copy one without the others.
|
||||||
|
url.user = baseUrl.user;
|
||||||
|
url.host = baseUrl.host;
|
||||||
|
url.port = baseUrl.port;
|
||||||
|
// fall through
|
||||||
|
case UrlType.SchemeRelative:
|
||||||
|
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||||
|
url.scheme = baseUrl.scheme;
|
||||||
|
}
|
||||||
|
if (baseType > inputType)
|
||||||
|
inputType = baseType;
|
||||||
|
}
|
||||||
|
normalizePath(url, inputType);
|
||||||
|
const queryHash = url.query + url.hash;
|
||||||
|
switch (inputType) {
|
||||||
|
// This is impossible, because of the empty checks at the start of the function.
|
||||||
|
// case UrlType.Empty:
|
||||||
|
case UrlType.Hash:
|
||||||
|
case UrlType.Query:
|
||||||
|
return queryHash;
|
||||||
|
case UrlType.RelativePath: {
|
||||||
|
// The first char is always a "/", and we need it to be relative.
|
||||||
|
const path = url.path.slice(1);
|
||||||
|
if (!path)
|
||||||
|
return queryHash || '.';
|
||||||
|
if (isRelative(base || input) && !isRelative(path)) {
|
||||||
|
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||||
|
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||||
|
// relative starts with a "..", though, so check before prepending.
|
||||||
|
return './' + path + queryHash;
|
||||||
|
}
|
||||||
|
return path + queryHash;
|
||||||
|
}
|
||||||
|
case UrlType.AbsolutePath:
|
||||||
|
return url.path + queryHash;
|
||||||
|
default:
|
||||||
|
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { resolve as default };
|
||||||
|
//# sourceMappingURL=resolve-uri.mjs.map
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,250 @@
|
||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||||
|
typeof define === 'function' && define.amd ? define(factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
|
||||||
|
})(this, (function () { 'use strict';
|
||||||
|
|
||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
/**
|
||||||
|
* Matches the parts of a URL:
|
||||||
|
* 1. Scheme, including ":", guaranteed.
|
||||||
|
* 2. User/password, including "@", optional.
|
||||||
|
* 3. Host, guaranteed.
|
||||||
|
* 4. Port, including ":", optional.
|
||||||
|
* 5. Path, including "/", optional.
|
||||||
|
* 6. Query, including "?", optional.
|
||||||
|
* 7. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||||
|
/**
|
||||||
|
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||||
|
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||||
|
*
|
||||||
|
* 1. Host, optional.
|
||||||
|
* 2. Path, which may include "/", guaranteed.
|
||||||
|
* 3. Query, including "?", optional.
|
||||||
|
* 4. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||||
|
var UrlType;
|
||||||
|
(function (UrlType) {
|
||||||
|
UrlType[UrlType["Empty"] = 1] = "Empty";
|
||||||
|
UrlType[UrlType["Hash"] = 2] = "Hash";
|
||||||
|
UrlType[UrlType["Query"] = 3] = "Query";
|
||||||
|
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
|
||||||
|
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
|
||||||
|
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
|
||||||
|
UrlType[UrlType["Absolute"] = 7] = "Absolute";
|
||||||
|
})(UrlType || (UrlType = {}));
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
function isAbsolutePath(input) {
|
||||||
|
return input.startsWith('/');
|
||||||
|
}
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function isRelative(input) {
|
||||||
|
return /^[.?#]/.test(input);
|
||||||
|
}
|
||||||
|
function parseAbsoluteUrl(input) {
|
||||||
|
const match = urlRegex.exec(input);
|
||||||
|
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||||
|
}
|
||||||
|
function parseFileUrl(input) {
|
||||||
|
const match = fileRegex.exec(input);
|
||||||
|
const path = match[2];
|
||||||
|
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||||
|
}
|
||||||
|
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||||
|
return {
|
||||||
|
scheme,
|
||||||
|
user,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
query,
|
||||||
|
hash,
|
||||||
|
type: UrlType.Absolute,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function parseUrl(input) {
|
||||||
|
if (isSchemeRelativeUrl(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http:' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.type = UrlType.SchemeRelative;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isAbsolutePath(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = UrlType.AbsolutePath;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isFileUrl(input))
|
||||||
|
return parseFileUrl(input);
|
||||||
|
if (isAbsoluteUrl(input))
|
||||||
|
return parseAbsoluteUrl(input);
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = input
|
||||||
|
? input.startsWith('?')
|
||||||
|
? UrlType.Query
|
||||||
|
: input.startsWith('#')
|
||||||
|
? UrlType.Hash
|
||||||
|
: UrlType.RelativePath
|
||||||
|
: UrlType.Empty;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
function stripPathFilename(path) {
|
||||||
|
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||||
|
// paths. It's not a file, so we can't strip it.
|
||||||
|
if (path.endsWith('/..'))
|
||||||
|
return path;
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
function mergePaths(url, base) {
|
||||||
|
normalizePath(base, base.type);
|
||||||
|
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||||
|
// path).
|
||||||
|
if (url.path === '/') {
|
||||||
|
url.path = base.path;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Resolution happens relative to the base path's directory, not the file.
|
||||||
|
url.path = stripPathFilename(base.path) + url.path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||||
|
* "foo/.". We need to normalize to a standard representation.
|
||||||
|
*/
|
||||||
|
function normalizePath(url, type) {
|
||||||
|
const rel = type <= UrlType.RelativePath;
|
||||||
|
const pieces = url.path.split('/');
|
||||||
|
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||||
|
// pieces[0] is an empty string.
|
||||||
|
let pointer = 1;
|
||||||
|
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||||
|
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||||
|
let positive = 0;
|
||||||
|
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||||
|
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||||
|
// real directory, we won't need to append, unless the other conditions happen again.
|
||||||
|
let addTrailingSlash = false;
|
||||||
|
for (let i = 1; i < pieces.length; i++) {
|
||||||
|
const piece = pieces[i];
|
||||||
|
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||||
|
if (!piece) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// If we encounter a real directory, then we don't need to append anymore.
|
||||||
|
addTrailingSlash = false;
|
||||||
|
// A current directory, which we can always drop.
|
||||||
|
if (piece === '.')
|
||||||
|
continue;
|
||||||
|
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||||
|
// have an excess of parents, and we'll need to keep the "..".
|
||||||
|
if (piece === '..') {
|
||||||
|
if (positive) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
positive--;
|
||||||
|
pointer--;
|
||||||
|
}
|
||||||
|
else if (rel) {
|
||||||
|
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||||
|
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||||
|
// any popped or dropped directories.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
positive++;
|
||||||
|
}
|
||||||
|
let path = '';
|
||||||
|
for (let i = 1; i < pointer; i++) {
|
||||||
|
path += '/' + pieces[i];
|
||||||
|
}
|
||||||
|
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||||
|
path += '/';
|
||||||
|
}
|
||||||
|
url.path = path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
function resolve(input, base) {
|
||||||
|
if (!input && !base)
|
||||||
|
return '';
|
||||||
|
const url = parseUrl(input);
|
||||||
|
let inputType = url.type;
|
||||||
|
if (base && inputType !== UrlType.Absolute) {
|
||||||
|
const baseUrl = parseUrl(base);
|
||||||
|
const baseType = baseUrl.type;
|
||||||
|
switch (inputType) {
|
||||||
|
case UrlType.Empty:
|
||||||
|
url.hash = baseUrl.hash;
|
||||||
|
// fall through
|
||||||
|
case UrlType.Hash:
|
||||||
|
url.query = baseUrl.query;
|
||||||
|
// fall through
|
||||||
|
case UrlType.Query:
|
||||||
|
case UrlType.RelativePath:
|
||||||
|
mergePaths(url, baseUrl);
|
||||||
|
// fall through
|
||||||
|
case UrlType.AbsolutePath:
|
||||||
|
// The host, user, and port are joined, you can't copy one without the others.
|
||||||
|
url.user = baseUrl.user;
|
||||||
|
url.host = baseUrl.host;
|
||||||
|
url.port = baseUrl.port;
|
||||||
|
// fall through
|
||||||
|
case UrlType.SchemeRelative:
|
||||||
|
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||||
|
url.scheme = baseUrl.scheme;
|
||||||
|
}
|
||||||
|
if (baseType > inputType)
|
||||||
|
inputType = baseType;
|
||||||
|
}
|
||||||
|
normalizePath(url, inputType);
|
||||||
|
const queryHash = url.query + url.hash;
|
||||||
|
switch (inputType) {
|
||||||
|
// This is impossible, because of the empty checks at the start of the function.
|
||||||
|
// case UrlType.Empty:
|
||||||
|
case UrlType.Hash:
|
||||||
|
case UrlType.Query:
|
||||||
|
return queryHash;
|
||||||
|
case UrlType.RelativePath: {
|
||||||
|
// The first char is always a "/", and we need it to be relative.
|
||||||
|
const path = url.path.slice(1);
|
||||||
|
if (!path)
|
||||||
|
return queryHash || '.';
|
||||||
|
if (isRelative(base || input) && !isRelative(path)) {
|
||||||
|
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||||
|
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||||
|
// relative starts with a "..", though, so check before prepending.
|
||||||
|
return './' + path + queryHash;
|
||||||
|
}
|
||||||
|
return path + queryHash;
|
||||||
|
}
|
||||||
|
case UrlType.AbsolutePath:
|
||||||
|
return url.path + queryHash;
|
||||||
|
default:
|
||||||
|
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolve;
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=resolve-uri.umd.js.map
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,4 @@
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
export default function resolve(input: string, base: string | undefined): string;
|
||||||
|
|
@ -0,0 +1,69 @@
|
||||||
|
{
|
||||||
|
"name": "@jridgewell/resolve-uri",
|
||||||
|
"version": "3.1.0",
|
||||||
|
"description": "Resolve a URI relative to an optional base URI",
|
||||||
|
"keywords": [
|
||||||
|
"resolve",
|
||||||
|
"uri",
|
||||||
|
"url",
|
||||||
|
"path"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/resolve-uri",
|
||||||
|
"main": "dist/resolve-uri.umd.js",
|
||||||
|
"module": "dist/resolve-uri.mjs",
|
||||||
|
"typings": "dist/types/resolve-uri.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/resolve-uri.d.ts",
|
||||||
|
"browser": "./dist/resolve-uri.umd.js",
|
||||||
|
"require": "./dist/resolve-uri.umd.js",
|
||||||
|
"import": "./dist/resolve-uri.mjs"
|
||||||
|
},
|
||||||
|
"./dist/resolve-uri.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.66.0",
|
||||||
|
"typescript": "4.5.5"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
# @jridgewell/set-array
|
||||||
|
|
||||||
|
> Like a Set, but provides the index of the `key` in the backing array
|
||||||
|
|
||||||
|
This is designed to allow synchronizing a second array with the contents of the backing array, like
|
||||||
|
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there
|
||||||
|
are never duplicates.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/set-array
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { SetArray, get, put, pop } from '@jridgewell/set-array';
|
||||||
|
|
||||||
|
const sa = new SetArray();
|
||||||
|
|
||||||
|
let index = put(sa, 'first');
|
||||||
|
assert.strictEqual(index, 0);
|
||||||
|
|
||||||
|
index = put(sa, 'second');
|
||||||
|
assert.strictEqual(index, 1);
|
||||||
|
|
||||||
|
assert.deepEqual(sa.array, [ 'first', 'second' ]);
|
||||||
|
|
||||||
|
index = get(sa, 'first');
|
||||||
|
assert.strictEqual(index, 0);
|
||||||
|
|
||||||
|
pop(sa);
|
||||||
|
index = get(sa, 'second');
|
||||||
|
assert.strictEqual(index, undefined);
|
||||||
|
assert.deepEqual(sa.array, [ 'first' ]);
|
||||||
|
```
|
||||||
|
|
@ -0,0 +1,48 @@
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
let get;
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
let put;
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
let pop;
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
class SetArray {
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null };
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
get = (strarr, key) => strarr._indexes[key];
|
||||||
|
put = (strarr, key) => {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = get(strarr, key);
|
||||||
|
if (index !== undefined)
|
||||||
|
return index;
|
||||||
|
const { array, _indexes: indexes } = strarr;
|
||||||
|
return (indexes[key] = array.push(key) - 1);
|
||||||
|
};
|
||||||
|
pop = (strarr) => {
|
||||||
|
const { array, _indexes: indexes } = strarr;
|
||||||
|
if (array.length === 0)
|
||||||
|
return;
|
||||||
|
const last = array.pop();
|
||||||
|
indexes[last] = undefined;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
export { SetArray, get, pop, put };
|
||||||
|
//# sourceMappingURL=set-array.mjs.map
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport let get: (strarr: SetArray, key: string) => number | undefined;\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport let put: (strarr: SetArray, key: string) => number;\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport let pop: (strarr: SetArray) => void;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray {\n private declare _indexes: { [key: string]: number | undefined };\n declare array: readonly string[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n\n static {\n get = (strarr, key) => strarr._indexes[key];\n\n put = (strarr, key) => {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(strarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = strarr;\n\n return (indexes[key] = (array as string[]).push(key) - 1);\n };\n\n pop = (strarr) => {\n const { array, _indexes: indexes } = strarr;\n if (array.length === 0) return;\n\n const last = (array as string[]).pop()!;\n indexes[last] = undefined;\n };\n }\n}\n"],"names":[],"mappings":"AAAA;;;IAGW,IAA2D;AAEtE;;;;IAIW,IAA+C;AAE1D;;;IAGW,IAAgC;AAE3C;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CAuBF;AArBC;IACE,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IAE5C,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG;;QAEhB,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAE5C,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAI,KAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;KAC3D,CAAC;IAEF,GAAG,GAAG,CAAC,MAAM;QACX,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAC5C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAI,KAAkB,CAAC,GAAG,EAAG,CAAC;QACxC,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;KAC3B,CAAC;AACJ,CAAC,GAAA;;;;"}
|
||||||
|
|
@ -0,0 +1,58 @@
|
||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {}));
|
||||||
|
})(this, (function (exports) { 'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
exports.get = void 0;
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
exports.put = void 0;
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
exports.pop = void 0;
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
class SetArray {
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null };
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
exports.get = (strarr, key) => strarr._indexes[key];
|
||||||
|
exports.put = (strarr, key) => {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = exports.get(strarr, key);
|
||||||
|
if (index !== undefined)
|
||||||
|
return index;
|
||||||
|
const { array, _indexes: indexes } = strarr;
|
||||||
|
return (indexes[key] = array.push(key) - 1);
|
||||||
|
};
|
||||||
|
exports.pop = (strarr) => {
|
||||||
|
const { array, _indexes: indexes } = strarr;
|
||||||
|
if (array.length === 0)
|
||||||
|
return;
|
||||||
|
const last = array.pop();
|
||||||
|
indexes[last] = undefined;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
exports.SetArray = SetArray;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=set-array.umd.js.map
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport let get: (strarr: SetArray, key: string) => number | undefined;\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport let put: (strarr: SetArray, key: string) => number;\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport let pop: (strarr: SetArray) => void;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray {\n private declare _indexes: { [key: string]: number | undefined };\n declare array: readonly string[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n\n static {\n get = (strarr, key) => strarr._indexes[key];\n\n put = (strarr, key) => {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(strarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = strarr;\n\n return (indexes[key] = (array as string[]).push(key) - 1);\n };\n\n pop = (strarr) => {\n const { array, _indexes: indexes } = strarr;\n if (array.length === 0) return;\n\n const last = (array as string[]).pop()!;\n indexes[last] = undefined;\n };\n }\n}\n"],"names":["get","put","pop"],"mappings":";;;;;;IAAA;;;AAGWA,yBAA2D;IAEtE;;;;AAIWC,yBAA+C;IAE1D;;;AAGWC,yBAAgC;IAE3C;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KAuBF;IArBC;QACEF,WAAG,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;QAE5CC,WAAG,GAAG,CAAC,MAAM,EAAE,GAAG;;YAEhB,MAAM,KAAK,GAAGD,WAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;YAC/B,IAAI,KAAK,KAAK,SAAS;gBAAE,OAAO,KAAK,CAAC;YAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;YAE5C,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAI,KAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;SAC3D,CAAC;QAEFE,WAAG,GAAG,CAAC,MAAM;YACX,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;YAC5C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;gBAAE,OAAO;YAE/B,MAAM,IAAI,GAAI,KAAkB,CAAC,GAAG,EAAG,CAAC;YACxC,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;SAC3B,CAAC;IACJ,CAAC,GAAA;;;;;;;;;;"}
|
||||||
|
|
@ -0,0 +1,26 @@
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
export declare let get: (strarr: SetArray, key: string) => number | undefined;
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
export declare let put: (strarr: SetArray, key: string) => number;
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
export declare let pop: (strarr: SetArray) => void;
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
export declare class SetArray {
|
||||||
|
private _indexes;
|
||||||
|
array: readonly string[];
|
||||||
|
constructor();
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,66 @@
|
||||||
|
{
|
||||||
|
"name": "@jridgewell/set-array",
|
||||||
|
"version": "1.1.2",
|
||||||
|
"description": "Like a Set, but provides the index of the `key` in the backing array",
|
||||||
|
"keywords": [],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/set-array",
|
||||||
|
"main": "dist/set-array.umd.js",
|
||||||
|
"module": "dist/set-array.mjs",
|
||||||
|
"typings": "dist/types/set-array.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/set-array.d.ts",
|
||||||
|
"browser": "./dist/set-array.umd.js",
|
||||||
|
"require": "./dist/set-array.umd.js",
|
||||||
|
"import": "./dist/set-array.mjs"
|
||||||
|
},
|
||||||
|
"./dist/set-array.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist",
|
||||||
|
"src"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@types/mocha": "9.1.1",
|
||||||
|
"@types/node": "17.0.29",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.66.0",
|
||||||
|
"typescript": "4.5.5"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,55 @@
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
export let get: (strarr: SetArray, key: string) => number | undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
export let put: (strarr: SetArray, key: string) => number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pops the last added item out of the SetArray.
|
||||||
|
*/
|
||||||
|
export let pop: (strarr: SetArray) => void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
export class SetArray {
|
||||||
|
private declare _indexes: { [key: string]: number | undefined };
|
||||||
|
declare array: readonly string[];
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null } as any;
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
get = (strarr, key) => strarr._indexes[key];
|
||||||
|
|
||||||
|
put = (strarr, key) => {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = get(strarr, key);
|
||||||
|
if (index !== undefined) return index;
|
||||||
|
|
||||||
|
const { array, _indexes: indexes } = strarr;
|
||||||
|
|
||||||
|
return (indexes[key] = (array as string[]).push(key) - 1);
|
||||||
|
};
|
||||||
|
|
||||||
|
pop = (strarr) => {
|
||||||
|
const { array, _indexes: indexes } = strarr;
|
||||||
|
if (array.length === 0) return;
|
||||||
|
|
||||||
|
const last = (array as string[]).pop()!;
|
||||||
|
indexes[last] = undefined;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,82 @@
|
||||||
|
# @jridgewell/source-map
|
||||||
|
|
||||||
|
> Packages `@jridgewell/trace-mapping` and `@jridgewell/gen-mapping` into the familiar source-map API
|
||||||
|
|
||||||
|
This isn't the full API, but it's the core functionality. This wraps
|
||||||
|
[@jridgewell/trace-mapping][trace-mapping] and [@jridgewell/gen-mapping][gen-mapping]
|
||||||
|
implementations.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/source-map
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
### SourceMapConsumer
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SourceMapConsumer } from '@jridgewell/source-map';
|
||||||
|
const smc = new SourceMapConsumer({
|
||||||
|
version: 3,
|
||||||
|
names: ['foo'],
|
||||||
|
sources: ['input.js'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smc = new SourceMapConsumer(map);
|
||||||
|
smc.originalPositionFor({ line: 1, column: 0 });
|
||||||
|
```
|
||||||
|
|
||||||
|
### SourceMapGenerator
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SourceMapGenerator } from '@jridgewell/source-map';
|
||||||
|
const smg = new SourceMapGenerator({
|
||||||
|
file: 'output.js',
|
||||||
|
sourceRoot: 'https://example.com/',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.addMapping({
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'input.js',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.setSourceContent('input.js', 'foobar');
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toJSON()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.toJSON(); // { version: 3, names: [], sources: [], mappings: '' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toDecodedMap()
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const smg = new SourceMapGenerator();
|
||||||
|
smg.toDecodedMap(); // { version: 3, names: [], sources: [], mappings: [] }
|
||||||
|
```
|
||||||
|
|
||||||
|
[trace-mapping]: https://github.com/jridgewell/trace-mapping/
|
||||||
|
[gen-mapping]: https://github.com/jridgewell/gen-mapping/
|
||||||
|
|
@ -0,0 +1,928 @@
|
||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInteger = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
charToInteger[c] = i;
|
||||||
|
intToChar[i] = c;
|
||||||
|
}
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td = typeof TextDecoder !== 'undefined'
|
||||||
|
? new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function decode(mappings) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const decoded = [];
|
||||||
|
let line = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
for (let i = 0; i < mappings.length;) {
|
||||||
|
const c = mappings.charCodeAt(i);
|
||||||
|
if (c === comma) {
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
else if (c === semicolon) {
|
||||||
|
state[0] = lastCol = 0;
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
sorted = true;
|
||||||
|
decoded.push(line);
|
||||||
|
line = [];
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // generatedCodeColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol)
|
||||||
|
sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
if (!hasMoreSegments(mappings, i)) {
|
||||||
|
line.push([col]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourceFileIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceCodeLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceCodeColumn
|
||||||
|
if (!hasMoreSegments(mappings, i)) {
|
||||||
|
line.push([col, state[1], state[2], state[3]]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // nameIndex
|
||||||
|
line.push([col, state[1], state[2], state[3], state[4]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
function decodeInteger(mappings, pos, state, j) {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInteger[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
function hasMoreSegments(mappings, i) {
|
||||||
|
if (i >= mappings.length)
|
||||||
|
return false;
|
||||||
|
const c = mappings.charCodeAt(i);
|
||||||
|
if (c === comma || c === semicolon)
|
||||||
|
return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sort(line) {
|
||||||
|
line.sort(sortComparator$1);
|
||||||
|
}
|
||||||
|
function sortComparator$1(a, b) {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
function encode(decoded) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
let buf = new Uint8Array(1024);
|
||||||
|
let pos = 0;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
buf = reserve(buf, pos, 1);
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0)
|
||||||
|
continue;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
buf = reserve(buf, pos, 36);
|
||||||
|
if (j > 0)
|
||||||
|
buf[pos++] = comma;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // generatedCodeColumn
|
||||||
|
if (segment.length === 1)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourceFileIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceCodeLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceCodeColumn
|
||||||
|
if (segment.length === 4)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // nameIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
function reserve(buf, pos, count) {
|
||||||
|
if (buf.length > pos + count)
|
||||||
|
return buf;
|
||||||
|
const swap = new Uint8Array(buf.length * 2);
|
||||||
|
swap.set(buf);
|
||||||
|
return swap;
|
||||||
|
}
|
||||||
|
function encodeInteger(buf, pos, state, segment, j) {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0)
|
||||||
|
clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
/**
|
||||||
|
* Matches the parts of a URL:
|
||||||
|
* 1. Scheme, including ":", guaranteed.
|
||||||
|
* 2. User/password, including "@", optional.
|
||||||
|
* 3. Host, guaranteed.
|
||||||
|
* 4. Port, including ":", optional.
|
||||||
|
* 5. Path, including "/", optional.
|
||||||
|
*/
|
||||||
|
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?/;
|
||||||
|
/**
|
||||||
|
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||||
|
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||||
|
*
|
||||||
|
* 1. Host, optional.
|
||||||
|
* 2. Path, which may inclue "/", guaranteed.
|
||||||
|
*/
|
||||||
|
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/]*)?)?(\/?.*)/i;
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
function isAbsolutePath(input) {
|
||||||
|
return input.startsWith('/');
|
||||||
|
}
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function parseAbsoluteUrl(input) {
|
||||||
|
const match = urlRegex.exec(input);
|
||||||
|
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/');
|
||||||
|
}
|
||||||
|
function parseFileUrl(input) {
|
||||||
|
const match = fileRegex.exec(input);
|
||||||
|
const path = match[2];
|
||||||
|
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path);
|
||||||
|
}
|
||||||
|
function makeUrl(scheme, user, host, port, path) {
|
||||||
|
return {
|
||||||
|
scheme,
|
||||||
|
user,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
relativePath: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function parseUrl(input) {
|
||||||
|
if (isSchemeRelativeUrl(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http:' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isAbsolutePath(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isFileUrl(input))
|
||||||
|
return parseFileUrl(input);
|
||||||
|
if (isAbsoluteUrl(input))
|
||||||
|
return parseAbsoluteUrl(input);
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.relativePath = true;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
function stripPathFilename(path) {
|
||||||
|
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||||
|
// paths. It's not a file, so we can't strip it.
|
||||||
|
if (path.endsWith('/..'))
|
||||||
|
return path;
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
function mergePaths(url, base) {
|
||||||
|
// If we're not a relative path, then we're an absolute path, and it doesn't matter what base is.
|
||||||
|
if (!url.relativePath)
|
||||||
|
return;
|
||||||
|
normalizePath(base);
|
||||||
|
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||||
|
// path).
|
||||||
|
if (url.path === '/') {
|
||||||
|
url.path = base.path;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Resolution happens relative to the base path's directory, not the file.
|
||||||
|
url.path = stripPathFilename(base.path) + url.path;
|
||||||
|
}
|
||||||
|
// If the base path is absolute, then our path is now absolute too.
|
||||||
|
url.relativePath = base.relativePath;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||||
|
* "foo/.". We need to normalize to a standard representation.
|
||||||
|
*/
|
||||||
|
function normalizePath(url) {
|
||||||
|
const { relativePath } = url;
|
||||||
|
const pieces = url.path.split('/');
|
||||||
|
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||||
|
// pieces[0] is an empty string.
|
||||||
|
let pointer = 1;
|
||||||
|
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||||
|
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||||
|
let positive = 0;
|
||||||
|
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||||
|
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||||
|
// real directory, we won't need to append, unless the other conditions happen again.
|
||||||
|
let addTrailingSlash = false;
|
||||||
|
for (let i = 1; i < pieces.length; i++) {
|
||||||
|
const piece = pieces[i];
|
||||||
|
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||||
|
if (!piece) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// If we encounter a real directory, then we don't need to append anymore.
|
||||||
|
addTrailingSlash = false;
|
||||||
|
// A current directory, which we can always drop.
|
||||||
|
if (piece === '.')
|
||||||
|
continue;
|
||||||
|
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||||
|
// have an excess of parents, and we'll need to keep the "..".
|
||||||
|
if (piece === '..') {
|
||||||
|
if (positive) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
positive--;
|
||||||
|
pointer--;
|
||||||
|
}
|
||||||
|
else if (relativePath) {
|
||||||
|
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||||
|
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||||
|
// any popped or dropped directories.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
positive++;
|
||||||
|
}
|
||||||
|
let path = '';
|
||||||
|
for (let i = 1; i < pointer; i++) {
|
||||||
|
path += '/' + pieces[i];
|
||||||
|
}
|
||||||
|
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||||
|
path += '/';
|
||||||
|
}
|
||||||
|
url.path = path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
function resolve$1(input, base) {
|
||||||
|
if (!input && !base)
|
||||||
|
return '';
|
||||||
|
const url = parseUrl(input);
|
||||||
|
// If we have a base, and the input isn't already an absolute URL, then we need to merge.
|
||||||
|
if (base && !url.scheme) {
|
||||||
|
const baseUrl = parseUrl(base);
|
||||||
|
url.scheme = baseUrl.scheme;
|
||||||
|
// If there's no host, then we were just a path.
|
||||||
|
if (!url.host) {
|
||||||
|
// The host, user, and port are joined, you can't copy one without the others.
|
||||||
|
url.user = baseUrl.user;
|
||||||
|
url.host = baseUrl.host;
|
||||||
|
url.port = baseUrl.port;
|
||||||
|
}
|
||||||
|
mergePaths(url, baseUrl);
|
||||||
|
}
|
||||||
|
normalizePath(url);
|
||||||
|
// If the input (and base, if there was one) are both relative, then we need to output a relative.
|
||||||
|
if (url.relativePath) {
|
||||||
|
// The first char is always a "/".
|
||||||
|
const path = url.path.slice(1);
|
||||||
|
if (!path)
|
||||||
|
return '.';
|
||||||
|
// If base started with a leading ".", or there is no base and input started with a ".", then we
|
||||||
|
// need to ensure that the relative path starts with a ".". We don't know if relative starts
|
||||||
|
// with a "..", though, so check before prepending.
|
||||||
|
const keepRelative = (base || input).startsWith('.');
|
||||||
|
return !keepRelative || path.startsWith('.') ? path : './' + path;
|
||||||
|
}
|
||||||
|
// If there's no host (and no scheme/user/port), then we need to output an absolute path.
|
||||||
|
if (!url.scheme && !url.host)
|
||||||
|
return url.path;
|
||||||
|
// We're outputting either an absolute URL, or a protocol relative one.
|
||||||
|
return `${url.scheme}//${url.user}${url.host}${url.port}${url.path}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolve$1(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN$1 = 0;
|
||||||
|
const SOURCES_INDEX$1 = 1;
|
||||||
|
const SOURCE_LINE$1 = 2;
|
||||||
|
const SOURCE_COLUMN$1 = 3;
|
||||||
|
const NAMES_INDEX$1 = 4;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN$1] - b[COLUMN$1];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN$1] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||||
|
if (haystack[i][COLUMN$1] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; i--, index--) {
|
||||||
|
if (haystack[i][COLUMN$1] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
if (!('sections' in parsed))
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const { sections } = parsed;
|
||||||
|
let i = 0;
|
||||||
|
for (; i < sections.length - 1; i++) {
|
||||||
|
const no = sections[i + 1].offset;
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||||
|
}
|
||||||
|
if (sections.length > 0) {
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||||
|
}
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||||
|
const map = AnyMap(section.map, mapUrl);
|
||||||
|
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||||
|
append(names, map.names);
|
||||||
|
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||||
|
for (let i = mappings.length; i <= lineOffset; i++)
|
||||||
|
mappings.push([]);
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range.
|
||||||
|
const stopI = stopLine - lineOffset;
|
||||||
|
const len = Math.min(decoded.length, stopI + 1);
|
||||||
|
for (let i = 0; i < len; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||||
|
// loop above.
|
||||||
|
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN$1];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (i === stopI && column >= stopColumn)
|
||||||
|
break;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX$1];
|
||||||
|
const sourceLine = seg[SOURCE_LINE$1];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN$1];
|
||||||
|
if (seg.length === 4) {
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX$1]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||||
|
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||||
|
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||||
|
// sourcemap would desynchronize the sources/contents.
|
||||||
|
function fillSourcesContent(len) {
|
||||||
|
const sourcesContent = [];
|
||||||
|
for (let i = 0; i < len; i++)
|
||||||
|
sourcesContent[i] = null;
|
||||||
|
return sourcesContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||||
|
source: null,
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
name: null,
|
||||||
|
});
|
||||||
|
Object.freeze({
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
});
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
let decodedMappings;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
let originalPositionFor;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
let presortedDecodedMap;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map.constructor === TraceMap)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
if (sourceRoot || mapUrl) {
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.resolvedSources = sources.map((s) => s || '');
|
||||||
|
}
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
decodedMappings = (map) => {
|
||||||
|
return (map._decoded || (map._decoded = decode(map._encoded)));
|
||||||
|
};
|
||||||
|
originalPositionFor = (map, { line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
if (segment.length == 1)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return {
|
||||||
|
source: resolvedSources[segment[SOURCES_INDEX$1]],
|
||||||
|
line: segment[SOURCE_LINE$1] + 1,
|
||||||
|
column: segment[SOURCE_COLUMN$1],
|
||||||
|
name: segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
presortedDecodedMap = (map, mapUrl) => {
|
||||||
|
const clone = Object.assign({}, map);
|
||||||
|
clone.mappings = [];
|
||||||
|
const tracer = new TraceMap(clone, mapUrl);
|
||||||
|
tracer._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return null;
|
||||||
|
return segments[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
let get;
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
let put;
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
class SetArray {
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null };
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
get = (strarr, key) => strarr._indexes[key];
|
||||||
|
put = (strarr, key) => {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = get(strarr, key);
|
||||||
|
if (index !== undefined)
|
||||||
|
return index;
|
||||||
|
const { array, _indexes: indexes } = strarr;
|
||||||
|
return (indexes[key] = array.push(key) - 1);
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
let maybeAddMapping;
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
let setSourceContent;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let toDecodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let toEncodedMap;
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
let addSegmentInternal;
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
class GenMapping {
|
||||||
|
constructor({ file, sourceRoot } = {}) {
|
||||||
|
this._names = new SetArray();
|
||||||
|
this._sources = new SetArray();
|
||||||
|
this._sourcesContent = [];
|
||||||
|
this._mappings = [];
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping);
|
||||||
|
};
|
||||||
|
setSourceContent = (map, source, content) => {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = map;
|
||||||
|
sourcesContent[put(sources, source)] = content;
|
||||||
|
};
|
||||||
|
toDecodedMap = (map) => {
|
||||||
|
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
toEncodedMap = (map) => {
|
||||||
|
const decoded = toDecodedMap(map);
|
||||||
|
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
||||||
|
};
|
||||||
|
// Internal helpers
|
||||||
|
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name) => {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index))
|
||||||
|
return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
const sourcesIndex = put(sources, source);
|
||||||
|
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length)
|
||||||
|
sourcesContent[sourcesIndex] = null;
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return insert(line, index, name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function getLine(mappings, index) {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
function getColumnIndex(line, genColumn) {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN])
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
function removeEmptyFinalLines(mappings) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (len < length)
|
||||||
|
mappings.length = len;
|
||||||
|
}
|
||||||
|
function skipSourceless(line, index) {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0)
|
||||||
|
return true;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0)
|
||||||
|
return false;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1)
|
||||||
|
return false;
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||||
|
}
|
||||||
|
function addMappingInternal(skipable, map, mapping) {
|
||||||
|
const { generated, source, original, name } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null);
|
||||||
|
}
|
||||||
|
const s = source;
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
class SourceMapConsumer {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const trace = (this._map = new AnyMap(map, mapUrl));
|
||||||
|
this.file = trace.file;
|
||||||
|
this.names = trace.names;
|
||||||
|
this.sourceRoot = trace.sourceRoot;
|
||||||
|
this.sources = trace.resolvedSources;
|
||||||
|
this.sourcesContent = trace.sourcesContent;
|
||||||
|
}
|
||||||
|
originalPositionFor(needle) {
|
||||||
|
return originalPositionFor(this._map, needle);
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
// noop.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class SourceMapGenerator {
|
||||||
|
constructor(opts) {
|
||||||
|
this._map = new GenMapping(opts);
|
||||||
|
}
|
||||||
|
addMapping(mapping) {
|
||||||
|
maybeAddMapping(this._map, mapping);
|
||||||
|
}
|
||||||
|
setSourceContent(source, content) {
|
||||||
|
setSourceContent(this._map, source, content);
|
||||||
|
}
|
||||||
|
toJSON() {
|
||||||
|
return toEncodedMap(this._map);
|
||||||
|
}
|
||||||
|
toDecodedMap() {
|
||||||
|
return toDecodedMap(this._map);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { SourceMapConsumer, SourceMapGenerator };
|
||||||
|
//# sourceMappingURL=source-map.mjs.map
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,939 @@
|
||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourceMap = {}));
|
||||||
|
})(this, (function (exports) { 'use strict';
|
||||||
|
|
||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInteger = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
charToInteger[c] = i;
|
||||||
|
intToChar[i] = c;
|
||||||
|
}
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td = typeof TextDecoder !== 'undefined'
|
||||||
|
? new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function decode(mappings) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const decoded = [];
|
||||||
|
let line = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
for (let i = 0; i < mappings.length;) {
|
||||||
|
const c = mappings.charCodeAt(i);
|
||||||
|
if (c === comma) {
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
else if (c === semicolon) {
|
||||||
|
state[0] = lastCol = 0;
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
sorted = true;
|
||||||
|
decoded.push(line);
|
||||||
|
line = [];
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // generatedCodeColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol)
|
||||||
|
sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
if (!hasMoreSegments(mappings, i)) {
|
||||||
|
line.push([col]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourceFileIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceCodeLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceCodeColumn
|
||||||
|
if (!hasMoreSegments(mappings, i)) {
|
||||||
|
line.push([col, state[1], state[2], state[3]]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // nameIndex
|
||||||
|
line.push([col, state[1], state[2], state[3], state[4]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
function decodeInteger(mappings, pos, state, j) {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInteger[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
function hasMoreSegments(mappings, i) {
|
||||||
|
if (i >= mappings.length)
|
||||||
|
return false;
|
||||||
|
const c = mappings.charCodeAt(i);
|
||||||
|
if (c === comma || c === semicolon)
|
||||||
|
return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sort(line) {
|
||||||
|
line.sort(sortComparator$1);
|
||||||
|
}
|
||||||
|
function sortComparator$1(a, b) {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
function encode(decoded) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
let buf = new Uint8Array(1024);
|
||||||
|
let pos = 0;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
buf = reserve(buf, pos, 1);
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0)
|
||||||
|
continue;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
buf = reserve(buf, pos, 36);
|
||||||
|
if (j > 0)
|
||||||
|
buf[pos++] = comma;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // generatedCodeColumn
|
||||||
|
if (segment.length === 1)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourceFileIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceCodeLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceCodeColumn
|
||||||
|
if (segment.length === 4)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // nameIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
function reserve(buf, pos, count) {
|
||||||
|
if (buf.length > pos + count)
|
||||||
|
return buf;
|
||||||
|
const swap = new Uint8Array(buf.length * 2);
|
||||||
|
swap.set(buf);
|
||||||
|
return swap;
|
||||||
|
}
|
||||||
|
function encodeInteger(buf, pos, state, segment, j) {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0)
|
||||||
|
clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
/**
|
||||||
|
* Matches the parts of a URL:
|
||||||
|
* 1. Scheme, including ":", guaranteed.
|
||||||
|
* 2. User/password, including "@", optional.
|
||||||
|
* 3. Host, guaranteed.
|
||||||
|
* 4. Port, including ":", optional.
|
||||||
|
* 5. Path, including "/", optional.
|
||||||
|
*/
|
||||||
|
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?/;
|
||||||
|
/**
|
||||||
|
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||||
|
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||||
|
*
|
||||||
|
* 1. Host, optional.
|
||||||
|
* 2. Path, which may inclue "/", guaranteed.
|
||||||
|
*/
|
||||||
|
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/]*)?)?(\/?.*)/i;
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
function isAbsolutePath(input) {
|
||||||
|
return input.startsWith('/');
|
||||||
|
}
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function parseAbsoluteUrl(input) {
|
||||||
|
const match = urlRegex.exec(input);
|
||||||
|
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/');
|
||||||
|
}
|
||||||
|
function parseFileUrl(input) {
|
||||||
|
const match = fileRegex.exec(input);
|
||||||
|
const path = match[2];
|
||||||
|
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path);
|
||||||
|
}
|
||||||
|
function makeUrl(scheme, user, host, port, path) {
|
||||||
|
return {
|
||||||
|
scheme,
|
||||||
|
user,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
relativePath: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function parseUrl(input) {
|
||||||
|
if (isSchemeRelativeUrl(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http:' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isAbsolutePath(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isFileUrl(input))
|
||||||
|
return parseFileUrl(input);
|
||||||
|
if (isAbsoluteUrl(input))
|
||||||
|
return parseAbsoluteUrl(input);
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.relativePath = true;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
function stripPathFilename(path) {
|
||||||
|
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||||
|
// paths. It's not a file, so we can't strip it.
|
||||||
|
if (path.endsWith('/..'))
|
||||||
|
return path;
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
function mergePaths(url, base) {
|
||||||
|
// If we're not a relative path, then we're an absolute path, and it doesn't matter what base is.
|
||||||
|
if (!url.relativePath)
|
||||||
|
return;
|
||||||
|
normalizePath(base);
|
||||||
|
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||||
|
// path).
|
||||||
|
if (url.path === '/') {
|
||||||
|
url.path = base.path;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Resolution happens relative to the base path's directory, not the file.
|
||||||
|
url.path = stripPathFilename(base.path) + url.path;
|
||||||
|
}
|
||||||
|
// If the base path is absolute, then our path is now absolute too.
|
||||||
|
url.relativePath = base.relativePath;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||||
|
* "foo/.". We need to normalize to a standard representation.
|
||||||
|
*/
|
||||||
|
function normalizePath(url) {
|
||||||
|
const { relativePath } = url;
|
||||||
|
const pieces = url.path.split('/');
|
||||||
|
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||||
|
// pieces[0] is an empty string.
|
||||||
|
let pointer = 1;
|
||||||
|
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||||
|
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||||
|
let positive = 0;
|
||||||
|
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||||
|
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||||
|
// real directory, we won't need to append, unless the other conditions happen again.
|
||||||
|
let addTrailingSlash = false;
|
||||||
|
for (let i = 1; i < pieces.length; i++) {
|
||||||
|
const piece = pieces[i];
|
||||||
|
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||||
|
if (!piece) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// If we encounter a real directory, then we don't need to append anymore.
|
||||||
|
addTrailingSlash = false;
|
||||||
|
// A current directory, which we can always drop.
|
||||||
|
if (piece === '.')
|
||||||
|
continue;
|
||||||
|
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||||
|
// have an excess of parents, and we'll need to keep the "..".
|
||||||
|
if (piece === '..') {
|
||||||
|
if (positive) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
positive--;
|
||||||
|
pointer--;
|
||||||
|
}
|
||||||
|
else if (relativePath) {
|
||||||
|
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||||
|
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||||
|
// any popped or dropped directories.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
positive++;
|
||||||
|
}
|
||||||
|
let path = '';
|
||||||
|
for (let i = 1; i < pointer; i++) {
|
||||||
|
path += '/' + pieces[i];
|
||||||
|
}
|
||||||
|
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||||
|
path += '/';
|
||||||
|
}
|
||||||
|
url.path = path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
function resolve$1(input, base) {
|
||||||
|
if (!input && !base)
|
||||||
|
return '';
|
||||||
|
const url = parseUrl(input);
|
||||||
|
// If we have a base, and the input isn't already an absolute URL, then we need to merge.
|
||||||
|
if (base && !url.scheme) {
|
||||||
|
const baseUrl = parseUrl(base);
|
||||||
|
url.scheme = baseUrl.scheme;
|
||||||
|
// If there's no host, then we were just a path.
|
||||||
|
if (!url.host) {
|
||||||
|
// The host, user, and port are joined, you can't copy one without the others.
|
||||||
|
url.user = baseUrl.user;
|
||||||
|
url.host = baseUrl.host;
|
||||||
|
url.port = baseUrl.port;
|
||||||
|
}
|
||||||
|
mergePaths(url, baseUrl);
|
||||||
|
}
|
||||||
|
normalizePath(url);
|
||||||
|
// If the input (and base, if there was one) are both relative, then we need to output a relative.
|
||||||
|
if (url.relativePath) {
|
||||||
|
// The first char is always a "/".
|
||||||
|
const path = url.path.slice(1);
|
||||||
|
if (!path)
|
||||||
|
return '.';
|
||||||
|
// If base started with a leading ".", or there is no base and input started with a ".", then we
|
||||||
|
// need to ensure that the relative path starts with a ".". We don't know if relative starts
|
||||||
|
// with a "..", though, so check before prepending.
|
||||||
|
const keepRelative = (base || input).startsWith('.');
|
||||||
|
return !keepRelative || path.startsWith('.') ? path : './' + path;
|
||||||
|
}
|
||||||
|
// If there's no host (and no scheme/user/port), then we need to output an absolute path.
|
||||||
|
if (!url.scheme && !url.host)
|
||||||
|
return url.path;
|
||||||
|
// We're outputting either an absolute URL, or a protocol relative one.
|
||||||
|
return `${url.scheme}//${url.user}${url.host}${url.port}${url.path}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolve$1(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN$1 = 0;
|
||||||
|
const SOURCES_INDEX$1 = 1;
|
||||||
|
const SOURCE_LINE$1 = 2;
|
||||||
|
const SOURCE_COLUMN$1 = 3;
|
||||||
|
const NAMES_INDEX$1 = 4;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN$1] - b[COLUMN$1];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN$1] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||||
|
if (haystack[i][COLUMN$1] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; i--, index--) {
|
||||||
|
if (haystack[i][COLUMN$1] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
if (!('sections' in parsed))
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const { sections } = parsed;
|
||||||
|
let i = 0;
|
||||||
|
for (; i < sections.length - 1; i++) {
|
||||||
|
const no = sections[i + 1].offset;
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||||
|
}
|
||||||
|
if (sections.length > 0) {
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||||
|
}
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||||
|
const map = AnyMap(section.map, mapUrl);
|
||||||
|
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||||
|
append(names, map.names);
|
||||||
|
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||||
|
for (let i = mappings.length; i <= lineOffset; i++)
|
||||||
|
mappings.push([]);
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range.
|
||||||
|
const stopI = stopLine - lineOffset;
|
||||||
|
const len = Math.min(decoded.length, stopI + 1);
|
||||||
|
for (let i = 0; i < len; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||||
|
// loop above.
|
||||||
|
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN$1];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (i === stopI && column >= stopColumn)
|
||||||
|
break;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX$1];
|
||||||
|
const sourceLine = seg[SOURCE_LINE$1];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN$1];
|
||||||
|
if (seg.length === 4) {
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX$1]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||||
|
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||||
|
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||||
|
// sourcemap would desynchronize the sources/contents.
|
||||||
|
function fillSourcesContent(len) {
|
||||||
|
const sourcesContent = [];
|
||||||
|
for (let i = 0; i < len; i++)
|
||||||
|
sourcesContent[i] = null;
|
||||||
|
return sourcesContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||||
|
source: null,
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
name: null,
|
||||||
|
});
|
||||||
|
Object.freeze({
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
});
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
let decodedMappings;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
let originalPositionFor;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
let presortedDecodedMap;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map.constructor === TraceMap)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
if (sourceRoot || mapUrl) {
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.resolvedSources = sources.map((s) => s || '');
|
||||||
|
}
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
decodedMappings = (map) => {
|
||||||
|
return (map._decoded || (map._decoded = decode(map._encoded)));
|
||||||
|
};
|
||||||
|
originalPositionFor = (map, { line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
if (segment.length == 1)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return {
|
||||||
|
source: resolvedSources[segment[SOURCES_INDEX$1]],
|
||||||
|
line: segment[SOURCE_LINE$1] + 1,
|
||||||
|
column: segment[SOURCE_COLUMN$1],
|
||||||
|
name: segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
presortedDecodedMap = (map, mapUrl) => {
|
||||||
|
const clone = Object.assign({}, map);
|
||||||
|
clone.mappings = [];
|
||||||
|
const tracer = new TraceMap(clone, mapUrl);
|
||||||
|
tracer._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return null;
|
||||||
|
return segments[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||||
|
*/
|
||||||
|
let get;
|
||||||
|
/**
|
||||||
|
* Puts `key` into the backing array, if it is not already present. Returns
|
||||||
|
* the index of the `key` in the backing array.
|
||||||
|
*/
|
||||||
|
let put;
|
||||||
|
/**
|
||||||
|
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||||
|
* index of the `key` in the backing array.
|
||||||
|
*
|
||||||
|
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||||
|
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||||
|
* and there are never duplicates.
|
||||||
|
*/
|
||||||
|
class SetArray {
|
||||||
|
constructor() {
|
||||||
|
this._indexes = { __proto__: null };
|
||||||
|
this.array = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
get = (strarr, key) => strarr._indexes[key];
|
||||||
|
put = (strarr, key) => {
|
||||||
|
// The key may or may not be present. If it is present, it's a number.
|
||||||
|
const index = get(strarr, key);
|
||||||
|
if (index !== undefined)
|
||||||
|
return index;
|
||||||
|
const { array, _indexes: indexes } = strarr;
|
||||||
|
return (indexes[key] = array.push(key) - 1);
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
|
||||||
|
const NO_NAME = -1;
|
||||||
|
/**
|
||||||
|
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||||
|
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||||
|
* not add a mapping with a lower generated line/column than one that came before.
|
||||||
|
*/
|
||||||
|
let maybeAddMapping;
|
||||||
|
/**
|
||||||
|
* Adds/removes the content of the source file to the source map.
|
||||||
|
*/
|
||||||
|
let setSourceContent;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let toDecodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let toEncodedMap;
|
||||||
|
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||||
|
let addSegmentInternal;
|
||||||
|
/**
|
||||||
|
* Provides the state to generate a sourcemap.
|
||||||
|
*/
|
||||||
|
class GenMapping {
|
||||||
|
constructor({ file, sourceRoot } = {}) {
|
||||||
|
this._names = new SetArray();
|
||||||
|
this._sources = new SetArray();
|
||||||
|
this._sourcesContent = [];
|
||||||
|
this._mappings = [];
|
||||||
|
this.file = file;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
maybeAddMapping = (map, mapping) => {
|
||||||
|
return addMappingInternal(true, map, mapping);
|
||||||
|
};
|
||||||
|
setSourceContent = (map, source, content) => {
|
||||||
|
const { _sources: sources, _sourcesContent: sourcesContent } = map;
|
||||||
|
sourcesContent[put(sources, source)] = content;
|
||||||
|
};
|
||||||
|
toDecodedMap = (map) => {
|
||||||
|
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||||
|
removeEmptyFinalLines(mappings);
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: file || undefined,
|
||||||
|
names: names.array,
|
||||||
|
sourceRoot: sourceRoot || undefined,
|
||||||
|
sources: sources.array,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
toEncodedMap = (map) => {
|
||||||
|
const decoded = toDecodedMap(map);
|
||||||
|
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
||||||
|
};
|
||||||
|
// Internal helpers
|
||||||
|
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name) => {
|
||||||
|
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||||
|
const line = getLine(mappings, genLine);
|
||||||
|
const index = getColumnIndex(line, genColumn);
|
||||||
|
if (!source) {
|
||||||
|
if (skipable && skipSourceless(line, index))
|
||||||
|
return;
|
||||||
|
return insert(line, index, [genColumn]);
|
||||||
|
}
|
||||||
|
const sourcesIndex = put(sources, source);
|
||||||
|
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||||
|
if (sourcesIndex === sourcesContent.length)
|
||||||
|
sourcesContent[sourcesIndex] = null;
|
||||||
|
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return insert(line, index, name
|
||||||
|
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function getLine(mappings, index) {
|
||||||
|
for (let i = mappings.length; i <= index; i++) {
|
||||||
|
mappings[i] = [];
|
||||||
|
}
|
||||||
|
return mappings[index];
|
||||||
|
}
|
||||||
|
function getColumnIndex(line, genColumn) {
|
||||||
|
let index = line.length;
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
const current = line[i];
|
||||||
|
if (genColumn >= current[COLUMN])
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
function removeEmptyFinalLines(mappings) {
|
||||||
|
const { length } = mappings;
|
||||||
|
let len = length;
|
||||||
|
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||||
|
if (mappings[i].length > 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (len < length)
|
||||||
|
mappings.length = len;
|
||||||
|
}
|
||||||
|
function skipSourceless(line, index) {
|
||||||
|
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||||
|
// doesn't generate any useful information.
|
||||||
|
if (index === 0)
|
||||||
|
return true;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||||
|
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||||
|
// a sourceless position, which is useful.
|
||||||
|
return prev.length === 1;
|
||||||
|
}
|
||||||
|
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||||
|
// A source/named segment at the start of a line gives position at that genColumn
|
||||||
|
if (index === 0)
|
||||||
|
return false;
|
||||||
|
const prev = line[index - 1];
|
||||||
|
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||||
|
if (prev.length === 1)
|
||||||
|
return false;
|
||||||
|
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||||
|
// provide any new position information.
|
||||||
|
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||||
|
sourceLine === prev[SOURCE_LINE] &&
|
||||||
|
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||||
|
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||||
|
}
|
||||||
|
function addMappingInternal(skipable, map, mapping) {
|
||||||
|
const { generated, source, original, name } = mapping;
|
||||||
|
if (!source) {
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null);
|
||||||
|
}
|
||||||
|
const s = source;
|
||||||
|
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
class SourceMapConsumer {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const trace = (this._map = new AnyMap(map, mapUrl));
|
||||||
|
this.file = trace.file;
|
||||||
|
this.names = trace.names;
|
||||||
|
this.sourceRoot = trace.sourceRoot;
|
||||||
|
this.sources = trace.resolvedSources;
|
||||||
|
this.sourcesContent = trace.sourcesContent;
|
||||||
|
}
|
||||||
|
originalPositionFor(needle) {
|
||||||
|
return originalPositionFor(this._map, needle);
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
// noop.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class SourceMapGenerator {
|
||||||
|
constructor(opts) {
|
||||||
|
this._map = new GenMapping(opts);
|
||||||
|
}
|
||||||
|
addMapping(mapping) {
|
||||||
|
maybeAddMapping(this._map, mapping);
|
||||||
|
}
|
||||||
|
setSourceContent(source, content) {
|
||||||
|
setSourceContent(this._map, source, content);
|
||||||
|
}
|
||||||
|
toJSON() {
|
||||||
|
return toEncodedMap(this._map);
|
||||||
|
}
|
||||||
|
toDecodedMap() {
|
||||||
|
return toDecodedMap(this._map);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.SourceMapConsumer = SourceMapConsumer;
|
||||||
|
exports.SourceMapGenerator = SourceMapGenerator;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=source-map.umd.js.map
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,25 @@
|
||||||
|
import { AnyMap, originalPositionFor } from '@jridgewell/trace-mapping';
|
||||||
|
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping';
|
||||||
|
import type { TraceMap, SectionedSourceMapInput } from '@jridgewell/trace-mapping';
|
||||||
|
export type { TraceMap, SectionedSourceMapInput };
|
||||||
|
import type { Mapping, EncodedSourceMap, DecodedSourceMap } from '@jridgewell/gen-mapping';
|
||||||
|
export type { Mapping, EncodedSourceMap, DecodedSourceMap };
|
||||||
|
export declare class SourceMapConsumer {
|
||||||
|
private _map;
|
||||||
|
file: TraceMap['file'];
|
||||||
|
names: TraceMap['names'];
|
||||||
|
sourceRoot: TraceMap['sourceRoot'];
|
||||||
|
sources: TraceMap['sources'];
|
||||||
|
sourcesContent: TraceMap['sourcesContent'];
|
||||||
|
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl: Parameters<typeof AnyMap>[1]);
|
||||||
|
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>;
|
||||||
|
destroy(): void;
|
||||||
|
}
|
||||||
|
export declare class SourceMapGenerator {
|
||||||
|
private _map;
|
||||||
|
constructor(opts: ConstructorParameters<typeof GenMapping>[0]);
|
||||||
|
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>;
|
||||||
|
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>;
|
||||||
|
toJSON(): ReturnType<typeof toEncodedMap>;
|
||||||
|
toDecodedMap(): ReturnType<typeof toDecodedMap>;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,67 @@
|
||||||
|
{
|
||||||
|
"name": "@jridgewell/source-map",
|
||||||
|
"version": "0.3.2",
|
||||||
|
"description": "Packages @jridgewell/trace-mapping and @jridgewell/gen-mapping into the familiar source-map API",
|
||||||
|
"keywords": [
|
||||||
|
"sourcemap",
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/source-map",
|
||||||
|
"main": "dist/source-map.umd.js",
|
||||||
|
"module": "dist/source-map.mjs",
|
||||||
|
"typings": "dist/types/source-map.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"browser": "./dist/source-map.umd.js",
|
||||||
|
"require": "./dist/source-map.umd.js",
|
||||||
|
"import": "./dist/source-map.mjs"
|
||||||
|
},
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-node-resolve": "13.2.1",
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@types/mocha": "9.1.1",
|
||||||
|
"@types/node": "17.0.30",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.66.0",
|
||||||
|
"typescript": "4.5.5"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/gen-mapping": "^0.3.0",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.9"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2015 Rich Harris
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
@ -0,0 +1,200 @@
|
||||||
|
# sourcemap-codec
|
||||||
|
|
||||||
|
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||||
|
|
||||||
|
|
||||||
|
## Why?
|
||||||
|
|
||||||
|
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||||
|
|
||||||
|
This package makes the process slightly easier.
|
||||||
|
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install sourcemap-codec
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { encode, decode } from 'sourcemap-codec';
|
||||||
|
|
||||||
|
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||||
|
|
||||||
|
assert.deepEqual( decoded, [
|
||||||
|
// the first line (of the generated code) has no mappings,
|
||||||
|
// as shown by the starting semi-colon (which separates lines)
|
||||||
|
[],
|
||||||
|
|
||||||
|
// the second line contains four (comma-separated) segments
|
||||||
|
[
|
||||||
|
// segments are encoded as you'd expect:
|
||||||
|
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||||
|
|
||||||
|
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||||
|
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||||
|
// name in the `map.names` array
|
||||||
|
[ 2, 0, 2, 2, 0 ],
|
||||||
|
|
||||||
|
// the remaining segments are 4-length rather than 5-length,
|
||||||
|
// because they don't map a name
|
||||||
|
[ 4, 0, 2, 4 ],
|
||||||
|
[ 6, 0, 2, 5 ],
|
||||||
|
[ 7, 0, 2, 7 ]
|
||||||
|
],
|
||||||
|
|
||||||
|
// the final line contains two segments
|
||||||
|
[
|
||||||
|
[ 2, 1, 10, 19 ],
|
||||||
|
[ 12, 1, 11, 20 ]
|
||||||
|
]
|
||||||
|
]);
|
||||||
|
|
||||||
|
var encoded = encode( decoded );
|
||||||
|
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map - 45120 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 5479160 bytes
|
||||||
|
sourcemap-codec 5659336 bytes
|
||||||
|
source-map-0.6.1 17144440 bytes
|
||||||
|
source-map-0.8.0 6867424 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled)
|
||||||
|
decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 1261620 bytes
|
||||||
|
sourcemap-codec 9119248 bytes
|
||||||
|
source-map-0.6.1 8968560 bytes
|
||||||
|
source-map-0.8.0 8952952 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled)
|
||||||
|
encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map - 347793 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 35338184 bytes
|
||||||
|
sourcemap-codec 35922736 bytes
|
||||||
|
source-map-0.6.1 62366360 bytes
|
||||||
|
source-map-0.8.0 44337416 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled)
|
||||||
|
decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled)
|
||||||
|
Fastest is decode: source-map-0.8.0
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 7212604 bytes
|
||||||
|
sourcemap-codec 21421456 bytes
|
||||||
|
source-map-0.6.1 25286888 bytes
|
||||||
|
source-map-0.8.0 25498744 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled)
|
||||||
|
encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map - 1992 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 500272 bytes
|
||||||
|
sourcemap-codec 516864 bytes
|
||||||
|
source-map-0.6.1 1596672 bytes
|
||||||
|
source-map-0.8.0 517272 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled)
|
||||||
|
decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 321026 bytes
|
||||||
|
sourcemap-codec 830832 bytes
|
||||||
|
source-map-0.6.1 586608 bytes
|
||||||
|
source-map-0.8.0 586680 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled)
|
||||||
|
encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map - 5726 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 734848 bytes
|
||||||
|
sourcemap-codec 954200 bytes
|
||||||
|
source-map-0.6.1 2276432 bytes
|
||||||
|
source-map-0.8.0 955488 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled)
|
||||||
|
decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 638672 bytes
|
||||||
|
sourcemap-codec 1109840 bytes
|
||||||
|
source-map-0.6.1 1321224 bytes
|
||||||
|
source-map-0.8.0 1324448 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled)
|
||||||
|
encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
```
|
||||||
|
|
||||||
|
# License
|
||||||
|
|
||||||
|
MIT
|
||||||
164
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
164
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
|
|
@ -0,0 +1,164 @@
|
||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
intToChar[i] = c;
|
||||||
|
charToInt[c] = i;
|
||||||
|
}
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td = typeof TextDecoder !== 'undefined'
|
||||||
|
? /* #__PURE__ */ new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function decode(mappings) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const decoded = [];
|
||||||
|
let index = 0;
|
||||||
|
do {
|
||||||
|
const semi = indexOf(mappings, index);
|
||||||
|
const line = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let i = index; i < semi; i++) {
|
||||||
|
let seg;
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol)
|
||||||
|
sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||||
|
seg = [col, state[1], state[2], state[3], state[4]];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col, state[1], state[2], state[3]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col];
|
||||||
|
}
|
||||||
|
line.push(seg);
|
||||||
|
}
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
index = semi + 1;
|
||||||
|
} while (index <= mappings.length);
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
function indexOf(mappings, index) {
|
||||||
|
const idx = mappings.indexOf(';', index);
|
||||||
|
return idx === -1 ? mappings.length : idx;
|
||||||
|
}
|
||||||
|
function decodeInteger(mappings, pos, state, j) {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInt[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
function hasMoreVlq(mappings, i, length) {
|
||||||
|
if (i >= length)
|
||||||
|
return false;
|
||||||
|
return mappings.charCodeAt(i) !== comma;
|
||||||
|
}
|
||||||
|
function sort(line) {
|
||||||
|
line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
function encode(decoded) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const bufLength = 1024 * 16;
|
||||||
|
const subLength = bufLength - 36;
|
||||||
|
const buf = new Uint8Array(bufLength);
|
||||||
|
const sub = buf.subarray(0, subLength);
|
||||||
|
let pos = 0;
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
if (pos === bufLength) {
|
||||||
|
out += td.decode(buf);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0)
|
||||||
|
continue;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
if (pos > subLength) {
|
||||||
|
out += td.decode(sub);
|
||||||
|
buf.copyWithin(0, subLength, pos);
|
||||||
|
pos -= subLength;
|
||||||
|
}
|
||||||
|
if (j > 0)
|
||||||
|
buf[pos++] = comma;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||||
|
if (segment.length === 1)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||||
|
if (segment.length === 4)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out + td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
function encodeInteger(buf, pos, state, segment, j) {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0)
|
||||||
|
clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { decode, encode };
|
||||||
|
//# sourceMappingURL=sourcemap-codec.mjs.map
|
||||||
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
175
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
175
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
|
|
@ -0,0 +1,175 @@
|
||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||||
|
})(this, (function (exports) { 'use strict';
|
||||||
|
|
||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
intToChar[i] = c;
|
||||||
|
charToInt[c] = i;
|
||||||
|
}
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td = typeof TextDecoder !== 'undefined'
|
||||||
|
? /* #__PURE__ */ new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function decode(mappings) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const decoded = [];
|
||||||
|
let index = 0;
|
||||||
|
do {
|
||||||
|
const semi = indexOf(mappings, index);
|
||||||
|
const line = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let i = index; i < semi; i++) {
|
||||||
|
let seg;
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol)
|
||||||
|
sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||||
|
seg = [col, state[1], state[2], state[3], state[4]];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col, state[1], state[2], state[3]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col];
|
||||||
|
}
|
||||||
|
line.push(seg);
|
||||||
|
}
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
index = semi + 1;
|
||||||
|
} while (index <= mappings.length);
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
function indexOf(mappings, index) {
|
||||||
|
const idx = mappings.indexOf(';', index);
|
||||||
|
return idx === -1 ? mappings.length : idx;
|
||||||
|
}
|
||||||
|
function decodeInteger(mappings, pos, state, j) {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInt[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
function hasMoreVlq(mappings, i, length) {
|
||||||
|
if (i >= length)
|
||||||
|
return false;
|
||||||
|
return mappings.charCodeAt(i) !== comma;
|
||||||
|
}
|
||||||
|
function sort(line) {
|
||||||
|
line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
function encode(decoded) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const bufLength = 1024 * 16;
|
||||||
|
const subLength = bufLength - 36;
|
||||||
|
const buf = new Uint8Array(bufLength);
|
||||||
|
const sub = buf.subarray(0, subLength);
|
||||||
|
let pos = 0;
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
if (pos === bufLength) {
|
||||||
|
out += td.decode(buf);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0)
|
||||||
|
continue;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
if (pos > subLength) {
|
||||||
|
out += td.decode(sub);
|
||||||
|
buf.copyWithin(0, subLength, pos);
|
||||||
|
pos -= subLength;
|
||||||
|
}
|
||||||
|
if (j > 0)
|
||||||
|
buf[pos++] = comma;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||||
|
if (segment.length === 1)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||||
|
if (segment.length === 4)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out + td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
function encodeInteger(buf, pos, state, segment, j) {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0)
|
||||||
|
clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.decode = decode;
|
||||||
|
exports.encode = encode;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
||||||
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
6
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||||
|
export declare type SourceMapLine = SourceMapSegment[];
|
||||||
|
export declare type SourceMapMappings = SourceMapLine[];
|
||||||
|
export declare function decode(mappings: string): SourceMapMappings;
|
||||||
|
export declare function encode(decoded: SourceMapMappings): string;
|
||||||
|
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
||||||
|
|
@ -0,0 +1,75 @@
|
||||||
|
{
|
||||||
|
"name": "@jridgewell/sourcemap-codec",
|
||||||
|
"version": "1.4.14",
|
||||||
|
"description": "Encode/decode sourcemap mappings",
|
||||||
|
"keywords": [
|
||||||
|
"sourcemap",
|
||||||
|
"vlq"
|
||||||
|
],
|
||||||
|
"main": "dist/sourcemap-codec.umd.js",
|
||||||
|
"module": "dist/sourcemap-codec.mjs",
|
||||||
|
"typings": "dist/types/sourcemap-codec.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist",
|
||||||
|
"src"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||||
|
"browser": "./dist/sourcemap-codec.umd.js",
|
||||||
|
"import": "./dist/sourcemap-codec.mjs",
|
||||||
|
"require": "./dist/sourcemap-codec.umd.js"
|
||||||
|
},
|
||||||
|
"./dist/sourcemap-codec.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||||
|
},
|
||||||
|
"author": "Rich Harris",
|
||||||
|
"license": "MIT",
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@types/node": "17.0.15",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "7.11.2",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.64.0",
|
||||||
|
"source-map": "0.6.1",
|
||||||
|
"source-map-js": "1.0.2",
|
||||||
|
"sourcemap-codec": "1.4.8",
|
||||||
|
"typescript": "4.5.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,198 @@
|
||||||
|
export type SourceMapSegment =
|
||||||
|
| [number]
|
||||||
|
| [number, number, number, number]
|
||||||
|
| [number, number, number, number, number];
|
||||||
|
export type SourceMapLine = SourceMapSegment[];
|
||||||
|
export type SourceMapMappings = SourceMapLine[];
|
||||||
|
|
||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
intToChar[i] = c;
|
||||||
|
charToInt[c] = i;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td =
|
||||||
|
typeof TextDecoder !== 'undefined'
|
||||||
|
? /* #__PURE__ */ new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf: Uint8Array) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf: Uint8Array) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export function decode(mappings: string): SourceMapMappings {
|
||||||
|
const state: [number, number, number, number, number] = new Int32Array(5) as any;
|
||||||
|
const decoded: SourceMapMappings = [];
|
||||||
|
|
||||||
|
let index = 0;
|
||||||
|
do {
|
||||||
|
const semi = indexOf(mappings, index);
|
||||||
|
const line: SourceMapLine = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
state[0] = 0;
|
||||||
|
|
||||||
|
for (let i = index; i < semi; i++) {
|
||||||
|
let seg: SourceMapSegment;
|
||||||
|
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol) sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||||
|
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||||
|
seg = [col, state[1], state[2], state[3], state[4]];
|
||||||
|
} else {
|
||||||
|
seg = [col, state[1], state[2], state[3]];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
seg = [col];
|
||||||
|
}
|
||||||
|
|
||||||
|
line.push(seg);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sorted) sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
index = semi + 1;
|
||||||
|
} while (index <= mappings.length);
|
||||||
|
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
|
||||||
|
function indexOf(mappings: string, index: number): number {
|
||||||
|
const idx = mappings.indexOf(';', index);
|
||||||
|
return idx === -1 ? mappings.length : idx;
|
||||||
|
}
|
||||||
|
|
||||||
|
function decodeInteger(mappings: string, pos: number, state: SourceMapSegment, j: number): number {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInt[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasMoreVlq(mappings: string, i: number, length: number): boolean {
|
||||||
|
if (i >= length) return false;
|
||||||
|
return mappings.charCodeAt(i) !== comma;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sort(line: SourceMapSegment[]) {
|
||||||
|
line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function encode(decoded: SourceMapMappings): string;
|
||||||
|
export function encode(decoded: Readonly<SourceMapMappings>): string;
|
||||||
|
export function encode(decoded: Readonly<SourceMapMappings>): string {
|
||||||
|
const state: [number, number, number, number, number] = new Int32Array(5) as any;
|
||||||
|
const bufLength = 1024 * 16;
|
||||||
|
const subLength = bufLength - 36;
|
||||||
|
const buf = new Uint8Array(bufLength);
|
||||||
|
const sub = buf.subarray(0, subLength);
|
||||||
|
let pos = 0;
|
||||||
|
let out = '';
|
||||||
|
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
if (pos === bufLength) {
|
||||||
|
out += td.decode(buf);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0) continue;
|
||||||
|
|
||||||
|
state[0] = 0;
|
||||||
|
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
if (pos > subLength) {
|
||||||
|
out += td.decode(sub);
|
||||||
|
buf.copyWithin(0, subLength, pos);
|
||||||
|
pos -= subLength;
|
||||||
|
}
|
||||||
|
if (j > 0) buf[pos++] = comma;
|
||||||
|
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||||
|
|
||||||
|
if (segment.length === 1) continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||||
|
|
||||||
|
if (segment.length === 4) continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return out + td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodeInteger(
|
||||||
|
buf: Uint8Array,
|
||||||
|
pos: number,
|
||||||
|
state: SourceMapSegment,
|
||||||
|
segment: SourceMapSegment,
|
||||||
|
j: number,
|
||||||
|
): number {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0) clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,252 @@
|
||||||
|
# @jridgewell/trace-mapping
|
||||||
|
|
||||||
|
> Trace the original position through a source map
|
||||||
|
|
||||||
|
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||||
|
original location in the source file through a source map.
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||||
|
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/trace-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
TraceMap,
|
||||||
|
originalPositionFor,
|
||||||
|
generatedPositionFor,
|
||||||
|
sourceContentFor,
|
||||||
|
} from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const tracer = new TraceMap({
|
||||||
|
version: 3,
|
||||||
|
sources: ['input.js'],
|
||||||
|
sourcesContent: ['content of input.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'KAyCIA',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = sourceContentFor(tracer, traced.source);
|
||||||
|
assert.strictEqual(content, 'content for input.js');
|
||||||
|
|
||||||
|
const generated = generatedPositionFor(tracer, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
});
|
||||||
|
assert.deepEqual(generated, {
|
||||||
|
line: 1,
|
||||||
|
column: 5,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||||
|
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
// line is 0-base.
|
||||||
|
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||||
|
|
||||||
|
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
// Again, line is 0-base and so is sourceLine
|
||||||
|
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||||
|
```
|
||||||
|
|
||||||
|
### SectionedSourceMaps
|
||||||
|
|
||||||
|
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||||
|
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||||
|
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||||
|
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||||
|
`TraceMap` instance:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||||
|
const fooOutput = 'foo';
|
||||||
|
const barOutput = 'bar';
|
||||||
|
const output = [fooOutput, barOutput].join('\n');
|
||||||
|
|
||||||
|
const sectioned = new AnyMap({
|
||||||
|
version: 3,
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
// 0-base line and column
|
||||||
|
offset: { line: 0, column: 0 },
|
||||||
|
// fooOutput's sourcemap
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['foo.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// barOutput's sourcemap will not affect the first line, only the second
|
||||||
|
offset: { line: 1, column: 0 },
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['bar.js'],
|
||||||
|
names: ['bar'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const traced = originalPositionFor(sectioned, {
|
||||||
|
line: 2,
|
||||||
|
column: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'bar.js',
|
||||||
|
line: 1,
|
||||||
|
column: 0,
|
||||||
|
name: 'bar',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map - 45120 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 562400 bytes
|
||||||
|
trace-mapping encoded 5706544 bytes
|
||||||
|
source-map-js 10717664 bytes
|
||||||
|
source-map-0.6.1 17446384 bytes
|
||||||
|
source-map-0.8.0 9701757 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map - 347793 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 89832 bytes
|
||||||
|
trace-mapping encoded 35474640 bytes
|
||||||
|
source-map-js 51257176 bytes
|
||||||
|
source-map-0.6.1 63515664 bytes
|
||||||
|
source-map-0.8.0 42933752 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map - 1992 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 37128 bytes
|
||||||
|
trace-mapping encoded 247280 bytes
|
||||||
|
source-map-js 1143536 bytes
|
||||||
|
source-map-0.6.1 1290992 bytes
|
||||||
|
source-map-0.8.0 96544 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map - 5726 segments
|
||||||
|
|
||||||
|
Memory Usage:
|
||||||
|
trace-mapping decoded 16176 bytes
|
||||||
|
trace-mapping encoded 681552 bytes
|
||||||
|
source-map-js 2418352 bytes
|
||||||
|
source-map-0.6.1 2443672 bytes
|
||||||
|
source-map-0.8.0 111768 bytes
|
||||||
|
Smallest memory usage is trace-mapping decoded
|
||||||
|
|
||||||
|
Init speed:
|
||||||
|
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
Trace speed:
|
||||||
|
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
||||||
|
|
@ -0,0 +1,511 @@
|
||||||
|
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import resolveUri from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
if (!('sections' in parsed))
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
if ('sections' in input)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(input, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
let encodedMappings;
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
let decodedMappings;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
let traceSegment;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
let originalPositionFor;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
let generatedPositionFor;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
let eachMapping;
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
let sourceContentFor;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
let presortedDecodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let decodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let encodedMap;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
encodedMappings = (map) => {
|
||||||
|
var _a;
|
||||||
|
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded)));
|
||||||
|
};
|
||||||
|
decodedMappings = (map) => {
|
||||||
|
return (map._decoded || (map._decoded = decode(map._encoded)));
|
||||||
|
};
|
||||||
|
traceSegment = (map, line, column) => {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
};
|
||||||
|
originalPositionFor = (map, { line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
if (segment.length == 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
};
|
||||||
|
generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const memos = map._bySourceMemos;
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return GMapping(null, null);
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
};
|
||||||
|
eachMapping = (map, cb) => {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
sourceContentFor = (map, source) => {
|
||||||
|
const { sources, resolvedSources, sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
};
|
||||||
|
presortedDecodedMap = (map, mapUrl) => {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
tracer._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
};
|
||||||
|
decodedMap = (map) => {
|
||||||
|
return clone(map, decodedMappings(map));
|
||||||
|
};
|
||||||
|
encodedMap = (map) => {
|
||||||
|
return clone(map, encodedMappings(map));
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return null;
|
||||||
|
return segments[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||||
|
//# sourceMappingURL=trace-mapping.mjs.map
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,525 @@
|
||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||||
|
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||||
|
|
||||||
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||||
|
|
||||||
|
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri);
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri__default["default"](input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; index = i--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
if (!('sections' in parsed))
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
return exports.presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
const { sections } = input;
|
||||||
|
for (let i = 0; i < sections.length; i++) {
|
||||||
|
const { map, offset } = sections[i];
|
||||||
|
let sl = stopLine;
|
||||||
|
let sc = stopColumn;
|
||||||
|
if (i + 1 < sections.length) {
|
||||||
|
const nextOffset = sections[i + 1].offset;
|
||||||
|
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||||
|
if (sl === stopLine) {
|
||||||
|
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||||
|
}
|
||||||
|
else if (sl < stopLine) {
|
||||||
|
sc = columnOffset + nextOffset.column;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||||
|
if ('sections' in input)
|
||||||
|
return recurse(...arguments);
|
||||||
|
const map = new TraceMap(input, mapUrl);
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
const { resolvedSources, sourcesContent: contents } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(names, map.names);
|
||||||
|
if (contents)
|
||||||
|
append(sourcesContent, contents);
|
||||||
|
else
|
||||||
|
for (let i = 0; i < resolvedSources.length; i++)
|
||||||
|
sourcesContent.push(null);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const lineI = lineOffset + i;
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||||
|
// still need to check that we don't overstep lines, too.
|
||||||
|
if (lineI > stopLine)
|
||||||
|
return;
|
||||||
|
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||||
|
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||||
|
const out = getLine(mappings, lineI);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (lineI === stopLine && column >= stopColumn)
|
||||||
|
return;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
out.push(seg.length === 4
|
||||||
|
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||||
|
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
function getLine(arr, index) {
|
||||||
|
for (let i = arr.length; i <= index; i++)
|
||||||
|
arr[i] = [];
|
||||||
|
return arr[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
exports.encodedMappings = void 0;
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
exports.decodedMappings = void 0;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
exports.traceSegment = void 0;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
exports.originalPositionFor = void 0;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
exports.generatedPositionFor = void 0;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
exports.eachMapping = void 0;
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
exports.sourceContentFor = void 0;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
exports.presortedDecodedMap = void 0;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
exports.decodedMap = void 0;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
exports.encodedMap = void 0;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map._decodedMemo)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
exports.encodedMappings = (map) => {
|
||||||
|
var _a;
|
||||||
|
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded)));
|
||||||
|
};
|
||||||
|
exports.decodedMappings = (map) => {
|
||||||
|
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded)));
|
||||||
|
};
|
||||||
|
exports.traceSegment = (map, line, column) => {
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
};
|
||||||
|
exports.originalPositionFor = (map, { line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
if (segment.length == 1)
|
||||||
|
return OMapping(null, null, null, null);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||||
|
};
|
||||||
|
exports.generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const memos = map._bySourceMemos;
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return GMapping(null, null);
|
||||||
|
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return GMapping(null, null);
|
||||||
|
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||||
|
};
|
||||||
|
exports.eachMapping = (map, cb) => {
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.sourceContentFor = (map, source) => {
|
||||||
|
const { sources, resolvedSources, sourcesContent } = map;
|
||||||
|
if (sourcesContent == null)
|
||||||
|
return null;
|
||||||
|
let index = sources.indexOf(source);
|
||||||
|
if (index === -1)
|
||||||
|
index = resolvedSources.indexOf(source);
|
||||||
|
return index === -1 ? null : sourcesContent[index];
|
||||||
|
};
|
||||||
|
exports.presortedDecodedMap = (map, mapUrl) => {
|
||||||
|
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||||
|
tracer._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
};
|
||||||
|
exports.decodedMap = (map) => {
|
||||||
|
return clone(map, exports.decodedMappings(map));
|
||||||
|
};
|
||||||
|
exports.encodedMap = (map) => {
|
||||||
|
return clone(map, exports.encodedMappings(map));
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function clone(map, mappings) {
|
||||||
|
return {
|
||||||
|
version: map.version,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function OMapping(source, line, column, name) {
|
||||||
|
return { source, line, column, name };
|
||||||
|
}
|
||||||
|
function GMapping(line, column) {
|
||||||
|
return { line, column };
|
||||||
|
}
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return null;
|
||||||
|
return segments[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.AnyMap = AnyMap;
|
||||||
|
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||||
|
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||||
|
exports.TraceMap = TraceMap;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=trace-mapping.umd.js.map
|
||||||
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,8 @@
|
||||||
|
import { TraceMap } from './trace-mapping';
|
||||||
|
import type { SectionedSourceMapInput } from './types';
|
||||||
|
declare type AnyMap = {
|
||||||
|
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
};
|
||||||
|
export declare const AnyMap: AnyMap;
|
||||||
|
export {};
|
||||||
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||||
|
export declare type MemoState = {
|
||||||
|
lastKey: number;
|
||||||
|
lastNeedle: number;
|
||||||
|
lastIndex: number;
|
||||||
|
};
|
||||||
|
export declare let found: boolean;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||||
|
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function memoizedState(): MemoState;
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { MemoState } from './binary-search';
|
||||||
|
export declare type Source = {
|
||||||
|
__proto__: null;
|
||||||
|
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||||
|
};
|
||||||
|
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
export default function resolve(input: string, base: string | undefined): string;
|
||||||
|
|
@ -0,0 +1,2 @@
|
||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
||||||
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
declare type GeneratedColumn = number;
|
||||||
|
declare type SourcesIndex = number;
|
||||||
|
declare type SourceLine = number;
|
||||||
|
declare type SourceColumn = number;
|
||||||
|
declare type NamesIndex = number;
|
||||||
|
declare type GeneratedLine = number;
|
||||||
|
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
export declare type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||||
|
export declare const COLUMN = 0;
|
||||||
|
export declare const SOURCES_INDEX = 1;
|
||||||
|
export declare const SOURCE_LINE = 2;
|
||||||
|
export declare const SOURCE_COLUMN = 3;
|
||||||
|
export declare const NAMES_INDEX = 4;
|
||||||
|
export declare const REV_GENERATED_LINE = 1;
|
||||||
|
export declare const REV_GENERATED_COLUMN = 2;
|
||||||
|
export {};
|
||||||
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
export default function stripFilename(path: string | undefined | null): string;
|
||||||
74
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
74
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||||
|
export type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export type { SourceMapInput, SectionedSourceMapInput, DecodedSourceMap, EncodedSourceMap, SectionedSourceMap, InvalidOriginalMapping, OriginalMapping as Mapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, EachMapping, } from './types';
|
||||||
|
export declare const LEAST_UPPER_BOUND = -1;
|
||||||
|
export declare const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly<SourceMapSegment> | null;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;
|
||||||
|
/**
|
||||||
|
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||||
|
*/
|
||||||
|
export declare let sourceContentFor: (map: TraceMap, source: string) => string | null;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare let decodedMap: (map: TraceMap) => Omit<DecodedSourceMap, 'mappings'> & {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare let encodedMap: (map: TraceMap) => EncodedSourceMap;
|
||||||
|
export { AnyMap } from './any-map';
|
||||||
|
export declare class TraceMap implements SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
resolvedSources: string[];
|
||||||
|
private _encoded;
|
||||||
|
private _decoded;
|
||||||
|
private _decodedMemo;
|
||||||
|
private _bySources;
|
||||||
|
private _bySourceMemos;
|
||||||
|
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,85 @@
|
||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { TraceMap } from './trace-mapping';
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: (string | null)[];
|
||||||
|
sourcesContent?: (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
export interface Section {
|
||||||
|
offset: {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||||
|
}
|
||||||
|
export interface SectionedSourceMap {
|
||||||
|
file?: string | null;
|
||||||
|
sections: Section[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
export declare type OriginalMapping = {
|
||||||
|
source: string | null;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export declare type InvalidOriginalMapping = {
|
||||||
|
source: null;
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
name: null;
|
||||||
|
};
|
||||||
|
export declare type GeneratedMapping = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
export declare type InvalidGeneratedMapping = {
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
};
|
||||||
|
export declare type SourceMapInput = string | EncodedSourceMap | DecodedSourceMap | TraceMap;
|
||||||
|
export declare type SectionedSourceMapInput = SourceMapInput | SectionedSourceMap;
|
||||||
|
export declare type Needle = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: 1 | -1;
|
||||||
|
};
|
||||||
|
export declare type SourceNeedle = {
|
||||||
|
source: string;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: 1 | -1;
|
||||||
|
};
|
||||||
|
export declare type EachMapping = {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: null;
|
||||||
|
originalLine: null;
|
||||||
|
originalColumn: null;
|
||||||
|
name: null;
|
||||||
|
} | {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: string | null;
|
||||||
|
originalLine: number;
|
||||||
|
originalColumn: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export declare abstract class SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
resolvedSources: SourceMapV3['sources'];
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,75 @@
|
||||||
|
{
|
||||||
|
"name": "@jridgewell/trace-mapping",
|
||||||
|
"version": "0.3.15",
|
||||||
|
"description": "Trace the original position through a source map",
|
||||||
|
"keywords": [
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"main": "dist/trace-mapping.umd.js",
|
||||||
|
"module": "dist/trace-mapping.mjs",
|
||||||
|
"typings": "dist/types/trace-mapping.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/trace-mapping.d.ts",
|
||||||
|
"browser": "./dist/trace-mapping.umd.js",
|
||||||
|
"require": "./dist/trace-mapping.umd.js",
|
||||||
|
"import": "./dist/trace-mapping.mjs"
|
||||||
|
},
|
||||||
|
"./dist/trace-mapping.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "ava debug",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "c8 ava",
|
||||||
|
"test:watch": "ava --watch"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.2",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.23.0",
|
||||||
|
"@typescript-eslint/parser": "5.23.0",
|
||||||
|
"ava": "4.2.0",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "7.11.2",
|
||||||
|
"esbuild": "0.14.38",
|
||||||
|
"esbuild-node-loader": "0.8.0",
|
||||||
|
"eslint": "8.15.0",
|
||||||
|
"eslint-config-prettier": "8.5.0",
|
||||||
|
"eslint-plugin-no-only-tests": "2.6.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.6.2",
|
||||||
|
"rollup": "2.72.1",
|
||||||
|
"typescript": "4.6.4"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/resolve-uri": "^3.0.3",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
||||||
|
|
@ -0,0 +1,85 @@
|
||||||
|
# Installation
|
||||||
|
> `npm install --save @types/eslint-scope`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for eslint-scope (https://github.com/eslint/eslint-scope).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/eslint-scope.
|
||||||
|
## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/eslint-scope/index.d.ts)
|
||||||
|
````ts
|
||||||
|
// Type definitions for eslint-scope 3.7
|
||||||
|
// Project: https://github.com/eslint/eslint-scope
|
||||||
|
// Definitions by: Toru Nagashima <https://github.com/mysticatea>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
// TypeScript Version: 3.8
|
||||||
|
import * as eslint from "eslint";
|
||||||
|
import * as estree from "estree";
|
||||||
|
|
||||||
|
export const version: string;
|
||||||
|
|
||||||
|
export class ScopeManager implements eslint.Scope.ScopeManager {
|
||||||
|
scopes: Scope[];
|
||||||
|
globalScope: Scope;
|
||||||
|
acquire(node: {}, inner?: boolean): Scope | null;
|
||||||
|
getDeclaredVariables(node: {}): Variable[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Scope implements eslint.Scope.Scope {
|
||||||
|
type: "block" | "catch" | "class" | "for" | "function" | "function-expression-name" | "global" | "module" | "switch" | "with" | "TDZ";
|
||||||
|
isStrict: boolean;
|
||||||
|
upper: Scope | null;
|
||||||
|
childScopes: Scope[];
|
||||||
|
variableScope: Scope;
|
||||||
|
block: estree.Node;
|
||||||
|
variables: Variable[];
|
||||||
|
set: Map<string, Variable>;
|
||||||
|
references: Reference[];
|
||||||
|
through: Reference[];
|
||||||
|
functionExpressionScope: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Variable implements eslint.Scope.Variable {
|
||||||
|
name: string;
|
||||||
|
scope: Scope;
|
||||||
|
identifiers: estree.Identifier[];
|
||||||
|
references: Reference[];
|
||||||
|
defs: eslint.Scope.Definition[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Reference implements eslint.Scope.Reference {
|
||||||
|
identifier: estree.Identifier;
|
||||||
|
from: Scope;
|
||||||
|
resolved: Variable | null;
|
||||||
|
writeExpr: estree.Node | null;
|
||||||
|
init: boolean;
|
||||||
|
|
||||||
|
isWrite(): boolean;
|
||||||
|
isRead(): boolean;
|
||||||
|
isWriteOnly(): boolean;
|
||||||
|
isReadOnly(): boolean;
|
||||||
|
isReadWrite(): boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnalysisOptions {
|
||||||
|
optimistic?: boolean;
|
||||||
|
directive?: boolean;
|
||||||
|
ignoreEval?: boolean;
|
||||||
|
nodejsScope?: boolean;
|
||||||
|
impliedStrict?: boolean;
|
||||||
|
fallback?: string | ((node: {}) => string[]);
|
||||||
|
sourceType?: "script" | "module";
|
||||||
|
ecmaVersion?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function analyze(ast: {}, options?: AnalysisOptions): ScopeManager;
|
||||||
|
|
||||||
|
````
|
||||||
|
|
||||||
|
### Additional Details
|
||||||
|
* Last updated: Thu, 30 Jun 2022 19:02:28 GMT
|
||||||
|
* Dependencies: [@types/eslint](https://npmjs.com/package/@types/eslint), [@types/estree](https://npmjs.com/package/@types/estree)
|
||||||
|
* Global values: none
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by [Toru Nagashima](https://github.com/mysticatea).
|
||||||
|
|
@ -0,0 +1,65 @@
|
||||||
|
// Type definitions for eslint-scope 3.7
|
||||||
|
// Project: https://github.com/eslint/eslint-scope
|
||||||
|
// Definitions by: Toru Nagashima <https://github.com/mysticatea>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
// TypeScript Version: 3.8
|
||||||
|
import * as eslint from "eslint";
|
||||||
|
import * as estree from "estree";
|
||||||
|
|
||||||
|
export const version: string;
|
||||||
|
|
||||||
|
export class ScopeManager implements eslint.Scope.ScopeManager {
|
||||||
|
scopes: Scope[];
|
||||||
|
globalScope: Scope;
|
||||||
|
acquire(node: {}, inner?: boolean): Scope | null;
|
||||||
|
getDeclaredVariables(node: {}): Variable[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Scope implements eslint.Scope.Scope {
|
||||||
|
type: "block" | "catch" | "class" | "for" | "function" | "function-expression-name" | "global" | "module" | "switch" | "with" | "TDZ";
|
||||||
|
isStrict: boolean;
|
||||||
|
upper: Scope | null;
|
||||||
|
childScopes: Scope[];
|
||||||
|
variableScope: Scope;
|
||||||
|
block: estree.Node;
|
||||||
|
variables: Variable[];
|
||||||
|
set: Map<string, Variable>;
|
||||||
|
references: Reference[];
|
||||||
|
through: Reference[];
|
||||||
|
functionExpressionScope: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Variable implements eslint.Scope.Variable {
|
||||||
|
name: string;
|
||||||
|
scope: Scope;
|
||||||
|
identifiers: estree.Identifier[];
|
||||||
|
references: Reference[];
|
||||||
|
defs: eslint.Scope.Definition[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Reference implements eslint.Scope.Reference {
|
||||||
|
identifier: estree.Identifier;
|
||||||
|
from: Scope;
|
||||||
|
resolved: Variable | null;
|
||||||
|
writeExpr: estree.Node | null;
|
||||||
|
init: boolean;
|
||||||
|
|
||||||
|
isWrite(): boolean;
|
||||||
|
isRead(): boolean;
|
||||||
|
isWriteOnly(): boolean;
|
||||||
|
isReadOnly(): boolean;
|
||||||
|
isReadWrite(): boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnalysisOptions {
|
||||||
|
optimistic?: boolean;
|
||||||
|
directive?: boolean;
|
||||||
|
ignoreEval?: boolean;
|
||||||
|
nodejsScope?: boolean;
|
||||||
|
impliedStrict?: boolean;
|
||||||
|
fallback?: string | ((node: {}) => string[]);
|
||||||
|
sourceType?: "script" | "module";
|
||||||
|
ecmaVersion?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function analyze(ast: {}, options?: AnalysisOptions): ScopeManager;
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
{
|
||||||
|
"name": "@types/eslint-scope",
|
||||||
|
"version": "3.7.4",
|
||||||
|
"description": "TypeScript definitions for eslint-scope",
|
||||||
|
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/eslint-scope",
|
||||||
|
"license": "MIT",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Toru Nagashima",
|
||||||
|
"url": "https://github.com/mysticatea",
|
||||||
|
"githubUsername": "mysticatea"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main": "",
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||||
|
"directory": "types/eslint-scope"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"dependencies": {
|
||||||
|
"@types/eslint": "*",
|
||||||
|
"@types/estree": "*"
|
||||||
|
},
|
||||||
|
"typesPublisherContentHash": "81c8e26e146b6b132a88bc06480ec59c5006561f35388cbc65756710cd486f05",
|
||||||
|
"typeScriptVersion": "4.0"
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
||||||
|
|
@ -0,0 +1,16 @@
|
||||||
|
# Installation
|
||||||
|
> `npm install --save @types/eslint`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for eslint (https://eslint.org).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/eslint.
|
||||||
|
|
||||||
|
### Additional Details
|
||||||
|
* Last updated: Fri, 19 Aug 2022 02:32:31 GMT
|
||||||
|
* Dependencies: [@types/estree](https://npmjs.com/package/@types/estree), [@types/json-schema](https://npmjs.com/package/@types/json-schema)
|
||||||
|
* Global values: none
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by [Pierre-Marie Dartus](https://github.com/pmdartus), [Jed Fox](https://github.com/j-f1), [Saad Quadri](https://github.com/saadq), [Jason Kwok](https://github.com/JasonHK), [Brad Zacher](https://github.com/bradzacher), and [JounQin](https://github.com/JounQin).
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
type Prepend<Tuple extends any[], Addend> = ((_: Addend, ..._1: Tuple) => any) extends (..._: infer Result) => any
|
||||||
|
? Result
|
||||||
|
: never;
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,65 @@
|
||||||
|
{
|
||||||
|
"name": "@types/eslint",
|
||||||
|
"version": "8.4.6",
|
||||||
|
"description": "TypeScript definitions for eslint",
|
||||||
|
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/eslint",
|
||||||
|
"license": "MIT",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Pierre-Marie Dartus",
|
||||||
|
"url": "https://github.com/pmdartus",
|
||||||
|
"githubUsername": "pmdartus"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jed Fox",
|
||||||
|
"url": "https://github.com/j-f1",
|
||||||
|
"githubUsername": "j-f1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Saad Quadri",
|
||||||
|
"url": "https://github.com/saadq",
|
||||||
|
"githubUsername": "saadq"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jason Kwok",
|
||||||
|
"url": "https://github.com/JasonHK",
|
||||||
|
"githubUsername": "JasonHK"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Brad Zacher",
|
||||||
|
"url": "https://github.com/bradzacher",
|
||||||
|
"githubUsername": "bradzacher"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "JounQin",
|
||||||
|
"url": "https://github.com/JounQin",
|
||||||
|
"githubUsername": "JounQin"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main": "",
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||||
|
"directory": "types/eslint"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"dependencies": {
|
||||||
|
"@types/estree": "*",
|
||||||
|
"@types/json-schema": "*"
|
||||||
|
},
|
||||||
|
"typesPublisherContentHash": "ea7a3930aee1a8f352a631e32f29e147c28e2916edc41c5459f608b82f73e211",
|
||||||
|
"typeScriptVersion": "4.0",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./index.d.ts"
|
||||||
|
},
|
||||||
|
"./use-at-your-own-risk": {
|
||||||
|
"types": "./use-at-your-own-risk.d.ts"
|
||||||
|
},
|
||||||
|
"./rules": {
|
||||||
|
"types": "./rules/index.d.ts"
|
||||||
|
},
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,931 @@
|
||||||
|
import { Linter } from "../index";
|
||||||
|
|
||||||
|
export interface BestPractices extends Linter.RulesRecord {
|
||||||
|
/**
|
||||||
|
* Rule to enforce getter and setter pairs in objects.
|
||||||
|
*
|
||||||
|
* @since 0.22.0
|
||||||
|
* @see https://eslint.org/docs/rules/accessor-pairs
|
||||||
|
*/
|
||||||
|
"accessor-pairs": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
setWithoutGet: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
getWithoutSet: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce `return` statements in callbacks of array methods.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-alpha-1
|
||||||
|
* @see https://eslint.org/docs/rules/array-callback-return
|
||||||
|
*/
|
||||||
|
"array-callback-return": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowImplicit: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce the use of variables within the scope they are defined.
|
||||||
|
*
|
||||||
|
* @since 0.1.0
|
||||||
|
* @see https://eslint.org/docs/rules/block-scoped-var
|
||||||
|
*/
|
||||||
|
"block-scoped-var": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce that class methods utilize `this`.
|
||||||
|
*
|
||||||
|
* @since 3.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/class-methods-use-this
|
||||||
|
*/
|
||||||
|
"class-methods-use-this": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
exceptMethods: string[];
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce a maximum cyclomatic complexity allowed in a program.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/complexity
|
||||||
|
*/
|
||||||
|
complexity: Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
| Partial<{
|
||||||
|
/**
|
||||||
|
* @default 20
|
||||||
|
*/
|
||||||
|
max: number;
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
* @default 20
|
||||||
|
*/
|
||||||
|
maximum: number;
|
||||||
|
}>
|
||||||
|
| number,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require `return` statements to either always or never specify values.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/consistent-return
|
||||||
|
*/
|
||||||
|
"consistent-return": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
treatUndefinedAsUnspecified: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce consistent brace style for all control statements.
|
||||||
|
*
|
||||||
|
* @since 0.0.2
|
||||||
|
* @see https://eslint.org/docs/rules/curly
|
||||||
|
*/
|
||||||
|
curly: Linter.RuleEntry<["all" | "multi" | "multi-line" | "multi-or-nest" | "consistent"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require `default` cases in `switch` statements.
|
||||||
|
*
|
||||||
|
* @since 0.6.0
|
||||||
|
* @see https://eslint.org/docs/rules/default-case
|
||||||
|
*/
|
||||||
|
"default-case": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default '^no default$'
|
||||||
|
*/
|
||||||
|
commentPattern: string;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce consistent newlines before and after dots.
|
||||||
|
*
|
||||||
|
* @since 0.21.0
|
||||||
|
* @see https://eslint.org/docs/rules/dot-location
|
||||||
|
*/
|
||||||
|
"dot-location": Linter.RuleEntry<["object" | "property"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce dot notation whenever possible.
|
||||||
|
*
|
||||||
|
* @since 0.0.7
|
||||||
|
* @see https://eslint.org/docs/rules/dot-notation
|
||||||
|
*/
|
||||||
|
"dot-notation": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
allowKeywords: boolean;
|
||||||
|
allowPattern: string;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require the use of `===` and `!==`.
|
||||||
|
*
|
||||||
|
* @since 0.0.2
|
||||||
|
* @see https://eslint.org/docs/rules/eqeqeq
|
||||||
|
*/
|
||||||
|
eqeqeq:
|
||||||
|
| Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
"always",
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default 'always'
|
||||||
|
*/
|
||||||
|
null: "always" | "never" | "ignore";
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>
|
||||||
|
| Linter.RuleEntry<["smart" | "allow-null"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require `for-in` loops to include an `if` statement.
|
||||||
|
*
|
||||||
|
* @since 0.0.6
|
||||||
|
* @see https://eslint.org/docs/rules/guard-for-in
|
||||||
|
*/
|
||||||
|
"guard-for-in": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce a maximum number of classes per file.
|
||||||
|
*
|
||||||
|
* @since 5.0.0-alpha.3
|
||||||
|
* @see https://eslint.org/docs/rules/max-classes-per-file
|
||||||
|
*/
|
||||||
|
"max-classes-per-file": Linter.RuleEntry<[number]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of `alert`, `confirm`, and `prompt`.
|
||||||
|
*
|
||||||
|
* @since 0.0.5
|
||||||
|
* @see https://eslint.org/docs/rules/no-alert
|
||||||
|
*/
|
||||||
|
"no-alert": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of `arguments.caller` or `arguments.callee`.
|
||||||
|
*
|
||||||
|
* @since 0.0.6
|
||||||
|
* @see https://eslint.org/docs/rules/no-caller
|
||||||
|
*/
|
||||||
|
"no-caller": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow lexical declarations in case clauses.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 1.9.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-case-declarations
|
||||||
|
*/
|
||||||
|
"no-case-declarations": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow division operators explicitly at the beginning of regular expressions.
|
||||||
|
*
|
||||||
|
* @since 0.1.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-div-regex
|
||||||
|
*/
|
||||||
|
"no-div-regex": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `else` blocks after `return` statements in `if` statements.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-else-return
|
||||||
|
*/
|
||||||
|
"no-else-return": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
allowElseIf: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow empty functions.
|
||||||
|
*
|
||||||
|
* @since 2.0.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-empty-function
|
||||||
|
*/
|
||||||
|
"no-empty-function": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default []
|
||||||
|
*/
|
||||||
|
allow: Array<
|
||||||
|
| "functions"
|
||||||
|
| "arrowFunctions"
|
||||||
|
| "generatorFunctions"
|
||||||
|
| "methods"
|
||||||
|
| "generatorMethods"
|
||||||
|
| "getters"
|
||||||
|
| "setters"
|
||||||
|
| "constructors"
|
||||||
|
>;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow empty destructuring patterns.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 1.7.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-empty-pattern
|
||||||
|
*/
|
||||||
|
"no-empty-pattern": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `null` comparisons without type-checking operators.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-eq-null
|
||||||
|
*/
|
||||||
|
"no-eq-null": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of `eval()`.
|
||||||
|
*
|
||||||
|
* @since 0.0.2
|
||||||
|
* @see https://eslint.org/docs/rules/no-eval
|
||||||
|
*/
|
||||||
|
"no-eval": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowIndirect: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow extending native types.
|
||||||
|
*
|
||||||
|
* @since 0.1.4
|
||||||
|
* @see https://eslint.org/docs/rules/no-extend-native
|
||||||
|
*/
|
||||||
|
"no-extend-native": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
exceptions: string[];
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary calls to `.bind()`.
|
||||||
|
*
|
||||||
|
* @since 0.8.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-extra-bind
|
||||||
|
*/
|
||||||
|
"no-extra-bind": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary labels.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-rc.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-extra-label
|
||||||
|
*/
|
||||||
|
"no-extra-label": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow fallthrough of `case` statements.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.7
|
||||||
|
* @see https://eslint.org/docs/rules/no-fallthrough
|
||||||
|
*/
|
||||||
|
"no-fallthrough": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default 'falls?\s?through'
|
||||||
|
*/
|
||||||
|
commentPattern: string;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow leading or trailing decimal points in numeric literals.
|
||||||
|
*
|
||||||
|
* @since 0.0.6
|
||||||
|
* @see https://eslint.org/docs/rules/no-floating-decimal
|
||||||
|
*/
|
||||||
|
"no-floating-decimal": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow assignments to native objects or read-only global variables.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 3.3.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-global-assign
|
||||||
|
*/
|
||||||
|
"no-global-assign": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
exceptions: string[];
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow shorthand type conversions.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-2
|
||||||
|
* @see https://eslint.org/docs/rules/no-implicit-coercion
|
||||||
|
*/
|
||||||
|
"no-implicit-coercion": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
boolean: boolean;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
number: boolean;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
string: boolean;
|
||||||
|
/**
|
||||||
|
* @default []
|
||||||
|
*/
|
||||||
|
allow: Array<"~" | "!!" | "+" | "*">;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow variable and `function` declarations in the global scope.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-alpha-1
|
||||||
|
* @see https://eslint.org/docs/rules/no-implicit-globals
|
||||||
|
*/
|
||||||
|
"no-implicit-globals": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of `eval()`-like methods.
|
||||||
|
*
|
||||||
|
* @since 0.0.7
|
||||||
|
* @see https://eslint.org/docs/rules/no-implied-eval
|
||||||
|
*/
|
||||||
|
"no-implied-eval": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `this` keywords outside of classes or class-like objects.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-2
|
||||||
|
* @see https://eslint.org/docs/rules/no-invalid-this
|
||||||
|
*/
|
||||||
|
"no-invalid-this": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of the `__iterator__` property.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-iterator
|
||||||
|
*/
|
||||||
|
"no-iterator": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow labeled statements.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-labels
|
||||||
|
*/
|
||||||
|
"no-labels": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowLoop: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowSwitch: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary nested blocks.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-lone-blocks
|
||||||
|
*/
|
||||||
|
"no-lone-blocks": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow function declarations that contain unsafe references inside loop statements.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-loop-func
|
||||||
|
*/
|
||||||
|
"no-loop-func": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow magic numbers.
|
||||||
|
*
|
||||||
|
* @since 1.7.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-magic-numbers
|
||||||
|
*/
|
||||||
|
"no-magic-numbers": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default []
|
||||||
|
*/
|
||||||
|
ignore: number[];
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreArrayIndexes: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
enforceConst: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
detectObjects: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow multiple spaces.
|
||||||
|
*
|
||||||
|
* @since 0.9.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-multi-spaces
|
||||||
|
*/
|
||||||
|
"no-multi-spaces": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreEOLComments: boolean;
|
||||||
|
/**
|
||||||
|
* @default { Property: true }
|
||||||
|
*/
|
||||||
|
exceptions: Record<string, boolean>;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow multiline strings.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-multi-str
|
||||||
|
*/
|
||||||
|
"no-multi-str": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `new` operators outside of assignments or comparisons.
|
||||||
|
*
|
||||||
|
* @since 0.0.7
|
||||||
|
* @see https://eslint.org/docs/rules/no-new
|
||||||
|
*/
|
||||||
|
"no-new": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `new` operators with the `Function` object.
|
||||||
|
*
|
||||||
|
* @since 0.0.7
|
||||||
|
* @see https://eslint.org/docs/rules/no-new-func
|
||||||
|
*/
|
||||||
|
"no-new-func": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `new` operators with the `String`, `Number`, and `Boolean` objects.
|
||||||
|
*
|
||||||
|
* @since 0.0.6
|
||||||
|
* @see https://eslint.org/docs/rules/no-new-wrappers
|
||||||
|
*/
|
||||||
|
"no-new-wrappers": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow octal literals.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.6
|
||||||
|
* @see https://eslint.org/docs/rules/no-octal
|
||||||
|
*/
|
||||||
|
"no-octal": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow octal escape sequences in string literals.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-octal-escape
|
||||||
|
*/
|
||||||
|
"no-octal-escape": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow reassigning `function` parameters.
|
||||||
|
*
|
||||||
|
* @since 0.18.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-param-reassign
|
||||||
|
*/
|
||||||
|
"no-param-reassign": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
props: boolean;
|
||||||
|
/**
|
||||||
|
* @default []
|
||||||
|
*/
|
||||||
|
ignorePropertyModificationsFor: string[];
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of the `__proto__` property.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-proto
|
||||||
|
*/
|
||||||
|
"no-proto": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow variable redeclaration.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-redeclare
|
||||||
|
*/
|
||||||
|
"no-redeclare": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
builtinGlobals: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow certain properties on certain objects.
|
||||||
|
*
|
||||||
|
* @since 3.5.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-restricted-properties
|
||||||
|
*/
|
||||||
|
"no-restricted-properties": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
...Array<
|
||||||
|
| {
|
||||||
|
object: string;
|
||||||
|
property?: string | undefined;
|
||||||
|
message?: string | undefined;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
property: string;
|
||||||
|
message?: string | undefined;
|
||||||
|
}
|
||||||
|
>
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow assignment operators in `return` statements.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-return-assign
|
||||||
|
*/
|
||||||
|
"no-return-assign": Linter.RuleEntry<["except-parens" | "always"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary `return await`.
|
||||||
|
*
|
||||||
|
* @since 3.10.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-return-await
|
||||||
|
*/
|
||||||
|
"no-return-await": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `javascript:` urls.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-script-url
|
||||||
|
*/
|
||||||
|
"no-script-url": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow assignments where both sides are exactly the same.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-rc.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-self-assign
|
||||||
|
*/
|
||||||
|
"no-self-assign": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow comparisons where both sides are exactly the same.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-self-compare
|
||||||
|
*/
|
||||||
|
"no-self-compare": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow comma operators.
|
||||||
|
*
|
||||||
|
* @since 0.5.1
|
||||||
|
* @see https://eslint.org/docs/rules/no-sequences
|
||||||
|
*/
|
||||||
|
"no-sequences": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow throwing literals as exceptions.
|
||||||
|
*
|
||||||
|
* @since 0.15.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-throw-literal
|
||||||
|
*/
|
||||||
|
"no-throw-literal": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unmodified loop conditions.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-alpha-2
|
||||||
|
* @see https://eslint.org/docs/rules/no-unmodified-loop-condition
|
||||||
|
*/
|
||||||
|
"no-unmodified-loop-condition": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unused expressions.
|
||||||
|
*
|
||||||
|
* @since 0.1.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-unused-expressions
|
||||||
|
*/
|
||||||
|
"no-unused-expressions": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowShortCircuit: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowTernary: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowTaggedTemplates: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unused labels.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-rc.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-unused-labels
|
||||||
|
*/
|
||||||
|
"no-unused-labels": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary calls to `.call()` and `.apply()`.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-1
|
||||||
|
* @see https://eslint.org/docs/rules/no-useless-call
|
||||||
|
*/
|
||||||
|
"no-useless-call": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary `catch` clauses.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 5.11.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-useless-catch
|
||||||
|
*/
|
||||||
|
"no-useless-catch": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary concatenation of literals or template literals.
|
||||||
|
*
|
||||||
|
* @since 1.3.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-useless-concat
|
||||||
|
*/
|
||||||
|
"no-useless-concat": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary escape characters.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 2.5.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-useless-escape
|
||||||
|
*/
|
||||||
|
"no-useless-escape": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow redundant return statements.
|
||||||
|
*
|
||||||
|
* @since 3.9.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-useless-return
|
||||||
|
*/
|
||||||
|
"no-useless-return": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `void` operators.
|
||||||
|
*
|
||||||
|
* @since 0.8.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-void
|
||||||
|
*/
|
||||||
|
"no-void": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow specified warning terms in comments.
|
||||||
|
*
|
||||||
|
* @since 0.4.4
|
||||||
|
* @see https://eslint.org/docs/rules/no-warning-comments
|
||||||
|
*/
|
||||||
|
"no-warning-comments": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* @default ["todo", "fixme", "xxx"]
|
||||||
|
*/
|
||||||
|
terms: string[];
|
||||||
|
/**
|
||||||
|
* @default 'start'
|
||||||
|
*/
|
||||||
|
location: "start" | "anywhere";
|
||||||
|
},
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `with` statements.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.2
|
||||||
|
* @see https://eslint.org/docs/rules/no-with
|
||||||
|
*/
|
||||||
|
"no-with": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce using named capture group in regular expression.
|
||||||
|
*
|
||||||
|
* @since 5.15.0
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-named-capture-group
|
||||||
|
*/
|
||||||
|
"prefer-named-capture-group": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require using Error objects as Promise rejection reasons.
|
||||||
|
*
|
||||||
|
* @since 3.14.0
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-promise-reject-errors
|
||||||
|
*/
|
||||||
|
"prefer-promise-reject-errors": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowEmptyReject: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce the consistent use of the radix argument when using `parseInt()`.
|
||||||
|
*
|
||||||
|
* @since 0.0.7
|
||||||
|
* @see https://eslint.org/docs/rules/radix
|
||||||
|
*/
|
||||||
|
radix: Linter.RuleEntry<["always" | "as-needed"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow async functions which have no `await` expression.
|
||||||
|
*
|
||||||
|
* @since 3.11.0
|
||||||
|
* @see https://eslint.org/docs/rules/require-await
|
||||||
|
*/
|
||||||
|
"require-await": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce the use of `u` flag on RegExp.
|
||||||
|
*
|
||||||
|
* @since 5.3.0
|
||||||
|
* @see https://eslint.org/docs/rules/require-unicode-regexp
|
||||||
|
*/
|
||||||
|
"require-unicode-regexp": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require `var` declarations be placed at the top of their containing scope.
|
||||||
|
*
|
||||||
|
* @since 0.8.0
|
||||||
|
* @see https://eslint.org/docs/rules/vars-on-top
|
||||||
|
*/
|
||||||
|
"vars-on-top": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require parentheses around immediate `function` invocations.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/wrap-iife
|
||||||
|
*/
|
||||||
|
"wrap-iife": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
"outside" | "inside" | "any",
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
functionPrototypeMethods: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require or disallow “Yoda” conditions.
|
||||||
|
*
|
||||||
|
* @since 0.7.1
|
||||||
|
* @see https://eslint.org/docs/rules/yoda
|
||||||
|
*/
|
||||||
|
yoda:
|
||||||
|
| Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
"never",
|
||||||
|
Partial<{
|
||||||
|
exceptRange: boolean;
|
||||||
|
onlyEquality: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>
|
||||||
|
| Linter.RuleEntry<["always"]>;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,267 @@
|
||||||
|
import { Linter } from "../index";
|
||||||
|
|
||||||
|
export interface Deprecated extends Linter.RulesRecord {
|
||||||
|
/**
|
||||||
|
* Rule to enforce consistent indentation.
|
||||||
|
*
|
||||||
|
* @since 4.0.0-alpha.0
|
||||||
|
* @deprecated since 4.0.0, use [`indent`](https://eslint.org/docs/rules/indent) instead.
|
||||||
|
* @see https://eslint.org/docs/rules/indent-legacy
|
||||||
|
*/
|
||||||
|
"indent-legacy": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
number | "tab",
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default 0
|
||||||
|
*/
|
||||||
|
SwitchCase: number;
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
VariableDeclarator:
|
||||||
|
| Partial<{
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
var: number | "first";
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
let: number | "first";
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
const: number | "first";
|
||||||
|
}>
|
||||||
|
| number
|
||||||
|
| "first";
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
outerIIFEBody: number;
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
MemberExpression: number | "off";
|
||||||
|
/**
|
||||||
|
* @default { parameters: 1, body: 1 }
|
||||||
|
*/
|
||||||
|
FunctionDeclaration: Partial<{
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
parameters: number | "first" | "off";
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
body: number;
|
||||||
|
}>;
|
||||||
|
/**
|
||||||
|
* @default { parameters: 1, body: 1 }
|
||||||
|
*/
|
||||||
|
FunctionExpression: Partial<{
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
parameters: number | "first" | "off";
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
body: number;
|
||||||
|
}>;
|
||||||
|
/**
|
||||||
|
* @default { arguments: 1 }
|
||||||
|
*/
|
||||||
|
CallExpression: Partial<{
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
arguments: number | "first" | "off";
|
||||||
|
}>;
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
ArrayExpression: number | "first" | "off";
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
ObjectExpression: number | "first" | "off";
|
||||||
|
/**
|
||||||
|
* @default 1
|
||||||
|
*/
|
||||||
|
ImportDeclaration: number | "first" | "off";
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
flatTernaryExpressions: boolean;
|
||||||
|
ignoredNodes: string[];
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreComments: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require or disallow newlines around directives.
|
||||||
|
*
|
||||||
|
* @since 3.5.0
|
||||||
|
* @deprecated since 4.0.0, use [`padding-line-between-statements`](https://eslint.org/docs/rules/padding-line-between-statements) instead.
|
||||||
|
* @see https://eslint.org/docs/rules/lines-around-directive
|
||||||
|
*/
|
||||||
|
"lines-around-directive": Linter.RuleEntry<["always" | "never"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require or disallow an empty line after variable declarations.
|
||||||
|
*
|
||||||
|
* @since 0.18.0
|
||||||
|
* @deprecated since 4.0.0, use [`padding-line-between-statements`](https://eslint.org/docs/rules/padding-line-between-statements) instead.
|
||||||
|
* @see https://eslint.org/docs/rules/newline-after-var
|
||||||
|
*/
|
||||||
|
"newline-after-var": Linter.RuleEntry<["always" | "never"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require an empty line before `return` statements.
|
||||||
|
*
|
||||||
|
* @since 2.3.0
|
||||||
|
* @deprecated since 4.0.0, use [`padding-line-between-statements`](https://eslint.org/docs/rules/padding-line-between-statements) instead.
|
||||||
|
* @see https://eslint.org/docs/rules/newline-before-return
|
||||||
|
*/
|
||||||
|
"newline-before-return": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow shadowing of variables inside of `catch`.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @deprecated since 5.1.0, use [`no-shadow`](https://eslint.org/docs/rules/no-shadow) instead.
|
||||||
|
* @see https://eslint.org/docs/rules/no-catch-shadow
|
||||||
|
*/
|
||||||
|
"no-catch-shadow": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow reassignment of native objects.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @deprecated since 3.3.0, use [`no-global-assign`](https://eslint.org/docs/rules/no-global-assign) instead.
|
||||||
|
* @see https://eslint.org/docs/rules/no-native-reassign
|
||||||
|
*/
|
||||||
|
"no-native-reassign": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
exceptions: string[];
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow negating the left operand in `in` expressions.
|
||||||
|
*
|
||||||
|
* @since 0.1.2
|
||||||
|
* @deprecated since 3.3.0, use [`no-unsafe-negation`](https://eslint.org/docs/rules/no-unsafe-negation) instead.
|
||||||
|
* @see https://eslint.org/docs/rules/no-negated-in-lhs
|
||||||
|
*/
|
||||||
|
"no-negated-in-lhs": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow spacing between function identifiers and their applications.
|
||||||
|
*
|
||||||
|
* @since 0.1.2
|
||||||
|
* @deprecated since 3.3.0, use [`func-call-spacing`](https://eslint.org/docs/rules/func-call-spacing) instead.
|
||||||
|
* @see https://eslint.org/docs/rules/no-spaced-func
|
||||||
|
*/
|
||||||
|
"no-spaced-func": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to suggest using `Reflect` methods where applicable.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-2
|
||||||
|
* @deprecated since 3.9.0
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-reflect
|
||||||
|
*/
|
||||||
|
"prefer-reflect": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
exceptions: string[];
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require JSDoc comments.
|
||||||
|
*
|
||||||
|
* @since 1.4.0
|
||||||
|
* @deprecated since 5.10.0
|
||||||
|
* @see https://eslint.org/docs/rules/require-jsdoc
|
||||||
|
*/
|
||||||
|
"require-jsdoc": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
require: Partial<{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
FunctionDeclaration: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
MethodDefinition: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ClassDeclaration: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ArrowFunctionExpression: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
FunctionExpression: boolean;
|
||||||
|
}>;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce valid JSDoc comments.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @deprecated since 5.10.0
|
||||||
|
* @see https://eslint.org/docs/rules/valid-jsdoc
|
||||||
|
*/
|
||||||
|
"valid-jsdoc": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
prefer: Record<string, string>;
|
||||||
|
preferType: Record<string, string>;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
requireReturn: boolean;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
requireReturnType: boolean;
|
||||||
|
/**
|
||||||
|
* @remarks
|
||||||
|
* Also accept for regular expression pattern
|
||||||
|
*/
|
||||||
|
matchDescription: string;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
requireParamDescription: boolean;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
requireReturnDescription: boolean;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
requireParamType: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,502 @@
|
||||||
|
import { Linter } from "../index";
|
||||||
|
|
||||||
|
export interface ECMAScript6 extends Linter.RulesRecord {
|
||||||
|
/**
|
||||||
|
* Rule to require braces around arrow function bodies.
|
||||||
|
*
|
||||||
|
* @since 1.8.0
|
||||||
|
* @see https://eslint.org/docs/rules/arrow-body-style
|
||||||
|
*/
|
||||||
|
"arrow-body-style":
|
||||||
|
| Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
"as-needed",
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
requireReturnForObjectLiteral: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>
|
||||||
|
| Linter.RuleEntry<["always" | "never"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require parentheses around arrow function arguments.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-1
|
||||||
|
* @see https://eslint.org/docs/rules/arrow-parens
|
||||||
|
*/
|
||||||
|
"arrow-parens":
|
||||||
|
| Linter.RuleEntry<["always"]>
|
||||||
|
| Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
"as-needed",
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
requireForBlockBody: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce consistent spacing before and after the arrow in arrow functions.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-1
|
||||||
|
* @see https://eslint.org/docs/rules/arrow-spacing
|
||||||
|
*/
|
||||||
|
"arrow-spacing": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require `super()` calls in constructors.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.24.0
|
||||||
|
* @see https://eslint.org/docs/rules/constructor-super
|
||||||
|
*/
|
||||||
|
"constructor-super": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce consistent spacing around `*` operators in generator functions.
|
||||||
|
*
|
||||||
|
* @since 0.17.0
|
||||||
|
* @see https://eslint.org/docs/rules/generator-star-spacing
|
||||||
|
*/
|
||||||
|
"generator-star-spacing": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
| Partial<{
|
||||||
|
before: boolean;
|
||||||
|
after: boolean;
|
||||||
|
named:
|
||||||
|
| Partial<{
|
||||||
|
before: boolean;
|
||||||
|
after: boolean;
|
||||||
|
}>
|
||||||
|
| "before"
|
||||||
|
| "after"
|
||||||
|
| "both"
|
||||||
|
| "neither";
|
||||||
|
anonymous:
|
||||||
|
| Partial<{
|
||||||
|
before: boolean;
|
||||||
|
after: boolean;
|
||||||
|
}>
|
||||||
|
| "before"
|
||||||
|
| "after"
|
||||||
|
| "both"
|
||||||
|
| "neither";
|
||||||
|
method:
|
||||||
|
| Partial<{
|
||||||
|
before: boolean;
|
||||||
|
after: boolean;
|
||||||
|
}>
|
||||||
|
| "before"
|
||||||
|
| "after"
|
||||||
|
| "both"
|
||||||
|
| "neither";
|
||||||
|
}>
|
||||||
|
| "before"
|
||||||
|
| "after"
|
||||||
|
| "both"
|
||||||
|
| "neither",
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow reassigning class members.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-1
|
||||||
|
* @see https://eslint.org/docs/rules/no-class-assign
|
||||||
|
*/
|
||||||
|
"no-class-assign": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow arrow functions where they could be confused with comparisons.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-alpha-2
|
||||||
|
* @see https://eslint.org/docs/rules/no-confusing-arrow
|
||||||
|
*/
|
||||||
|
"no-confusing-arrow": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
allowParens: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow reassigning `const` variables.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-1
|
||||||
|
* @see https://eslint.org/docs/rules/no-const-assign
|
||||||
|
*/
|
||||||
|
"no-const-assign": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow duplicate class members.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 1.2.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-dupe-class-members
|
||||||
|
*/
|
||||||
|
"no-dupe-class-members": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow duplicate module imports.
|
||||||
|
*
|
||||||
|
* @since 2.5.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-duplicate-import
|
||||||
|
*/
|
||||||
|
"no-duplicate-import": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
includeExports: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `new` operators with the `Symbol` object.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-beta.1
|
||||||
|
* @see https://eslint.org/docs/rules/no-new-symbol
|
||||||
|
*/
|
||||||
|
"no-new-symbol": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow specified modules when loaded by `import`.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-alpha-1
|
||||||
|
* @see https://eslint.org/docs/rules/no-restricted-imports
|
||||||
|
*/
|
||||||
|
"no-restricted-imports": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
...Array<
|
||||||
|
| string
|
||||||
|
| {
|
||||||
|
name: string;
|
||||||
|
importNames?: string[] | undefined;
|
||||||
|
message?: string | undefined;
|
||||||
|
}
|
||||||
|
| Partial<{
|
||||||
|
paths: Array<
|
||||||
|
| string
|
||||||
|
| {
|
||||||
|
name: string;
|
||||||
|
importNames?: string[] | undefined;
|
||||||
|
message?: string | undefined;
|
||||||
|
}
|
||||||
|
>;
|
||||||
|
patterns: string[];
|
||||||
|
}>
|
||||||
|
>
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `this`/`super` before calling `super()` in constructors.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.24.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-this-before-super
|
||||||
|
*/
|
||||||
|
"no-this-before-super": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary computed property keys in object literals.
|
||||||
|
*
|
||||||
|
* @since 2.9.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-useless-computed-key
|
||||||
|
*/
|
||||||
|
"no-useless-computed-key": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary constructors.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-beta.1
|
||||||
|
* @see https://eslint.org/docs/rules/no-useless-constructor
|
||||||
|
*/
|
||||||
|
"no-useless-constructor": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow renaming import, export, and destructured assignments to the same name.
|
||||||
|
*
|
||||||
|
* @since 2.11.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-useless-rename
|
||||||
|
*/
|
||||||
|
"no-useless-rename": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreImport: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreExport: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreDestructuring: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require `let` or `const` instead of `var`.
|
||||||
|
*
|
||||||
|
* @since 0.12.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-var
|
||||||
|
*/
|
||||||
|
"no-var": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require or disallow method and property shorthand syntax for object literals.
|
||||||
|
*
|
||||||
|
* @since 0.20.0
|
||||||
|
* @see https://eslint.org/docs/rules/object-shorthand
|
||||||
|
*/
|
||||||
|
"object-shorthand":
|
||||||
|
| Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
"always" | "methods",
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
avoidQuotes: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreConstructors: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
avoidExplicitReturnArrows: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>
|
||||||
|
| Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
"properties",
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
avoidQuotes: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>
|
||||||
|
| Linter.RuleEntry<["never" | "consistent" | "consistent-as-needed"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require using arrow functions for callbacks.
|
||||||
|
*
|
||||||
|
* @since 1.2.0
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-arrow-callback
|
||||||
|
*/
|
||||||
|
"prefer-arrow-callback": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowNamedFunctions: boolean;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
allowUnboundThis: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require `const` declarations for variables that are never reassigned after declared.
|
||||||
|
*
|
||||||
|
* @since 0.23.0
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-const
|
||||||
|
*/
|
||||||
|
"prefer-const": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default 'any'
|
||||||
|
*/
|
||||||
|
destructuring: "any" | "all";
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreReadBeforeAssign: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require destructuring from arrays and/or objects.
|
||||||
|
*
|
||||||
|
* @since 3.13.0
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-destructuring
|
||||||
|
*/
|
||||||
|
"prefer-destructuring": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<
|
||||||
|
| {
|
||||||
|
VariableDeclarator: Partial<{
|
||||||
|
array: boolean;
|
||||||
|
object: boolean;
|
||||||
|
}>;
|
||||||
|
AssignmentExpression: Partial<{
|
||||||
|
array: boolean;
|
||||||
|
object: boolean;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
array: boolean;
|
||||||
|
object: boolean;
|
||||||
|
}
|
||||||
|
>,
|
||||||
|
Partial<{
|
||||||
|
enforceForRenamedProperties: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `parseInt()` and `Number.parseInt()` in favor of binary, octal, and hexadecimal literals.
|
||||||
|
*
|
||||||
|
* @since 3.5.0
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-numeric-literals
|
||||||
|
*/
|
||||||
|
"prefer-numeric-literals": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require rest parameters instead of `arguments`.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-alpha-1
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-rest-params
|
||||||
|
*/
|
||||||
|
"prefer-rest-params": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require spread operators instead of `.apply()`.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-1
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-spread
|
||||||
|
*/
|
||||||
|
"prefer-spread": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require template literals instead of string concatenation.
|
||||||
|
*
|
||||||
|
* @since 1.2.0
|
||||||
|
* @see https://eslint.org/docs/rules/prefer-template
|
||||||
|
*/
|
||||||
|
"prefer-template": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require generator functions to contain `yield`.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-1
|
||||||
|
* @see https://eslint.org/docs/rules/require-yield
|
||||||
|
*/
|
||||||
|
"require-yield": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce spacing between rest and spread operators and their expressions.
|
||||||
|
*
|
||||||
|
* @since 2.12.0
|
||||||
|
* @see https://eslint.org/docs/rules/rest-spread-spacing
|
||||||
|
*/
|
||||||
|
"rest-spread-spacing": Linter.RuleEntry<["never" | "always"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce sorted import declarations within modules.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-beta.1
|
||||||
|
* @see https://eslint.org/docs/rules/sort-imports
|
||||||
|
*/
|
||||||
|
"sort-imports": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreCase: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreDeclarationSort: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
ignoreMemberSort: boolean;
|
||||||
|
/**
|
||||||
|
* @default ['none', 'all', 'multiple', 'single']
|
||||||
|
*/
|
||||||
|
memberSyntaxSortOrder: Array<"none" | "all" | "multiple" | "single">;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require symbol descriptions.
|
||||||
|
*
|
||||||
|
* @since 3.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/symbol-description
|
||||||
|
*/
|
||||||
|
"symbol-description": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require or disallow spacing around embedded expressions of template strings.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-rc.0
|
||||||
|
* @see https://eslint.org/docs/rules/template-curly-spacing
|
||||||
|
*/
|
||||||
|
"template-curly-spacing": Linter.RuleEntry<["never" | "always"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require or disallow spacing around the `*` in `yield*` expressions.
|
||||||
|
*
|
||||||
|
* @since 2.0.0-alpha-1
|
||||||
|
* @see https://eslint.org/docs/rules/yield-star-spacing
|
||||||
|
*/
|
||||||
|
"yield-star-spacing": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
| Partial<{
|
||||||
|
before: boolean;
|
||||||
|
after: boolean;
|
||||||
|
}>
|
||||||
|
| "before"
|
||||||
|
| "after"
|
||||||
|
| "both"
|
||||||
|
| "neither",
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
import { Linter } from "../index";
|
||||||
|
|
||||||
|
import { BestPractices } from "./best-practices";
|
||||||
|
import { Deprecated } from "./deprecated";
|
||||||
|
import { ECMAScript6 } from "./ecmascript-6";
|
||||||
|
import { NodeJSAndCommonJS } from "./node-commonjs";
|
||||||
|
import { PossibleErrors } from "./possible-errors";
|
||||||
|
import { StrictMode } from "./strict-mode";
|
||||||
|
import { StylisticIssues } from "./stylistic-issues";
|
||||||
|
import { Variables } from "./variables";
|
||||||
|
|
||||||
|
export interface ESLintRules
|
||||||
|
extends Linter.RulesRecord,
|
||||||
|
PossibleErrors,
|
||||||
|
BestPractices,
|
||||||
|
StrictMode,
|
||||||
|
Variables,
|
||||||
|
NodeJSAndCommonJS,
|
||||||
|
StylisticIssues,
|
||||||
|
ECMAScript6,
|
||||||
|
Deprecated {}
|
||||||
|
|
@ -0,0 +1,133 @@
|
||||||
|
import { Linter } from "../index";
|
||||||
|
|
||||||
|
export interface NodeJSAndCommonJS extends Linter.RulesRecord {
|
||||||
|
/**
|
||||||
|
* Rule to require `return` statements after callbacks.
|
||||||
|
*
|
||||||
|
* @since 1.0.0-rc-1
|
||||||
|
* @see https://eslint.org/docs/rules/callback-return
|
||||||
|
*/
|
||||||
|
"callback-return": Linter.RuleEntry<[string[]]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require `require()` calls to be placed at top-level module scope.
|
||||||
|
*
|
||||||
|
* @since 1.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/global-require
|
||||||
|
*/
|
||||||
|
"global-require": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require error handling in callbacks.
|
||||||
|
*
|
||||||
|
* @since 0.4.5
|
||||||
|
* @see https://eslint.org/docs/rules/handle-callback-err
|
||||||
|
*/
|
||||||
|
"handle-callback-err": Linter.RuleEntry<[string]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow use of the `Buffer()` constructor.
|
||||||
|
*
|
||||||
|
* @since 4.0.0-alpha.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-buffer-constructor
|
||||||
|
*/
|
||||||
|
"no-buffer-constructor": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `require` calls to be mixed with regular variable declarations.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-mixed-requires
|
||||||
|
*/
|
||||||
|
"no-mixed-requires": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
grouping: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowCall: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `new` operators with calls to `require`.
|
||||||
|
*
|
||||||
|
* @since 0.6.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-new-require
|
||||||
|
*/
|
||||||
|
"no-new-require": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow string concatenation when using `__dirname` and `__filename`.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-path-concat
|
||||||
|
*/
|
||||||
|
"no-path-concat": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of `process.env`.
|
||||||
|
*
|
||||||
|
* @since 0.9.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-process-env
|
||||||
|
*/
|
||||||
|
"no-process-env": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of `process.exit()`.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-process-exit
|
||||||
|
*/
|
||||||
|
"no-process-exit": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow specified modules when loaded by `require`.
|
||||||
|
*
|
||||||
|
* @since 0.6.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-restricted-modules
|
||||||
|
*/
|
||||||
|
"no-restricted-modules": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
...Array<
|
||||||
|
| string
|
||||||
|
| {
|
||||||
|
name: string;
|
||||||
|
message?: string | undefined;
|
||||||
|
}
|
||||||
|
| Partial<{
|
||||||
|
paths: Array<
|
||||||
|
| string
|
||||||
|
| {
|
||||||
|
name: string;
|
||||||
|
message?: string | undefined;
|
||||||
|
}
|
||||||
|
>;
|
||||||
|
patterns: string[];
|
||||||
|
}>
|
||||||
|
>
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow synchronous methods.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-sync
|
||||||
|
*/
|
||||||
|
"no-sync": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowAtRootLevel: boolean;
|
||||||
|
},
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,484 @@
|
||||||
|
import { Linter } from "../index";
|
||||||
|
|
||||||
|
export interface PossibleErrors extends Linter.RulesRecord {
|
||||||
|
/**
|
||||||
|
* Rule to enforce `for` loop update clause moving the counter in the right direction.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 4.0.0-beta.0
|
||||||
|
* @see https://eslint.org/docs/rules/for-direction
|
||||||
|
*/
|
||||||
|
"for-direction": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce `return` statements in getters.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 4.2.0
|
||||||
|
* @see https://eslint.org/docs/rules/getter-return
|
||||||
|
*/
|
||||||
|
"getter-return": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowImplicit: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow using an async function as a `Promise` executor.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 5.3.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-async-promise-executor
|
||||||
|
*/
|
||||||
|
"no-async-promise-executor": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow `await` inside of loops.
|
||||||
|
*
|
||||||
|
* @since 3.12.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-await-in-loop
|
||||||
|
*/
|
||||||
|
"no-await-in-loop": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow comparing against `-0`.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 3.17.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-compare-neg-zero
|
||||||
|
*/
|
||||||
|
"no-compare-neg-zero": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow assignment operators in conditional statements.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-cond-assign
|
||||||
|
*/
|
||||||
|
"no-cond-assign": Linter.RuleEntry<["except-parens" | "always"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of `console`.
|
||||||
|
*
|
||||||
|
* @since 0.0.2
|
||||||
|
* @see https://eslint.org/docs/rules/no-console
|
||||||
|
*/
|
||||||
|
"no-console": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
allow: Array<keyof Console>;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow constant expressions in conditions.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.4.1
|
||||||
|
* @see https://eslint.org/docs/rules/no-constant-condition
|
||||||
|
*/
|
||||||
|
"no-constant-condition": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
checkLoops: boolean;
|
||||||
|
},
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow control characters in regular expressions.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.1.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-control-regex
|
||||||
|
*/
|
||||||
|
"no-control-regex": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow the use of `debugger`.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.2
|
||||||
|
* @see https://eslint.org/docs/rules/no-debugger
|
||||||
|
*/
|
||||||
|
"no-debugger": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow duplicate arguments in `function` definitions.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.16.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-dupe-args
|
||||||
|
*/
|
||||||
|
"no-dupe-args": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow duplicate keys in object literals.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-dupe-keys
|
||||||
|
*/
|
||||||
|
"no-dupe-keys": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow a duplicate case label.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.17.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-duplicate-case
|
||||||
|
*/
|
||||||
|
"no-duplicate-case": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow empty block statements.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.2
|
||||||
|
* @see https://eslint.org/docs/rules/no-empty
|
||||||
|
*/
|
||||||
|
"no-empty": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
allowEmptyCatch: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow empty character classes in regular expressions.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.22.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-empty-character-class
|
||||||
|
*/
|
||||||
|
"no-empty-character-class": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow reassigning exceptions in `catch` clauses.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-ex-assign
|
||||||
|
*/
|
||||||
|
"no-ex-assign": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary boolean casts.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-extra-boolean-cast
|
||||||
|
*/
|
||||||
|
"no-extra-boolean-cast": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary parentheses.
|
||||||
|
*
|
||||||
|
* @since 0.1.4
|
||||||
|
* @see https://eslint.org/docs/rules/no-extra-parens
|
||||||
|
*/
|
||||||
|
"no-extra-parens":
|
||||||
|
| Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
"all",
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default true,
|
||||||
|
*/
|
||||||
|
conditionalAssign: boolean;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
returnAssign: boolean;
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
nestedBinaryExpressions: boolean;
|
||||||
|
/**
|
||||||
|
* @default 'none'
|
||||||
|
*/
|
||||||
|
ignoreJSX: "none" | "all" | "multi-line" | "single-line";
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
enforceForArrowConditionals: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>
|
||||||
|
| Linter.RuleEntry<["functions"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unnecessary semicolons.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-extra-semi
|
||||||
|
*/
|
||||||
|
"no-extra-semi": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow reassigning `function` declarations.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-func-assign
|
||||||
|
*/
|
||||||
|
"no-func-assign": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow variable or `function` declarations in nested blocks.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.6.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-inner-declarations
|
||||||
|
*/
|
||||||
|
"no-inner-declarations": Linter.RuleEntry<["functions" | "both"]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow invalid regular expression strings in `RegExp` constructors.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.1.4
|
||||||
|
* @see https://eslint.org/docs/rules/no-invalid-regexp
|
||||||
|
*/
|
||||||
|
"no-invalid-regexp": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
allowConstructorFlags: string[];
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow irregular whitespace.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.9.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-irregular-whitespace
|
||||||
|
*/
|
||||||
|
"no-irregular-whitespace": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
skipStrings: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
skipComments: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
skipRegExps: boolean;
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
skipTemplates: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow characters which are made with multiple code points in character class syntax.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 5.3.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-misleading-character-class
|
||||||
|
*/
|
||||||
|
"no-misleading-character-class": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow calling global object properties as functions.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.9
|
||||||
|
* @see https://eslint.org/docs/rules/no-obj-calls
|
||||||
|
*/
|
||||||
|
"no-obj-calls": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow use of `Object.prototypes` builtins directly.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 2.11.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-prototype-builtins
|
||||||
|
*/
|
||||||
|
"no-prototype-builtins": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow multiple spaces in regular expressions.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-regex-spaces
|
||||||
|
*/
|
||||||
|
"no-regex-spaces": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow sparse arrays.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.4.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-sparse-arrays
|
||||||
|
*/
|
||||||
|
"no-sparse-arrays": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow template literal placeholder syntax in regular strings.
|
||||||
|
*
|
||||||
|
* @since 3.3.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-template-curly-in-string
|
||||||
|
*/
|
||||||
|
"no-template-curly-in-string": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow confusing multiline expressions.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.24.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-unexpected-multiline
|
||||||
|
*/
|
||||||
|
"no-unexpected-multiline": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow unreachable code after `return`, `throw`, `continue`, and `break` statements.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.6
|
||||||
|
* @see https://eslint.org/docs/rules/no-unreachable
|
||||||
|
*/
|
||||||
|
"no-unreachable": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow control flow statements in `finally` blocks.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 2.9.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-unsafe-finally
|
||||||
|
*/
|
||||||
|
"no-unsafe-finally": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow negating the left operand of relational operators.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 3.3.0
|
||||||
|
* @see https://eslint.org/docs/rules/no-unsafe-negation
|
||||||
|
*/
|
||||||
|
"no-unsafe-negation": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to disallow assignments that can lead to race conditions due to usage of `await` or `yield`.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 5.3.0
|
||||||
|
* @see https://eslint.org/docs/rules/require-atomic-updates
|
||||||
|
*/
|
||||||
|
"require-atomic-updates": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to require calls to `isNaN()` when checking for `NaN`.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.0.6
|
||||||
|
* @see https://eslint.org/docs/rules/use-isnan
|
||||||
|
*/
|
||||||
|
"use-isnan": Linter.RuleEntry<[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rule to enforce comparing `typeof` expressions against valid strings.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* Recommended by ESLint, the rule was enabled in `eslint:recommended`.
|
||||||
|
*
|
||||||
|
* @since 0.5.0
|
||||||
|
* @see https://eslint.org/docs/rules/valid-typeof
|
||||||
|
*/
|
||||||
|
"valid-typeof": Linter.RuleEntry<
|
||||||
|
[
|
||||||
|
Partial<{
|
||||||
|
/**
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
requireStringLiterals: boolean;
|
||||||
|
}>,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
import { Linter } from "../index";
|
||||||
|
|
||||||
|
export interface StrictMode extends Linter.RulesRecord {
|
||||||
|
/**
|
||||||
|
* Rule to require or disallow strict mode directives.
|
||||||
|
*
|
||||||
|
* @since 0.1.0
|
||||||
|
* @see https://eslint.org/docs/rules/strict
|
||||||
|
*/
|
||||||
|
strict: Linter.RuleEntry<["safe" | "global" | "function" | "never"]>;
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue