routie dev init since i didn't adhere to any proper guidance up until now
This commit is contained in:
+758
@@ -0,0 +1,758 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Ivan Kopeykin @vankop
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { parseIdentifier } = require("./identifier");
|
||||
|
||||
/** @typedef {string | (string | ConditionalMapping)[]} DirectMapping */
|
||||
/** @typedef {{ [k: string]: MappingValue }} ConditionalMapping */
|
||||
/** @typedef {ConditionalMapping | DirectMapping | null} MappingValue */
|
||||
/** @typedef {Record<string, MappingValue> | ConditionalMapping | DirectMapping} ExportsField */
|
||||
/** @typedef {Record<string, MappingValue>} ImportsField */
|
||||
|
||||
/**
|
||||
* Processing exports/imports field
|
||||
* @callback FieldProcessor
|
||||
* @param {string} request request
|
||||
* @param {Set<string>} conditionNames condition names
|
||||
* @returns {[string[], string | null]} resolved paths with used field
|
||||
*/
|
||||
|
||||
/*
|
||||
Example exports field:
|
||||
{
|
||||
".": "./main.js",
|
||||
"./feature": {
|
||||
"browser": "./feature-browser.js",
|
||||
"default": "./feature.js"
|
||||
}
|
||||
}
|
||||
Terminology:
|
||||
|
||||
Enhanced-resolve name keys ("." and "./feature") as exports field keys.
|
||||
|
||||
If value is string or string[], mapping is called as a direct mapping
|
||||
and value called as a direct export.
|
||||
|
||||
If value is key-value object, mapping is called as a conditional mapping
|
||||
and value called as a conditional export.
|
||||
|
||||
Key in conditional mapping is called condition name.
|
||||
|
||||
Conditional mapping nested in another conditional mapping is called nested mapping.
|
||||
|
||||
----------
|
||||
|
||||
Example imports field:
|
||||
{
|
||||
"#a": "./main.js",
|
||||
"#moment": {
|
||||
"browser": "./moment/index.js",
|
||||
"default": "moment"
|
||||
},
|
||||
"#moment/": {
|
||||
"browser": "./moment/",
|
||||
"default": "moment/"
|
||||
}
|
||||
}
|
||||
Terminology:
|
||||
|
||||
Enhanced-resolve name keys ("#a" and "#moment/", "#moment") as imports field keys.
|
||||
|
||||
If value is string or string[], mapping is called as a direct mapping
|
||||
and value called as a direct export.
|
||||
|
||||
If value is key-value object, mapping is called as a conditional mapping
|
||||
and value called as a conditional export.
|
||||
|
||||
Key in conditional mapping is called condition name.
|
||||
|
||||
Conditional mapping nested in another conditional mapping is called nested mapping.
|
||||
|
||||
*/
|
||||
|
||||
const slashCode = "/".charCodeAt(0);
|
||||
const dotCode = ".".charCodeAt(0);
|
||||
const hashCode = "#".charCodeAt(0);
|
||||
const patternRegEx = /\*/g;
|
||||
|
||||
/** @typedef {Record<string, MappingValue>} RecordMapping */
|
||||
|
||||
/**
|
||||
* Cached `Object.keys()` for objects whose shape does not change after the
|
||||
* first observation — i.e. parsed `package.json` fields and the nested
|
||||
* conditional mappings inside them. `Object.keys` allocates a fresh array
|
||||
* on every call; since `findMatch` / `conditionalMapping` run on every
|
||||
* bare-specifier resolve, the allocation adds up quickly.
|
||||
* @type {WeakMap<RecordMapping, string[]>}
|
||||
*/
|
||||
const _keysCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {RecordMapping} obj object to read keys from
|
||||
* @returns {string[]} cached keys array (DO NOT mutate)
|
||||
*/
|
||||
function cachedKeys(obj) {
|
||||
let keys = _keysCache.get(obj);
|
||||
if (keys === undefined) {
|
||||
keys = Object.keys(obj);
|
||||
_keysCache.set(obj, keys);
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
/**
|
||||
* Per-key precomputed info used by `findMatch`. Equivalent to what the
|
||||
* previous implementation recomputed inline on every resolve.
|
||||
* @typedef {object} FieldKeyInfo
|
||||
* @property {string} key the original key
|
||||
* @property {number} patternIndex position of the single "*" in the key, or -1 when absent
|
||||
* @property {string} wildcardPrefix substring before "*" (empty when patternIndex === -1)
|
||||
* @property {string} wildcardSuffix substring after "*" (empty when patternIndex === -1)
|
||||
* @property {boolean} isLegacySubpath true when key is a legacy `./foo/`-style folder key with no "*"
|
||||
* @property {boolean} isPattern true when key contains "*"
|
||||
* @property {boolean} isSubpathMapping true when key ends with "/"
|
||||
* @property {boolean} isValidPattern true when key has at most one "*"
|
||||
*/
|
||||
|
||||
/**
|
||||
* Cached per-field key metadata, keyed by the exports/imports field
|
||||
* object. Computed lazily on first `findMatch` call and reused forever.
|
||||
* Safe because `package.json` fields are immutable JSON values.
|
||||
* @type {WeakMap<RecordMapping, FieldKeyInfo[]>}
|
||||
*/
|
||||
const _fieldKeyInfoCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {ExportsField | ImportsField} field field object
|
||||
* @returns {FieldKeyInfo[]} precomputed per-key info
|
||||
*/
|
||||
function getFieldKeyInfos(field) {
|
||||
const fieldKey = /** @type {RecordMapping} */ (field);
|
||||
let infos = _fieldKeyInfoCache.get(fieldKey);
|
||||
if (infos !== undefined) return infos;
|
||||
const keys = Object.getOwnPropertyNames(field);
|
||||
infos = Array.from({ length: keys.length });
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
const patternIndex = key.indexOf("*");
|
||||
const lastStar = patternIndex === -1 ? -1 : key.lastIndexOf("*");
|
||||
const keyLen = key.length;
|
||||
const endsWithSlash =
|
||||
keyLen > 0 && key.charCodeAt(keyLen - 1) === slashCode;
|
||||
infos[i] = {
|
||||
key,
|
||||
patternIndex,
|
||||
wildcardPrefix: patternIndex === -1 ? "" : key.slice(0, patternIndex),
|
||||
wildcardSuffix: patternIndex === -1 ? "" : key.slice(patternIndex + 1),
|
||||
isLegacySubpath: patternIndex === -1 && endsWithSlash,
|
||||
isPattern: patternIndex !== -1,
|
||||
isSubpathMapping: endsWithSlash,
|
||||
isValidPattern: patternIndex === -1 || lastStar === patternIndex,
|
||||
};
|
||||
}
|
||||
_fieldKeyInfoCache.set(fieldKey, infos);
|
||||
return infos;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} a first string
|
||||
* @param {string} b second string
|
||||
* @returns {number} compare result
|
||||
*/
|
||||
function patternKeyCompare(a, b) {
|
||||
const aPatternIndex = a.indexOf("*");
|
||||
const bPatternIndex = b.indexOf("*");
|
||||
const baseLenA = aPatternIndex === -1 ? a.length : aPatternIndex + 1;
|
||||
const baseLenB = bPatternIndex === -1 ? b.length : bPatternIndex + 1;
|
||||
|
||||
if (baseLenA > baseLenB) return -1;
|
||||
if (baseLenB > baseLenA) return 1;
|
||||
if (aPatternIndex === -1) return 1;
|
||||
if (bPatternIndex === -1) return -1;
|
||||
if (a.length > b.length) return -1;
|
||||
if (b.length > a.length) return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** @typedef {[MappingValue, string, boolean, boolean, string] | null} MatchTuple */
|
||||
|
||||
/**
|
||||
* Per-field memoization of `findMatch(request, field)`. For a given field
|
||||
* the result depends only on the `request` string (it does NOT depend on
|
||||
* `conditionNames` — that's applied separately by `conditionalMapping`),
|
||||
* so we can cache the tuple keyed by request.
|
||||
*
|
||||
* Typical build traffic runs the same request through the resolver
|
||||
* repeatedly (same import re-resolved from different source files, module
|
||||
* graph traversals that revisit a package, etc.), and every one of those
|
||||
* hits walks the same key list and allocates the same tuple. Caching the
|
||||
* tuple turns the second-and-onward call into a single Map lookup.
|
||||
*
|
||||
* Keyed on the field object via a module-level `WeakMap`, so the cache
|
||||
* is freed automatically when the owning description file is GC'd.
|
||||
* @type {WeakMap<RecordMapping, Map<string, MatchTuple>>}
|
||||
*/
|
||||
const _findMatchCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {string} request request
|
||||
* @param {ExportsField | ImportsField} field exports or import field
|
||||
* @returns {MatchTuple} match result (uncached)
|
||||
*/
|
||||
function computeFindMatch(request, field) {
|
||||
const requestLen = request.length;
|
||||
const requestEndsWithSlash =
|
||||
requestLen > 0 && request.charCodeAt(requestLen - 1) === slashCode;
|
||||
const requestHasStar = request.includes("*");
|
||||
|
||||
if (
|
||||
!requestHasStar &&
|
||||
!requestEndsWithSlash &&
|
||||
Object.prototype.hasOwnProperty.call(field, request)
|
||||
) {
|
||||
const target = /** @type {{ [k: string]: MappingValue }} */ (field)[
|
||||
request
|
||||
];
|
||||
|
||||
return [target, "", false, false, request];
|
||||
}
|
||||
|
||||
/** @type {string} */
|
||||
let bestMatch = "";
|
||||
/** @type {FieldKeyInfo | null} */
|
||||
let bestMatchInfo = null;
|
||||
/** @type {string | undefined} */
|
||||
let bestMatchSubpath;
|
||||
|
||||
const infos = getFieldKeyInfos(field);
|
||||
|
||||
for (let i = 0; i < infos.length; i++) {
|
||||
const info = infos[i];
|
||||
const { key, patternIndex } = info;
|
||||
|
||||
if (patternIndex !== -1) {
|
||||
if (
|
||||
!info.isValidPattern ||
|
||||
!request.startsWith(info.wildcardPrefix) ||
|
||||
requestLen < key.length ||
|
||||
!request.endsWith(info.wildcardSuffix) ||
|
||||
patternKeyCompare(bestMatch, key) !== 1
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
bestMatch = key;
|
||||
bestMatchInfo = info;
|
||||
bestMatchSubpath = request.slice(
|
||||
patternIndex,
|
||||
requestLen - info.wildcardSuffix.length,
|
||||
);
|
||||
} else if (
|
||||
info.isLegacySubpath &&
|
||||
request.startsWith(key) &&
|
||||
patternKeyCompare(bestMatch, key) === 1
|
||||
) {
|
||||
bestMatch = key;
|
||||
bestMatchInfo = info;
|
||||
bestMatchSubpath = request.slice(key.length);
|
||||
}
|
||||
}
|
||||
|
||||
if (bestMatch === "") return null;
|
||||
|
||||
const target =
|
||||
/** @type {{ [k: string]: MappingValue }} */
|
||||
(field)[bestMatch];
|
||||
|
||||
return [
|
||||
target,
|
||||
/** @type {string} */ (bestMatchSubpath),
|
||||
/** @type {FieldKeyInfo} */ (bestMatchInfo).isSubpathMapping,
|
||||
/** @type {FieldKeyInfo} */ (bestMatchInfo).isPattern,
|
||||
bestMatch,
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Trying to match request to field
|
||||
* @param {string} request request
|
||||
* @param {ExportsField | ImportsField} field exports or import field
|
||||
* @returns {MatchTuple} match or null, number is negative and one less when it's a folder mapping, number is request.length + 1 for direct mappings
|
||||
*/
|
||||
function findMatch(request, field) {
|
||||
const fieldKey = /** @type {RecordMapping} */ (field);
|
||||
let perRequest = _findMatchCache.get(fieldKey);
|
||||
if (perRequest !== undefined) {
|
||||
const cached = perRequest.get(request);
|
||||
if (cached !== undefined) return cached;
|
||||
// `null` is a valid cached value (= "no match"), so a `get(...)`
|
||||
// that returns undefined could either mean "not cached yet" or
|
||||
// "cached null". Do the explicit `has` only in the undefined case.
|
||||
if (perRequest.has(request)) return null;
|
||||
} else {
|
||||
perRequest = new Map();
|
||||
_findMatchCache.set(fieldKey, perRequest);
|
||||
}
|
||||
|
||||
const result = computeFindMatch(request, field);
|
||||
perRequest.set(request, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ConditionalMapping | DirectMapping | null} mapping mapping
|
||||
* @returns {boolean} is conditional mapping
|
||||
*/
|
||||
function isConditionalMapping(mapping) {
|
||||
return (
|
||||
mapping !== null && typeof mapping === "object" && !Array.isArray(mapping)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sentinel stored in the conditional-mapping cache for inputs whose walk
|
||||
* returns `null` ("no condition matched"). Using a non-null marker lets the
|
||||
* cache-hit path be a single `WeakMap.get()` — we distinguish
|
||||
* "cached null" from "not cached yet" without a second `has` call.
|
||||
*/
|
||||
const NULL_RESULT = Symbol("NULL_RESULT");
|
||||
|
||||
/**
|
||||
* Memoization of `conditionalMapping(mapping, conditionNames)`. The result
|
||||
* depends only on the mapping object (immutable — owned by a parsed
|
||||
* `package.json`) and the `conditionNames` Set (owned by the resolver's
|
||||
* options and stable for its lifetime), so it is safe to cache per (mapping,
|
||||
* conditionNames) pair.
|
||||
*
|
||||
* A conditional `exports` entry that appears inside a `directMapping` array
|
||||
* (the common `"browser": [...fallback list...]` shape, plus nested
|
||||
* conditions) gets walked on every resolve that traverses the parent entry.
|
||||
* Without this cache each of those walks re-reads `Object.keys` on the
|
||||
* mapping and re-visits every condition until one matches, even though the
|
||||
* inputs are identical.
|
||||
*
|
||||
* Outer key is the conditional mapping itself; inner key is the condition
|
||||
* Set. Both are object references, so WeakMap-of-WeakMap lets both levels
|
||||
* be collected automatically when the description file or resolver go away.
|
||||
* @type {WeakMap<ConditionalMapping, WeakMap<Set<string>, DirectMapping | typeof NULL_RESULT>>}
|
||||
*/
|
||||
const _conditionalMappingCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {ConditionalMapping} conditionalMapping_ conditional mapping
|
||||
* @param {Set<string>} conditionNames condition names
|
||||
* @returns {DirectMapping | null} direct mapping if found (uncached)
|
||||
*/
|
||||
function computeConditionalMapping(conditionalMapping_, conditionNames) {
|
||||
/** @type {[ConditionalMapping, string[], number][]} */
|
||||
const lookup = [[conditionalMapping_, cachedKeys(conditionalMapping_), 0]];
|
||||
|
||||
loop: while (lookup.length > 0) {
|
||||
const [mapping, conditions, j] = lookup[lookup.length - 1];
|
||||
|
||||
for (let i = j; i < conditions.length; i++) {
|
||||
const condition = conditions[i];
|
||||
|
||||
if (condition === "default") {
|
||||
const innerMapping = mapping[condition];
|
||||
// is nested
|
||||
if (isConditionalMapping(innerMapping)) {
|
||||
const conditionalMapping = /** @type {ConditionalMapping} */ (
|
||||
innerMapping
|
||||
);
|
||||
lookup[lookup.length - 1][2] = i + 1;
|
||||
lookup.push([conditionalMapping, cachedKeys(conditionalMapping), 0]);
|
||||
continue loop;
|
||||
}
|
||||
|
||||
return /** @type {DirectMapping} */ (innerMapping);
|
||||
}
|
||||
|
||||
if (conditionNames.has(condition)) {
|
||||
const innerMapping = mapping[condition];
|
||||
// is nested
|
||||
if (isConditionalMapping(innerMapping)) {
|
||||
const conditionalMapping = /** @type {ConditionalMapping} */ (
|
||||
innerMapping
|
||||
);
|
||||
lookup[lookup.length - 1][2] = i + 1;
|
||||
lookup.push([conditionalMapping, cachedKeys(conditionalMapping), 0]);
|
||||
continue loop;
|
||||
}
|
||||
|
||||
return /** @type {DirectMapping} */ (innerMapping);
|
||||
}
|
||||
}
|
||||
|
||||
lookup.pop();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ConditionalMapping} conditionalMapping_ conditional mapping
|
||||
* @param {Set<string>} conditionNames condition names
|
||||
* @returns {DirectMapping | null} direct mapping if found
|
||||
*/
|
||||
function conditionalMapping(conditionalMapping_, conditionNames) {
|
||||
let perSet = _conditionalMappingCache.get(conditionalMapping_);
|
||||
if (perSet !== undefined) {
|
||||
const cached = perSet.get(conditionNames);
|
||||
if (cached !== undefined) {
|
||||
return cached === NULL_RESULT
|
||||
? null
|
||||
: /** @type {DirectMapping} */ (cached);
|
||||
}
|
||||
} else {
|
||||
perSet = new WeakMap();
|
||||
_conditionalMappingCache.set(conditionalMapping_, perSet);
|
||||
}
|
||||
const result = computeConditionalMapping(conditionalMapping_, conditionNames);
|
||||
perSet.set(conditionNames, result === null ? NULL_RESULT : result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | undefined} remainingRequest remaining request when folder mapping, undefined for file mappings
|
||||
* @param {boolean} isPattern true, if mapping is a pattern (contains "*")
|
||||
* @param {boolean} isSubpathMapping true, for subpath mappings
|
||||
* @param {string} mappingTarget direct export
|
||||
* @param {(d: string, f: boolean) => void} assert asserting direct value
|
||||
* @returns {string} mapping result
|
||||
*/
|
||||
function targetMapping(
|
||||
remainingRequest,
|
||||
isPattern,
|
||||
isSubpathMapping,
|
||||
mappingTarget,
|
||||
assert,
|
||||
) {
|
||||
if (remainingRequest === undefined) {
|
||||
assert(mappingTarget, false);
|
||||
|
||||
return mappingTarget;
|
||||
}
|
||||
|
||||
if (isSubpathMapping) {
|
||||
assert(mappingTarget, true);
|
||||
|
||||
return mappingTarget + remainingRequest;
|
||||
}
|
||||
|
||||
assert(mappingTarget, false);
|
||||
|
||||
let result = mappingTarget;
|
||||
|
||||
if (isPattern) {
|
||||
result = result.replace(
|
||||
patternRegEx,
|
||||
remainingRequest.replace(/\$/g, "$$"),
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | undefined} remainingRequest remaining request when folder mapping, undefined for file mappings
|
||||
* @param {boolean} isPattern true, if mapping is a pattern (contains "*")
|
||||
* @param {boolean} isSubpathMapping true, for subpath mappings
|
||||
* @param {DirectMapping | null} mappingTarget direct export
|
||||
* @param {Set<string>} conditionNames condition names
|
||||
* @param {(d: string, f: boolean) => void} assert asserting direct value
|
||||
* @returns {string[]} mapping result
|
||||
*/
|
||||
function directMapping(
|
||||
remainingRequest,
|
||||
isPattern,
|
||||
isSubpathMapping,
|
||||
mappingTarget,
|
||||
conditionNames,
|
||||
assert,
|
||||
) {
|
||||
if (mappingTarget === null) return [];
|
||||
|
||||
if (typeof mappingTarget === "string") {
|
||||
return [
|
||||
targetMapping(
|
||||
remainingRequest,
|
||||
isPattern,
|
||||
isSubpathMapping,
|
||||
mappingTarget,
|
||||
assert,
|
||||
),
|
||||
];
|
||||
}
|
||||
|
||||
/** @type {string[]} */
|
||||
const targets = [];
|
||||
|
||||
for (const exp of mappingTarget) {
|
||||
if (typeof exp === "string") {
|
||||
targets.push(
|
||||
targetMapping(
|
||||
remainingRequest,
|
||||
isPattern,
|
||||
isSubpathMapping,
|
||||
exp,
|
||||
assert,
|
||||
),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const mapping = conditionalMapping(exp, conditionNames);
|
||||
if (!mapping) continue;
|
||||
const innerExports = directMapping(
|
||||
remainingRequest,
|
||||
isPattern,
|
||||
isSubpathMapping,
|
||||
mapping,
|
||||
conditionNames,
|
||||
assert,
|
||||
);
|
||||
for (const innerExport of innerExports) {
|
||||
targets.push(innerExport);
|
||||
}
|
||||
}
|
||||
|
||||
return targets;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ExportsField | ImportsField} field root
|
||||
* @param {(s: string) => string} normalizeRequest Normalize request, for `imports` field it adds `#`, for `exports` field it adds `.` or `./`
|
||||
* @param {(s: string) => string} assertRequest assertRequest
|
||||
* @param {(s: string, f: boolean) => void} assertTarget assertTarget
|
||||
* @returns {FieldProcessor} field processor
|
||||
*/
|
||||
function createFieldProcessor(
|
||||
field,
|
||||
normalizeRequest,
|
||||
assertRequest,
|
||||
assertTarget,
|
||||
) {
|
||||
return function fieldProcessor(request, conditionNames) {
|
||||
request = assertRequest(request);
|
||||
|
||||
const match = findMatch(normalizeRequest(request), field);
|
||||
|
||||
if (match === null) return [[], null];
|
||||
|
||||
const [mapping, remainingRequest, isSubpathMapping, isPattern, usedField] =
|
||||
match;
|
||||
|
||||
/** @type {DirectMapping | null} */
|
||||
let direct = null;
|
||||
|
||||
if (isConditionalMapping(mapping)) {
|
||||
direct = conditionalMapping(
|
||||
/** @type {ConditionalMapping} */ (mapping),
|
||||
conditionNames,
|
||||
);
|
||||
|
||||
// matching not found
|
||||
if (direct === null) return [[], null];
|
||||
} else {
|
||||
direct = /** @type {DirectMapping} */ (mapping);
|
||||
}
|
||||
|
||||
return [
|
||||
directMapping(
|
||||
remainingRequest,
|
||||
isPattern,
|
||||
isSubpathMapping,
|
||||
direct,
|
||||
conditionNames,
|
||||
assertTarget,
|
||||
),
|
||||
usedField,
|
||||
];
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} request request
|
||||
* @returns {string} updated request
|
||||
*/
|
||||
function assertExportsFieldRequest(request) {
|
||||
if (request.charCodeAt(0) !== dotCode) {
|
||||
throw new Error('Request should be relative path and start with "."');
|
||||
}
|
||||
if (request.length === 1) return "";
|
||||
if (request.charCodeAt(1) !== slashCode) {
|
||||
throw new Error('Request should be relative path and start with "./"');
|
||||
}
|
||||
if (request.charCodeAt(request.length - 1) === slashCode) {
|
||||
throw new Error("Only requesting file allowed");
|
||||
}
|
||||
|
||||
return request.slice(2);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ExportsField} field exports field
|
||||
* @returns {ExportsField} normalized exports field
|
||||
*/
|
||||
function buildExportsField(field) {
|
||||
// handle syntax sugar, if exports field is direct mapping for "."
|
||||
if (typeof field === "string" || Array.isArray(field)) {
|
||||
return { ".": field };
|
||||
}
|
||||
|
||||
const keys = Object.keys(field);
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
|
||||
if (key.charCodeAt(0) !== dotCode) {
|
||||
// handle syntax sugar, if exports field is conditional mapping for "."
|
||||
if (i === 0) {
|
||||
while (i < keys.length) {
|
||||
const charCode = keys[i].charCodeAt(0);
|
||||
if (charCode === dotCode || charCode === slashCode) {
|
||||
throw new Error(
|
||||
`Exports field key should be relative path and start with "." (key: ${JSON.stringify(
|
||||
key,
|
||||
)})`,
|
||||
);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
||||
return { ".": field };
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Exports field key should be relative path and start with "." (key: ${JSON.stringify(
|
||||
key,
|
||||
)})`,
|
||||
);
|
||||
}
|
||||
|
||||
if (key.length === 1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (key.charCodeAt(1) !== slashCode) {
|
||||
throw new Error(
|
||||
`Exports field key should be relative path and start with "./" (key: ${JSON.stringify(
|
||||
key,
|
||||
)})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} exp export target
|
||||
* @param {boolean} expectFolder is folder expected
|
||||
*/
|
||||
function assertExportTarget(exp, expectFolder) {
|
||||
const parsedIdentifier = parseIdentifier(exp);
|
||||
|
||||
if (!parsedIdentifier) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [relativePath] = parsedIdentifier;
|
||||
const isFolder =
|
||||
relativePath.charCodeAt(relativePath.length - 1) === slashCode;
|
||||
|
||||
if (isFolder !== expectFolder) {
|
||||
throw new Error(
|
||||
expectFolder
|
||||
? `Expecting folder to folder mapping. ${JSON.stringify(
|
||||
exp,
|
||||
)} should end with "/"`
|
||||
: `Expecting file to file mapping. ${JSON.stringify(
|
||||
exp,
|
||||
)} should not end with "/"`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ExportsField} exportsField the exports field
|
||||
* @returns {FieldProcessor} process callback
|
||||
*/
|
||||
module.exports.processExportsField = function processExportsField(
|
||||
exportsField,
|
||||
) {
|
||||
return createFieldProcessor(
|
||||
buildExportsField(exportsField),
|
||||
(request) => (request.length === 0 ? "." : `./${request}`),
|
||||
assertExportsFieldRequest,
|
||||
assertExportTarget,
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} request request
|
||||
* @returns {string} updated request
|
||||
*/
|
||||
function assertImportsFieldRequest(request) {
|
||||
if (request.charCodeAt(0) !== hashCode) {
|
||||
throw new Error('Request should start with "#"');
|
||||
}
|
||||
if (request.length === 1) {
|
||||
throw new Error("Request should have at least 2 characters");
|
||||
}
|
||||
// Note: #/ patterns are now allowed per Node.js PR #60864
|
||||
// https://github.com/nodejs/node/pull/60864
|
||||
if (request.charCodeAt(request.length - 1) === slashCode) {
|
||||
throw new Error("Only requesting file allowed");
|
||||
}
|
||||
|
||||
return request.slice(1);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} imp import target
|
||||
* @param {boolean} expectFolder is folder expected
|
||||
*/
|
||||
function assertImportTarget(imp, expectFolder) {
|
||||
const parsedIdentifier = parseIdentifier(imp);
|
||||
|
||||
if (!parsedIdentifier) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [relativePath] = parsedIdentifier;
|
||||
const isFolder =
|
||||
relativePath.charCodeAt(relativePath.length - 1) === slashCode;
|
||||
|
||||
if (isFolder !== expectFolder) {
|
||||
throw new Error(
|
||||
expectFolder
|
||||
? `Expecting folder to folder mapping. ${JSON.stringify(
|
||||
imp,
|
||||
)} should end with "/"`
|
||||
: `Expecting file to file mapping. ${JSON.stringify(
|
||||
imp,
|
||||
)} should not end with "/"`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ImportsField} importsField the exports field
|
||||
* @returns {FieldProcessor} process callback
|
||||
*/
|
||||
module.exports.processImportsField = function processImportsField(
|
||||
importsField,
|
||||
) {
|
||||
return createFieldProcessor(
|
||||
importsField,
|
||||
(request) => `#${request}`,
|
||||
assertImportsFieldRequest,
|
||||
assertImportTarget,
|
||||
);
|
||||
};
|
||||
+67
@@ -0,0 +1,67 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Natsu @xiaoxiaojx
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const stripJsonComments = require("./strip-json-comments");
|
||||
|
||||
/** @typedef {import("../Resolver").FileSystem} FileSystem */
|
||||
/** @typedef {import("../Resolver").JsonObject} JsonObject */
|
||||
|
||||
/**
|
||||
* @typedef {object} ReadJsonOptions
|
||||
* @property {boolean=} stripComments Whether to strip JSONC comments
|
||||
*/
|
||||
|
||||
/** @type {WeakMap<Buffer, JsonObject>} */
|
||||
const _stripCommentsCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* Read and parse JSON file (supports JSONC with comments)
|
||||
* @template T
|
||||
* @param {FileSystem} fileSystem the file system
|
||||
* @param {string} jsonFilePath absolute path to JSON file
|
||||
* @param {ReadJsonOptions} options Options
|
||||
* @returns {Promise<T>} parsed JSON content
|
||||
*/
|
||||
async function readJson(fileSystem, jsonFilePath, options = {}) {
|
||||
const { stripComments = false } = options;
|
||||
const { readJson } = fileSystem;
|
||||
if (readJson && !stripComments) {
|
||||
return new Promise((resolve, reject) => {
|
||||
readJson(jsonFilePath, (err, content) => {
|
||||
if (err) return reject(err);
|
||||
resolve(/** @type {T} */ (content));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const buf = await new Promise((resolve, reject) => {
|
||||
fileSystem.readFile(jsonFilePath, (err, data) => {
|
||||
if (err) return reject(err);
|
||||
resolve(data);
|
||||
});
|
||||
});
|
||||
|
||||
if (stripComments) {
|
||||
const cached = _stripCommentsCache.get(buf);
|
||||
if (cached !== undefined) return /** @type {T} */ (cached);
|
||||
}
|
||||
|
||||
const jsonText = /** @type {string} */ (buf.toString());
|
||||
// Strip comments to support JSONC (e.g., tsconfig.json with comments)
|
||||
const jsonWithoutComments = stripComments
|
||||
? stripJsonComments(jsonText, { trailingCommas: true, whitespace: true })
|
||||
: jsonText;
|
||||
const result = JSON.parse(jsonWithoutComments);
|
||||
|
||||
if (stripComments) {
|
||||
_stripCommentsCache.set(buf, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports.readJson = readJson;
|
||||
+103
@@ -0,0 +1,103 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Ivan Kopeykin @vankop
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const memorize = require("./memoize");
|
||||
|
||||
const getUrl = memorize(() => require("url"));
|
||||
|
||||
const PATH_QUERY_FRAGMENT_REGEXP =
|
||||
/^(#?(?:\0.|[^?#\0])*)(\?(?:\0.|[^#\0])*)?(#.*)?$/;
|
||||
const ZERO_ESCAPE_REGEXP = /\0(.)/g;
|
||||
const FILE_REG_EXP = /file:/i;
|
||||
|
||||
/**
|
||||
* Index past a DOS device path prefix (`\\?\…` or `\\.\…`), or 0. Kept
|
||||
* out of `parseIdentifier` on purpose: inlining it back bloats the caller
|
||||
* beyond the size where V8's interpreter and JIT both handle it well
|
||||
* (the cause of the description-files-multi CodSpeed regression).
|
||||
* @param {string} identifier identifier known to start with `\`
|
||||
* @returns {number} 4 if identifier starts with a DOS device prefix, else 0
|
||||
*/
|
||||
function dosPrefixEnd(identifier) {
|
||||
if (
|
||||
identifier.length >= 4 &&
|
||||
identifier.charCodeAt(1) === 92 &&
|
||||
identifier.charCodeAt(3) === 92
|
||||
) {
|
||||
const c2 = identifier.charCodeAt(2);
|
||||
if (c2 === 63 || c2 === 46) return 4;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} identifier identifier
|
||||
* @returns {[string, string, string] | null} parsed identifier
|
||||
*/
|
||||
function parseIdentifier(identifier) {
|
||||
if (!identifier) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (FILE_REG_EXP.test(identifier)) {
|
||||
identifier = getUrl().fileURLToPath(identifier);
|
||||
}
|
||||
|
||||
const firstEscape = identifier.indexOf("\0");
|
||||
|
||||
// Handle `\0`
|
||||
if (firstEscape !== -1) {
|
||||
const match = PATH_QUERY_FRAGMENT_REGEXP.exec(identifier);
|
||||
|
||||
if (!match) return null;
|
||||
|
||||
return [
|
||||
match[1].replace(ZERO_ESCAPE_REGEXP, "$1"),
|
||||
match[2] ? match[2].replace(ZERO_ESCAPE_REGEXP, "$1") : "",
|
||||
match[3] || "",
|
||||
];
|
||||
}
|
||||
|
||||
// Fast path for inputs that don't use \0 escaping. DOS device paths
|
||||
// (`\\?\…`, `\\.\…`) embed a literal `?` / `.` that must not be read
|
||||
// as a query separator; skip past the prefix when the input actually
|
||||
// starts with `\`. Gate is a single char-code compare so this function
|
||||
// stays inside V8's inline budget for its hot callers (resolver parse).
|
||||
const scanStart =
|
||||
identifier.charCodeAt(0) === 92 ? dosPrefixEnd(identifier) : 0;
|
||||
const queryStart = identifier.indexOf("?", scanStart);
|
||||
// Start at index 1 (or past a DOS prefix) to ignore a possible leading hash.
|
||||
const fragmentStart = identifier.indexOf("#", scanStart || 1);
|
||||
|
||||
if (fragmentStart < 0) {
|
||||
if (queryStart < 0) {
|
||||
// No fragment, no query
|
||||
return [identifier, "", ""];
|
||||
}
|
||||
|
||||
// Query, no fragment
|
||||
return [identifier.slice(0, queryStart), identifier.slice(queryStart), ""];
|
||||
}
|
||||
|
||||
if (queryStart < 0 || fragmentStart < queryStart) {
|
||||
// Fragment, no query
|
||||
return [
|
||||
identifier.slice(0, fragmentStart),
|
||||
"",
|
||||
identifier.slice(fragmentStart),
|
||||
];
|
||||
}
|
||||
|
||||
// Query and fragment
|
||||
return [
|
||||
identifier.slice(0, queryStart),
|
||||
identifier.slice(queryStart, fragmentStart),
|
||||
identifier.slice(fragmentStart),
|
||||
];
|
||||
}
|
||||
|
||||
module.exports.parseIdentifier = parseIdentifier;
|
||||
+37
@@ -0,0 +1,37 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {() => T} FunctionReturning
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {FunctionReturning<T>} fn memorized function
|
||||
* @returns {FunctionReturning<T>} new function
|
||||
*/
|
||||
const memoize = (fn) => {
|
||||
let cache = false;
|
||||
/** @type {T | undefined} */
|
||||
let result;
|
||||
return () => {
|
||||
if (cache) {
|
||||
return /** @type {T} */ (result);
|
||||
}
|
||||
|
||||
result = fn();
|
||||
cache = true;
|
||||
// Allow to clean up memory for fn
|
||||
// and all dependent resources
|
||||
/** @type {FunctionReturning<T> | undefined} */
|
||||
(fn) = undefined;
|
||||
return /** @type {T} */ (result);
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = memoize;
|
||||
+8
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
module.exports = {};
|
||||
+344
@@ -0,0 +1,344 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const path = require("path");
|
||||
|
||||
const CHAR_HASH = "#".charCodeAt(0);
|
||||
const CHAR_SLASH = "/".charCodeAt(0);
|
||||
const CHAR_BACKSLASH = "\\".charCodeAt(0);
|
||||
const CHAR_A = "A".charCodeAt(0);
|
||||
const CHAR_Z = "Z".charCodeAt(0);
|
||||
const CHAR_LOWER_A = "a".charCodeAt(0);
|
||||
const CHAR_LOWER_Z = "z".charCodeAt(0);
|
||||
const CHAR_DOT = ".".charCodeAt(0);
|
||||
const CHAR_COLON = ":".charCodeAt(0);
|
||||
const CHAR_QUESTION = "?".charCodeAt(0);
|
||||
|
||||
const posixNormalize = path.posix.normalize;
|
||||
const winNormalize = path.win32.normalize;
|
||||
|
||||
/**
|
||||
* @enum {number}
|
||||
*/
|
||||
const PathType = Object.freeze({
|
||||
Empty: 0,
|
||||
Normal: 1,
|
||||
Relative: 2,
|
||||
AbsoluteWin: 3,
|
||||
AbsolutePosix: 4,
|
||||
Internal: 5,
|
||||
});
|
||||
|
||||
const deprecatedInvalidSegmentRegEx =
|
||||
/(^|\\|\/)((\.|%2e)(\.|%2e)?|(n|%6e|%4e)(o|%6f|%4f)(d|%64|%44)(e|%65|%45)(_|%5f)(m|%6d|%4d)(o|%6f|%4f)(d|%64|%44)(u|%75|%55)(l|%6c|%4c)(e|%65|%45)(s|%73|%53))(\\|\/|$)/i;
|
||||
|
||||
const invalidSegmentRegEx =
|
||||
/(^|\\|\/)((\.|%2e)(\.|%2e)?|(n|%6e|%4e)(o|%6f|%4f)(d|%64|%44)(e|%65|%45)(_|%5f)(m|%6d|%4d)(o|%6f|%4f)(d|%64|%44)(u|%75|%55)(l|%6c|%4c)(e|%65|%45)(s|%73|%53))?(\\|\/|$)/i;
|
||||
|
||||
/**
|
||||
* @param {string} maybePath a path known to start with `\\`
|
||||
* @returns {PathType} AbsoluteWin for `\\?\…` / `\\.\…`, otherwise Normal
|
||||
*/
|
||||
const getDosDeviceType = (maybePath) => {
|
||||
if (maybePath.length >= 4 && maybePath.charCodeAt(3) === CHAR_BACKSLASH) {
|
||||
const c2 = maybePath.charCodeAt(2);
|
||||
if (c2 === CHAR_QUESTION || c2 === CHAR_DOT) {
|
||||
return PathType.AbsoluteWin;
|
||||
}
|
||||
}
|
||||
return PathType.Normal;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} maybePath a path
|
||||
* @returns {PathType} type of path
|
||||
*/
|
||||
const getType = (maybePath) => {
|
||||
switch (maybePath.length) {
|
||||
case 0:
|
||||
return PathType.Empty;
|
||||
case 1: {
|
||||
const c0 = maybePath.charCodeAt(0);
|
||||
switch (c0) {
|
||||
case CHAR_DOT:
|
||||
return PathType.Relative;
|
||||
case CHAR_SLASH:
|
||||
return PathType.AbsolutePosix;
|
||||
case CHAR_HASH:
|
||||
return PathType.Internal;
|
||||
}
|
||||
return PathType.Normal;
|
||||
}
|
||||
case 2: {
|
||||
const c0 = maybePath.charCodeAt(0);
|
||||
switch (c0) {
|
||||
case CHAR_DOT: {
|
||||
const c1 = maybePath.charCodeAt(1);
|
||||
switch (c1) {
|
||||
case CHAR_DOT:
|
||||
case CHAR_SLASH:
|
||||
return PathType.Relative;
|
||||
}
|
||||
return PathType.Normal;
|
||||
}
|
||||
case CHAR_SLASH:
|
||||
return PathType.AbsolutePosix;
|
||||
case CHAR_HASH:
|
||||
return PathType.Internal;
|
||||
}
|
||||
const c1 = maybePath.charCodeAt(1);
|
||||
if (
|
||||
c1 === CHAR_COLON &&
|
||||
((c0 >= CHAR_A && c0 <= CHAR_Z) ||
|
||||
(c0 >= CHAR_LOWER_A && c0 <= CHAR_LOWER_Z))
|
||||
) {
|
||||
return PathType.AbsoluteWin;
|
||||
}
|
||||
return PathType.Normal;
|
||||
}
|
||||
}
|
||||
const c0 = maybePath.charCodeAt(0);
|
||||
switch (c0) {
|
||||
case CHAR_DOT: {
|
||||
const c1 = maybePath.charCodeAt(1);
|
||||
switch (c1) {
|
||||
case CHAR_SLASH:
|
||||
return PathType.Relative;
|
||||
case CHAR_DOT: {
|
||||
const c2 = maybePath.charCodeAt(2);
|
||||
if (c2 === CHAR_SLASH) return PathType.Relative;
|
||||
return PathType.Normal;
|
||||
}
|
||||
}
|
||||
return PathType.Normal;
|
||||
}
|
||||
case CHAR_SLASH:
|
||||
return PathType.AbsolutePosix;
|
||||
case CHAR_HASH:
|
||||
return PathType.Internal;
|
||||
}
|
||||
const c1 = maybePath.charCodeAt(1);
|
||||
if (c1 === CHAR_COLON) {
|
||||
const c2 = maybePath.charCodeAt(2);
|
||||
if (
|
||||
(c2 === CHAR_BACKSLASH || c2 === CHAR_SLASH) &&
|
||||
((c0 >= CHAR_A && c0 <= CHAR_Z) ||
|
||||
(c0 >= CHAR_LOWER_A && c0 <= CHAR_LOWER_Z))
|
||||
) {
|
||||
return PathType.AbsoluteWin;
|
||||
}
|
||||
}
|
||||
// DOS device paths (`\\?\…`, `\\.\…`) are handled in a cold helper so
|
||||
// this function stays small — inlining the full check here regressed
|
||||
// `description-files-multi` under `--no-opt` interpretation. Here we
|
||||
// only pay the two-byte gate for non-DOS inputs.
|
||||
if (c0 === CHAR_BACKSLASH && c1 === CHAR_BACKSLASH) {
|
||||
return getDosDeviceType(maybePath);
|
||||
}
|
||||
return PathType.Normal;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} maybePath a path
|
||||
* @returns {string} the normalized path
|
||||
*/
|
||||
const normalize = (maybePath) => {
|
||||
switch (getType(maybePath)) {
|
||||
case PathType.Empty:
|
||||
return maybePath;
|
||||
case PathType.AbsoluteWin:
|
||||
return winNormalize(maybePath);
|
||||
case PathType.Relative: {
|
||||
const r = posixNormalize(maybePath);
|
||||
return getType(r) === PathType.Relative ? r : `./${r}`;
|
||||
}
|
||||
}
|
||||
return posixNormalize(maybePath);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} rootPath the root path
|
||||
* @param {string | undefined} request the request path
|
||||
* @returns {string} the joined path
|
||||
*/
|
||||
const join = (rootPath, request) => {
|
||||
if (!request) return normalize(rootPath);
|
||||
const requestType = getType(request);
|
||||
switch (requestType) {
|
||||
case PathType.AbsolutePosix:
|
||||
return posixNormalize(request);
|
||||
case PathType.AbsoluteWin:
|
||||
return winNormalize(request);
|
||||
}
|
||||
switch (getType(rootPath)) {
|
||||
case PathType.Normal:
|
||||
case PathType.Relative:
|
||||
case PathType.AbsolutePosix:
|
||||
return posixNormalize(`${rootPath}/${request}`);
|
||||
case PathType.AbsoluteWin:
|
||||
return winNormalize(`${rootPath}\\${request}`);
|
||||
}
|
||||
switch (requestType) {
|
||||
case PathType.Empty:
|
||||
return rootPath;
|
||||
case PathType.Relative: {
|
||||
const r = posixNormalize(rootPath);
|
||||
return getType(r) === PathType.Relative ? r : `./${r}`;
|
||||
}
|
||||
}
|
||||
return posixNormalize(rootPath);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} maybePath a path
|
||||
* @returns {string} the directory name
|
||||
*/
|
||||
const dirname = (maybePath) => {
|
||||
switch (getType(maybePath)) {
|
||||
case PathType.AbsoluteWin:
|
||||
return path.win32.dirname(maybePath);
|
||||
}
|
||||
return path.posix.dirname(maybePath);
|
||||
};
|
||||
|
||||
/** @typedef {{ fn: (rootPath: string, request: string) => string, cache: Map<string, Map<string, string | undefined>> }} CachedJoin */
|
||||
|
||||
/**
|
||||
* @returns {CachedJoin} cached join
|
||||
*/
|
||||
const createCachedJoin = () => {
|
||||
/** @type {CachedJoin["cache"]} */
|
||||
const cache = new Map();
|
||||
/** @type {CachedJoin["fn"]} */
|
||||
const fn = (rootPath, request) => {
|
||||
/** @type {string | undefined} */
|
||||
let cacheEntry;
|
||||
let inner = cache.get(rootPath);
|
||||
if (inner === undefined) {
|
||||
cache.set(rootPath, (inner = new Map()));
|
||||
} else {
|
||||
cacheEntry = inner.get(request);
|
||||
if (cacheEntry !== undefined) return cacheEntry;
|
||||
}
|
||||
cacheEntry = join(rootPath, request);
|
||||
inner.set(request, cacheEntry);
|
||||
return cacheEntry;
|
||||
};
|
||||
return { fn, cache };
|
||||
};
|
||||
|
||||
/** @typedef {{ fn: (maybePath: string) => string, cache: Map<string, string> }} CachedDirname */
|
||||
|
||||
/**
|
||||
* @returns {CachedDirname} cached dirname
|
||||
*/
|
||||
const createCachedDirname = () => {
|
||||
/** @type {CachedDirname["cache"]} */
|
||||
const cache = new Map();
|
||||
/** @type {CachedDirname["fn"]} */
|
||||
const fn = (maybePath) => {
|
||||
const cacheEntry = cache.get(maybePath);
|
||||
if (cacheEntry !== undefined) return cacheEntry;
|
||||
const result = dirname(maybePath);
|
||||
cache.set(maybePath, result);
|
||||
return result;
|
||||
};
|
||||
return { fn, cache };
|
||||
};
|
||||
|
||||
/** @typedef {{ fn: (maybePath: string, suffix?: string) => string, cache: Map<string, Map<string | undefined, string | undefined>> }} CachedBasename */
|
||||
|
||||
/**
|
||||
* @returns {CachedBasename} cached basename
|
||||
*/
|
||||
const createCachedBasename = () => {
|
||||
/** @type {CachedBasename["cache"]} */
|
||||
const cache = new Map();
|
||||
/** @type {CachedBasename["fn"]} */
|
||||
const fn = (maybePath, suffix) => {
|
||||
/** @type {string | undefined} */
|
||||
let cacheEntry;
|
||||
let inner = cache.get(maybePath);
|
||||
if (inner === undefined) {
|
||||
cache.set(maybePath, (inner = new Map()));
|
||||
} else {
|
||||
cacheEntry = inner.get(suffix);
|
||||
if (cacheEntry !== undefined) return cacheEntry;
|
||||
}
|
||||
cacheEntry = path.basename(maybePath, suffix);
|
||||
inner.set(suffix, cacheEntry);
|
||||
return cacheEntry;
|
||||
};
|
||||
return { fn, cache };
|
||||
};
|
||||
|
||||
/**
|
||||
* Whether `request` is a relative request — i.e. matches `^\.\.?(?:\/|$)`.
|
||||
*
|
||||
* This is called on every `doResolve` via `UnsafeCachePlugin` and
|
||||
* `getInnerRequest`, so the char-code form is meaningfully faster than the
|
||||
* equivalent regex test: no regex state machine, no string object churn.
|
||||
* @param {string} request request string
|
||||
* @returns {boolean} true if request is relative
|
||||
*/
|
||||
const isRelativeRequest = (request) => {
|
||||
const len = request.length;
|
||||
if (len === 0 || request.charCodeAt(0) !== CHAR_DOT) return false;
|
||||
if (len === 1) return true; // "."
|
||||
const c1 = request.charCodeAt(1);
|
||||
if (c1 === CHAR_SLASH) return true; // "./..."
|
||||
if (c1 !== CHAR_DOT) return false; // ".x..."
|
||||
if (len === 2) return true; // ".."
|
||||
return request.charCodeAt(2) === CHAR_SLASH; // "../..."
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if childPath is a subdirectory of parentPath.
|
||||
*
|
||||
* Called from `TsconfigPathsPlugin._selectPathsDataForContext` inside a loop
|
||||
* over every tsconfig-paths context on every resolve, so it's worth keeping
|
||||
* cheap. Compared to the previous `startsWith(normalize(parent + "/"))`
|
||||
* version, this: checks the last char with `charCodeAt` instead of two
|
||||
* `endsWith` calls; and skips `normalize()` entirely in the common case
|
||||
* (parent has no trailing separator), since all we really need is the same
|
||||
* anchoring effect — a cheap `startsWith` plus a separator char check on the
|
||||
* byte immediately after `parentPath.length`.
|
||||
* @param {string} parentPath parent directory path
|
||||
* @param {string} childPath child path to check
|
||||
* @returns {boolean} true if childPath is under parentPath
|
||||
*/
|
||||
const isSubPath = (parentPath, childPath) => {
|
||||
const parentLen = parentPath.length;
|
||||
if (parentLen === 0) {
|
||||
// Match the old `normalize("" + "/") === "/"` fallback: an empty
|
||||
// parent only "contains" a child that starts with a forward slash.
|
||||
return childPath.length > 0 && childPath.charCodeAt(0) === CHAR_SLASH;
|
||||
}
|
||||
const lastChar = parentPath.charCodeAt(parentLen - 1);
|
||||
if (lastChar === CHAR_SLASH || lastChar === CHAR_BACKSLASH) {
|
||||
// Parent already ends with a separator — a plain prefix test is enough.
|
||||
return childPath.startsWith(parentPath);
|
||||
}
|
||||
if (childPath.length <= parentLen) return false;
|
||||
if (!childPath.startsWith(parentPath)) return false;
|
||||
// Must be followed by a separator so "/app" doesn't match "/app-other".
|
||||
const nextChar = childPath.charCodeAt(parentLen);
|
||||
return nextChar === CHAR_SLASH || nextChar === CHAR_BACKSLASH;
|
||||
};
|
||||
|
||||
module.exports.PathType = PathType;
|
||||
module.exports.createCachedBasename = createCachedBasename;
|
||||
module.exports.createCachedDirname = createCachedDirname;
|
||||
module.exports.createCachedJoin = createCachedJoin;
|
||||
module.exports.deprecatedInvalidSegmentRegEx = deprecatedInvalidSegmentRegEx;
|
||||
module.exports.dirname = dirname;
|
||||
module.exports.getType = getType;
|
||||
module.exports.invalidSegmentRegEx = invalidSegmentRegEx;
|
||||
module.exports.isRelativeRequest = isRelativeRequest;
|
||||
module.exports.isSubPath = isSubPath;
|
||||
module.exports.join = join;
|
||||
module.exports.normalize = normalize;
|
||||
+23
@@ -0,0 +1,23 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* @type {Record<string, string>}
|
||||
*/
|
||||
versions: {},
|
||||
// eslint-disable-next-line jsdoc/reject-function-type
|
||||
/** @param {Function} fn function */
|
||||
nextTick(fn) {
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
const args = Array.prototype.slice.call(arguments, 1);
|
||||
Promise.resolve().then(() => {
|
||||
// eslint-disable-next-line prefer-spread
|
||||
fn.apply(null, args);
|
||||
});
|
||||
},
|
||||
};
|
||||
+177
@@ -0,0 +1,177 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Natsu @xiaoxiaojx
|
||||
|
||||
This file contains code ported from strip-json-comments:
|
||||
https://github.com/sindresorhus/strip-json-comments
|
||||
Original license: MIT
|
||||
Original author: Sindre Sorhus
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* @typedef {object} StripJsonCommentsOptions
|
||||
* @property {boolean=} whitespace Replace comments with whitespace
|
||||
* @property {boolean=} trailingCommas Strip trailing commas
|
||||
*/
|
||||
|
||||
const singleComment = Symbol("singleComment");
|
||||
const multiComment = Symbol("multiComment");
|
||||
|
||||
/**
|
||||
* Strip without whitespace (returns empty string)
|
||||
* @param {string} _string Unused
|
||||
* @param {number} _start Unused
|
||||
* @param {number} _end Unused
|
||||
* @returns {string} Empty string for all input
|
||||
*/
|
||||
const stripWithoutWhitespace = (_string, _start, _end) => "";
|
||||
|
||||
/**
|
||||
* Replace all characters except ASCII spaces, tabs and line endings with regular spaces to ensure valid JSON output.
|
||||
* @param {string} string String to process
|
||||
* @param {number} start Start index
|
||||
* @param {number} end End index
|
||||
* @returns {string} Processed string with comments replaced by whitespace
|
||||
*/
|
||||
const stripWithWhitespace = (string, start, end) =>
|
||||
string.slice(start, end).replace(/[^ \t\r\n]/g, " ");
|
||||
|
||||
/**
|
||||
* Check if a quote is escaped
|
||||
* @param {string} jsonString JSON string
|
||||
* @param {number} quotePosition Position of the quote
|
||||
* @returns {boolean} True if the quote at the given position is escaped
|
||||
*/
|
||||
const isEscaped = (jsonString, quotePosition) => {
|
||||
let index = quotePosition - 1;
|
||||
let backslashCount = 0;
|
||||
|
||||
while (jsonString[index] === "\\") {
|
||||
index -= 1;
|
||||
backslashCount += 1;
|
||||
}
|
||||
|
||||
return Boolean(backslashCount % 2);
|
||||
};
|
||||
|
||||
/**
|
||||
* Strip comments from JSON string
|
||||
* @param {string} jsonString JSON string with potential comments
|
||||
* @param {StripJsonCommentsOptions} options Options
|
||||
* @returns {string} JSON string without comments
|
||||
*/
|
||||
function stripJsonComments(
|
||||
jsonString,
|
||||
{ whitespace = true, trailingCommas = false } = {},
|
||||
) {
|
||||
if (typeof jsonString !== "string") {
|
||||
throw new TypeError(
|
||||
`Expected argument \`jsonString\` to be a \`string\`, got \`${typeof jsonString}\``,
|
||||
);
|
||||
}
|
||||
|
||||
const strip = whitespace ? stripWithWhitespace : stripWithoutWhitespace;
|
||||
|
||||
let isInsideString = false;
|
||||
/** @type {false | typeof singleComment | typeof multiComment} */
|
||||
let isInsideComment = false;
|
||||
let offset = 0;
|
||||
let buffer = "";
|
||||
let result = "";
|
||||
let commaIndex = -1;
|
||||
|
||||
for (let index = 0; index < jsonString.length; index++) {
|
||||
const currentCharacter = jsonString[index];
|
||||
const nextCharacter = jsonString[index + 1];
|
||||
|
||||
if (!isInsideComment && currentCharacter === '"') {
|
||||
// Enter or exit string
|
||||
const escaped = isEscaped(jsonString, index);
|
||||
if (!escaped) {
|
||||
isInsideString = !isInsideString;
|
||||
}
|
||||
}
|
||||
|
||||
if (isInsideString) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isInsideComment && currentCharacter + nextCharacter === "//") {
|
||||
// Enter single-line comment
|
||||
buffer += jsonString.slice(offset, index);
|
||||
offset = index;
|
||||
isInsideComment = singleComment;
|
||||
index++;
|
||||
} else if (
|
||||
isInsideComment === singleComment &&
|
||||
currentCharacter + nextCharacter === "\r\n"
|
||||
) {
|
||||
// Exit single-line comment via \r\n
|
||||
index++;
|
||||
isInsideComment = false;
|
||||
buffer += strip(jsonString, offset, index);
|
||||
offset = index;
|
||||
continue;
|
||||
} else if (isInsideComment === singleComment && currentCharacter === "\n") {
|
||||
// Exit single-line comment via \n
|
||||
isInsideComment = false;
|
||||
buffer += strip(jsonString, offset, index);
|
||||
offset = index;
|
||||
} else if (!isInsideComment && currentCharacter + nextCharacter === "/*") {
|
||||
// Enter multiline comment
|
||||
buffer += jsonString.slice(offset, index);
|
||||
offset = index;
|
||||
isInsideComment = multiComment;
|
||||
index++;
|
||||
continue;
|
||||
} else if (
|
||||
isInsideComment === multiComment &&
|
||||
currentCharacter + nextCharacter === "*/"
|
||||
) {
|
||||
// Exit multiline comment
|
||||
index++;
|
||||
isInsideComment = false;
|
||||
buffer += strip(jsonString, offset, index + 1);
|
||||
offset = index + 1;
|
||||
continue;
|
||||
} else if (trailingCommas && !isInsideComment) {
|
||||
if (commaIndex !== -1) {
|
||||
if (currentCharacter === "}" || currentCharacter === "]") {
|
||||
// Strip trailing comma
|
||||
buffer += jsonString.slice(offset, index);
|
||||
result += strip(buffer, 0, 1) + buffer.slice(1);
|
||||
buffer = "";
|
||||
offset = index;
|
||||
commaIndex = -1;
|
||||
} else if (
|
||||
currentCharacter !== " " &&
|
||||
currentCharacter !== "\t" &&
|
||||
currentCharacter !== "\r" &&
|
||||
currentCharacter !== "\n"
|
||||
) {
|
||||
// Hit non-whitespace following a comma; comma is not trailing
|
||||
buffer += jsonString.slice(offset, index);
|
||||
offset = index;
|
||||
commaIndex = -1;
|
||||
}
|
||||
} else if (currentCharacter === ",") {
|
||||
// Flush buffer prior to this point, and save new comma index
|
||||
result += buffer + jsonString.slice(offset, index);
|
||||
buffer = "";
|
||||
offset = index;
|
||||
commaIndex = index;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const remaining =
|
||||
isInsideComment === singleComment
|
||||
? strip(jsonString, offset, jsonString.length)
|
||||
: jsonString.slice(offset);
|
||||
|
||||
return result + buffer + remaining;
|
||||
}
|
||||
|
||||
module.exports = stripJsonComments;
|
||||
Reference in New Issue
Block a user