routie dev init since i didn't adhere to any proper guidance up until now

This commit is contained in:
2026-04-29 22:27:29 -06:00
commit e1dabb71e2
15301 changed files with 3562618 additions and 0 deletions
+316
View File
@@ -0,0 +1,316 @@
import $Ref from "./ref.js";
import Pointer from "./pointer.js";
import * as url from "./util/url.js";
import type $Refs from "./refs.js";
import type $RefParser from "./index";
import type { ParserOptions } from "./index";
import type { JSONSchema } from "./index";
import type { BundleOptions } from "./options";
export interface InventoryEntry {
$ref: any;
parent: any;
key: any;
pathFromRoot: any;
depth: any;
file: any;
hash: any;
value: any;
circular: any;
extended: any;
external: any;
indirections: any;
}
/**
* Bundles all external JSON references into the main JSON schema, thus resulting in a schema that
* only has *internal* references, not any *external* references.
* This method mutates the JSON schema object, adding new references and re-mapping existing ones.
*
* @param parser
* @param options
*/
function bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
parser: $RefParser<S, O>,
options: O,
) {
// console.log('Bundling $ref pointers in %s', parser.$refs._root$Ref.path);
// Build an inventory of all $ref pointers in the JSON Schema
const inventory: InventoryEntry[] = [];
crawl<S, O>(parser, "schema", parser.$refs._root$Ref.path + "#", "#", 0, inventory, parser.$refs, options);
// Remap all $ref pointers
remap(inventory);
}
/**
* Recursively crawls the given value, and inventories all JSON references.
*
* @param parent - The object containing the value to crawl. If the value is not an object or array, it will be ignored.
* @param key - The property key of `parent` to be crawled
* @param path - The full path of the property being crawled, possibly with a JSON Pointer in the hash
* @param pathFromRoot - The path of the property being crawled, from the schema root
* @param indirections
* @param inventory - An array of already-inventoried $ref pointers
* @param $refs
* @param options
*/
function crawl<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
parent: object | $RefParser<S, O>,
key: string | null,
path: string,
pathFromRoot: string,
indirections: number,
inventory: InventoryEntry[],
$refs: $Refs<S, O>,
options: O,
) {
const obj = key === null ? parent : parent[key as keyof typeof parent];
const bundleOptions = (options.bundle || {}) as BundleOptions;
const isExcludedPath = bundleOptions.excludedPathMatcher || (() => false);
if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj) && !isExcludedPath(pathFromRoot)) {
if ($Ref.isAllowed$Ref(obj)) {
inventory$Ref(parent, key, path, pathFromRoot, indirections, inventory, $refs, options);
} else {
// Crawl the object in a specific order that's optimized for bundling.
// This is important because it determines how `pathFromRoot` gets built,
// which later determines which keys get dereferenced and which ones get remapped
const keys = Object.keys(obj).sort((a, b) => {
// Most people will expect references to be bundled into the the "definitions" property,
// so we always crawl that property first, if it exists.
if (a === "definitions" || a === "$defs") {
return -1;
} else if (b === "definitions" || b === "$defs") {
return 1;
} else {
// Otherwise, crawl the keys based on their length.
// This produces the shortest possible bundled references
return a.length - b.length;
}
}) as (keyof typeof obj)[];
for (const key of keys) {
const keyPath = Pointer.join(path, key);
const keyPathFromRoot = Pointer.join(pathFromRoot, key);
const value = obj[key];
if ($Ref.isAllowed$Ref(value)) {
inventory$Ref(obj, key, path, keyPathFromRoot, indirections, inventory, $refs, options);
} else {
crawl(obj, key, keyPath, keyPathFromRoot, indirections, inventory, $refs, options);
}
// We need to ensure that we have an object to work with here because we may be crawling
// an `examples` schema and `value` may be nullish.
if (value && typeof value === "object" && !Array.isArray(value)) {
if ("$ref" in value) {
bundleOptions?.onBundle?.(value["$ref"], obj[key], obj as any, key);
}
}
}
}
}
}
/**
* Inventories the given JSON Reference (i.e. records detailed information about it so we can
* optimize all $refs in the schema), and then crawls the resolved value.
*
* @param $refParent - The object that contains a JSON Reference as one of its keys
* @param $refKey - The key in `$refParent` that is a JSON Reference
* @param path - The full path of the JSON Reference at `$refKey`, possibly with a JSON Pointer in the hash
* @param indirections - unknown
* @param pathFromRoot - The path of the JSON Reference at `$refKey`, from the schema root
* @param inventory - An array of already-inventoried $ref pointers
* @param $refs
* @param options
*/
function inventory$Ref<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
$refParent: any,
$refKey: string | null,
path: string,
pathFromRoot: string,
indirections: number,
inventory: InventoryEntry[],
$refs: $Refs<S, O>,
options: O,
) {
const $ref = $refKey === null ? $refParent : $refParent[$refKey];
const $refPath = url.resolve(path, $ref.$ref);
const pointer = $refs._resolve($refPath, pathFromRoot, options);
if (pointer === null) {
return;
}
const parsed = Pointer.parse(pathFromRoot);
const depth = parsed.length;
const file = url.stripHash(pointer.path);
const hash = url.getHash(pointer.path);
const external = file !== $refs._root$Ref.path;
const extended = $Ref.isExtended$Ref($ref);
indirections += pointer.indirections;
const existingEntry = findInInventory(inventory, $refParent, $refKey);
if (existingEntry) {
// This $Ref has already been inventoried, so we don't need to process it again
if (depth < existingEntry.depth || indirections < existingEntry.indirections) {
removeFromInventory(inventory, existingEntry);
} else {
return;
}
}
inventory.push({
$ref, // The JSON Reference (e.g. {$ref: string})
parent: $refParent, // The object that contains this $ref pointer
key: $refKey, // The key in `parent` that is the $ref pointer
pathFromRoot, // The path to the $ref pointer, from the JSON Schema root
depth, // How far from the JSON Schema root is this $ref pointer?
file, // The file that the $ref pointer resolves to
hash, // The hash within `file` that the $ref pointer resolves to
value: pointer.value, // The resolved value of the $ref pointer
circular: pointer.circular, // Is this $ref pointer DIRECTLY circular? (i.e. it references itself)
extended, // Does this $ref extend its resolved value? (i.e. it has extra properties, in addition to "$ref")
external, // Does this $ref pointer point to a file other than the main JSON Schema file?
indirections, // The number of indirect references that were traversed to resolve the value
});
// Recursively crawl the resolved value
if (!existingEntry || external) {
crawl(pointer.value, null, pointer.path, pathFromRoot, indirections + 1, inventory, $refs, options);
}
}
/**
* Re-maps every $ref pointer, so that they're all relative to the root of the JSON Schema.
* Each referenced value is dereferenced EXACTLY ONCE. All subsequent references to the same
* value are re-mapped to point to the first reference.
*
* @example: {
* first: { $ref: somefile.json#/some/part },
* second: { $ref: somefile.json#/another/part },
* third: { $ref: somefile.json },
* fourth: { $ref: somefile.json#/some/part/sub/part }
* }
*
* In this example, there are four references to the same file, but since the third reference points
* to the ENTIRE file, that's the only one we need to dereference. The other three can just be
* remapped to point inside the third one.
*
* On the other hand, if the third reference DIDN'T exist, then the first and second would both need
* to be dereferenced, since they point to different parts of the file. The fourth reference does NOT
* need to be dereferenced, because it can be remapped to point inside the first one.
*
* @param inventory
*/
function remap(inventory: InventoryEntry[]) {
// Group & sort all the $ref pointers, so they're in the order that we need to dereference/remap them
inventory.sort((a: InventoryEntry, b: InventoryEntry) => {
if (a.file !== b.file) {
// Group all the $refs that point to the same file
return a.file < b.file ? -1 : +1;
} else if (a.hash !== b.hash) {
// Group all the $refs that point to the same part of the file
return a.hash < b.hash ? -1 : +1;
} else if (a.circular !== b.circular) {
// If the $ref points to itself, then sort it higher than other $refs that point to this $ref
return a.circular ? -1 : +1;
} else if (a.extended !== b.extended) {
// If the $ref extends the resolved value, then sort it lower than other $refs that don't extend the value
return a.extended ? +1 : -1;
} else if (a.indirections !== b.indirections) {
// Sort direct references higher than indirect references
return a.indirections - b.indirections;
} else if (a.depth !== b.depth) {
// Sort $refs by how close they are to the JSON Schema root
return a.depth - b.depth;
} else {
// Determine how far each $ref is from the "definitions" property.
// Most people will expect references to be bundled into the the "definitions" property if possible.
const aDefinitionsIndex = Math.max(
a.pathFromRoot.lastIndexOf("/definitions"),
a.pathFromRoot.lastIndexOf("/$defs"),
);
const bDefinitionsIndex = Math.max(
b.pathFromRoot.lastIndexOf("/definitions"),
b.pathFromRoot.lastIndexOf("/$defs"),
);
if (aDefinitionsIndex !== bDefinitionsIndex) {
// Give higher priority to the $ref that's closer to the "definitions" property
return bDefinitionsIndex - aDefinitionsIndex;
} else {
// All else is equal, so use the shorter path, which will produce the shortest possible reference
return a.pathFromRoot.length - b.pathFromRoot.length;
}
}
});
let file, hash, pathFromRoot;
for (const entry of inventory) {
// console.log('Re-mapping $ref pointer "%s" at %s', entry.$ref.$ref, entry.pathFromRoot);
if (!entry.external) {
// This $ref already resolves to the main JSON Schema file
entry.$ref.$ref = entry.hash;
} else if (entry.file === file && entry.hash === hash) {
// This $ref points to the same value as the prevous $ref, so remap it to the same path
entry.$ref.$ref = pathFromRoot;
} else if (entry.file === file && entry.hash.indexOf(hash + "/") === 0) {
// This $ref points to a sub-value of the prevous $ref, so remap it beneath that path
entry.$ref.$ref = Pointer.join(pathFromRoot, Pointer.parse(entry.hash.replace(hash, "#")));
} else {
// We've moved to a new file or new hash
file = entry.file;
hash = entry.hash;
pathFromRoot = entry.pathFromRoot;
// This is the first $ref to point to this value, so dereference the value.
// Any other $refs that point to the same value will point to this $ref instead
entry.$ref = entry.parent[entry.key] = $Ref.dereference(entry.$ref, entry.value);
if (entry.circular) {
// This $ref points to itself
entry.$ref.$ref = entry.pathFromRoot;
}
}
}
// we want to ensure that any $refs that point to another $ref are remapped to point to the final value
// let hadChange = true;
// while (hadChange) {
// hadChange = false;
// for (const entry of inventory) {
// if (entry.$ref && typeof entry.$ref === "object" && "$ref" in entry.$ref) {
// const resolved = inventory.find((e: InventoryEntry) => e.pathFromRoot === entry.$ref.$ref);
// if (resolved) {
// const resolvedPointsToAnotherRef =
// resolved.$ref && typeof resolved.$ref === "object" && "$ref" in resolved.$ref;
// if (resolvedPointsToAnotherRef && entry.$ref.$ref !== resolved.$ref.$ref) {
// // console.log('Re-mapping $ref pointer "%s" at %s', entry.$ref.$ref, entry.pathFromRoot);
// entry.$ref.$ref = resolved.$ref.$ref;
// hadChange = true;
// }
// }
// }
// }
// }
}
/**
* TODO
*/
function findInInventory(inventory: InventoryEntry[], $refParent: any, $refKey: any) {
for (const existingEntry of inventory) {
if (existingEntry && existingEntry.parent === $refParent && existingEntry.key === $refKey) {
return existingEntry;
}
}
return undefined;
}
function removeFromInventory(inventory: InventoryEntry[], entry: any) {
const index = inventory.indexOf(entry);
inventory.splice(index, 1);
}
export default bundle;
@@ -0,0 +1,361 @@
import $Ref from "./ref.js";
import Pointer from "./pointer.js";
import * as url from "./util/url.js";
import type $Refs from "./refs.js";
import type { DereferenceOptions, ParserOptions } from "./options.js";
import type { JSONSchema } from "./types";
import type $RefParser from "./index";
import { TimeoutError } from "./util/errors";
export default dereference;
/**
* Crawls the JSON schema, finds all JSON references, and dereferences them.
* This method mutates the JSON schema object, replacing JSON references with their resolved value.
*
* @param parser
* @param options
*/
function dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
parser: $RefParser<S, O>,
options: O,
) {
const start = Date.now();
// console.log('Dereferencing $ref pointers in %s', parser.$refs._root$Ref.path);
const dereferenced = crawl<S, O>(
parser.schema,
parser.$refs._root$Ref.path!,
"#",
new Set(),
new Set(),
new Map(),
parser.$refs,
options,
start,
);
parser.$refs.circular = dereferenced.circular;
parser.schema = dereferenced.value;
}
/**
* Recursively crawls the given value, and dereferences any JSON references.
*
* @param obj - The value to crawl. If it's not an object or array, it will be ignored.
* @param path - The full path of `obj`, possibly with a JSON Pointer in the hash
* @param pathFromRoot - The path of `obj` from the schema root
* @param parents - An array of the parent objects that have already been dereferenced
* @param processedObjects - An array of all the objects that have already been processed
* @param dereferencedCache - An map of all the dereferenced objects
* @param $refs
* @param options
* @param startTime - The time when the dereferencing started
* @returns
*/
function crawl<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
obj: any,
path: string,
pathFromRoot: string,
parents: Set<any>,
processedObjects: Set<any>,
dereferencedCache: any,
$refs: $Refs<S, O>,
options: O,
startTime: number,
) {
let dereferenced;
const result = {
value: obj,
circular: false,
};
checkDereferenceTimeout<S, O>(startTime, options);
const derefOptions = (options.dereference || {}) as DereferenceOptions;
const isExcludedPath = derefOptions.excludedPathMatcher || (() => false);
if (derefOptions?.circular === "ignore" || !processedObjects.has(obj)) {
if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj) && !isExcludedPath(pathFromRoot)) {
parents.add(obj);
processedObjects.add(obj);
if ($Ref.isAllowed$Ref(obj, options)) {
dereferenced = dereference$Ref(
obj,
path,
pathFromRoot,
parents,
processedObjects,
dereferencedCache,
$refs,
options,
startTime,
);
result.circular = dereferenced.circular;
result.value = dereferenced.value;
} else {
for (const key of Object.keys(obj)) {
checkDereferenceTimeout<S, O>(startTime, options);
const keyPath = Pointer.join(path, key);
const keyPathFromRoot = Pointer.join(pathFromRoot, key);
if (isExcludedPath(keyPathFromRoot)) {
continue;
}
const value = obj[key];
let circular = false;
if ($Ref.isAllowed$Ref(value, options)) {
dereferenced = dereference$Ref(
value,
keyPath,
keyPathFromRoot,
parents,
processedObjects,
dereferencedCache,
$refs,
options,
startTime,
);
circular = dereferenced.circular;
// Avoid pointless mutations; breaks frozen objects to no profit
if (obj[key] !== dereferenced.value) {
// If we have properties we want to preserve from our dereferenced schema then we need
// to copy them over to our new object.
const preserved: Map<string, unknown> = new Map();
if (derefOptions?.preservedProperties) {
if (typeof obj[key] === "object" && !Array.isArray(obj[key])) {
derefOptions?.preservedProperties.forEach((prop) => {
if (prop in obj[key]) {
preserved.set(prop, obj[key][prop]);
}
});
}
}
obj[key] = dereferenced.value;
// If we have data to preserve and our dereferenced object is still an object then
// we need copy back our preserved data into our dereferenced schema.
if (derefOptions?.preservedProperties) {
if (preserved.size && typeof obj[key] === "object" && !Array.isArray(obj[key])) {
preserved.forEach((value, prop) => {
obj[key][prop] = value;
});
}
}
derefOptions?.onDereference?.(value.$ref, obj[key], obj, key);
}
} else {
if (!parents.has(value)) {
dereferenced = crawl(
value,
keyPath,
keyPathFromRoot,
parents,
processedObjects,
dereferencedCache,
$refs,
options,
startTime,
);
circular = dereferenced.circular;
// Avoid pointless mutations; breaks frozen objects to no profit
if (obj[key] !== dereferenced.value) {
obj[key] = dereferenced.value;
}
} else {
circular = foundCircularReference(keyPath, $refs, options);
}
}
// Set the "isCircular" flag if this or any other property is circular
result.circular = result.circular || circular;
}
}
parents.delete(obj);
}
}
return result;
}
/**
* Dereferences the given JSON Reference, and then crawls the resulting value.
*
* @param $ref - The JSON Reference to resolve
* @param path - The full path of `$ref`, possibly with a JSON Pointer in the hash
* @param pathFromRoot - The path of `$ref` from the schema root
* @param parents - An array of the parent objects that have already been dereferenced
* @param processedObjects - An array of all the objects that have already been dereferenced
* @param dereferencedCache - An map of all the dereferenced objects
* @param $refs
* @param options
* @returns
*/
function dereference$Ref<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
$ref: any,
path: string,
pathFromRoot: string,
parents: Set<any>,
processedObjects: any,
dereferencedCache: any,
$refs: $Refs<S, O>,
options: O,
startTime: number,
) {
const isExternalRef = $Ref.isExternal$Ref($ref);
const shouldResolveOnCwd = isExternalRef && options?.dereference?.externalReferenceResolution === "root";
const $refPath = url.resolve(shouldResolveOnCwd ? url.cwd() : path, $ref.$ref);
const cache = dereferencedCache.get($refPath);
if (cache) {
// If the object we found is circular we can immediately return it because it would have been
// cached with everything we need already and we don't need to re-process anything inside it.
//
// If the cached object however is _not_ circular and there are additional keys alongside our
// `$ref` pointer here we should merge them back in and return that.
if (!cache.circular) {
const refKeys = Object.keys($ref);
if (refKeys.length > 1) {
const extraKeys = {};
for (const key of refKeys) {
if (key !== "$ref" && !(key in cache.value)) {
// @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
extraKeys[key] = $ref[key];
}
}
return {
circular: cache.circular,
value: Object.assign({}, cache.value, extraKeys),
};
}
return cache;
}
// If both our cached value and our incoming `$ref` are the same then we can return what we
// got out of the cache, otherwise we should re-process this value. We need to do this because
// the current dereference caching mechanism doesn't take into account that `$ref` are neither
// unique or reference the same file.
//
// For example if `schema.yaml` references `definitions/child.yaml` and
// `definitions/parent.yaml` references `child.yaml` then `$ref: 'child.yaml'` may get cached
// for `definitions/child.yaml`, resulting in `schema.yaml` being having an invalid reference
// to `child.yaml`.
//
// This check is not perfect and the design of the dereference caching mechanism needs a total
// overhaul.
if (typeof cache.value === "object" && "$ref" in cache.value && "$ref" in $ref) {
if (cache.value.$ref === $ref.$ref) {
return cache;
} else {
// no-op
}
} else {
return cache;
}
}
const pointer = $refs._resolve($refPath, path, options);
if (pointer === null) {
return {
circular: false,
value: null,
};
}
// Check for circular references
const directCircular = pointer.circular;
let circular = directCircular || parents.has(pointer.value);
if (circular) {
foundCircularReference(path, $refs, options);
}
// Dereference the JSON reference
let dereferencedValue = $Ref.dereference($ref, pointer.value);
// Crawl the dereferenced value (unless it's circular)
if (!circular) {
// Determine if the dereferenced value is circular
const dereferenced = crawl(
dereferencedValue,
pointer.path,
pathFromRoot,
parents,
processedObjects,
dereferencedCache,
$refs,
options,
startTime,
);
circular = dereferenced.circular;
dereferencedValue = dereferenced.value;
}
if (circular && !directCircular && options.dereference?.circular === "ignore") {
// The user has chosen to "ignore" circular references, so don't change the value
dereferencedValue = $ref;
}
if (directCircular) {
// The pointer is a DIRECT circular reference (i.e. it references itself).
// So replace the $ref path with the absolute path from the JSON Schema root
dereferencedValue.$ref = pathFromRoot;
}
const dereferencedObject = {
circular,
value: dereferencedValue,
};
// only cache if no extra properties than $ref
if (Object.keys($ref).length === 1) {
dereferencedCache.set($refPath, dereferencedObject);
}
return dereferencedObject;
}
/**
* Check if we've run past our allowed timeout and throw an error if we have.
*
* @param startTime - The time when the dereferencing started.
* @param options
*/
function checkDereferenceTimeout<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
startTime: number,
options: O,
): void {
if (options && options.timeoutMs) {
if (Date.now() - startTime > options.timeoutMs) {
throw new TimeoutError(options.timeoutMs);
}
}
}
/**
* Called when a circular reference is found.
* It sets the {@link $Refs#circular} flag, executes the options.dereference.onCircular callback,
* and throws an error if options.dereference.circular is false.
*
* @param keyPath - The JSON Reference path of the circular reference
* @param $refs
* @param options
* @returns - always returns true, to indicate that a circular reference was found
*/
function foundCircularReference(keyPath: any, $refs: any, options: any) {
$refs.circular = true;
options?.dereference?.onCircular?.(keyPath);
if (!options.dereference.circular) {
const error = new ReferenceError(`Circular $ref pointer found at ${keyPath}`);
throw error;
}
return true;
}
+444
View File
@@ -0,0 +1,444 @@
import $Refs from "./refs.js";
import _parse from "./parse.js";
import normalizeArgs from "./normalize-args.js";
import resolveExternal from "./resolve-external.js";
import _bundle from "./bundle.js";
import _dereference from "./dereference.js";
import * as url from "./util/url.js";
import {
JSONParserError,
InvalidPointerError,
MissingPointerError,
ResolverError,
ParserError,
UnmatchedParserError,
UnmatchedResolverError,
isHandledError,
JSONParserErrorGroup,
} from "./util/errors.js";
import maybe from "./util/maybe.js";
import type { ParserOptions } from "./options.js";
import { getJsonSchemaRefParserDefaultOptions } from "./options.js";
import type {
$RefsCallback,
JSONSchema,
SchemaCallback,
FileInfo,
Plugin,
ResolverOptions,
HTTPResolverOptions,
} from "./types/index.js";
import { isUnsafeUrl } from "./util/url.js";
export type RefParserSchema = string | JSONSchema;
/**
* This class parses a JSON schema, builds a map of its JSON references and their resolved values,
* and provides methods for traversing, manipulating, and dereferencing those references.
*
* @class
*/
export class $RefParser<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> {
/**
* The parsed (and possibly dereferenced) JSON schema object
*
* @type {object}
* @readonly
*/
public schema: S | null = null;
/**
* The resolved JSON references
*
* @type {$Refs}
* @readonly
*/
$refs = new $Refs<S, O>();
/**
* Parses the given JSON schema.
* This method does not resolve any JSON references.
* It just reads a single file in JSON or YAML format, and parse it as a JavaScript object.
*
* @param [path] - The file path or URL of the JSON schema
* @param [schema] - A JSON schema object. This object will be used instead of reading from `path`.
* @param [options] - Options that determine how the schema is parsed
* @param [callback] - An error-first callback. The second parameter is the parsed JSON schema object.
* @returns - The returned promise resolves with the parsed JSON schema object.
*/
public parse(schema: S | string | unknown): Promise<S>;
public parse(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
public parse(schema: S | string | unknown, options: O): Promise<S>;
public parse(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
public parse(path: string, schema: S | string | unknown, options: O): Promise<S>;
public parse(path: string, schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
async parse() {
const args = normalizeArgs<S, O>(arguments as any);
let promise;
if (!args.path && !args.schema) {
const err = new Error(`Expected a file path, URL, or object. Got ${args.path || args.schema}`);
return maybe(args.callback, Promise.reject(err));
}
// Reset everything
this.schema = null;
this.$refs = new $Refs();
// If the path is a filesystem path, then convert it to a URL.
// NOTE: According to the JSON Reference spec, these should already be URLs,
// but, in practice, many people use local filesystem paths instead.
// So we're being generous here and doing the conversion automatically.
// This is not intended to be a 100% bulletproof solution.
// If it doesn't work for your use-case, then use a URL instead.
let pathType = "http";
if (url.isFileSystemPath(args.path)) {
args.path = url.fromFileSystemPath(args.path);
pathType = "file";
} else if (!args.path && args.schema && "$id" in args.schema && args.schema.$id) {
// when schema id has defined an URL should use that hostname to request the references,
// instead of using the current page URL
const params = url.parse(args.schema.$id as string);
const port = params.protocol === "https:" ? 443 : 80;
args.path = `${params.protocol}//${params.hostname}:${port}`;
}
// Resolve the absolute path of the schema
args.path = url.resolve(url.cwd(), args.path);
if (args.schema && typeof args.schema === "object") {
// A schema object was passed-in.
// So immediately add a new $Ref with the schema object as its value
const $ref = this.$refs._add(args.path);
$ref.value = args.schema;
$ref.pathType = pathType;
promise = Promise.resolve(args.schema);
} else {
// Parse the schema file/url
promise = _parse<S, typeof args.options>(args.path, this.$refs, args.options);
}
try {
const result = await promise;
if (result !== null && typeof result === "object" && !Buffer.isBuffer(result)) {
this.schema = result;
return maybe(args.callback, Promise.resolve(this.schema!));
} else if (args.options.continueOnError) {
this.schema = null; // it's already set to null at line 79, but let's set it again for the sake of readability
return maybe(args.callback, Promise.resolve(this.schema!));
} else {
throw new SyntaxError(`"${this.$refs._root$Ref.path || result}" is not a valid JSON Schema`);
}
} catch (err) {
if (!args.options.continueOnError || !isHandledError(err)) {
return maybe(args.callback, Promise.reject(err));
}
if (this.$refs._$refs[url.stripHash(args.path)]) {
this.$refs._$refs[url.stripHash(args.path)].addError(err);
}
return maybe(args.callback, Promise.resolve(null));
}
}
public static parse<S extends object = JSONSchema>(schema: S | string | unknown): Promise<S>;
public static parse<S extends object = JSONSchema>(
schema: S | string | unknown,
callback: SchemaCallback<S>,
): Promise<void>;
public static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
options: O,
): Promise<S>;
public static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
options: O,
callback: SchemaCallback<S>,
): Promise<void>;
public static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
schema: S | string | unknown,
options: O,
): Promise<S>;
public static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
schema: S | string | unknown,
options: O,
callback: SchemaCallback<S>,
): Promise<void>;
public static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>():
| Promise<S>
| Promise<void> {
const parser = new $RefParser<S, O>();
return parser.parse.apply(parser, arguments as any);
}
/**
* *This method is used internally by other methods, such as `bundle` and `dereference`. You probably won't need to call this method yourself.*
*
* Resolves all JSON references (`$ref` pointers) in the given JSON Schema file. If it references any other files/URLs, then they will be downloaded and resolved as well. This method **does not** dereference anything. It simply gives you a `$Refs` object, which is a map of all the resolved references and their values.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#resolveschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive a `$Refs` object
*/
public resolve(schema: S | string | unknown): Promise<$Refs<S, O>>;
public resolve(schema: S | string | unknown, callback: $RefsCallback<S, O>): Promise<void>;
public resolve(schema: S | string | unknown, options: O): Promise<$Refs<S, O>>;
public resolve(schema: S | string | unknown, options: O, callback: $RefsCallback<S, O>): Promise<void>;
public resolve(path: string, schema: S | string | unknown, options: O): Promise<$Refs<S, O>>;
public resolve(path: string, schema: S | string | unknown, options: O, callback: $RefsCallback<S, O>): Promise<void>;
async resolve() {
const args = normalizeArgs<S, O>(arguments);
try {
await this.parse(args.path, args.schema, args.options);
await resolveExternal(this, args.options);
finalize(this);
return maybe(args.callback, Promise.resolve(this.$refs));
} catch (err) {
return maybe(args.callback, Promise.reject(err));
}
}
/**
* *This method is used internally by other methods, such as `bundle` and `dereference`. You probably won't need to call this method yourself.*
*
* Resolves all JSON references (`$ref` pointers) in the given JSON Schema file. If it references any other files/URLs, then they will be downloaded and resolved as well. This method **does not** dereference anything. It simply gives you a `$Refs` object, which is a map of all the resolved references and their values.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#resolveschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive a `$Refs` object
*/
public static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
): Promise<$Refs<S, O>>;
public static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
callback: $RefsCallback<S, O>,
): Promise<void>;
public static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
options: O,
): Promise<$Refs<S, O>>;
public static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
options: O,
callback: $RefsCallback<S, O>,
): Promise<void>;
public static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
schema: S | string | unknown,
options: O,
): Promise<$Refs<S, O>>;
public static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
schema: S | string | unknown,
options: O,
callback: $RefsCallback<S, O>,
): Promise<void>;
static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>():
| Promise<S>
| Promise<void> {
const instance = new $RefParser<S, O>();
return instance.resolve.apply(instance, arguments as any);
}
/**
* Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
*
* This also eliminates the risk of circular references, so the schema can be safely serialized using `JSON.stringify()`.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#bundleschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive the bundled schema object
*/
public static bundle<S extends object = JSONSchema>(schema: S | string | unknown): Promise<S>;
public static bundle<S extends object = JSONSchema>(
schema: S | string | unknown,
callback: SchemaCallback<S>,
): Promise<void>;
public static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
options: O,
): Promise<S>;
public static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
options: O,
callback: SchemaCallback<S>,
): Promise<void>;
public static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
schema: S | string | unknown,
options: O,
): Promise<S>;
public static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
schema: S | string | unknown,
options: O,
callback: SchemaCallback<S>,
): Promise<S>;
static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>():
| Promise<S>
| Promise<void> {
const instance = new $RefParser<S, O>();
return instance.bundle.apply(instance, arguments as any);
}
/**
* Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
*
* This also eliminates the risk of circular references, so the schema can be safely serialized using `JSON.stringify()`.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#bundleschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive the bundled schema object
*/
public bundle(schema: S | string | unknown): Promise<S>;
public bundle(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
public bundle(schema: S | string | unknown, options: O): Promise<S>;
public bundle(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
public bundle(path: string, schema: S | string | unknown, options: O): Promise<S>;
public bundle(path: string, schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
async bundle() {
const args = normalizeArgs<S, O>(arguments);
try {
await this.resolve(args.path, args.schema, args.options);
_bundle<S, O>(this, args.options);
finalize(this);
return maybe(args.callback, Promise.resolve(this.schema!));
} catch (err) {
return maybe(args.callback, Promise.reject(err));
}
}
/**
* Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
*
* The dereference method maintains object reference equality, meaning that all `$ref` pointers that point to the same object will be replaced with references to the same object. Again, this is great for programmatic usage, but it does introduce the risk of circular references, so be careful if you intend to serialize the schema using `JSON.stringify()`. Consider using the bundle method instead, which does not create circular references.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#dereferenceschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive the dereferenced schema object
*/
public static dereference<S extends object = JSONSchema>(schema: S | string | unknown): Promise<S>;
public static dereference<S extends object = JSONSchema>(
schema: S | string | unknown,
callback: SchemaCallback<S>,
): Promise<void>;
public static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
options: O,
): Promise<S>;
public static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
schema: S | string | unknown,
options: O,
callback: SchemaCallback<S>,
): Promise<void>;
public static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
schema: S | string | unknown,
options: O,
): Promise<S>;
public static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
schema: S | string | unknown,
options: O,
callback: SchemaCallback<S>,
): Promise<void>;
static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>():
| Promise<S>
| Promise<void> {
const instance = new $RefParser<S, O>();
return instance.dereference.apply(instance, arguments as any);
}
/**
* Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
*
* The dereference method maintains object reference equality, meaning that all `$ref` pointers that point to the same object will be replaced with references to the same object. Again, this is great for programmatic usage, but it does introduce the risk of circular references, so be careful if you intend to serialize the schema using `JSON.stringify()`. Consider using the bundle method instead, which does not create circular references.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#dereferenceschema-options-callback
*
* @param path
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive the dereferenced schema object
*/
public dereference(
path: string,
schema: S | string | unknown,
options: O,
callback: SchemaCallback<S>,
): Promise<void>;
public dereference(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
public dereference(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
public dereference(path: string, schema: S | string | unknown, options: O): Promise<S>;
public dereference(schema: S | string | unknown, options: O): Promise<S>;
public dereference(schema: S | string | unknown): Promise<S>;
async dereference() {
const args = normalizeArgs<S, O>(arguments);
try {
await this.resolve(args.path, args.schema, args.options);
_dereference(this, args.options);
finalize(this);
return maybe<S>(args.callback, Promise.resolve(this.schema!) as Promise<S>);
} catch (err) {
return maybe<S>(args.callback, Promise.reject(err));
}
}
}
export default $RefParser;
function finalize<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
parser: $RefParser<S, O>,
) {
const errors = JSONParserErrorGroup.getParserErrors(parser);
if (errors.length > 0) {
throw new JSONParserErrorGroup(parser);
}
}
export const parse = $RefParser.parse;
export const resolve = $RefParser.resolve;
export const bundle = $RefParser.bundle;
export const dereference = $RefParser.dereference;
export {
UnmatchedResolverError,
JSONParserError,
JSONSchema,
InvalidPointerError,
MissingPointerError,
ResolverError,
ParserError,
UnmatchedParserError,
ParserOptions,
$RefsCallback,
isHandledError,
JSONParserErrorGroup,
SchemaCallback,
FileInfo,
Plugin,
ResolverOptions,
HTTPResolverOptions,
_dereference as dereferenceInternal,
normalizeArgs as jsonSchemaParserNormalizeArgs,
getJsonSchemaRefParserDefaultOptions,
$Refs,
isUnsafeUrl,
};
@@ -0,0 +1,68 @@
import type { Options, ParserOptions } from "./options.js";
import { getNewOptions } from "./options.js";
import type { JSONSchema, SchemaCallback } from "./types";
// I really dislike this function and the way it's written. It's not clear what it's doing, and it's way too flexible
// In the future, I'd like to deprecate the api and accept only named parameters in index.ts
export interface NormalizedArguments<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> {
path: string;
schema: S;
options: O & Options<S>;
callback: SchemaCallback<S>;
}
/**
* Normalizes the given arguments, accounting for optional args.
*/
export function normalizeArgs<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
_args: Partial<IArguments>,
): NormalizedArguments<S, O> {
let path;
let schema;
let options: Options<S> & O;
let callback;
const args = Array.prototype.slice.call(_args) as any[];
if (typeof args[args.length - 1] === "function") {
// The last parameter is a callback function
callback = args.pop();
}
if (typeof args[0] === "string") {
// The first parameter is the path
path = args[0];
if (typeof args[2] === "object") {
// The second parameter is the schema, and the third parameter is the options
schema = args[1];
options = args[2];
} else {
// The second parameter is the options
schema = undefined;
options = args[1];
}
} else {
// The first parameter is the schema
path = "";
schema = args[0];
options = args[1];
}
try {
options = getNewOptions<S, O>(options);
} catch (e) {
console.error(`JSON Schema Ref Parser: Error normalizing options: ${e}`);
}
if (!options.mutateInputSchema && typeof schema === "object") {
// Make a deep clone of the schema, so that we don't alter the original object
schema = JSON.parse(JSON.stringify(schema));
}
return {
path,
schema,
options,
callback,
};
}
export default normalizeArgs;
@@ -0,0 +1,288 @@
import jsonParser from "./parsers/json.js";
import yamlParser from "./parsers/yaml.js";
import textParser from "./parsers/text.js";
import binaryParser from "./parsers/binary.js";
import fileResolver from "./resolvers/file.js";
import httpResolver from "./resolvers/http.js";
import type { HTTPResolverOptions, JSONSchema, JSONSchemaObject, Plugin, ResolverOptions } from "./types/index.js";
export type DeepPartial<T> = T extends object
? {
[P in keyof T]?: DeepPartial<T[P]>;
}
: T;
export interface BundleOptions {
/**
* A function, called for each path, which can return true to stop this path and all
* subpaths from being processed further. This is useful in schemas where some
* subpaths contain literal $ref keys that should not be changed.
*/
excludedPathMatcher?(path: string): boolean;
/**
* Callback invoked during bundling.
*
* @argument {string} path - The path being processed (ie. the `$ref` string)
* @argument {JSONSchemaObject} value - The JSON-Schema that the `$ref` resolved to
* @argument {JSONSchemaObject} parent - The parent of the processed object
* @argument {string} parentPropName - The prop name of the parent object whose value was processed
*/
onBundle?(path: string, value: JSONSchemaObject, parent?: JSONSchemaObject, parentPropName?: string): void;
}
export interface DereferenceOptions {
/**
* Determines whether circular `$ref` pointers are handled.
*
* If set to `false`, then a `ReferenceError` will be thrown if the schema contains any circular references.
*
* If set to `"ignore"`, then circular references will simply be ignored. No error will be thrown, but the `$Refs.circular` property will still be set to `true`.
*/
circular?: boolean | "ignore";
/**
* A function, called for each path, which can return true to stop this path and all
* subpaths from being dereferenced further. This is useful in schemas where some
* subpaths contain literal $ref keys that should not be dereferenced.
*/
excludedPathMatcher?(path: string): boolean;
/**
* Callback invoked during circular reference detection.
*
* @argument {string} path - The path that is circular (ie. the `$ref` string)
*/
onCircular?(path: string): void;
/**
* Callback invoked during dereferencing.
*
* @argument {string} path - The path being dereferenced (ie. the `$ref` string)
* @argument {JSONSchemaObject} value - The JSON-Schema that the `$ref` resolved to
* @argument {JSONSchemaObject} parent - The parent of the dereferenced object
* @argument {string} parentPropName - The prop name of the parent object whose value was dereferenced
*/
onDereference?(path: string, value: JSONSchemaObject, parent?: JSONSchemaObject, parentPropName?: string): void;
/**
* An array of properties to preserve when dereferencing a `$ref` schema. Useful if you want to
* enforce non-standard dereferencing behavior like present in the OpenAPI 3.1 specification where
* `description` and `summary` properties are preserved when alongside a `$ref` pointer.
*
* If none supplied then no properties will be preserved and the object will be fully replaced
* with the dereferenced `$ref`.
*/
preservedProperties?: string[];
/**
* Whether a reference should resolve relative to its directory/path, or from the cwd
*
* Default: `relative`
*/
externalReferenceResolution?: "relative" | "root";
}
/**
* Options that determine how JSON schemas are parsed, resolved, and dereferenced.
*
* @param [options] - Overridden options
* @class
*/
export interface $RefParserOptions<S extends object = JSONSchema> {
/**
* The `parse` options determine how different types of files will be parsed.
*
* JSON Schema `$Ref` Parser comes with built-in JSON, YAML, plain-text, and binary parsers, any of which you can configure or disable. You can also add your own custom parsers if you want.
*/
parse: {
json?: Plugin | boolean;
yaml?: Plugin | boolean;
binary?: Plugin | boolean;
text?: Plugin | boolean;
[key: string]: Plugin | boolean | undefined;
};
/**
* The `resolve` options control how JSON Schema $Ref Parser will resolve file paths and URLs, and how those files will be read/downloaded.
*
* JSON Schema `$Ref` Parser comes with built-in support for HTTP and HTTPS, as well as support for local files (when running in Node.js). You can configure or disable either of these built-in resolvers. You can also add your own custom resolvers if you want.
*/
resolve: {
/**
* Determines whether external $ref pointers will be resolved. If this option is disabled, then external `$ref` pointers will simply be ignored.
*/
external?: boolean;
file?: Partial<ResolverOptions<S>> | boolean;
http?: HTTPResolverOptions<S> | boolean;
} & {
[key: string]: Partial<ResolverOptions<S>> | HTTPResolverOptions<S> | boolean | undefined;
};
/**
* By default, JSON Schema $Ref Parser throws the first error it encounters. Setting `continueOnError` to `true`
* causes it to keep processing as much as possible and then throw a single error that contains all errors
* that were encountered.
*/
continueOnError: boolean;
/**
* The `bundle` options control how JSON Schema `$Ref` Parser will process `$ref` pointers within the JSON schema.
*/
bundle: BundleOptions;
/**
* The `dereference` options control how JSON Schema `$Ref` Parser will dereference `$ref` pointers within the JSON schema.
*/
dereference: DereferenceOptions;
/**
* Whether to clone the schema before dereferencing it.
* This is useful when you want to dereference the same schema multiple times, but you don't want to modify the original schema.
* Default: `true` due to mutating the input being the default behavior historically
*/
mutateInputSchema?: boolean;
/**
* The maximum amount of time (in milliseconds) that JSON Schema $Ref Parser will spend dereferencing a single schema.
* It will throw a timeout error if the operation takes longer than this.
*/
timeoutMs?: number;
}
export const getJsonSchemaRefParserDefaultOptions = () => {
const defaults = {
/**
* Determines how different types of files will be parsed.
*
* You can add additional parsers of your own, replace an existing one with
* your own implementation, or disable any parser by setting it to false.
*/
parse: {
json: { ...jsonParser },
yaml: { ...yamlParser },
text: { ...textParser },
binary: { ...binaryParser },
},
/**
* Determines how JSON References will be resolved.
*
* You can add additional resolvers of your own, replace an existing one with
* your own implementation, or disable any resolver by setting it to false.
*/
resolve: {
file: { ...fileResolver },
http: { ...httpResolver },
/**
* Determines whether external $ref pointers will be resolved.
* If this option is disabled, then none of above resolvers will be called.
* Instead, external $ref pointers will simply be ignored.
*
* @type {boolean}
*/
external: true,
},
/**
* By default, JSON Schema $Ref Parser throws the first error it encounters. Setting `continueOnError` to `true`
* causes it to keep processing as much as possible and then throw a single error that contains all errors
* that were encountered.
*/
continueOnError: false,
/**
* Determines the types of JSON references that are allowed.
*/
bundle: {
/**
* A function, called for each path, which can return true to stop this path and all
* subpaths from being processed further. This is useful in schemas where some
* subpaths contain literal $ref keys that should not be changed.
*
* @type {function}
*/
excludedPathMatcher: () => false,
},
/**
* Determines the types of JSON references that are allowed.
*/
dereference: {
/**
* Dereference circular (recursive) JSON references?
* If false, then a {@link ReferenceError} will be thrown if a circular reference is found.
* If "ignore", then circular references will not be dereferenced.
*
* @type {boolean|string}
*/
circular: true,
/**
* A function, called for each path, which can return true to stop this path and all
* subpaths from being dereferenced further. This is useful in schemas where some
* subpaths contain literal $ref keys that should not be dereferenced.
*
* @type {function}
*/
excludedPathMatcher: () => false,
referenceResolution: "relative",
},
mutateInputSchema: true,
} as $RefParserOptions<JSONSchema>;
return defaults;
};
export const getNewOptions = <S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
options: O | undefined,
): O & $RefParserOptions<S> => {
const newOptions = getJsonSchemaRefParserDefaultOptions();
if (options) {
merge(newOptions, options);
}
return newOptions as O & $RefParserOptions<S>;
};
export type Options<S extends object = JSONSchema> = $RefParserOptions<S>;
export type ParserOptions<S extends object = JSONSchema> = DeepPartial<$RefParserOptions<S>>;
/**
* Merges the properties of the source object into the target object.
*
* @param target - The object that we're populating
* @param source - The options that are being merged
* @returns
*/
function merge(target: any, source: any) {
if (isMergeable(source)) {
// prevent prototype pollution
const keys = Object.keys(source).filter((key) => !["__proto__", "constructor", "prototype"].includes(key));
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
const sourceSetting = source[key];
const targetSetting = target[key];
if (isMergeable(sourceSetting)) {
// It's a nested object, so merge it recursively
target[key] = merge(targetSetting || {}, sourceSetting);
} else if (sourceSetting !== undefined) {
// It's a scalar value, function, or array. No merging necessary. Just overwrite the target value.
target[key] = sourceSetting;
}
}
}
return target;
}
/**
* Determines whether the given value can be merged,
* or if it is a scalar value that should just override the target value.
*
* @param val
* @returns
*/
function isMergeable(val: any) {
return val && typeof val === "object" && !Array.isArray(val) && !(val instanceof RegExp) && !(val instanceof Date);
}
export default $RefParserOptions;
+169
View File
@@ -0,0 +1,169 @@
import * as url from "./util/url.js";
import * as plugins from "./util/plugins.js";
import {
ResolverError,
ParserError,
UnmatchedParserError,
UnmatchedResolverError,
isHandledError,
} from "./util/errors.js";
import type $Refs from "./refs.js";
import type { ParserOptions } from "./options.js";
import type { FileInfo, JSONSchema } from "./types/index.js";
/**
* Reads and parses the specified file path or URL.
*/
async function parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
path: string,
$refs: $Refs<S, O>,
options: O,
) {
// Remove the URL fragment, if any
const hashIndex = path.indexOf("#");
let hash = "";
if (hashIndex >= 0) {
hash = path.substring(hashIndex);
// Remove the URL fragment, if any
path = path.substring(0, hashIndex);
}
// Add a new $Ref for this file, even though we don't have the value yet.
// This ensures that we don't simultaneously read & parse the same file multiple times
const $ref = $refs._add(path);
// This "file object" will be passed to all resolvers and parsers.
const file = {
url: path,
hash,
extension: url.getExtension(path),
} as FileInfo;
// Read the file and then parse the data
try {
const resolver = await readFile<S, O>(file, options, $refs);
$ref.pathType = resolver.plugin.name;
file.data = resolver.result;
const parser = await parseFile<S, O>(file, options, $refs);
$ref.value = parser.result;
return parser.result;
} catch (err) {
if (isHandledError(err)) {
$ref.value = err;
}
throw err;
}
}
/**
* Reads the given file, using the configured resolver plugins
*
* @param file - An object containing information about the referenced file
* @param file.url - The full URL of the referenced file
* @param file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
* @param options
* @param $refs
* @returns
* The promise resolves with the raw file contents and the resolver that was used.
*/
async function readFile<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
file: FileInfo,
options: O,
$refs: $Refs<S, O>,
): Promise<any> {
// console.log('Reading %s', file.url);
// Find the resolvers that can read this file
let resolvers = plugins.all(options.resolve);
resolvers = plugins.filter(resolvers, "canRead", file);
// Run the resolvers, in order, until one of them succeeds
plugins.sort(resolvers);
try {
const data = await plugins.run(resolvers, "read", file, $refs);
return data;
} catch (err: any) {
if (!err && options.continueOnError) {
// No resolver could be matched
throw new UnmatchedResolverError(file.url);
} else if (!err || !("error" in err)) {
// Throw a generic, friendly error.
throw new SyntaxError(`Unable to resolve $ref pointer "${file.url}"`);
}
// Throw the original error, if it's one of our own (user-friendly) errors.
else if (err.error instanceof ResolverError) {
throw err.error;
} else {
throw new ResolverError(err, file.url);
}
}
}
/**
* Parses the given file's contents, using the configured parser plugins.
*
* @param file - An object containing information about the referenced file
* @param file.url - The full URL of the referenced file
* @param file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
* @param file.data - The file contents. This will be whatever data type was returned by the resolver
* @param options
* @param $refs
*
* @returns
* The promise resolves with the parsed file contents and the parser that was used.
*/
async function parseFile<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
file: FileInfo,
options: O,
$refs: $Refs<S, O>,
) {
// Find the parsers that can read this file type.
// If none of the parsers are an exact match for this file, then we'll try ALL of them.
// This handles situations where the file IS a supported type, just with an unknown extension.
const allParsers = plugins.all(options.parse);
const filteredParsers = plugins.filter(allParsers, "canParse", file);
const parsers = filteredParsers.length > 0 ? filteredParsers : allParsers;
// Run the parsers, in order, until one of them succeeds
plugins.sort(parsers);
try {
const parser = await plugins.run<S, O>(parsers, "parse", file, $refs);
if (!parser.plugin.allowEmpty && isEmpty(parser.result)) {
throw new SyntaxError(`Error parsing "${file.url}" as ${parser.plugin.name}. \nParsed value is empty`);
} else {
return parser;
}
} catch (err: any) {
if (!err && options.continueOnError) {
// No resolver could be matched
throw new UnmatchedParserError(file.url);
} else if (err && err.message && err.message.startsWith("Error parsing")) {
throw err;
} else if (!err || !("error" in err)) {
throw new SyntaxError(`Unable to parse ${file.url}`);
} else if (err.error instanceof ParserError) {
throw err.error;
} else {
throw new ParserError(err.error.message, file.url);
}
}
}
/**
* Determines whether the parsed value is "empty".
*
* @param value
* @returns
*/
function isEmpty(value: any) {
return (
value === undefined ||
(typeof value === "object" && Object.keys(value).length === 0) ||
(typeof value === "string" && value.trim().length === 0) ||
(Buffer.isBuffer(value) && value.length === 0)
);
}
export default parse;
@@ -0,0 +1,39 @@
import type { FileInfo } from "../types/index.js";
import type { Plugin } from "../types/index.js";
const BINARY_REGEXP = /\.(jpeg|jpg|gif|png|bmp|ico)$/i;
export default {
/**
* The order that this parser will run, in relation to other parsers.
*/
order: 400,
/**
* Whether to allow "empty" files (zero bytes).
*/
allowEmpty: true,
/**
* Determines whether this parser can parse a given file reference.
* Parsers that return true will be tried, in order, until one successfully parses the file.
* Parsers that return false will be skipped, UNLESS all parsers returned false, in which case
* every parser will be tried.
*/
canParse(file: FileInfo) {
// Use this parser if the file is a Buffer, and has a known binary extension
return Buffer.isBuffer(file.data) && BINARY_REGEXP.test(file.url);
},
/**
* Parses the given data as a Buffer (byte array).
*/
parse(file: FileInfo) {
if (Buffer.isBuffer(file.data)) {
return file.data;
} else {
// This will reject if data is anything other than a string or typed array
return Buffer.from(file.data);
}
},
} as Plugin;
@@ -0,0 +1,64 @@
import { ParserError } from "../util/errors.js";
import type { FileInfo } from "../types/index.js";
import type { Plugin } from "../types/index.js";
export default {
/**
* The order that this parser will run, in relation to other parsers.
*/
order: 100,
/**
* Whether to allow "empty" files. This includes zero-byte files, as well as empty JSON objects.
*/
allowEmpty: true,
/**
* Determines whether this parser can parse a given file reference.
* Parsers that match will be tried, in order, until one successfully parses the file.
* Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case
* every parser will be tried.
*/
canParse: ".json",
/**
* Allow JSON files with byte order marks (BOM)
*/
allowBOM: true,
/**
* Parses the given file as JSON
*/
async parse(file: FileInfo): Promise<object | undefined> {
let data = file.data;
if (Buffer.isBuffer(data)) {
data = data.toString();
}
if (typeof data === "string") {
if (data.trim().length === 0) {
return; // This mirrors the YAML behavior
} else {
try {
return JSON.parse(data);
} catch (e: any) {
if (this.allowBOM) {
try {
// find the first curly brace
const firstCurlyBrace = data.indexOf("{");
// remove any characters before the first curly brace
data = data.slice(firstCurlyBrace);
return JSON.parse(data);
} catch (e: any) {
throw new ParserError(e.message, file.url);
}
}
throw new ParserError(e.message, file.url);
}
}
} else {
// data is already a JavaScript value (object, array, number, null, NaN, etc.)
return data as object;
}
},
} as Plugin;
@@ -0,0 +1,46 @@
import { ParserError } from "../util/errors.js";
import type { FileInfo } from "../types/index.js";
import type { Plugin } from "../types/index.js";
const TEXT_REGEXP = /\.(txt|htm|html|md|xml|js|min|map|css|scss|less|svg)$/i;
export default {
/**
* The order that this parser will run, in relation to other parsers.
*/
order: 300,
/**
* Whether to allow "empty" files (zero bytes).
*/
allowEmpty: true,
/**
* The encoding that the text is expected to be in.
*/
encoding: "utf8" as BufferEncoding,
/**
* Determines whether this parser can parse a given file reference.
* Parsers that return true will be tried, in order, until one successfully parses the file.
* Parsers that return false will be skipped, UNLESS all parsers returned false, in which case
* every parser will be tried.
*/
canParse(file: FileInfo) {
// Use this parser if the file is a string or Buffer, and has a known text-based extension
return (typeof file.data === "string" || Buffer.isBuffer(file.data)) && TEXT_REGEXP.test(file.url);
},
/**
* Parses the given file as text
*/
parse(file: FileInfo) {
if (typeof file.data === "string") {
return file.data;
} else if (Buffer.isBuffer(file.data)) {
return file.data.toString(this.encoding);
} else {
throw new ParserError("data is not text", file.url);
}
},
} as Plugin;
@@ -0,0 +1,57 @@
import { ParserError } from "../util/errors.js";
import yaml from "js-yaml";
import { JSON_SCHEMA } from "js-yaml";
import type { FileInfo } from "../types/index.js";
import type { Plugin } from "../types/index.js";
export default {
/**
* The order that this parser will run, in relation to other parsers.
*/
order: 200,
/**
* Whether to allow "empty" files. This includes zero-byte files, as well as empty JSON objects.
*/
allowEmpty: true,
/**
* Determines whether this parser can parse a given file reference.
* Parsers that match will be tried, in order, until one successfully parses the file.
* Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case
* every parser will be tried.
*/
canParse: [".yaml", ".yml", ".json"], // JSON is valid YAML
/**
* Parses the given file as YAML
*
* @param file - An object containing information about the referenced file
* @param file.url - The full URL of the referenced file
* @param file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.)
* @param file.data - The file contents. This will be whatever data type was returned by the resolver
* @returns
*/
async parse(file: FileInfo) {
let data = file.data;
if (Buffer.isBuffer(data)) {
data = data.toString();
}
if (typeof data === "string") {
try {
return yaml.load(data, { schema: JSON_SCHEMA });
} catch (e: any) {
try {
// fallback to non JSON_SCHEMA
return yaml.load(data);
} catch (e: any) {
throw new ParserError(e?.message || "Parser Error", file.url);
}
}
} else {
// data is already a JavaScript value (object, array, number, null, NaN, etc.)
return data;
}
},
} as Plugin;
@@ -0,0 +1,346 @@
import type { ParserOptions } from "./options.js";
import $Ref from "./ref.js";
import * as url from "./util/url.js";
import { JSONParserError, InvalidPointerError, MissingPointerError, isHandledError } from "./util/errors.js";
import type { JSONSchema } from "./types";
export const nullSymbol = Symbol("null");
const slashes = /\//g;
const tildes = /~/g;
const escapedSlash = /~1/g;
const escapedTilde = /~0/g;
const safeDecodeURIComponent = (encodedURIComponent: string): string => {
try {
return decodeURIComponent(encodedURIComponent);
} catch {
return encodedURIComponent;
}
};
/**
* This class represents a single JSON pointer and its resolved value.
*
* @param $ref
* @param path
* @param [friendlyPath] - The original user-specified path (used for error messages)
* @class
*/
class Pointer<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> {
/**
* The {@link $Ref} object that contains this {@link Pointer} object.
*/
$ref: $Ref<S, O>;
/**
* The file path or URL, containing the JSON pointer in the hash.
* This path is relative to the path of the main JSON schema file.
*/
path: string;
/**
* The original path or URL, used for error messages.
*/
originalPath: string;
/**
* The value of the JSON pointer.
* Can be any JSON type, not just objects. Unknown file types are represented as Buffers (byte arrays).
*/
value: any;
/**
* Indicates whether the pointer references itself.
*/
circular: boolean;
/**
* The number of indirect references that were traversed to resolve the value.
* Resolving a single pointer may require resolving multiple $Refs.
*/
indirections: number;
constructor($ref: $Ref<S, O>, path: string, friendlyPath?: string) {
this.$ref = $ref;
this.path = path;
this.originalPath = friendlyPath || path;
this.value = undefined;
this.circular = false;
this.indirections = 0;
}
/**
* Resolves the value of a nested property within the given object.
*
* @param obj - The object that will be crawled
* @param options
* @param pathFromRoot - the path of place that initiated resolving
*
* @returns
* Returns a JSON pointer whose {@link Pointer#value} is the resolved value.
* If resolving this value required resolving other JSON references, then
* the {@link Pointer#$ref} and {@link Pointer#path} will reflect the resolution path
* of the resolved value.
*/
resolve(obj: S, options?: O, pathFromRoot?: string) {
const tokens = Pointer.parse(this.path, this.originalPath);
const found: any = [];
// Crawl the object, one token at a time
this.value = unwrapOrThrow(obj);
for (let i = 0; i < tokens.length; i++) {
if (resolveIf$Ref(this, options, pathFromRoot)) {
// The $ref path has changed, so append the remaining tokens to the path
this.path = Pointer.join(this.path, tokens.slice(i));
}
const token = tokens[i];
if (this.value[token] === undefined || (this.value[token] === null && i === tokens.length - 1)) {
// one final case is if the entry itself includes slashes, and was parsed out as a token - we can join the remaining tokens and try again
let didFindSubstringSlashMatch = false;
for (let j = tokens.length - 1; j > i; j--) {
const joinedToken = tokens.slice(i, j + 1).join("/");
if (this.value[joinedToken] !== undefined) {
this.value = this.value[joinedToken];
i = j;
didFindSubstringSlashMatch = true;
break;
}
}
if (didFindSubstringSlashMatch) {
continue;
}
// If the token we're looking for ended up not containing any slashes but is
// actually instead pointing to an existing `null` value then we should use that
// `null` value.
if (token in this.value && this.value[token] === null) {
// We use a `null` symbol for internal tracking to differntiate between a general `null`
// value and our expected `null` value.
this.value = nullSymbol;
continue;
}
this.value = null;
const path = this.$ref.path || "";
const targetRef = this.path.replace(path, "");
const targetFound = Pointer.join("", found);
const parentPath = pathFromRoot?.replace(path, "");
throw new MissingPointerError(token, decodeURI(this.originalPath), targetRef, targetFound, parentPath);
} else {
this.value = this.value[token];
}
found.push(token);
}
// Resolve the final value
if (!this.value || (this.value.$ref && url.resolve(this.path, this.value.$ref) !== pathFromRoot)) {
resolveIf$Ref(this, options, pathFromRoot);
}
return this;
}
/**
* Sets the value of a nested property within the given object.
*
* @param obj - The object that will be crawled
* @param value - the value to assign
* @param options
*
* @returns
* Returns the modified object, or an entirely new object if the entire object is overwritten.
*/
set(obj: S, value: any, options?: O) {
const tokens = Pointer.parse(this.path);
let token;
if (tokens.length === 0) {
// There are no tokens, replace the entire object with the new value
this.value = value;
return value;
}
// Crawl the object, one token at a time
this.value = unwrapOrThrow(obj);
for (let i = 0; i < tokens.length - 1; i++) {
resolveIf$Ref(this, options);
token = tokens[i];
if (this.value && this.value[token] !== undefined) {
// The token exists
this.value = this.value[token];
} else {
// The token doesn't exist, so create it
this.value = setValue(this, token, {});
}
}
// Set the value of the final token
resolveIf$Ref(this, options);
token = tokens[tokens.length - 1];
setValue(this, token, value);
// Return the updated object
return obj;
}
/**
* Parses a JSON pointer (or a path containing a JSON pointer in the hash)
* and returns an array of the pointer's tokens.
* (e.g. "schema.json#/definitions/person/name" => ["definitions", "person", "name"])
*
* The pointer is parsed according to RFC 6901
* {@link https://tools.ietf.org/html/rfc6901#section-3}
*
* @param path
* @param [originalPath]
* @returns
*/
static parse(path: string, originalPath?: string): string[] {
// Get the JSON pointer from the path's hash
const pointer = url.getHash(path).substring(1);
// If there's no pointer, then there are no tokens,
// so return an empty array
if (!pointer) {
return [];
}
// Split into an array
const split = pointer.split("/");
// Decode each part, according to RFC 6901
for (let i = 0; i < split.length; i++) {
split[i] = safeDecodeURIComponent(split[i].replace(escapedSlash, "/").replace(escapedTilde, "~"));
}
if (split[0] !== "") {
throw new InvalidPointerError(pointer, originalPath === undefined ? path : originalPath);
}
return split.slice(1);
}
/**
* Creates a JSON pointer path, by joining one or more tokens to a base path.
*
* @param base - The base path (e.g. "schema.json#/definitions/person")
* @param tokens - The token(s) to append (e.g. ["name", "first"])
* @returns
*/
static join(base: string, tokens: string | string[]) {
// Ensure that the base path contains a hash
if (base.indexOf("#") === -1) {
base += "#";
}
// Append each token to the base path
tokens = Array.isArray(tokens) ? tokens : [tokens];
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Encode the token, according to RFC 6901
base += "/" + encodeURIComponent(token.replace(tildes, "~0").replace(slashes, "~1"));
}
return base;
}
}
/**
* If the given pointer's {@link Pointer#value} is a JSON reference,
* then the reference is resolved and {@link Pointer#value} is replaced with the resolved value.
* In addition, {@link Pointer#path} and {@link Pointer#$ref} are updated to reflect the
* resolution path of the new value.
*
* @param pointer
* @param options
* @param [pathFromRoot] - the path of place that initiated resolving
* @returns - Returns `true` if the resolution path changed
*/
function resolveIf$Ref(pointer: any, options: any, pathFromRoot?: any) {
// Is the value a JSON reference? (and allowed?)
if ($Ref.isAllowed$Ref(pointer.value, options)) {
const $refPath = url.resolve(pointer.path, pointer.value.$ref);
if ($refPath === pointer.path && !isRootPath(pathFromRoot)) {
// The value is a reference to itself, so there's nothing to do.
pointer.circular = true;
} else {
const resolved = pointer.$ref.$refs._resolve($refPath, pointer.path, options);
if (resolved === null) {
return false;
}
pointer.indirections += resolved.indirections + 1;
if ($Ref.isExtended$Ref(pointer.value)) {
// This JSON reference "extends" the resolved value, rather than simply pointing to it.
// So the resolved path does NOT change. Just the value does.
pointer.value = $Ref.dereference(pointer.value, resolved.value);
return false;
} else {
// Resolve the reference
pointer.$ref = resolved.$ref;
pointer.path = resolved.path;
pointer.value = resolved.value;
}
return true;
}
}
return undefined;
}
export default Pointer;
/**
* Sets the specified token value of the {@link Pointer#value}.
*
* The token is evaluated according to RFC 6901.
* {@link https://tools.ietf.org/html/rfc6901#section-4}
*
* @param pointer - The JSON Pointer whose value will be modified
* @param token - A JSON Pointer token that indicates how to modify `obj`
* @param value - The value to assign
* @returns - Returns the assigned value
*/
function setValue(pointer: any, token: any, value: any) {
if (pointer.value && typeof pointer.value === "object") {
if (token === "-" && Array.isArray(pointer.value)) {
pointer.value.push(value);
} else {
pointer.value[token] = value;
}
} else {
throw new JSONParserError(
`Error assigning $ref pointer "${pointer.path}". \nCannot set "${token}" of a non-object.`,
);
}
return value;
}
function unwrapOrThrow(value: any) {
if (isHandledError(value)) {
throw value;
}
return value;
}
function isRootPath(pathFromRoot: any): boolean {
return typeof pathFromRoot == "string" && Pointer.parse(pathFromRoot).length == 0;
}
+306
View File
@@ -0,0 +1,306 @@
import Pointer, { nullSymbol } from "./pointer.js";
import type { JSONParserError, MissingPointerError, ParserError, ResolverError } from "./util/errors.js";
import { InvalidPointerError, isHandledError, normalizeError } from "./util/errors.js";
import { safePointerToPath, stripHash, getHash } from "./util/url.js";
import type $Refs from "./refs.js";
import type { ParserOptions } from "./options.js";
import type { JSONSchema } from "./types";
export type $RefError = JSONParserError | ResolverError | ParserError | MissingPointerError;
/**
* This class represents a single JSON reference and its resolved value.
*
* @class
*/
class $Ref<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> {
/**
* The file path or URL of the referenced file.
* This path is relative to the path of the main JSON schema file.
*
* This path does NOT contain document fragments (JSON pointers). It always references an ENTIRE file.
* Use methods such as {@link $Ref#get}, {@link $Ref#resolve}, and {@link $Ref#exists} to get
* specific JSON pointers within the file.
*
* @type {string}
*/
path: undefined | string;
/**
* The resolved value of the JSON reference.
* Can be any JSON type, not just objects. Unknown file types are represented as Buffers (byte arrays).
*
* @type {?*}
*/
value: any;
/**
* The {@link $Refs} object that contains this {@link $Ref} object.
*
* @type {$Refs}
*/
$refs: $Refs<S, O>;
/**
* Indicates the type of {@link $Ref#path} (e.g. "file", "http", etc.)
*/
pathType: string | unknown;
/**
* List of all errors. Undefined if no errors.
*/
errors: Array<$RefError> = [];
constructor($refs: $Refs<S, O>) {
this.$refs = $refs;
}
/**
* Pushes an error to errors array.
*
* @param err - The error to be pushed
* @returns
*/
addError(err: $RefError) {
if (this.errors === undefined) {
this.errors = [];
}
const existingErrors = this.errors.map(({ footprint }: any) => footprint);
// the path has been almost certainly set at this point,
// but just in case something went wrong, normalizeError injects path if necessary
// moreover, certain errors might point at the same spot, so filter them out to reduce noise
if ("errors" in err && Array.isArray(err.errors)) {
this.errors.push(
...err.errors.map(normalizeError).filter(({ footprint }: any) => !existingErrors.includes(footprint)),
);
} else if (!("footprint" in err) || !existingErrors.includes(err.footprint)) {
this.errors.push(normalizeError(err));
}
}
/**
* Determines whether the given JSON reference exists within this {@link $Ref#value}.
*
* @param path - The full path being resolved, optionally with a JSON pointer in the hash
* @param options
* @returns
*/
exists(path: string, options?: O) {
try {
this.resolve(path, options);
return true;
} catch {
return false;
}
}
/**
* Resolves the given JSON reference within this {@link $Ref#value} and returns the resolved value.
*
* @param path - The full path being resolved, optionally with a JSON pointer in the hash
* @param options
* @returns - Returns the resolved value
*/
get(path: string, options?: O) {
return this.resolve(path, options)?.value;
}
/**
* Resolves the given JSON reference within this {@link $Ref#value}.
*
* @param path - The full path being resolved, optionally with a JSON pointer in the hash
* @param options
* @param friendlyPath - The original user-specified path (used for error messages)
* @param pathFromRoot - The path of `obj` from the schema root
* @returns
*/
resolve(path: string, options?: O, friendlyPath?: string, pathFromRoot?: string) {
const pointer = new Pointer<S, O>(this, path, friendlyPath);
try {
const resolved = pointer.resolve(this.value, options, pathFromRoot);
if (resolved.value === nullSymbol) {
resolved.value = null;
}
return resolved;
} catch (err: any) {
if (!options || !options.continueOnError || !isHandledError(err)) {
throw err;
}
if (err.path === null) {
err.path = safePointerToPath(getHash(pathFromRoot));
}
if (err instanceof InvalidPointerError) {
err.source = decodeURI(stripHash(pathFromRoot));
}
this.addError(err);
return null;
}
}
/**
* Sets the value of a nested property within this {@link $Ref#value}.
* If the property, or any of its parents don't exist, they will be created.
*
* @param path - The full path of the property to set, optionally with a JSON pointer in the hash
* @param value - The value to assign
*/
set(path: string, value: any) {
const pointer = new Pointer(this, path);
this.value = pointer.set(this.value, value);
if (this.value === nullSymbol) {
this.value = null;
}
}
/**
* Determines whether the given value is a JSON reference.
*
* @param value - The value to inspect
* @returns
*/
static is$Ref(value: unknown): value is { $ref: string; length?: number } {
return (
Boolean(value) &&
typeof value === "object" &&
value !== null &&
"$ref" in value &&
typeof value.$ref === "string" &&
value.$ref.length > 0
);
}
/**
* Determines whether the given value is an external JSON reference.
*
* @param value - The value to inspect
* @returns
*/
static isExternal$Ref(value: unknown): boolean {
return $Ref.is$Ref(value) && value.$ref![0] !== "#";
}
/**
* Determines whether the given value is a JSON reference, and whether it is allowed by the options.
* For example, if it references an external file, then options.resolve.external must be true.
*
* @param value - The value to inspect
* @param options
* @returns
*/
static isAllowed$Ref<S extends object = JSONSchema>(value: unknown, options?: ParserOptions<S>) {
if (this.is$Ref(value)) {
if (value.$ref.substring(0, 2) === "#/" || value.$ref === "#") {
// It's a JSON Pointer reference, which is always allowed
return true;
} else if (value.$ref[0] !== "#" && (!options || options.resolve?.external)) {
// It's an external reference, which is allowed by the options
return true;
}
}
return undefined;
}
/**
* Determines whether the given value is a JSON reference that "extends" its resolved value.
* That is, it has extra properties (in addition to "$ref"), so rather than simply pointing to
* an existing value, this $ref actually creates a NEW value that is a shallow copy of the resolved
* value, plus the extra properties.
*
* @example: {
person: {
properties: {
firstName: { type: string }
lastName: { type: string }
}
}
employee: {
properties: {
$ref: #/person/properties
salary: { type: number }
}
}
}
* In this example, "employee" is an extended $ref, since it extends "person" with an additional
* property (salary). The result is a NEW value that looks like this:
*
* {
* properties: {
* firstName: { type: string }
* lastName: { type: string }
* salary: { type: number }
* }
* }
*
* @param value - The value to inspect
* @returns
*/
static isExtended$Ref(value: unknown) {
return $Ref.is$Ref(value) && Object.keys(value).length > 1;
}
/**
* Returns the resolved value of a JSON Reference.
* If necessary, the resolved value is merged with the JSON Reference to create a new object
*
* @example: {
person: {
properties: {
firstName: { type: string }
lastName: { type: string }
}
}
employee: {
properties: {
$ref: #/person/properties
salary: { type: number }
}
}
} When "person" and "employee" are merged, you end up with the following object:
*
* {
* properties: {
* firstName: { type: string }
* lastName: { type: string }
* salary: { type: number }
* }
* }
*
* @param $ref - The JSON reference object (the one with the "$ref" property)
* @param resolvedValue - The resolved value, which can be any type
* @returns - Returns the dereferenced value
*/
static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
$ref: $Ref<S, O>,
resolvedValue: S,
): S {
if (resolvedValue && typeof resolvedValue === "object" && $Ref.isExtended$Ref($ref)) {
const merged = {};
for (const key of Object.keys($ref)) {
if (key !== "$ref") {
// @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
merged[key] = $ref[key];
}
}
for (const key of Object.keys(resolvedValue)) {
if (!(key in merged)) {
// @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
merged[key] = resolvedValue[key];
}
}
return merged as S;
} else {
// Completely replace the original reference with the resolved value
return resolvedValue;
}
}
}
export default $Ref;
+238
View File
@@ -0,0 +1,238 @@
import $Ref from "./ref.js";
import * as url from "./util/url.js";
import type { JSONSchema4Type, JSONSchema6Type, JSONSchema7Type } from "json-schema";
import type { ParserOptions } from "./options.js";
import convertPathToPosix from "./util/convert-path-to-posix";
import type { JSONSchema } from "./types";
interface $RefsMap<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> {
[url: string]: $Ref<S, O>;
}
/**
* When you call the resolve method, the value that gets passed to the callback function (or Promise) is a $Refs object. This same object is accessible via the parser.$refs property of $RefParser objects.
*
* This object is a map of JSON References and their resolved values. It also has several convenient helper methods that make it easy for you to navigate and manipulate the JSON References.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/refs.html
*/
export default class $Refs<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> {
/**
* This property is true if the schema contains any circular references. You may want to check this property before serializing the dereferenced schema as JSON, since JSON.stringify() does not support circular references by default.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/refs.html#circular
*/
public circular: boolean;
/**
* Returns the paths/URLs of all the files in your schema (including the main schema file).
*
* See https://apidevtools.com/json-schema-ref-parser/docs/refs.html#pathstypes
*
* @param types (optional) Optionally only return certain types of paths ("file", "http", etc.)
*/
paths(...types: (string | string[])[]): string[] {
const paths = getPaths(this._$refs, types.flat());
return paths.map((path) => {
return convertPathToPosix(path.decoded);
});
}
/**
* Returns a map of paths/URLs and their correspond values.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/refs.html#valuestypes
*
* @param types (optional) Optionally only return values from certain locations ("file", "http", etc.)
*/
values(...types: (string | string[])[]): S {
const $refs = this._$refs;
const paths = getPaths($refs, types.flat());
return paths.reduce<Record<string, any>>((obj, path) => {
obj[convertPathToPosix(path.decoded)] = $refs[path.encoded].value;
return obj;
}, {}) as S;
}
/**
* Returns `true` if the given path exists in the schema; otherwise, returns `false`
*
* See https://apidevtools.com/json-schema-ref-parser/docs/refs.html#existsref
*
* @param $ref The JSON Reference path, optionally with a JSON Pointer in the hash
*/
/**
* Determines whether the given JSON reference exists.
*
* @param path - The path being resolved, optionally with a JSON pointer in the hash
* @param [options]
* @returns
*/
exists(path: string, options: any) {
try {
this._resolve(path, "", options);
return true;
} catch {
return false;
}
}
/**
* Resolves the given JSON reference and returns the resolved value.
*
* @param path - The path being resolved, with a JSON pointer in the hash
* @param [options]
* @returns - Returns the resolved value
*/
get(path: string, options?: O): JSONSchema4Type | JSONSchema6Type | JSONSchema7Type {
return this._resolve(path, "", options)!.value;
}
/**
* Sets the value at the given path in the schema. If the property, or any of its parents, don't exist, they will be created.
*
* @param path The JSON Reference path, optionally with a JSON Pointer in the hash
* @param value The value to assign. Can be anything (object, string, number, etc.)
*/
set(path: string, value: JSONSchema4Type | JSONSchema6Type | JSONSchema7Type) {
const absPath = url.resolve(this._root$Ref.path!, path);
const withoutHash = url.stripHash(absPath);
const $ref = this._$refs[withoutHash];
if (!$ref) {
throw new Error(`Error resolving $ref pointer "${path}". \n"${withoutHash}" not found.`);
}
$ref.set(absPath, value);
}
/**
* Returns the specified {@link $Ref} object, or undefined.
*
* @param path - The path being resolved, optionally with a JSON pointer in the hash
* @returns
* @protected
*/
_get$Ref(path: string) {
path = url.resolve(this._root$Ref.path!, path);
const withoutHash = url.stripHash(path);
return this._$refs[withoutHash];
}
/**
* Creates a new {@link $Ref} object and adds it to this {@link $Refs} object.
*
* @param path - The file path or URL of the referenced file
*/
_add(path: string) {
const withoutHash = url.stripHash(path);
const $ref = new $Ref<S, O>(this);
$ref.path = withoutHash;
this._$refs[withoutHash] = $ref;
this._root$Ref = this._root$Ref || $ref;
return $ref;
}
/**
* Resolves the given JSON reference.
*
* @param path - The path being resolved, optionally with a JSON pointer in the hash
* @param pathFromRoot - The path of `obj` from the schema root
* @param [options]
* @returns
* @protected
*/
_resolve(path: string, pathFromRoot: string, options?: O) {
const absPath = url.resolve(this._root$Ref.path!, path);
const withoutHash = url.stripHash(absPath);
const $ref = this._$refs[withoutHash];
if (!$ref) {
throw new Error(`Error resolving $ref pointer "${path}". \n"${withoutHash}" not found.`);
}
return $ref.resolve(absPath, options, path, pathFromRoot);
}
/**
* A map of paths/urls to {@link $Ref} objects
*
* @type {object}
* @protected
*/
_$refs: $RefsMap<S, O> = {};
/**
* The {@link $Ref} object that is the root of the JSON schema.
*
* @type {$Ref}
* @protected
*/
_root$Ref: $Ref<S, O>;
constructor() {
/**
* Indicates whether the schema contains any circular references.
*
* @type {boolean}
*/
this.circular = false;
this._$refs = {};
// @ts-ignore
this._root$Ref = null;
}
/**
* Returns the paths of all the files/URLs that are referenced by the JSON schema,
* including the schema itself.
*
* @param [types] - Only return paths of the given types ("file", "http", etc.)
* @returns
*/
/**
* Returns the map of JSON references and their resolved values.
*
* @param [types] - Only return references of the given types ("file", "http", etc.)
* @returns
*/
/**
* Returns a POJO (plain old JavaScript object) for serialization as JSON.
*
* @returns {object}
*/
toJSON = this.values;
}
/**
* Returns the encoded and decoded paths keys of the given object.
*
* @param $refs - The object whose keys are URL-encoded paths
* @param [types] - Only return paths of the given types ("file", "http", etc.)
* @returns
*/
function getPaths<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
$refs: $RefsMap<S, O>,
types: string[],
) {
let paths = Object.keys($refs);
// Filter the paths by type
types = Array.isArray(types[0]) ? types[0] : Array.prototype.slice.call(types);
if (types.length > 0 && types[0]) {
paths = paths.filter((key) => {
return types.includes($refs[key].pathType as string);
});
}
// Decode local filesystem paths
return paths.map((path) => {
return {
encoded: path,
decoded: $refs[path].pathType === "file" ? url.toFileSystemPath(path, true) : path,
};
});
}
@@ -0,0 +1,136 @@
import $Ref from "./ref.js";
import Pointer from "./pointer.js";
import parse from "./parse.js";
import * as url from "./util/url.js";
import { isHandledError } from "./util/errors.js";
import type $Refs from "./refs.js";
import type { ParserOptions } from "./options.js";
import type { JSONSchema } from "./types/index.js";
import type $RefParser from "./index.js";
/**
* Crawls the JSON schema, finds all external JSON references, and resolves their values.
* This method does not mutate the JSON schema. The resolved values are added to {@link $RefParser#$refs}.
*
* NOTE: We only care about EXTERNAL references here. INTERNAL references are only relevant when dereferencing.
*
* @returns
* The promise resolves once all JSON references in the schema have been resolved,
* including nested references that are contained in externally-referenced files.
*/
function resolveExternal<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
parser: $RefParser<S, O>,
options: O,
) {
if (!options.resolve?.external) {
// Nothing to resolve, so exit early
return Promise.resolve();
}
try {
// console.log('Resolving $ref pointers in %s', parser.$refs._root$Ref.path);
const promises = crawl(parser.schema, parser.$refs._root$Ref.path + "#", parser.$refs, options);
return Promise.all(promises);
} catch (e) {
return Promise.reject(e);
}
}
/**
* Recursively crawls the given value, and resolves any external JSON references.
*
* @param obj - The value to crawl. If it's not an object or array, it will be ignored.
* @param path - The full path of `obj`, possibly with a JSON Pointer in the hash
* @param {boolean} external - Whether `obj` was found in an external document.
* @param $refs
* @param options
* @param seen - Internal.
*
* @returns
* Returns an array of promises. There will be one promise for each JSON reference in `obj`.
* If `obj` does not contain any JSON references, then the array will be empty.
* If any of the JSON references point to files that contain additional JSON references,
* then the corresponding promise will internally reference an array of promises.
*/
function crawl<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
obj: string | Buffer | S | undefined | null,
path: string,
$refs: $Refs<S, O>,
options: O,
seen?: Set<any>,
external?: boolean,
) {
seen ||= new Set();
let promises: any = [];
if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj) && !seen.has(obj)) {
seen.add(obj); // Track previously seen objects to avoid infinite recursion
if ($Ref.isExternal$Ref(obj)) {
promises.push(resolve$Ref<S, O>(obj, path, $refs, options));
}
const keys = Object.keys(obj) as string[];
for (const key of keys) {
const keyPath = Pointer.join(path, key);
const value = obj[key as keyof typeof obj] as string | JSONSchema | Buffer | undefined;
promises = promises.concat(crawl(value, keyPath, $refs, options, seen, external));
}
}
return promises;
}
/**
* Resolves the given JSON Reference, and then crawls the resulting value.
*
* @param $ref - The JSON Reference to resolve
* @param path - The full path of `$ref`, possibly with a JSON Pointer in the hash
* @param $refs
* @param options
*
* @returns
* The promise resolves once all JSON references in the object have been resolved,
* including nested references that are contained in externally-referenced files.
*/
async function resolve$Ref<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
$ref: S,
path: string,
$refs: $Refs<S, O>,
options: O,
) {
const shouldResolveOnCwd = options.dereference?.externalReferenceResolution === "root";
const resolvedPath = url.resolve(shouldResolveOnCwd ? url.cwd() : path, ($ref as JSONSchema).$ref!);
const withoutHash = url.stripHash(resolvedPath);
// $ref.$ref = url.relative($refs._root$Ref.path, resolvedPath);
// Do we already have this $ref?
const ref = $refs._$refs[withoutHash];
if (ref) {
// We've already parsed this $ref, so use the existing value
return Promise.resolve(ref.value);
}
// Parse the $referenced file/url
try {
const result = await parse(resolvedPath, $refs, options);
// Crawl the parsed value
// console.log('Resolving $ref pointers in %s', withoutHash);
const promises = crawl(result, withoutHash + "#", $refs, options, new Set(), true);
return Promise.all(promises);
} catch (err) {
if (!options?.continueOnError || !isHandledError(err)) {
throw err;
}
if ($refs._$refs[withoutHash]) {
err.source = decodeURI(url.stripHash(path));
err.path = url.safePointerToPath(url.getHash(path));
}
return [];
}
}
export default resolveExternal;
@@ -0,0 +1,46 @@
import fs from "fs";
import * as url from "../util/url.js";
import { ResolverError } from "../util/errors.js";
import type { JSONSchema, ResolverOptions } from "../types/index.js";
import type { FileInfo } from "../types/index.js";
export default {
/**
* The order that this resolver will run, in relation to other resolvers.
*/
order: 100,
/**
* Determines whether this resolver can read a given file reference.
* Resolvers that return true will be tried, in order, until one successfully resolves the file.
* Resolvers that return false will not be given a chance to resolve the file.
*/
canRead(file: FileInfo) {
return url.isFileSystemPath(file.url);
},
/**
* Reads the given file and returns its raw contents as a Buffer.
*/
async read(file: FileInfo): Promise<Buffer> {
let path: string | undefined;
try {
path = url.toFileSystemPath(file.url);
} catch (err: any) {
const e = err as Error;
e.message = `Malformed URI: ${file.url}: ${e.message}`;
throw new ResolverError(e, file.url);
}
// strip trailing slashes
if (path.endsWith("/") || path.endsWith("\\")) {
path = path.slice(0, -1);
}
try {
return await fs.promises.readFile(path);
} catch (err: any) {
const e = err as Error;
e.message = `Error opening file ${path}: ${e.message}`;
throw new ResolverError(e, path);
}
},
} as ResolverOptions<JSONSchema>;
@@ -0,0 +1,140 @@
import * as url from "../util/url.js";
import { ResolverError } from "../util/errors.js";
import type { FileInfo, HTTPResolverOptions, JSONSchema } from "../types/index.js";
export default {
/**
* The order that this resolver will run, in relation to other resolvers.
*/
order: 200,
/**
* HTTP headers to send when downloading files.
*
* @example:
* {
* "User-Agent": "JSON Schema $Ref Parser",
* Accept: "application/json"
* }
*/
headers: null,
/**
* HTTP request timeout (in milliseconds).
*/
timeout: 60_000, // 60 seconds
/**
* The maximum number of HTTP redirects to follow.
* To disable automatic following of redirects, set this to zero.
*/
redirects: 5,
/**
* The `withCredentials` option of XMLHttpRequest.
* Set this to `true` if you're downloading files from a CORS-enabled server that requires authentication
*/
withCredentials: false,
/**
* Set this to `false` if you want to allow unsafe URLs (e.g., `127.0.0.1`, localhost, and other internal URLs).
*/
safeUrlResolver: true,
/**
* Determines whether this resolver can read a given file reference.
* Resolvers that return true will be tried in order, until one successfully resolves the file.
* Resolvers that return false will not be given a chance to resolve the file.
*/
canRead(file: FileInfo) {
return url.isHttp(file.url) && (!this.safeUrlResolver || !url.isUnsafeUrl(file.url));
},
/**
* Reads the given URL and returns its raw contents as a Buffer.
*/
read(file: FileInfo) {
const u = url.parse(file.url);
if (typeof window !== "undefined" && !u.protocol) {
// Use the protocol of the current page
u.protocol = url.parse(location.href).protocol;
}
return download(u, this);
},
} as HTTPResolverOptions<JSONSchema>;
/**
* Downloads the given file.
* @returns
* The promise resolves with the raw downloaded data, or rejects if there is an HTTP error.
*/
async function download<S extends object = JSONSchema>(
u: URL | string,
httpOptions: HTTPResolverOptions<S>,
_redirects?: string[],
): Promise<Buffer> {
u = url.parse(u);
const redirects = _redirects || [];
redirects.push(u.href);
try {
const res = await get(u, httpOptions);
if (res.status >= 400) {
const error = new Error(`HTTP ERROR ${res.status}`) as Error & { status?: number };
error.status = res.status;
throw error;
} else if (res.status >= 300) {
if (!Number.isNaN(httpOptions.redirects) && redirects.length > httpOptions.redirects!) {
const error = new Error(
`Error downloading ${redirects[0]}. \nToo many redirects: \n ${redirects.join(" \n ")}`,
) as Error & { status?: number };
error.status = res.status;
throw new ResolverError(error);
} else if (!("location" in res.headers) || !res.headers.location) {
const error = new Error(`HTTP ${res.status} redirect with no location header`) as Error & { status?: number };
error.status = res.status;
throw error;
} else {
const redirectTo = url.resolve(u.href, res.headers.location as string);
return download(redirectTo, httpOptions, redirects);
}
} else {
if (res.body) {
const buf = await res.arrayBuffer();
return Buffer.from(buf);
}
return Buffer.alloc(0);
}
} catch (err: any) {
const e = err as Error;
e.message = `Error downloading ${u.href}: ${e.message}`;
throw new ResolverError(e, u.href);
}
}
/**
* Sends an HTTP GET request.
* The promise resolves with the HTTP Response object.
*/
async function get<S extends object = JSONSchema>(u: RequestInfo | URL, httpOptions: HTTPResolverOptions<S>) {
let controller: any;
let timeoutId: any;
if (httpOptions.timeout) {
controller = new AbortController();
timeoutId = setTimeout(() => controller.abort(), httpOptions.timeout);
}
const response = await fetch(u, {
method: "GET",
headers: httpOptions.headers || {},
credentials: httpOptions.withCredentials ? "include" : "same-origin",
signal: controller ? controller.signal : null,
});
if (timeoutId) {
clearTimeout(timeoutId);
}
return response;
}
@@ -0,0 +1,156 @@
import type {
JSONSchema4,
JSONSchema4Object,
JSONSchema6,
JSONSchema6Object,
JSONSchema7,
JSONSchema7Object,
} from "json-schema";
import type $Refs from "../refs.js";
import type { ParserOptions } from "../options";
export type JSONSchema = JSONSchema4 | JSONSchema6 | JSONSchema7;
export type JSONSchemaObject = JSONSchema4Object | JSONSchema6Object | JSONSchema7Object;
export type SchemaCallback<S extends object = JSONSchema> = (err: Error | null, schema?: S | object | null) => any;
export type $RefsCallback<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> = (
err: Error | null,
$refs?: $Refs<S, O>,
) => any;
/**
* See https://apidevtools.com/json-schema-ref-parser/docs/options.html
*/
export interface HTTPResolverOptions<S extends object = JSONSchema> extends Partial<ResolverOptions<S>> {
/**
* You can specify any HTTP headers that should be sent when downloading files. For example, some servers may require you to set the `Accept` or `Referrer` header.
*/
headers?: RequestInit["headers"] | null;
/**
* The amount of time (in milliseconds) to wait for a response from the server when downloading files. The default is 5 seconds.
*/
timeout?: number;
/**
* The maximum number of HTTP redirects to follow per file. The default is 5. To disable automatic following of redirects, set this to zero.
*/
redirects?: number;
/**
* Set this to `true` if you're downloading files from a CORS-enabled server that requires authentication
*/
withCredentials?: boolean;
/**
* Set this to `false` if you want to allow unsafe URLs (e.g., `127.0.0.1`, localhost, and other internal URLs).
*/
safeUrlResolver?: boolean;
}
/**
* JSON Schema `$Ref` Parser comes with built-in resolvers for HTTP and HTTPS URLs, as well as local filesystem paths (when running in Node.js). You can add your own custom resolvers to support additional protocols, or even replace any of the built-in resolvers with your own custom implementation.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/plugins/resolvers.html
*/
export interface ResolverOptions<S extends object = JSONSchema> {
name?: string;
/**
* All resolvers have an order property, even the built-in resolvers. If you don't specify an order property, then your resolver will run last. Specifying `order: 1`, like we did in this example, will make your resolver run first. Or you can squeeze your resolver in-between some of the built-in resolvers. For example, `order: 101` would make it run after the file resolver, but before the HTTP resolver. You can see the order of all the built-in resolvers by looking at their source code.
*
* The order property and canRead property are related to each other. For each file that JSON Schema $Ref Parser needs to resolve, it first determines which resolvers can read that file by checking their canRead property. If only one resolver matches a file, then only that one resolver is called, regardless of its order. If multiple resolvers match a file, then those resolvers are tried in order until one of them successfully reads the file. Once a resolver successfully reads the file, the rest of the resolvers are skipped.
*/
order?: number;
/**
* The `canRead` property tells JSON Schema `$Ref` Parser what kind of files your resolver can read. In this example, we've simply specified a regular expression that matches "mogodb://" URLs, but we could have used a simple boolean, or even a function with custom logic to determine which files to resolve. Here are examples of each approach:
*/
canRead: boolean | RegExp | string | string[] | ((file: FileInfo) => boolean);
/**
* This is where the real work of a resolver happens. The `read` method accepts the same file info object as the `canRead` function, but rather than returning a boolean value, the `read` method should return the contents of the file. The file contents should be returned in as raw a form as possible, such as a string or a byte array. Any further parsing or processing should be done by parsers.
*
* Unlike the `canRead` function, the `read` method can also be asynchronous. This might be important if your resolver needs to read data from a database or some other external source. You can return your asynchronous value using either an ES6 Promise or a Node.js-style error-first callback. Of course, if your resolver has the ability to return its data synchronously, then that's fine too. Here are examples of all three approaches:
*/
read:
| string
| object
| ((
file: FileInfo,
callback?: (error: Error | null, data: string | null) => any,
) => string | Buffer | S | Promise<string | Buffer | S>);
}
export interface Plugin {
name?: string;
/**
* Parsers run in a specific order, relative to other parsers. For example, a parser with `order: 5` will run before a parser with `order: 10`. If a parser is unable to successfully parse a file, then the next parser is tried, until one succeeds or they all fail.
*
* You can change the order in which parsers run, which is useful if you know that most of your referenced files will be a certain type, or if you add your own custom parser that you want to run first.
*/
order?: number;
/**
* All of the built-in parsers allow empty files by default. The JSON and YAML parsers will parse empty files as `undefined`. The text parser will parse empty files as an empty string. The binary parser will parse empty files as an empty byte array.
*
* You can set `allowEmpty: false` on any parser, which will cause an error to be thrown if a file empty.
*/
allowEmpty?: boolean;
/**
* Specifies whether a Byte Order Mark (BOM) is allowed or not. Only applies to JSON parsing
*
* @type {boolean} @default true
*/
allowBOM?: boolean;
/**
* The encoding that the text is expected to be in.
*/
encoding?: BufferEncoding;
/**
* Determines which parsers will be used for which files.
*
* A regular expression can be used to match files by their full path. A string (or array of strings) can be used to match files by their file extension. Or a function can be used to perform more complex matching logic. See the custom parser docs for details.
*/
canParse?: boolean | RegExp | string | string[] | ((file: FileInfo) => boolean);
/**
* This is where the real work of a parser happens. The `parse` method accepts the same file info object as the `canParse` function, but rather than returning a boolean value, the `parse` method should return a JavaScript representation of the file contents. For our CSV parser, that is a two-dimensional array of lines and values. For your parser, it might be an object, a string, a custom class, or anything else.
*
* Unlike the `canParse` function, the `parse` method can also be asynchronous. This might be important if your parser needs to retrieve data from a database or if it relies on an external HTTP service to return the parsed value. You can return your asynchronous value via a [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) or a Node.js-style error-first callback. Here are examples of both approaches:
*/
parse:
| ((file: FileInfo, callback?: (error: Error | null, data: string | null) => any) => unknown | Promise<unknown>)
| number
| string;
}
/**
* JSON Schema `$Ref` Parser supports plug-ins, such as resolvers and parsers. These plug-ins can have methods such as `canRead()`, `read()`, `canParse()`, and `parse()`. All of these methods accept the same object as their parameter: an object containing information about the file being read or parsed.
*
* The file info object currently only consists of a few properties, but it may grow in the future if plug-ins end up needing more information.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/plugins/file-info-object.html
*/
export interface FileInfo {
/**
* The full URL of the file. This could be any type of URL, including "http://", "https://", "file://", "ftp://", "mongodb://", or even a local filesystem path (when running in Node.js).
*/
url: string;
/**
* The hash (URL fragment) of the file URL, including the # symbol. If the URL doesn't have a hash, then this will be an empty string.
*/
hash: string;
/**
* The lowercase file extension, such as ".json", ".yaml", ".txt", etc.
*/
extension: string;
/**
* The raw file contents, in whatever form they were returned by the resolver that read the file.
*/
data: string | Buffer;
}
@@ -0,0 +1,11 @@
import path from "path";
export default function convertPathToPosix(filePath: string) {
const isExtendedLengthPath = filePath.startsWith("\\\\?\\");
if (isExtendedLengthPath) {
return filePath;
}
return filePath.split(path?.win32?.sep).join(path?.posix?.sep ?? "/");
}
@@ -0,0 +1,214 @@
import { getHash, stripHash, toFileSystemPath } from "./url.js";
import type $RefParser from "../index.js";
import type { ParserOptions } from "../index.js";
import type { JSONSchema } from "../index.js";
import type $Ref from "../ref";
export type JSONParserErrorType =
| "EUNKNOWN"
| "EPARSER"
| "EUNMATCHEDPARSER"
| "ETIMEOUT"
| "ERESOLVER"
| "EUNMATCHEDRESOLVER"
| "EMISSINGPOINTER"
| "EINVALIDPOINTER";
const nonJsonTypes = ["function", "symbol", "undefined"];
const protectedProps = ["constructor", "prototype", "__proto__"];
const objectPrototype = Object.getPrototypeOf({});
/**
* Custom JSON serializer for Error objects.
* Returns all built-in error properties, as well as extended properties.
*/
export function toJSON<T extends Error>(this: T): Error & T {
// HACK: We have to cast the objects to `any` so we can use symbol indexers.
// see https://github.com/Microsoft/TypeScript/issues/1863
const pojo: any = {};
const error = this as any;
for (const key of getDeepKeys(error)) {
if (typeof key === "string") {
const value = error[key];
const type = typeof value;
if (!nonJsonTypes.includes(type)) {
pojo[key] = value;
}
}
}
return pojo as Error & T;
}
/**
* Returns own, inherited, enumerable, non-enumerable, string, and symbol keys of `obj`.
* Does NOT return members of the base Object prototype, or the specified omitted keys.
*/
export function getDeepKeys(obj: object, omit: Array<string | symbol> = []): Set<string | symbol> {
let keys: Array<string | symbol> = [];
// Crawl the prototype chain, finding all the string and symbol keys
while (obj && obj !== objectPrototype) {
keys = keys.concat(Object.getOwnPropertyNames(obj), Object.getOwnPropertySymbols(obj));
obj = Object.getPrototypeOf(obj) as object;
}
// De-duplicate the list of keys
const uniqueKeys = new Set(keys);
// Remove any omitted keys
for (const key of omit.concat(protectedProps)) {
uniqueKeys.delete(key);
}
return uniqueKeys;
}
export class JSONParserError extends Error {
public readonly name: string;
public readonly message: string;
public source: string | undefined;
public path: Array<string | number> | null;
public readonly code: JSONParserErrorType;
public constructor(message: string, source?: string) {
super();
this.code = "EUNKNOWN";
this.name = "JSONParserError";
this.message = message;
this.source = source;
this.path = null;
}
toJSON = toJSON.bind(this);
get footprint() {
return `${this.path}+${this.source}+${this.code}+${this.message}`;
}
}
export class JSONParserErrorGroup<
S extends object = JSONSchema,
O extends ParserOptions<S> = ParserOptions<S>,
> extends Error {
files: $RefParser<S, O>;
constructor(parser: $RefParser<S, O>) {
super();
this.files = parser;
this.name = "JSONParserErrorGroup";
this.message = `${this.errors.length} error${
this.errors.length > 1 ? "s" : ""
} occurred while reading '${toFileSystemPath(parser.$refs._root$Ref!.path)}'`;
}
toJSON = toJSON.bind(this);
static getParserErrors<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
parser: $RefParser<S, O>,
) {
const errors = [];
for (const $ref of Object.values(parser.$refs._$refs) as $Ref<S, O>[]) {
if ($ref.errors) {
errors.push(...$ref.errors);
}
}
return errors;
}
get errors(): Array<
| JSONParserError
| InvalidPointerError
| ResolverError
| ParserError
| MissingPointerError
| UnmatchedParserError
| UnmatchedResolverError
> {
return JSONParserErrorGroup.getParserErrors<S, O>(this.files);
}
}
export class ParserError extends JSONParserError {
code = "EPARSER" as JSONParserErrorType;
name = "ParserError";
constructor(message: any, source: any) {
super(`Error parsing ${source}: ${message}`, source);
}
}
export class UnmatchedParserError extends JSONParserError {
code = "EUNMATCHEDPARSER" as JSONParserErrorType;
name = "UnmatchedParserError";
constructor(source: string) {
super(`Could not find parser for "${source}"`, source);
}
}
export class ResolverError extends JSONParserError {
code = "ERESOLVER" as JSONParserErrorType;
name = "ResolverError";
ioErrorCode?: string;
constructor(ex: Error | any, source?: string) {
super(ex.message || `Error reading file "${source}"`, source);
if ("code" in ex) {
this.ioErrorCode = String(ex.code);
}
}
}
export class UnmatchedResolverError extends JSONParserError {
code = "EUNMATCHEDRESOLVER" as JSONParserErrorType;
name = "UnmatchedResolverError";
constructor(source: any) {
super(`Could not find resolver for "${source}"`, source);
}
}
export class MissingPointerError extends JSONParserError {
code = "EMISSINGPOINTER" as JSONParserErrorType;
name = "MissingPointerError";
public targetToken: any;
public targetRef: string;
public targetFound: string;
public parentPath: string;
constructor(token: any, path: any, targetRef: any, targetFound: any, parentPath: any) {
super(`Missing $ref pointer "${getHash(path)}". Token "${token}" does not exist.`, stripHash(path));
this.targetToken = token;
this.targetRef = targetRef;
this.targetFound = targetFound;
this.parentPath = parentPath;
}
}
export class TimeoutError extends JSONParserError {
code = "ETIMEOUT" as JSONParserErrorType;
name = "TimeoutError";
constructor(timeout: number) {
super(`Dereferencing timeout reached: ${timeout}ms`);
}
}
export class InvalidPointerError extends JSONParserError {
code = "EUNMATCHEDRESOLVER" as JSONParserErrorType;
name = "InvalidPointerError";
constructor(pointer: string, path: string) {
super(`Invalid $ref pointer "${pointer}". Pointers must begin with "#/"`, stripHash(path));
}
}
export function isHandledError(err: any): err is JSONParserError {
return err instanceof JSONParserError || err instanceof JSONParserErrorGroup;
}
export function normalizeError(err: any) {
if (err.path === null) {
err.path = [];
}
return err;
}
@@ -0,0 +1,2 @@
const isWindowsConst = /^win/.test(globalThis.process ? globalThis.process.platform : "");
export const isWindows = () => isWindowsConst;
@@ -0,0 +1,22 @@
import next from "./next.js";
type MaybeParams<T> = (err: Error | any | null, result?: T) => void;
export default function maybe<T>(cb: MaybeParams<T> | undefined, promise: Promise<T>): Promise<T> | void {
if (cb) {
promise.then(
function (result) {
next(function () {
cb(null, result);
});
},
function (err) {
next(function () {
cb(err);
});
},
);
return undefined;
} else {
return promise;
}
}
@@ -0,0 +1,13 @@
function makeNext() {
if (typeof process === "object" && typeof process.nextTick === "function") {
return process.nextTick;
} else if (typeof setImmediate === "function") {
return setImmediate;
} else {
return function next(f: () => void) {
setTimeout(f, 0);
};
}
}
export default makeNext();
@@ -0,0 +1,159 @@
import type { FileInfo, JSONSchema } from "../types/index.js";
import type { ParserOptions } from "../options.js";
import type { ResolverOptions } from "../types/index.js";
import type $Refs from "../refs.js";
import type { Plugin } from "../types/index.js";
/**
* Returns the given plugins as an array, rather than an object map.
* All other methods in this module expect an array of plugins rather than an object map.
*
* @returns
*/
export function all<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
plugins: O["resolve"],
): Plugin[] {
return (Object.keys(plugins || {}) as (keyof ResolverOptions<S>)[])
.filter((key) => {
return typeof plugins![key] === "object";
})
.map((key) => {
(plugins![key] as ResolverOptions<S>)!.name = key;
return plugins![key] as Plugin;
});
}
/**
* Filters the given plugins, returning only the ones return `true` for the given method.
*/
export function filter(plugins: Plugin[], method: any, file: any) {
return plugins.filter((plugin: Plugin) => {
return !!getResult(plugin, method, file);
});
}
/**
* Sorts the given plugins, in place, by their `order` property.
*/
export function sort(plugins: Plugin[]) {
for (const plugin of plugins) {
plugin.order = plugin.order || Number.MAX_SAFE_INTEGER;
}
return plugins.sort((a: any, b: any) => {
return a.order - b.order;
});
}
export interface PluginResult<S extends object = JSONSchema> {
plugin: Plugin;
result?: string | Buffer | S;
error?: any;
}
/**
* Runs the specified method of the given plugins, in order, until one of them returns a successful result.
* Each method can return a synchronous value, a Promise, or call an error-first callback.
* If the promise resolves successfully, or the callback is called without an error, then the result
* is immediately returned and no further plugins are called.
* If the promise rejects, or the callback is called with an error, then the next plugin is called.
* If ALL plugins fail, then the last error is thrown.
*/
export async function run<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
plugins: Plugin[],
method: keyof Plugin | keyof ResolverOptions<S>,
file: FileInfo,
$refs: $Refs<S, O>,
) {
let plugin: Plugin;
let lastError: PluginResult<S>;
let index = 0;
return new Promise<PluginResult<S>>((resolve, reject) => {
runNextPlugin();
function runNextPlugin() {
plugin = plugins[index++];
if (!plugin) {
// There are no more functions, so re-throw the last error
return reject(lastError);
}
try {
// console.log(' %s', plugin.name);
const result = getResult(plugin, method, file, callback, $refs);
if (result && typeof result.then === "function") {
// A promise was returned
result.then(onSuccess, onError);
} else if (result !== undefined) {
// A synchronous result was returned
onSuccess(result);
} else if (index === plugins.length) {
throw new Error("No promise has been returned or callback has been called.");
}
} catch (e) {
onError(e);
}
}
function callback(err: PluginResult<S>["error"], result: PluginResult<S>["result"]) {
if (err) {
onError(err);
} else {
onSuccess(result);
}
}
function onSuccess(result: PluginResult<S>["result"]) {
// console.log(' success');
resolve({
plugin,
result: result!,
});
}
function onError(error: PluginResult<S>["error"]) {
// console.log(' %s', err.message || err);
lastError = {
plugin,
error,
};
runNextPlugin();
}
});
}
/**
* Returns the value of the given property.
* If the property is a function, then the result of the function is returned.
* If the value is a RegExp, then it will be tested against the file URL.
* If the value is an array, then it will be compared against the file extension.
*/
function getResult<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(
obj: Plugin,
prop: keyof Plugin | keyof ResolverOptions<S>,
file: FileInfo,
callback?: (err?: Error, result?: any) => void,
$refs?: $Refs<S, O>,
) {
const value = obj[prop as keyof typeof obj] as unknown;
if (typeof value === "function") {
return value.apply(obj, [file, callback, $refs]);
}
if (!callback) {
// The synchronous plugin functions (canParse and canRead)
// allow a "shorthand" syntax, where the user can match
// files by RegExp or by file extension.
if (value instanceof RegExp) {
return value.test(file.url);
} else if (typeof value === "string") {
return value === file.extension;
} else if (Array.isArray(value)) {
return value.indexOf(file.extension) !== -1;
}
}
return value;
}
@@ -0,0 +1,493 @@
import convertPathToPosix from "./convert-path-to-posix";
import path, { win32 } from "path";
const forwardSlashPattern = /\//g;
const protocolPattern = /^(\w{2,}):\/\//i;
const jsonPointerSlash = /~1/g;
const jsonPointerTilde = /~0/g;
import { join } from "path";
import { isWindows } from "./is-windows";
// RegExp patterns to URL-encode special characters in local filesystem paths
const urlEncodePatterns = [
[/\?/g, "%3F"],
[/#/g, "%23"],
] as [RegExp, string][];
// RegExp patterns to URL-decode special characters for local filesystem paths
const urlDecodePatterns = [/%23/g, "#", /%24/g, "$", /%26/g, "&", /%2C/g, ",", /%40/g, "@"];
export const parse = (u: string | URL) => new URL(u);
/**
* Returns resolved target URL relative to a base URL in a manner similar to that of a Web browser resolving an anchor tag HREF.
*
* @returns
*/
export function resolve(from: string, to: string) {
// we use a non-existent URL to check if its a relative URL
const fromUrl = new URL(convertPathToPosix(from), "https://aaa.nonexistanturl.com");
const resolvedUrl = new URL(convertPathToPosix(to), fromUrl);
const endSpaces = to.match(/(\s*)$/)?.[1] || "";
if (resolvedUrl.hostname === "aaa.nonexistanturl.com") {
// `from` is a relative URL.
const { pathname, search, hash } = resolvedUrl;
return pathname + search + hash + endSpaces;
}
return resolvedUrl.toString() + endSpaces;
}
/**
* Returns the current working directory (in Node) or the current page URL (in browsers).
*
* @returns
*/
export function cwd() {
if (typeof window !== "undefined" && window.location && window.location.href) {
const href = window.location.href;
if (!href || !href.startsWith("http")) {
// try parsing as url, and if it fails, return root url /
try {
new URL(href);
return href;
} catch {
return "/";
}
}
return href;
}
if (typeof process !== "undefined" && process.cwd) {
const path = process.cwd();
const lastChar = path.slice(-1);
if (lastChar === "/" || lastChar === "\\") {
return path;
} else {
return path + "/";
}
}
return "/";
}
/**
* Returns the protocol of the given URL, or `undefined` if it has no protocol.
*
* @param path
* @returns
*/
export function getProtocol(path: string | undefined) {
const match = protocolPattern.exec(path || "");
if (match) {
return match[1].toLowerCase();
}
return undefined;
}
/**
* Returns the lowercased file extension of the given URL,
* or an empty string if it has no extension.
*
* @param path
* @returns
*/
export function getExtension(path: any) {
const lastDot = path.lastIndexOf(".");
if (lastDot >= 0) {
return stripQuery(path.substring(lastDot).toLowerCase());
}
return "";
}
/**
* Removes the query, if any, from the given path.
*
* @param path
* @returns
*/
export function stripQuery(path: any) {
const queryIndex = path.indexOf("?");
if (queryIndex >= 0) {
path = path.substring(0, queryIndex);
}
return path;
}
/**
* Returns the hash (URL fragment), of the given path.
* If there is no hash, then the root hash ("#") is returned.
*
* @param path
* @returns
*/
export function getHash(path: undefined | string) {
if (!path) {
return "#";
}
const hashIndex = path.indexOf("#");
if (hashIndex >= 0) {
return path.substring(hashIndex);
}
return "#";
}
/**
* Removes the hash (URL fragment), if any, from the given path.
*
* @param path
* @returns
*/
export function stripHash(path?: string | undefined) {
if (!path) {
return "";
}
const hashIndex = path.indexOf("#");
if (hashIndex >= 0) {
path = path.substring(0, hashIndex);
}
return path;
}
/**
* Determines whether the given path is an HTTP(S) URL.
*
* @param path
* @returns
*/
export function isHttp(path: string) {
const protocol = getProtocol(path);
if (protocol === "http" || protocol === "https") {
return true;
} else if (protocol === undefined) {
// There is no protocol. If we're running in a browser, then assume it's HTTP.
return typeof window !== "undefined";
} else {
// It's some other protocol, such as "ftp://", "mongodb://", etc.
return false;
}
}
/**
* Determines whether the given url is an unsafe or internal url.
*
* @param path - The URL or path to check
* @returns true if the URL is unsafe/internal, false otherwise
*/
export function isUnsafeUrl(path: string | unknown): boolean {
if (!path || typeof path !== "string") {
return true;
}
// Trim whitespace and convert to lowercase for comparison
const normalizedPath = path.trim().toLowerCase();
// Empty or just whitespace
if (!normalizedPath) {
return true;
}
// JavaScript protocols
if (
normalizedPath.startsWith("javascript:") ||
normalizedPath.startsWith("vbscript:") ||
normalizedPath.startsWith("data:")
) {
return true;
}
// File protocol
if (normalizedPath.startsWith("file:")) {
return true;
}
// if we're in the browser, we assume that it is safe
if (typeof window !== "undefined" && window.location && window.location.href) {
return false;
}
// Local/internal network addresses
const localPatterns = [
// Localhost variations
"localhost",
"127.0.0.1",
"::1",
// Private IP ranges (RFC 1918)
"10.",
"172.16.",
"172.17.",
"172.18.",
"172.19.",
"172.20.",
"172.21.",
"172.22.",
"172.23.",
"172.24.",
"172.25.",
"172.26.",
"172.27.",
"172.28.",
"172.29.",
"172.30.",
"172.31.",
"192.168.",
// Link-local addresses
"169.254.",
// Internal domains
".local",
".internal",
".intranet",
".corp",
".home",
".lan",
];
try {
// Try to parse as URL
const url = new URL(normalizedPath.startsWith("//") ? "http:" + normalizedPath : normalizedPath);
const hostname = url.hostname.toLowerCase();
// Check against local patterns
for (const pattern of localPatterns) {
if (hostname === pattern || hostname.startsWith(pattern) || hostname.endsWith(pattern)) {
return true;
}
}
// Check for IP addresses in private ranges
if (isPrivateIP(hostname)) {
return true;
}
// Check for non-standard ports that might indicate internal services
const port = url.port;
if (port && isInternalPort(parseInt(port))) {
return true;
}
} catch {
// If URL parsing fails, check if it's a relative path or contains suspicious patterns
// Relative paths starting with / are generally safe for same-origin
if (normalizedPath.startsWith("/") && !normalizedPath.startsWith("//")) {
return false;
}
// Check for localhost patterns in non-URL strings
for (const pattern of localPatterns) {
if (normalizedPath.includes(pattern)) {
return true;
}
}
}
return false;
}
/**
* Helper function to check if an IP address is in a private range
*/
function isPrivateIP(ip: string): boolean {
const ipRegex = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
const match = ip.match(ipRegex);
if (!match) {
return false;
}
const [, a, b, c, d] = match.map(Number);
// Validate IP format
if (a > 255 || b > 255 || c > 255 || d > 255) {
return false;
}
// Private IP ranges
return (
a === 10 || a === 127 || (a === 172 && b >= 16 && b <= 31) || (a === 192 && b === 168) || (a === 169 && b === 254) // Link-local
);
}
/**
* Helper function to check if a port is typically used for internal services
*/
function isInternalPort(port: number): boolean {
const internalPorts = [
22, // SSH
23, // Telnet
25, // SMTP
53, // DNS
135, // RPC
139, // NetBIOS
445, // SMB
993, // IMAPS
995, // POP3S
1433, // SQL Server
1521, // Oracle
3306, // MySQL
3389, // RDP
5432, // PostgreSQL
5900, // VNC
6379, // Redis
8080, // Common internal web
8443, // Common internal HTTPS
9200, // Elasticsearch
27017, // MongoDB
];
return internalPorts.includes(port);
}
/**
* Determines whether the given path is a filesystem path.
* This includes "file://" URLs.
*
* @param path
* @returns
*/
export function isFileSystemPath(path: string | undefined) {
// @ts-ignore
if (typeof window !== "undefined" || (typeof process !== "undefined" && process.browser)) {
// We're running in a browser, so assume that all paths are URLs.
// This way, even relative paths will be treated as URLs rather than as filesystem paths
return false;
}
const protocol = getProtocol(path);
return protocol === undefined || protocol === "file";
}
/**
* Converts a filesystem path to a properly-encoded URL.
*
* This is intended to handle situations where JSON Schema $Ref Parser is called
* with a filesystem path that contains characters which are not allowed in URLs.
*
* @example
* The following filesystem paths would be converted to the following URLs:
*
* <"!@#$%^&*+=?'>.json ==> %3C%22!@%23$%25%5E&*+=%3F\'%3E.json
* C:\\My Documents\\File (1).json ==> C:/My%20Documents/File%20(1).json
* file://Project #42/file.json ==> file://Project%20%2342/file.json
*
* @param path
* @returns
*/
export function fromFileSystemPath(path: string) {
// Step 1: On Windows, replace backslashes with forward slashes,
// rather than encoding them as "%5C"
if (isWindows()) {
const projectDir = cwd();
const upperPath = path.toUpperCase();
const projectDirPosixPath = convertPathToPosix(projectDir);
const posixUpper = projectDirPosixPath.toUpperCase();
const hasProjectDir = upperPath.includes(posixUpper);
const hasProjectUri = upperPath.includes(posixUpper);
const isAbsolutePath =
win32?.isAbsolute(path) ||
path.startsWith("http://") ||
path.startsWith("https://") ||
path.startsWith("file://");
if (!(hasProjectDir || hasProjectUri || isAbsolutePath) && !projectDir.startsWith("http")) {
path = join(projectDir, path);
}
path = convertPathToPosix(path);
}
// Step 2: `encodeURI` will take care of MOST characters
path = encodeURI(path);
// Step 3: Manually encode characters that are not encoded by `encodeURI`.
// This includes characters such as "#" and "?", which have special meaning in URLs,
// but are just normal characters in a filesystem path.
for (const pattern of urlEncodePatterns) {
path = path.replace(pattern[0], pattern[1]);
}
return path;
}
/**
* Converts a URL to a local filesystem path.
*/
export function toFileSystemPath(path: string | undefined, keepFileProtocol?: boolean): string {
// Step 1: `decodeURI` will decode characters such as Cyrillic characters, spaces, etc.
path = decodeURI(path!);
// Step 2: Manually decode characters that are not decoded by `decodeURI`.
// This includes characters such as "#" and "?", which have special meaning in URLs,
// but are just normal characters in a filesystem path.
for (let i = 0; i < urlDecodePatterns.length; i += 2) {
path = path.replace(urlDecodePatterns[i], urlDecodePatterns[i + 1] as string);
}
// Step 3: If it's a "file://" URL, then format it consistently
// or convert it to a local filesystem path
let isFileUrl = path.toLowerCase().startsWith("file://");
if (isFileUrl) {
// Strip-off the protocol, and the initial "/", if there is one
path = path.replace(/^file:\/\//, "").replace(/^\//, "");
// insert a colon (":") after the drive letter on Windows
if (isWindows() && path[1] === "/") {
path = `${path[0]}:${path.substring(1)}`;
}
if (keepFileProtocol) {
// Return the consistently-formatted "file://" URL
path = "file:///" + path;
} else {
// Convert the "file://" URL to a local filesystem path.
// On Windows, it will start with something like "C:/".
// On Posix, it will start with "/"
isFileUrl = false;
path = isWindows() ? path : "/" + path;
}
}
// Step 4: Normalize Windows paths (unless it's a "file://" URL)
if (isWindows() && !isFileUrl) {
// Replace forward slashes with backslashes
path = path.replace(forwardSlashPattern, "\\");
// Capitalize the drive letter
if (path.match(/^[a-z]:\\/i)) {
path = path[0].toUpperCase() + path.substring(1);
}
}
return path;
}
/**
* Converts a $ref pointer to a valid JSON Path.
*
* @param pointer
* @returns
*/
export function safePointerToPath(pointer: any) {
if (pointer.length <= 1 || pointer[0] !== "#" || pointer[1] !== "/") {
return [];
}
return pointer
.slice(2)
.split("/")
.map((value: any) => {
return decodeURIComponent(value).replace(jsonPointerSlash, "/").replace(jsonPointerTilde, "~");
});
}
export function relative(from: string, to: string) {
if (!isFileSystemPath(from) || !isFileSystemPath(to)) {
return resolve(from, to);
}
const fromDir = path.dirname(stripHash(from));
const toPath = stripHash(to);
const result = path.relative(fromDir, toPath);
return result + getHash(to);
}