FRE-600: Fix code review blockers

- Consolidated duplicate UndoManagers to single instance
- Fixed connection promise to only resolve on 'connected' status
- Fixed WebSocketProvider import (WebsocketProvider)
- Added proper doc.destroy() cleanup
- Renamed isPresenceInitialized property to avoid conflict

Co-Authored-By: Paperclip <noreply@paperclip.ing>
This commit is contained in:
2026-04-25 00:08:01 -04:00
parent 65b552bb08
commit 7c684a42cc
48450 changed files with 5679671 additions and 383 deletions

60
node_modules/metro-file-map/src/lib/FileProcessor.d.ts generated vendored Normal file
View File

@@ -0,0 +1,60 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @oncall react_native
* @generated SignedSource<<2ea213f753eef5de14cb8a27f68b9fa2>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/FileProcessor.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
import type {
FileMapPluginWorker,
FileMetadata,
PerfLogger,
} from '../flow-types';
type ProcessFileRequest = Readonly<{
/**
* Populate metadata[H.SHA1] with the SHA1 of the file's contents.
*/
computeSha1: boolean;
/**
* Only if processing has already required reading the file's contents, return
* the contents as a Buffer - null otherwise. Not supported for batches.
*/
maybeReturnContent: boolean;
}>;
interface MaybeCodedError extends Error {
code?: string;
}
export declare class FileProcessor {
constructor(
opts: Readonly<{
maxFilesPerWorker?: null | undefined | number;
maxWorkers: number;
pluginWorkers: null | undefined | ReadonlyArray<FileMapPluginWorker>;
perfLogger: null | undefined | PerfLogger;
rootDir: string;
}>,
);
processBatch(
files: ReadonlyArray<[string, FileMetadata]>,
req: ProcessFileRequest,
): Promise<{
errors: Array<{normalFilePath: string; error: MaybeCodedError}>;
}>;
processRegularFile(
normalPath: string,
fileMetadata: FileMetadata,
req: ProcessFileRequest,
): null | undefined | {content: null | undefined | Buffer};
end(): Promise<void>;
}

194
node_modules/metro-file-map/src/lib/FileProcessor.js generated vendored Normal file
View File

@@ -0,0 +1,194 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.FileProcessor = void 0;
var _constants = _interopRequireDefault(require("../constants"));
var _worker = require("../worker");
var _RootPathUtils = require("./RootPathUtils");
var _jestWorker = require("jest-worker");
var _path = require("path");
function _interopRequireDefault(e) {
return e && e.__esModule ? e : { default: e };
}
const debug = require("debug")("Metro:FileMap");
const NODE_MODULES_SEP = "node_modules" + _path.sep;
const MAX_FILES_PER_WORKER = 100;
class FileProcessor {
#maxFilesPerWorker;
#maxWorkers;
#perfLogger;
#pluginWorkers;
#inBandWorker;
#rootPathUtils;
constructor(opts) {
this.#maxFilesPerWorker = opts.maxFilesPerWorker ?? MAX_FILES_PER_WORKER;
this.#maxWorkers = opts.maxWorkers;
this.#pluginWorkers = opts.pluginWorkers ?? [];
this.#inBandWorker = new _worker.Worker({
plugins: this.#pluginWorkers.map((plugin) => plugin.worker),
});
this.#perfLogger = opts.perfLogger;
this.#rootPathUtils = new _RootPathUtils.RootPathUtils(opts.rootDir);
}
async processBatch(files, req) {
const errors = [];
const workerJobs = files
.map(([normalFilePath, fileMetadata]) => {
const maybeWorkerInput = this.#getWorkerInput(
normalFilePath,
fileMetadata,
req,
);
if (!maybeWorkerInput) {
return null;
}
return [maybeWorkerInput, fileMetadata];
})
.filter(Boolean);
const numWorkers = Math.min(
this.#maxWorkers,
Math.ceil(workerJobs.length / this.#maxFilesPerWorker),
);
const batchWorker = this.#getBatchWorker(numWorkers);
if (req.maybeReturnContent) {
throw new Error(
"Batch processing does not support returning file contents",
);
}
await Promise.all(
workerJobs.map(([workerInput, fileMetadata]) => {
return batchWorker
.processFile(workerInput)
.then((reply) =>
processWorkerReply(reply, workerInput.pluginsToRun, fileMetadata),
)
.catch((error) =>
errors.push({
normalFilePath: this.#rootPathUtils.absoluteToNormal(
workerInput.filePath,
),
error: normalizeWorkerError(error),
}),
);
}),
);
await batchWorker.end();
return {
errors,
};
}
processRegularFile(normalPath, fileMetadata, req) {
const workerInput = this.#getWorkerInput(normalPath, fileMetadata, req);
return workerInput
? {
content: processWorkerReply(
this.#inBandWorker.processFile(workerInput),
workerInput.pluginsToRun,
fileMetadata,
),
}
: null;
}
#getWorkerInput(normalPath, fileMetadata, req) {
if (fileMetadata[_constants.default.SYMLINK] !== 0) {
return null;
}
const computeSha1 =
req.computeSha1 && fileMetadata[_constants.default.SHA1] == null;
const { maybeReturnContent } = req;
const nodeModulesIdx = normalPath.indexOf(NODE_MODULES_SEP);
const isNodeModules =
nodeModulesIdx === 0 ||
(nodeModulesIdx > 0 && normalPath[nodeModulesIdx - 1] === _path.sep);
const pluginsToRun =
this.#pluginWorkers?.reduce((prev, plugin, idx) => {
if (
plugin.filter({
isNodeModules,
normalPath,
})
) {
prev.push(idx);
}
return prev;
}, []) ?? [];
if (!computeSha1 && pluginsToRun.length === 0) {
return null;
}
if (isNodeModules) {
if (computeSha1) {
return {
computeSha1: true,
filePath: this.#rootPathUtils.normalToAbsolute(normalPath),
maybeReturnContent,
pluginsToRun,
};
}
return null;
}
return {
computeSha1,
filePath: this.#rootPathUtils.normalToAbsolute(normalPath),
maybeReturnContent,
pluginsToRun,
};
}
#getBatchWorker(numWorkers) {
if (numWorkers <= 1) {
return {
processFile: async (message) => this.#inBandWorker.processFile(message),
end: async () => {},
};
}
const workerPath = require.resolve("../worker");
debug("Creating worker farm of %d worker threads", numWorkers);
this.#perfLogger?.point("initWorkers_start");
const jestWorker = new _jestWorker.Worker(workerPath, {
exposedMethods: ["processFile"],
maxRetries: 3,
numWorkers,
enableWorkerThreads: true,
forkOptions: {
execArgv: [],
},
setupArgs: [
{
plugins: this.#pluginWorkers.map((plugin) => plugin.worker),
},
],
});
this.#perfLogger?.point("initWorkers_end");
this.#perfLogger = null;
return jestWorker;
}
async end() {}
}
exports.FileProcessor = FileProcessor;
function processWorkerReply(metadata, pluginsRun, fileMetadata) {
fileMetadata[_constants.default.VISITED] = 1;
const pluginData = metadata.pluginData;
if (pluginData) {
for (const [i, pluginIdx] of pluginsRun.entries()) {
fileMetadata[_constants.default.PLUGINDATA + pluginIdx] = pluginData[i];
}
}
if (metadata.sha1 != null) {
fileMetadata[_constants.default.SHA1] = metadata.sha1;
}
return metadata.content;
}
function normalizeWorkerError(mixedError) {
if (
mixedError == null ||
typeof mixedError !== "object" ||
mixedError.message == null ||
mixedError.stack == null
) {
const error = new Error(mixedError);
error.stack = "";
return error;
}
return mixedError;
}

View File

@@ -0,0 +1,289 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow strict-local
* @format
* @oncall react_native
*/
import type {
FileMapPluginWorker,
FileMetadata,
PerfLogger,
WorkerMessage,
WorkerMetadata,
WorkerSetupArgs,
} from '../flow-types';
import H from '../constants';
import {Worker} from '../worker';
import {RootPathUtils} from './RootPathUtils';
import {Worker as JestWorker} from 'jest-worker';
import {sep} from 'path';
// eslint-disable-next-line import/no-commonjs
const debug = require('debug')('Metro:FileMap');
type ProcessFileRequest = Readonly<{
/**
* Populate metadata[H.SHA1] with the SHA1 of the file's contents.
*/
computeSha1: boolean,
/**
* Only if processing has already required reading the file's contents, return
* the contents as a Buffer - null otherwise. Not supported for batches.
*/
maybeReturnContent: boolean,
}>;
interface AsyncWorker {
+processFile: WorkerMessage => Promise<WorkerMetadata>;
+end: () => Promise<void>;
}
interface MaybeCodedError extends Error {
code?: string;
}
const NODE_MODULES_SEP = 'node_modules' + sep;
const MAX_FILES_PER_WORKER = 100;
export class FileProcessor {
#maxFilesPerWorker: number;
#maxWorkers: number;
#perfLogger: ?PerfLogger;
#pluginWorkers: ReadonlyArray<FileMapPluginWorker>;
#inBandWorker: Worker;
#rootPathUtils: RootPathUtils;
constructor(
opts: Readonly<{
maxFilesPerWorker?: ?number,
maxWorkers: number,
pluginWorkers: ?ReadonlyArray<FileMapPluginWorker>,
perfLogger: ?PerfLogger,
rootDir: string,
}>,
) {
this.#maxFilesPerWorker = opts.maxFilesPerWorker ?? MAX_FILES_PER_WORKER;
this.#maxWorkers = opts.maxWorkers;
this.#pluginWorkers = opts.pluginWorkers ?? [];
this.#inBandWorker = new Worker({
plugins: this.#pluginWorkers.map(plugin => plugin.worker),
});
this.#perfLogger = opts.perfLogger;
this.#rootPathUtils = new RootPathUtils(opts.rootDir);
}
async processBatch(
files: ReadonlyArray<[string /*relativePath*/, FileMetadata]>,
req: ProcessFileRequest,
): Promise<{
errors: Array<{
normalFilePath: string,
error: MaybeCodedError,
}>,
}> {
const errors = [];
const workerJobs = files
.map(([normalFilePath, fileMetadata]) => {
const maybeWorkerInput = this.#getWorkerInput(
normalFilePath,
fileMetadata,
req,
);
if (!maybeWorkerInput) {
return null;
}
return [maybeWorkerInput, fileMetadata];
})
.filter(Boolean);
const numWorkers = Math.min(
this.#maxWorkers,
Math.ceil(workerJobs.length / this.#maxFilesPerWorker),
);
const batchWorker = this.#getBatchWorker(numWorkers);
if (req.maybeReturnContent) {
throw new Error(
'Batch processing does not support returning file contents',
);
}
await Promise.all(
workerJobs.map(([workerInput, fileMetadata]) => {
return batchWorker
.processFile(workerInput)
.then(reply =>
processWorkerReply(reply, workerInput.pluginsToRun, fileMetadata),
)
.catch(error =>
errors.push({
normalFilePath: this.#rootPathUtils.absoluteToNormal(
workerInput.filePath,
),
error: normalizeWorkerError(error),
}),
);
}),
);
await batchWorker.end();
return {errors};
}
processRegularFile(
normalPath: string,
fileMetadata: FileMetadata,
req: ProcessFileRequest,
): ?{content: ?Buffer} {
const workerInput = this.#getWorkerInput(normalPath, fileMetadata, req);
return workerInput
? {
content: processWorkerReply(
this.#inBandWorker.processFile(workerInput),
workerInput.pluginsToRun,
fileMetadata,
),
}
: null;
}
#getWorkerInput(
normalPath: string,
fileMetadata: FileMetadata,
req: ProcessFileRequest,
): ?WorkerMessage {
if (fileMetadata[H.SYMLINK] !== 0) {
// Only process regular files
return null;
}
const computeSha1 = req.computeSha1 && fileMetadata[H.SHA1] == null;
const {maybeReturnContent} = req;
const nodeModulesIdx = normalPath.indexOf(NODE_MODULES_SEP);
// Path may begin 'node_modules/' or contain '/node_modules/'.
const isNodeModules =
nodeModulesIdx === 0 ||
(nodeModulesIdx > 0 && normalPath[nodeModulesIdx - 1] === sep);
// Indices of plugins with a passing filter
const pluginsToRun =
this.#pluginWorkers?.reduce((prev, plugin, idx) => {
if (plugin.filter({isNodeModules, normalPath})) {
prev.push(idx);
}
return prev;
}, [] as Array<number>) ?? [];
if (!computeSha1 && pluginsToRun.length === 0) {
// Nothing to process
return null;
}
// Use a cheaper worker configuration for node_modules files, because
// they may never be Haste modules or packages.
//
// Note that we'd only expect node_modules files to reach this point if
// retainAllFiles is true, or they're touched during watch mode.
if (isNodeModules) {
if (computeSha1) {
return {
computeSha1: true,
filePath: this.#rootPathUtils.normalToAbsolute(normalPath),
maybeReturnContent,
pluginsToRun,
};
}
return null;
}
return {
computeSha1,
filePath: this.#rootPathUtils.normalToAbsolute(normalPath),
maybeReturnContent,
pluginsToRun,
};
}
/**
* Creates workers or parses files and extracts metadata in-process.
*/
#getBatchWorker(numWorkers: number): AsyncWorker {
if (numWorkers <= 1) {
// In-band worker with the same interface as a Jest worker farm
return {
processFile: async message => this.#inBandWorker.processFile(message),
end: async () => {},
};
}
const workerPath = require.resolve('../worker');
debug('Creating worker farm of %d worker threads', numWorkers);
this.#perfLogger?.point('initWorkers_start');
const jestWorker = new JestWorker<{
processFile: WorkerMessage => Promise<WorkerMetadata>,
}>(workerPath, {
exposedMethods: ['processFile'],
maxRetries: 3,
numWorkers,
enableWorkerThreads: true,
forkOptions: {
// Don't pass Node arguments down to workers. In particular, avoid
// unnecessarily registering Babel when we're running Metro from
// source (our worker is plain CommonJS).
execArgv: [],
},
setupArgs: [
{
plugins: this.#pluginWorkers.map(plugin => plugin.worker),
} as WorkerSetupArgs,
],
});
this.#perfLogger?.point('initWorkers_end');
// Only log worker init once
this.#perfLogger = null;
return jestWorker;
}
async end(): Promise<void> {}
}
function processWorkerReply(
metadata: WorkerMetadata,
pluginsRun: ReadonlyArray<number>,
fileMetadata: FileMetadata,
) {
fileMetadata[H.VISITED] = 1;
const pluginData = metadata.pluginData;
if (pluginData) {
for (const [i, pluginIdx] of pluginsRun.entries()) {
// $FlowFixMe[invalid-tuple-index]
fileMetadata[H.PLUGINDATA + pluginIdx] = pluginData[i];
}
}
if (metadata.sha1 != null) {
fileMetadata[H.SHA1] = metadata.sha1;
}
return metadata.content;
}
function normalizeWorkerError(mixedError: ?Error | string): MaybeCodedError {
if (
mixedError == null ||
typeof mixedError !== 'object' ||
mixedError.message == null ||
mixedError.stack == null
) {
const error = new Error(mixedError);
error.stack = ''; // Remove stack for stack-less errors.
return error;
}
return mixedError;
}

View File

@@ -0,0 +1,40 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @oncall react_native
* @generated SignedSource<<5feda1b197530a9a5fdbc57200633ac5>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/FileSystemChangeAggregator.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
import type {
CanonicalPath,
FileMetadata,
FileSystemListener,
ReadonlyFileSystemChanges,
} from '../flow-types';
export declare class FileSystemChangeAggregator implements FileSystemListener {
directoryAdded(canonicalPath: CanonicalPath): void;
directoryRemoved(canonicalPath: CanonicalPath): void;
fileAdded(canonicalPath: CanonicalPath, data: FileMetadata): void;
fileModified(
canonicalPath: CanonicalPath,
oldData: FileMetadata,
newData: FileMetadata,
): void;
fileRemoved(canonicalPath: CanonicalPath, data: FileMetadata): void;
getSize(): number;
getView(): ReadonlyFileSystemChanges<FileMetadata>;
getMappedView<T>(
metadataMapFn: (metadata: FileMetadata) => T,
): ReadonlyFileSystemChanges<T>;
}

View File

@@ -0,0 +1,89 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.FileSystemChangeAggregator = void 0;
class FileSystemChangeAggregator {
#addedDirectories = new Set();
#removedDirectories = new Set();
#addedFiles = new Map();
#modifiedFiles = new Map();
#removedFiles = new Map();
#initialMetadata = new Map();
directoryAdded(canonicalPath) {
if (!this.#removedDirectories.delete(canonicalPath)) {
this.#addedDirectories.add(canonicalPath);
}
}
directoryRemoved(canonicalPath) {
if (!this.#addedDirectories.delete(canonicalPath)) {
this.#removedDirectories.add(canonicalPath);
}
}
fileAdded(canonicalPath, data) {
if (this.#removedFiles.delete(canonicalPath)) {
this.#modifiedFiles.set(canonicalPath, data);
} else {
this.#addedFiles.set(canonicalPath, data);
}
}
fileModified(canonicalPath, oldData, newData) {
if (this.#addedFiles.has(canonicalPath)) {
this.#addedFiles.set(canonicalPath, newData);
} else {
if (!this.#initialMetadata.has(canonicalPath)) {
this.#initialMetadata.set(canonicalPath, oldData);
}
this.#modifiedFiles.set(canonicalPath, newData);
}
}
fileRemoved(canonicalPath, data) {
if (!this.#addedFiles.delete(canonicalPath)) {
let initialData = this.#initialMetadata.get(canonicalPath);
if (!initialData) {
initialData = data;
this.#initialMetadata.set(canonicalPath, initialData);
}
this.#modifiedFiles.delete(canonicalPath);
this.#removedFiles.set(canonicalPath, initialData);
}
}
getSize() {
return (
this.#addedDirectories.size +
this.#removedDirectories.size +
this.#addedFiles.size +
this.#modifiedFiles.size +
this.#removedFiles.size
);
}
getView() {
return {
addedDirectories: this.#addedDirectories,
removedDirectories: this.#removedDirectories,
addedFiles: this.#addedFiles,
modifiedFiles: this.#modifiedFiles,
removedFiles: this.#removedFiles,
};
}
getMappedView(metadataMapFn) {
return {
addedDirectories: this.#addedDirectories,
removedDirectories: this.#removedDirectories,
addedFiles: mapIterable(this.#addedFiles, metadataMapFn),
modifiedFiles: mapIterable(this.#modifiedFiles, metadataMapFn),
removedFiles: mapIterable(this.#removedFiles, metadataMapFn),
};
}
}
exports.FileSystemChangeAggregator = FileSystemChangeAggregator;
function mapIterable(map, metadataMapFn) {
return {
*[Symbol.iterator]() {
for (const [path, metadata] of map) {
yield [path, metadataMapFn(metadata)];
}
},
};
}

View File

@@ -0,0 +1,143 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow strict-local
* @format
* @oncall react_native
*/
import type {
CanonicalPath,
FileMetadata,
FileSystemListener,
ReadonlyFileSystemChanges,
} from '../flow-types';
export class FileSystemChangeAggregator implements FileSystemListener {
// Mutually exclusive with removedDirectories
+#addedDirectories: Set<CanonicalPath> = new Set();
// Mutually exclusive with addedDirectories
+#removedDirectories: Set<CanonicalPath> = new Set();
// Mutually exclusive with modified and removed files
+#addedFiles: Map<CanonicalPath, FileMetadata> = new Map();
// Mutually exclusive with added and removed files
+#modifiedFiles: Map<CanonicalPath, FileMetadata> = new Map();
// Mutually exclusive with added and modified files
+#removedFiles: Map<CanonicalPath, FileMetadata> = new Map();
// Removed files must be paired with the file's metadata the last time it was
// observable by consumers - ie, immediately *before* this batch. To report
// this accurately with minimal overhead, we'll note the current metadata of
// a file the first time it is modified or removed within a batch. If it is
// re-added, modified and removed again, we still have the initial metadata.
// This is particularly important if, say, a regular file is replaced by a
// symlink, or vice-versa.
+#initialMetadata: Map<CanonicalPath, FileMetadata> = new Map();
directoryAdded(canonicalPath: CanonicalPath): void {
// Only add to newDirectories if this directory wasn't previously removed
// (i.e., it's truly new). If it was removed and re-added, the net effect
// is no directory change.
if (!this.#removedDirectories.delete(canonicalPath)) {
this.#addedDirectories.add(canonicalPath);
}
}
directoryRemoved(canonicalPath: CanonicalPath): void {
if (!this.#addedDirectories.delete(canonicalPath)) {
this.#removedDirectories.add(canonicalPath);
}
}
fileAdded(canonicalPath: CanonicalPath, data: FileMetadata): void {
if (this.#removedFiles.delete(canonicalPath)) {
// File was removed then re-added in the same batch - treat as modification
this.#modifiedFiles.set(canonicalPath, data);
} else {
// New file
this.#addedFiles.set(canonicalPath, data);
}
}
fileModified(
canonicalPath: CanonicalPath,
oldData: FileMetadata,
newData: FileMetadata,
): void {
if (this.#addedFiles.has(canonicalPath)) {
// File did not exist before this batch. Further modification only
// updates metadata
this.#addedFiles.set(canonicalPath, newData);
} else {
if (!this.#initialMetadata.has(canonicalPath)) {
this.#initialMetadata.set(canonicalPath, oldData);
}
this.#modifiedFiles.set(canonicalPath, newData);
}
}
fileRemoved(canonicalPath: CanonicalPath, data: FileMetadata): void {
// Check if this file was added in the same batch
if (!this.#addedFiles.delete(canonicalPath)) {
let initialData = this.#initialMetadata.get(canonicalPath);
if (!initialData) {
initialData = data;
this.#initialMetadata.set(canonicalPath, initialData);
}
// File was not added in this batch, so add to removed with last metadata
this.#modifiedFiles.delete(canonicalPath);
this.#removedFiles.set(canonicalPath, initialData);
}
// else: File was added then removed in the same batch - no net change
}
getSize(): number {
return (
this.#addedDirectories.size +
this.#removedDirectories.size +
this.#addedFiles.size +
this.#modifiedFiles.size +
this.#removedFiles.size
);
}
getView(): ReadonlyFileSystemChanges<FileMetadata> {
return {
addedDirectories: this.#addedDirectories,
removedDirectories: this.#removedDirectories,
addedFiles: this.#addedFiles,
modifiedFiles: this.#modifiedFiles,
removedFiles: this.#removedFiles,
};
}
getMappedView<T>(
metadataMapFn: (metadata: FileMetadata) => T,
): ReadonlyFileSystemChanges<T> {
return {
addedDirectories: this.#addedDirectories,
removedDirectories: this.#removedDirectories,
addedFiles: mapIterable(this.#addedFiles, metadataMapFn),
modifiedFiles: mapIterable(this.#modifiedFiles, metadataMapFn),
removedFiles: mapIterable(this.#removedFiles, metadataMapFn),
};
}
}
function mapIterable<T>(
map: Map<CanonicalPath, FileMetadata>,
metadataMapFn: (metadata: FileMetadata) => T,
): Iterable<Readonly<[CanonicalPath, T]>> {
return {
*[Symbol.iterator](): Iterator<Readonly<[CanonicalPath, T]>> {
for (const [path, metadata] of map) {
yield [path, metadataMapFn(metadata)];
}
},
};
}

30
node_modules/metro-file-map/src/lib/RootPathUtils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,30 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @generated SignedSource<<5ecdb559fce5f5c6ed50df6e4eaebf20>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/RootPathUtils.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
export declare class RootPathUtils {
constructor(rootDir: string);
getBasenameOfNthAncestor(n: number): string;
getParts(): ReadonlyArray<string>;
absoluteToNormal(absolutePath: string): string;
normalToAbsolute(normalPath: string): string;
relativeToNormal(relativePath: string): string;
getAncestorOfRootIdx(normalPath: string): null | undefined | number;
joinNormalToRelative(
normalPath: string,
relativePath: string,
): {normalPath: string; collapsedSegments: number};
relative(from: string, to: string): string;
}

242
node_modules/metro-file-map/src/lib/RootPathUtils.js generated vendored Normal file
View File

@@ -0,0 +1,242 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.RootPathUtils = void 0;
var _invariant = _interopRequireDefault(require("invariant"));
var path = _interopRequireWildcard(require("path"));
function _interopRequireWildcard(e, t) {
if ("function" == typeof WeakMap)
var r = new WeakMap(),
n = new WeakMap();
return (_interopRequireWildcard = function (e, t) {
if (!t && e && e.__esModule) return e;
var o,
i,
f = { __proto__: null, default: e };
if (null === e || ("object" != typeof e && "function" != typeof e))
return f;
if ((o = t ? n : r)) {
if (o.has(e)) return o.get(e);
o.set(e, f);
}
for (const t in e)
"default" !== t &&
{}.hasOwnProperty.call(e, t) &&
((i =
(o = Object.defineProperty) &&
Object.getOwnPropertyDescriptor(e, t)) &&
(i.get || i.set)
? o(f, t, i)
: (f[t] = e[t]));
return f;
})(e, t);
}
function _interopRequireDefault(e) {
return e && e.__esModule ? e : { default: e };
}
const UP_FRAGMENT_SEP = ".." + path.sep;
const SEP_UP_FRAGMENT = path.sep + "..";
const UP_FRAGMENT_SEP_LENGTH = UP_FRAGMENT_SEP.length;
const CURRENT_FRAGMENT = "." + path.sep;
class RootPathUtils {
#rootDir;
#rootDirnames;
#rootParts;
#rootDepth;
constructor(rootDir) {
this.#rootDir = rootDir;
const rootDirnames = [];
for (
let next = rootDir, previous = null;
previous !== next;
previous = next, next = path.dirname(next)
) {
rootDirnames.push(next);
}
this.#rootDirnames = rootDirnames;
this.#rootParts = rootDir.split(path.sep);
this.#rootDepth = rootDirnames.length - 1;
if (this.#rootDepth === 0) {
this.#rootParts.pop();
}
}
getBasenameOfNthAncestor(n) {
return this.#rootParts[this.#rootParts.length - 1 - n];
}
getParts() {
return this.#rootParts;
}
absoluteToNormal(absolutePath) {
let endOfMatchingPrefix = 0;
let lastMatchingPartIdx = 0;
for (
let nextPart = this.#rootParts[0], nextLength = nextPart.length;
nextPart != null &&
absolutePath.startsWith(nextPart, endOfMatchingPrefix) &&
(absolutePath.length === endOfMatchingPrefix + nextLength ||
absolutePath[endOfMatchingPrefix + nextLength] === path.sep);
) {
endOfMatchingPrefix += nextLength + 1;
nextPart = this.#rootParts[++lastMatchingPartIdx];
nextLength = nextPart?.length;
}
const upIndirectionsToPrepend =
this.#rootParts.length - lastMatchingPartIdx;
return (
this.#tryCollapseIndirectionsInSuffix(
absolutePath,
endOfMatchingPrefix,
upIndirectionsToPrepend,
)?.collapsedPath ?? this.#slowAbsoluteToNormal(absolutePath)
);
}
#slowAbsoluteToNormal(absolutePath) {
const endsWithSep = absolutePath.endsWith(path.sep);
const result = path.relative(this.#rootDir, absolutePath);
return endsWithSep && !result.endsWith(path.sep)
? result + path.sep
: result;
}
normalToAbsolute(normalPath) {
let left = this.#rootDir;
let i = 0;
let pos = 0;
while (
normalPath.startsWith(UP_FRAGMENT_SEP, pos) ||
(normalPath.endsWith("..") && normalPath.length === 2 + pos)
) {
left = this.#rootDirnames[i === this.#rootDepth ? this.#rootDepth : ++i];
pos += UP_FRAGMENT_SEP_LENGTH;
}
const right = pos === 0 ? normalPath : normalPath.slice(pos);
if (right.length === 0) {
return left;
}
if (i === this.#rootDepth) {
return left + right;
}
return left + path.sep + right;
}
relativeToNormal(relativePath) {
return (
this.#tryCollapseIndirectionsInSuffix(relativePath, 0, 0)
?.collapsedPath ??
path.relative(this.#rootDir, path.join(this.#rootDir, relativePath))
);
}
getAncestorOfRootIdx(normalPath) {
if (normalPath === "") {
return 0;
}
if (normalPath === "..") {
return 1;
}
if (normalPath.endsWith(SEP_UP_FRAGMENT)) {
return (normalPath.length + 1) / 3;
}
return null;
}
joinNormalToRelative(normalPath, relativePath) {
if (normalPath === "") {
return {
collapsedSegments: 0,
normalPath: relativePath,
};
}
if (relativePath === "") {
return {
collapsedSegments: 0,
normalPath,
};
}
const left = normalPath + path.sep;
const rawPath = left + relativePath;
if (normalPath === ".." || normalPath.endsWith(SEP_UP_FRAGMENT)) {
const collapsed = this.#tryCollapseIndirectionsInSuffix(rawPath, 0, 0);
(0, _invariant.default)(collapsed != null, "Failed to collapse");
return {
collapsedSegments: collapsed.collapsedSegments,
normalPath: collapsed.collapsedPath,
};
}
return {
collapsedSegments: 0,
normalPath: rawPath,
};
}
relative(from, to) {
return path.relative(from, to);
}
#tryCollapseIndirectionsInSuffix(
fullPath,
startOfRelativePart,
implicitUpIndirections,
) {
let totalUpIndirections = implicitUpIndirections;
let collapsedSegments = 0;
for (let pos = startOfRelativePart; ; pos += UP_FRAGMENT_SEP_LENGTH) {
const nextIndirection = fullPath.indexOf(CURRENT_FRAGMENT, pos);
if (nextIndirection === -1) {
while (totalUpIndirections > 0) {
const segmentToMaybeCollapse =
this.#rootParts[this.#rootParts.length - totalUpIndirections];
if (
fullPath.startsWith(segmentToMaybeCollapse, pos) &&
(fullPath.length === segmentToMaybeCollapse.length + pos ||
fullPath[segmentToMaybeCollapse.length + pos] === path.sep)
) {
pos += segmentToMaybeCollapse.length + 1;
collapsedSegments++;
totalUpIndirections--;
} else {
break;
}
}
if (pos >= fullPath.length) {
return {
collapsedPath:
totalUpIndirections > 0
? UP_FRAGMENT_SEP.repeat(totalUpIndirections - 1) +
".." +
fullPath.slice(pos - 1)
: "",
collapsedSegments,
};
}
const right = pos > 0 ? fullPath.slice(pos) : fullPath;
if (
right === ".." &&
totalUpIndirections >= this.#rootParts.length - 1
) {
return {
collapsedPath: UP_FRAGMENT_SEP.repeat(totalUpIndirections).slice(
0,
-1,
),
collapsedSegments,
};
}
if (totalUpIndirections === 0) {
return {
collapsedPath: right,
collapsedSegments,
};
}
return {
collapsedPath: UP_FRAGMENT_SEP.repeat(totalUpIndirections) + right,
collapsedSegments,
};
}
if (totalUpIndirections < this.#rootParts.length - 1) {
totalUpIndirections++;
}
if (nextIndirection !== pos + 1 || fullPath[pos] !== ".") {
return null;
}
}
}
}
exports.RootPathUtils = RootPathUtils;

View File

@@ -0,0 +1,316 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow strict
* @format
*/
import invariant from 'invariant';
import * as path from 'path';
/**
* This module provides path utility functions - similar to `node:path` -
* optimised for Metro's use case (many paths, few roots) under assumptions
* typically safe to make within Metro - namely:
*
* - All input path separators must be system-native.
* - Double/redundant separators like '/foo//bar' are not supported.
* - All characters except separators are assumed to be valid in path segments.
*
* - A "well-formed" path is any path following the rules above.
* - A "normal" path is a root-relative well-formed path with no redundant
* indirections. Normal paths have no leading './`, and the normal path of
* the root is the empty string.
*
* Output and input paths are at least well-formed (normal where indicated by
* naming).
*
* Trailing path separators are preserved, except for fs roots in
* normalToAbsolute (fs roots always have a trailing separator), and the
* project root in absoluteToNormal and relativeToNormal (the project root is
* always the empty string, and is always a directory, so a trailing separator
* is redundant).
*
* As of Node 20, absoluteToNormal is ~8x faster than `path.relative` and
* `normalToAbsolute` is ~20x faster than `path.resolve`, benchmarked on the
* real inputs from building FB's product graph. Some well-formed inputs
* (e.g., /project/./foo/../bar), are handled but not optimised, and we fall
* back to `node:path` equivalents in those cases.
*/
const UP_FRAGMENT_SEP = '..' + path.sep;
const SEP_UP_FRAGMENT = path.sep + '..';
const UP_FRAGMENT_SEP_LENGTH = UP_FRAGMENT_SEP.length;
const CURRENT_FRAGMENT = '.' + path.sep;
export class RootPathUtils {
#rootDir: string;
#rootDirnames: ReadonlyArray<string>;
#rootParts: ReadonlyArray<string>;
#rootDepth: number;
constructor(rootDir: string) {
this.#rootDir = rootDir;
const rootDirnames = [];
for (
let next = rootDir, previous = null;
/* $FlowFixMe[invalid-compare] Error discovered during Constant Condition
* roll out. See https://fburl.com/workplace/5whu3i34. */
previous !== next;
previous = next, next = path.dirname(next)
) {
rootDirnames.push(next);
}
this.#rootDirnames = rootDirnames;
this.#rootParts = rootDir.split(path.sep);
this.#rootDepth = rootDirnames.length - 1;
// If rootDir is a filesystem root (C:\ or /), it will end in a separator and
// give a spurious empty entry at the end of rootParts.
if (this.#rootDepth === 0) {
this.#rootParts.pop();
}
}
getBasenameOfNthAncestor(n: number): string {
return this.#rootParts[this.#rootParts.length - 1 - n];
}
getParts(): ReadonlyArray<string> {
return this.#rootParts;
}
// absolutePath may be any well-formed absolute path.
absoluteToNormal(absolutePath: string): string {
let endOfMatchingPrefix = 0;
let lastMatchingPartIdx = 0;
for (
let nextPart = this.#rootParts[0], nextLength = nextPart.length;
nextPart != null &&
// Check that absolutePath is equal to nextPart + '/' or ends with
// nextPart, starting from endOfMatchingPrefix.
absolutePath.startsWith(nextPart, endOfMatchingPrefix) &&
(absolutePath.length === endOfMatchingPrefix + nextLength ||
absolutePath[endOfMatchingPrefix + nextLength] === path.sep);
) {
// Move our matching pointer forward and load the next part.
endOfMatchingPrefix += nextLength + 1;
nextPart = this.#rootParts[++lastMatchingPartIdx];
nextLength = nextPart?.length;
}
// If our root is /project/root and we're given /project/bar/foo.js, we
// have matched up to '/project', and will need to return a path
// beginning '../' (one prepended indirection, to go up from 'root').
//
// If we're given /project/../project2/otherroot, we have one level of
// indirection up to prepend in the same way as above. There's another
// explicit indirection already present in the input - we'll account for
// that in tryCollapseIndirectionsInSuffix.
const upIndirectionsToPrepend =
this.#rootParts.length - lastMatchingPartIdx;
return (
this.#tryCollapseIndirectionsInSuffix(
absolutePath,
endOfMatchingPrefix,
upIndirectionsToPrepend,
)?.collapsedPath ?? this.#slowAbsoluteToNormal(absolutePath)
);
}
#slowAbsoluteToNormal(absolutePath: string): string {
const endsWithSep = absolutePath.endsWith(path.sep);
const result = path.relative(this.#rootDir, absolutePath);
return endsWithSep && !result.endsWith(path.sep)
? result + path.sep
: result;
}
// `normalPath` is assumed to be normal (root-relative, no redundant
// indirection), per the definition above.
normalToAbsolute(normalPath: string): string {
let left = this.#rootDir;
let i = 0;
let pos = 0;
while (
normalPath.startsWith(UP_FRAGMENT_SEP, pos) ||
(normalPath.endsWith('..') && normalPath.length === 2 + pos)
) {
left = this.#rootDirnames[i === this.#rootDepth ? this.#rootDepth : ++i];
pos += UP_FRAGMENT_SEP_LENGTH;
}
const right = pos === 0 ? normalPath : normalPath.slice(pos);
if (right.length === 0) {
return left;
}
// left may already end in a path separator only if it is a filesystem root,
// '/' or 'X:\'.
if (i === this.#rootDepth) {
return left + right;
}
return left + path.sep + right;
}
relativeToNormal(relativePath: string): string {
return (
this.#tryCollapseIndirectionsInSuffix(relativePath, 0, 0)
?.collapsedPath ??
path.relative(this.#rootDir, path.join(this.#rootDir, relativePath))
);
}
// If a path is a direct ancestor of the project root (or the root itself),
// return a number with the degrees of separation, e.g. root=0, parent=1,..
// or null otherwise.
getAncestorOfRootIdx(normalPath: string): ?number {
if (normalPath === '') {
return 0;
}
if (normalPath === '..') {
return 1;
}
// Otherwise a *normal* path is only a root ancestor if it is a sequence of
// '../' segments followed by '..', so the length tells us the number of
// up fragments.
if (normalPath.endsWith(SEP_UP_FRAGMENT)) {
return (normalPath.length + 1) / 3;
}
return null;
}
// Takes a normal and relative path, and joins them efficiently into a normal
// path, including collapsing trailing '..' in the first part with leading
// project root segments in the relative part.
joinNormalToRelative(
normalPath: string,
relativePath: string,
): {normalPath: string, collapsedSegments: number} {
if (normalPath === '') {
return {collapsedSegments: 0, normalPath: relativePath};
}
if (relativePath === '') {
return {collapsedSegments: 0, normalPath};
}
const left = normalPath + path.sep;
const rawPath = left + relativePath;
if (normalPath === '..' || normalPath.endsWith(SEP_UP_FRAGMENT)) {
const collapsed = this.#tryCollapseIndirectionsInSuffix(rawPath, 0, 0);
invariant(collapsed != null, 'Failed to collapse');
return {
collapsedSegments: collapsed.collapsedSegments,
normalPath: collapsed.collapsedPath,
};
}
return {
collapsedSegments: 0,
normalPath: rawPath,
};
}
relative(from: string, to: string): string {
return path.relative(from, to);
}
// Internal: Tries to collapse sequences like `../root/foo` for root
// `/project/root` down to the normal 'foo'.
#tryCollapseIndirectionsInSuffix(
fullPath: string, // A string ending with the relative path to process
startOfRelativePart: number, // Index of the start of part to process
implicitUpIndirections: number, // 0=root-relative, 1=dirname(root)-relative...
): ?{collapsedPath: string, collapsedSegments: number} {
let totalUpIndirections = implicitUpIndirections;
let collapsedSegments = 0;
// Allow any sequence of indirection fragments at the start of the
// unmatched suffix e.g /project/[../../foo], but bail out to Node's
// path.relative if we find a possible indirection after any later segment,
// or on any "./" that isn't a "../".
for (let pos = startOfRelativePart; ; pos += UP_FRAGMENT_SEP_LENGTH) {
const nextIndirection = fullPath.indexOf(CURRENT_FRAGMENT, pos);
if (nextIndirection === -1) {
// If we have any indirections, they may "collapse" if a subsequent
// segment re-enters a directory we had previously exited, e.g:
// /project/root/../root/foo should collapse to /project/root/foo' and
// return foo, not ../root/foo.
//
// We match each segment following redirections, in turn, against the
// part of the root path they may collapse into, and break on the first
// mismatch.
while (totalUpIndirections > 0) {
const segmentToMaybeCollapse =
this.#rootParts[this.#rootParts.length - totalUpIndirections];
if (
fullPath.startsWith(segmentToMaybeCollapse, pos) &&
// The following character should be either a separator or end of
// string
(fullPath.length === segmentToMaybeCollapse.length + pos ||
fullPath[segmentToMaybeCollapse.length + pos] === path.sep)
) {
pos += segmentToMaybeCollapse.length + 1;
collapsedSegments++;
totalUpIndirections--;
} else {
break;
}
}
// After collapsing we may have no more segments remaining (following
// '..' indirections). Ensure that we don't drop or add a trailing
// separator in this case by taking .slice(pos-1). In any other case,
// we know that fullPath[pos] is a separator.
if (pos >= fullPath.length) {
return {
collapsedPath:
totalUpIndirections > 0
? UP_FRAGMENT_SEP.repeat(totalUpIndirections - 1) +
'..' +
fullPath.slice(pos - 1)
: '',
collapsedSegments,
};
}
const right = pos > 0 ? fullPath.slice(pos) : fullPath;
if (
right === '..' &&
totalUpIndirections >= this.#rootParts.length - 1
) {
// If we have no right side (or an indirection that would take us
// below the root), just ensure we don't include a trailing separtor.
return {
collapsedPath: UP_FRAGMENT_SEP.repeat(totalUpIndirections).slice(
0,
-1,
),
collapsedSegments,
};
}
// Optimisation for the common case, saves a concatenation.
if (totalUpIndirections === 0) {
return {collapsedPath: right, collapsedSegments};
}
return {
collapsedPath: UP_FRAGMENT_SEP.repeat(totalUpIndirections) + right,
collapsedSegments,
};
}
// Cap the number of indirections at the total number of root segments.
// File systems treat '..' at the root as '.'.
if (totalUpIndirections < this.#rootParts.length - 1) {
totalUpIndirections++;
}
if (
nextIndirection !== pos + 1 || // Fallback when ./ later in the path, or leading
fullPath[pos] !== '.' // and for anything other than a leading ../
) {
return null;
}
}
}
}

174
node_modules/metro-file-map/src/lib/TreeFS.d.ts generated vendored Normal file
View File

@@ -0,0 +1,174 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @generated SignedSource<<65a3c4140d459a56b8c949e52b32ea1b>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/TreeFS.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
import type {
CacheData,
FileData,
FileMetadata,
FileStats,
FileSystemListener,
LookupResult,
MutableFileSystem,
Path,
ProcessFileFunction,
} from '../flow-types';
type DirectoryNode = Map<string, MixedNode>;
type FileNode = FileMetadata;
type MixedNode = FileNode | DirectoryNode;
type DeserializedSnapshotInput = {
rootDir: string;
fileSystemData: DirectoryNode;
processFile: ProcessFileFunction;
};
type TreeFSOptions = {
rootDir: Path;
files?: FileData;
processFile: ProcessFileFunction;
};
type MatchFilesOptions = Readonly<{
filter?: null | undefined | RegExp;
filterCompareAbsolute?: boolean;
filterComparePosix?: boolean;
follow?: boolean;
recursive?: boolean;
rootDir?: null | undefined | Path;
}>;
type MetadataIteratorOptions = Readonly<{
includeSymlinks: boolean;
includeNodeModules: boolean;
}>;
/**
* OVERVIEW:
*
* TreeFS is Metro's in-memory representation of the file system. It is
* structured as a tree of non-empty maps and leaves (tuples), with the root
* node representing the given `rootDir`, typically Metro's _project root_
* (not a filesystem root). Map keys are path segments, and branches outside
* the project root are accessed via `'..'`.
*
* EXAMPLE:
*
* For a root dir '/data/project', the file '/data/other/app/index.js' would
* have metadata at #rootNode.get('..').get('other').get('app').get('index.js')
*
* SERIALISATION:
*
* #rootNode is designed to be directly serialisable and directly portable (for
* a given project) between different root directories and operating systems.
*
* SYMLINKS:
*
* Symlinks are represented as nodes whose metadata contains their literal
* target. Literal targets are resolved to normal paths at runtime, and cached.
* If a symlink is encountered during traversal, we restart traversal at the
* root node targeting join(normal symlink target, remaining path suffix).
*
* NODE TYPES:
*
* - A directory (including a parent directory at '..') is represented by a
* `Map` of basenames to any other node type.
* - A file is represented by an `Array` (tuple) of metadata, of which:
* - A regular file has node[H.SYMLINK] === 0
* - A symlink has node[H.SYMLINK] === 1 or
* typeof node[H.SYMLINK] === 'string', where a string is the literal
* content of the symlink (i.e. from readlink), if known.
*
* TERMINOLOGY:
*
* - mixedPath
* A root-relative or absolute path
* - relativePath
* A root-relative path
* - normalPath
* A root-relative, normalised path (no extraneous '.' or '..'), may have a
* single trailing slash
* - canonicalPath
* A root-relative, normalised, real path (no symlinks in dirname), never has
* a trailing slash
*/
declare class TreeFS implements MutableFileSystem {
constructor(opts: TreeFSOptions);
getSerializableSnapshot(): CacheData['fileSystemData'];
static fromDeserializedSnapshot(args: DeserializedSnapshotInput): TreeFS;
getSize(mixedPath: Path): null | undefined | number;
getDifference(
files: FileData,
options?: Readonly<{subpath?: string}>,
): {changedFiles: FileData; removedFiles: Set<string>};
getSha1(mixedPath: Path): null | undefined | string;
getOrComputeSha1(
mixedPath: Path,
): Promise<null | undefined | {sha1: string; content?: Buffer}>;
exists(mixedPath: Path): boolean;
lookup(mixedPath: Path): LookupResult;
getAllFiles(): Array<Path>;
linkStats(mixedPath: Path): null | undefined | FileStats;
/**
* Given a search context, return a list of file paths matching the query.
* The query matches against normalized paths which start with `./`,
* for example: `a/b.js` -> `./a/b.js`
*/
matchFiles(opts: MatchFilesOptions): Iterable<Path>;
addOrModify(
mixedPath: Path,
metadata: FileMetadata,
changeListener?: FileSystemListener,
): void;
bulkAddOrModify(
addedOrModifiedFiles: FileData,
changeListener?: FileSystemListener,
): void;
remove(mixedPath: Path, changeListener?: FileSystemListener): void;
/**
* Given a start path (which need not exist), a subpath and type, and
* optionally a 'breakOnSegment', performs the following:
*
* X = mixedStartPath
* do
* if basename(X) === opts.breakOnSegment
* return null
* if X + subpath exists and has type opts.subpathType
* return {
* absolutePath: realpath(X + subpath)
* containerRelativePath: relative(mixedStartPath, X)
* }
* X = dirname(X)
* while X !== dirname(X)
*
* If opts.invalidatedBy is given, collects all absolute, real paths that if
* added or removed may invalidate this result.
*
* Useful for finding the closest package scope (subpath: package.json,
* type f, breakOnSegment: node_modules) or closest potential package root
* (subpath: node_modules/pkg, type: d) in Node.js resolution.
*/
hierarchicalLookup(
mixedStartPath: string,
subpath: string,
opts: {
breakOnSegment: null | undefined | string;
invalidatedBy: null | undefined | Set<string>;
subpathType: 'f' | 'd';
},
): null | undefined | {absolutePath: string; containerRelativePath: string};
metadataIterator(opts: MetadataIteratorOptions): Iterator<{
baseName: string;
canonicalPath: string;
metadata: FileMetadata;
}>;
}
export default TreeFS;

881
node_modules/metro-file-map/src/lib/TreeFS.js generated vendored Normal file
View File

@@ -0,0 +1,881 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.default = void 0;
var _constants = _interopRequireDefault(require("../constants"));
var _RootPathUtils = require("./RootPathUtils");
var _invariant = _interopRequireDefault(require("invariant"));
var _path = _interopRequireDefault(require("path"));
function _interopRequireDefault(e) {
return e && e.__esModule ? e : { default: e };
}
function isDirectory(node) {
return node instanceof Map;
}
function isRegularFile(node) {
return node[_constants.default.SYMLINK] === 0;
}
class TreeFS {
#cachedNormalSymlinkTargets = new WeakMap();
#pathUtils;
#processFile;
#rootDir;
#rootNode = new Map();
constructor(opts) {
const { rootDir, files, processFile } = opts;
this.#rootDir = rootDir;
this.#pathUtils = new _RootPathUtils.RootPathUtils(rootDir);
this.#processFile = processFile;
if (files != null) {
this.bulkAddOrModify(files);
}
}
getSerializableSnapshot() {
return this.#cloneTree(this.#rootNode);
}
static fromDeserializedSnapshot(args) {
const { rootDir, fileSystemData, processFile } = args;
const tfs = new TreeFS({
processFile,
rootDir,
});
tfs.#rootNode = fileSystemData;
return tfs;
}
getSize(mixedPath) {
const fileMetadata = this.#getFileData(mixedPath);
return (fileMetadata && fileMetadata[_constants.default.SIZE]) ?? null;
}
getDifference(files, options) {
const changedFiles = new Map(files);
const removedFiles = new Set();
const subpath = options?.subpath;
let rootNode = this.#rootNode;
let prefix = "";
if (subpath != null && subpath !== "") {
const lookupResult = this.#lookupByNormalPath(subpath, {
followLeaf: true,
});
if (!lookupResult.exists || !isDirectory(lookupResult.node)) {
return {
changedFiles,
removedFiles,
};
}
rootNode = lookupResult.node;
prefix = lookupResult.canonicalPath;
}
for (const { canonicalPath, metadata } of this.#metadataIterator(
rootNode,
{
includeNodeModules: true,
includeSymlinks: true,
},
prefix,
)) {
const newMetadata = files.get(canonicalPath);
if (newMetadata) {
if (isRegularFile(newMetadata) !== isRegularFile(metadata)) {
continue;
}
if (
newMetadata[_constants.default.MTIME] != null &&
newMetadata[_constants.default.MTIME] != 0 &&
newMetadata[_constants.default.MTIME] ===
metadata[_constants.default.MTIME]
) {
changedFiles.delete(canonicalPath);
} else if (
newMetadata[_constants.default.SHA1] != null &&
newMetadata[_constants.default.SHA1] ===
metadata[_constants.default.SHA1] &&
metadata[_constants.default.VISITED] === 1
) {
const updatedMetadata = [...metadata];
updatedMetadata[_constants.default.MTIME] =
newMetadata[_constants.default.MTIME];
changedFiles.set(canonicalPath, updatedMetadata);
}
} else {
removedFiles.add(canonicalPath);
}
}
return {
changedFiles,
removedFiles,
};
}
getSha1(mixedPath) {
const fileMetadata = this.#getFileData(mixedPath);
return (fileMetadata && fileMetadata[_constants.default.SHA1]) ?? null;
}
async getOrComputeSha1(mixedPath) {
const normalPath = this.#normalizePath(mixedPath);
const result = this.#lookupByNormalPath(normalPath, {
followLeaf: true,
});
if (!result.exists || isDirectory(result.node)) {
return null;
}
const { canonicalPath, node: fileMetadata } = result;
const existing = fileMetadata[_constants.default.SHA1];
if (existing != null && existing.length > 0) {
return {
sha1: existing,
};
}
const maybeContent = await this.#processFile(canonicalPath, fileMetadata, {
computeSha1: true,
});
const sha1 = fileMetadata[_constants.default.SHA1];
(0, _invariant.default)(
sha1 != null && sha1.length > 0,
"File processing didn't populate a SHA-1 hash for %s",
canonicalPath,
);
return maybeContent
? {
content: maybeContent,
sha1,
}
: {
sha1,
};
}
exists(mixedPath) {
const result = this.#getFileData(mixedPath);
return result != null;
}
lookup(mixedPath) {
const normalPath = this.#normalizePath(mixedPath);
const links = new Set();
const result = this.#lookupByNormalPath(normalPath, {
collectLinkPaths: links,
followLeaf: true,
});
if (!result.exists) {
const { canonicalMissingPath } = result;
return {
exists: false,
links,
missing: this.#pathUtils.normalToAbsolute(canonicalMissingPath),
};
}
const { canonicalPath, node } = result;
const realPath = this.#pathUtils.normalToAbsolute(canonicalPath);
if (isDirectory(node)) {
return {
exists: true,
links,
realPath,
type: "d",
};
}
(0, _invariant.default)(
isRegularFile(node),
"lookup follows symlinks, so should never return one (%s -> %s)",
mixedPath,
canonicalPath,
);
return {
exists: true,
links,
realPath,
type: "f",
metadata: node,
};
}
getAllFiles() {
return Array.from(
this.metadataIterator({
includeNodeModules: true,
includeSymlinks: false,
}),
({ canonicalPath }) => this.#pathUtils.normalToAbsolute(canonicalPath),
);
}
linkStats(mixedPath) {
const fileMetadata = this.#getFileData(mixedPath, {
followLeaf: false,
});
if (fileMetadata == null) {
return null;
}
const fileType = isRegularFile(fileMetadata) ? "f" : "l";
return {
fileType,
modifiedTime: fileMetadata[_constants.default.MTIME],
size: fileMetadata[_constants.default.SIZE],
};
}
*matchFiles(opts) {
const {
filter = null,
filterCompareAbsolute = false,
filterComparePosix = false,
follow = false,
recursive = true,
rootDir = null,
} = opts;
const normalRoot = rootDir == null ? "" : this.#normalizePath(rootDir);
const contextRootResult = this.#lookupByNormalPath(normalRoot);
if (!contextRootResult.exists) {
return;
}
const {
ancestorOfRootIdx,
canonicalPath: rootRealPath,
node: contextRoot,
parentNode: contextRootParent,
} = contextRootResult;
if (!isDirectory(contextRoot)) {
return;
}
const contextRootAbsolutePath =
rootRealPath === ""
? this.#rootDir
: _path.default.join(this.#rootDir, rootRealPath);
const prefix = filterComparePosix ? "./" : "." + _path.default.sep;
const contextRootAbsolutePathForComparison =
filterComparePosix && _path.default.sep !== "/"
? contextRootAbsolutePath.replaceAll(_path.default.sep, "/")
: contextRootAbsolutePath;
for (const relativePathForComparison of this.#pathIterator(
contextRoot,
contextRootParent,
ancestorOfRootIdx,
{
alwaysYieldPosix: filterComparePosix,
canonicalPathOfRoot: rootRealPath,
follow,
recursive,
subtreeOnly: rootDir != null,
},
)) {
if (
filter == null ||
filter.test(
filterCompareAbsolute === true
? _path.default.join(
contextRootAbsolutePathForComparison,
relativePathForComparison,
)
: prefix + relativePathForComparison,
)
) {
const relativePath =
filterComparePosix === true && _path.default.sep !== "/"
? relativePathForComparison.replaceAll("/", _path.default.sep)
: relativePathForComparison;
yield _path.default.join(contextRootAbsolutePath, relativePath);
}
}
}
addOrModify(mixedPath, metadata, changeListener) {
const normalPath = this.#normalizePath(mixedPath);
const parentDirNode = this.#lookupByNormalPath(
_path.default.dirname(normalPath),
{
changeListener,
makeDirectories: true,
},
);
if (!parentDirNode.exists) {
throw new Error(
`TreeFS: Failed to make parent directory entry for ${mixedPath}`,
);
}
const canonicalPath = this.#normalizePath(
parentDirNode.canonicalPath +
_path.default.sep +
_path.default.basename(normalPath),
);
this.bulkAddOrModify(new Map([[canonicalPath, metadata]]), changeListener);
}
bulkAddOrModify(addedOrModifiedFiles, changeListener) {
let lastDir;
let directoryNode;
for (const [normalPath, metadata] of addedOrModifiedFiles) {
const lastSepIdx = normalPath.lastIndexOf(_path.default.sep);
const dirname = lastSepIdx === -1 ? "" : normalPath.slice(0, lastSepIdx);
const basename =
lastSepIdx === -1 ? normalPath : normalPath.slice(lastSepIdx + 1);
if (directoryNode == null || dirname !== lastDir) {
const lookup = this.#lookupByNormalPath(dirname, {
changeListener,
followLeaf: false,
makeDirectories: true,
});
if (!lookup.exists) {
throw new Error(
`TreeFS: Unexpected error adding ${normalPath}.\nMissing: ` +
lookup.canonicalMissingPath,
);
}
if (!isDirectory(lookup.node)) {
throw new Error(
`TreeFS: Could not add directory ${dirname}, adding ${normalPath}. ` +
`${dirname} already exists in the file map as a file.`,
);
}
lastDir = dirname;
directoryNode = lookup.node;
}
if (changeListener != null) {
const existingNode = directoryNode.get(basename);
if (existingNode != null) {
(0, _invariant.default)(
!isDirectory(existingNode),
"Detected addition or modification of file %s, but it is tracked as a non-empty directory",
normalPath,
);
changeListener.fileModified(normalPath, existingNode, metadata);
} else {
changeListener.fileAdded(normalPath, metadata);
}
}
directoryNode.set(basename, metadata);
}
}
remove(mixedPath, changeListener) {
const normalPath = this.#normalizePath(mixedPath);
const result = this.#lookupByNormalPath(normalPath, {
followLeaf: false,
});
if (!result.exists) {
return;
}
const { parentNode, canonicalPath, node } = result;
if (isDirectory(node) && node.size > 0) {
for (const basename of node.keys()) {
this.remove(
canonicalPath + _path.default.sep + basename,
changeListener,
);
}
return;
}
if (parentNode != null) {
if (changeListener != null) {
if (isDirectory(node)) {
changeListener.directoryRemoved(canonicalPath);
} else {
changeListener.fileRemoved(canonicalPath, node);
}
}
parentNode.delete(_path.default.basename(canonicalPath));
if (parentNode.size === 0 && parentNode !== this.#rootNode) {
this.remove(_path.default.dirname(canonicalPath), changeListener);
}
}
}
#lookupByNormalPath(
requestedNormalPath,
opts = {
followLeaf: true,
makeDirectories: false,
},
) {
let targetNormalPath = requestedNormalPath;
let seen;
let fromIdx = opts.start?.pathIdx ?? 0;
let parentNode = opts.start?.node ?? this.#rootNode;
let ancestorOfRootIdx = opts.start?.ancestorOfRootIdx ?? 0;
const { collectAncestors, changeListener } = opts;
let unseenPathFromIdx = 0;
while (targetNormalPath.length > fromIdx) {
const nextSepIdx = targetNormalPath.indexOf(_path.default.sep, fromIdx);
const isLastSegment = nextSepIdx === -1;
const segmentName = isLastSegment
? targetNormalPath.slice(fromIdx)
: targetNormalPath.slice(fromIdx, nextSepIdx);
const isUnseen = fromIdx >= unseenPathFromIdx;
fromIdx = !isLastSegment ? nextSepIdx + 1 : targetNormalPath.length;
if (segmentName === ".") {
continue;
}
let segmentNode = parentNode.get(segmentName);
if (segmentName === ".." && ancestorOfRootIdx != null) {
ancestorOfRootIdx++;
} else if (segmentNode != null) {
ancestorOfRootIdx = null;
}
if (segmentNode == null) {
if (opts.makeDirectories !== true && segmentName !== "..") {
return {
canonicalMissingPath: isLastSegment
? targetNormalPath
: targetNormalPath.slice(0, fromIdx - 1),
exists: false,
missingSegmentName: segmentName,
};
}
segmentNode = new Map();
if (opts.makeDirectories === true) {
if (changeListener != null) {
const canonicalPath = isLastSegment
? targetNormalPath
: targetNormalPath.slice(0, fromIdx - 1);
changeListener.directoryAdded(canonicalPath);
}
parentNode.set(segmentName, segmentNode);
}
}
if (
(nextSepIdx === targetNormalPath.length - 1 &&
isDirectory(segmentNode)) ||
(isLastSegment &&
(isDirectory(segmentNode) ||
isRegularFile(segmentNode) ||
opts.followLeaf === false))
) {
return {
ancestorOfRootIdx,
canonicalPath: isLastSegment
? targetNormalPath
: targetNormalPath.slice(0, -1),
exists: true,
node: segmentNode,
parentNode,
};
}
if (isDirectory(segmentNode)) {
parentNode = segmentNode;
if (collectAncestors && isUnseen) {
const currentPath = isLastSegment
? targetNormalPath
: targetNormalPath.slice(0, fromIdx - 1);
collectAncestors.push({
ancestorOfRootIdx,
node: segmentNode,
normalPath: currentPath,
segmentName,
});
}
} else {
const currentPath = isLastSegment
? targetNormalPath
: targetNormalPath.slice(0, fromIdx - 1);
if (isRegularFile(segmentNode)) {
return {
canonicalMissingPath: currentPath,
exists: false,
missingSegmentName: segmentName,
};
}
const normalSymlinkTarget = this.#resolveSymlinkTargetToNormalPath(
segmentNode,
currentPath,
);
if (opts.collectLinkPaths) {
opts.collectLinkPaths.add(
this.#pathUtils.normalToAbsolute(currentPath),
);
}
const remainingTargetPath = isLastSegment
? ""
: targetNormalPath.slice(fromIdx);
const joinedResult = this.#pathUtils.joinNormalToRelative(
normalSymlinkTarget.normalPath,
remainingTargetPath,
);
targetNormalPath = joinedResult.normalPath;
if (
collectAncestors &&
!isLastSegment &&
(normalSymlinkTarget.ancestorOfRootIdx === 0 ||
joinedResult.collapsedSegments > 0)
) {
let node = this.#rootNode;
let collapsedPath = "";
const reverseAncestors = [];
for (
let i = 0;
i <= joinedResult.collapsedSegments && isDirectory(node);
i++
) {
if (
i > 0 ||
normalSymlinkTarget.ancestorOfRootIdx === 0 ||
joinedResult.collapsedSegments > 0
) {
reverseAncestors.push({
ancestorOfRootIdx: i,
node,
normalPath: collapsedPath,
segmentName: this.#pathUtils.getBasenameOfNthAncestor(i),
});
}
node = node.get("..") ?? new Map();
collapsedPath =
collapsedPath === ""
? ".."
: collapsedPath + _path.default.sep + "..";
}
collectAncestors.push(...reverseAncestors.reverse());
}
unseenPathFromIdx = normalSymlinkTarget.startOfBasenameIdx;
if (seen == null) {
seen = new Set([requestedNormalPath]);
}
if (seen.has(targetNormalPath)) {
return {
canonicalMissingPath: targetNormalPath,
exists: false,
missingSegmentName: segmentName,
};
}
seen.add(targetNormalPath);
fromIdx = 0;
parentNode = this.#rootNode;
ancestorOfRootIdx = 0;
}
}
(0, _invariant.default)(
parentNode === this.#rootNode,
"Unexpectedly escaped traversal",
);
return {
ancestorOfRootIdx: 0,
canonicalPath: targetNormalPath,
exists: true,
node: this.#rootNode,
parentNode: null,
};
}
hierarchicalLookup(mixedStartPath, subpath, opts) {
const ancestorsOfInput = [];
const normalPath = this.#normalizePath(mixedStartPath);
const invalidatedBy = opts.invalidatedBy;
const closestLookup = this.#lookupByNormalPath(normalPath, {
collectAncestors: ancestorsOfInput,
collectLinkPaths: invalidatedBy,
});
if (closestLookup.exists && isDirectory(closestLookup.node)) {
const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath(
closestLookup.canonicalPath,
subpath,
opts.subpathType,
invalidatedBy,
null,
);
if (maybeAbsolutePathMatch != null) {
return {
absolutePath: maybeAbsolutePathMatch,
containerRelativePath: "",
};
}
} else {
if (
invalidatedBy &&
(!closestLookup.exists || !isDirectory(closestLookup.node))
) {
invalidatedBy.add(
this.#pathUtils.normalToAbsolute(
closestLookup.exists
? closestLookup.canonicalPath
: closestLookup.canonicalMissingPath,
),
);
}
if (
opts.breakOnSegment != null &&
!closestLookup.exists &&
closestLookup.missingSegmentName === opts.breakOnSegment
) {
return null;
}
}
let commonRoot = this.#rootNode;
let commonRootDepth = 0;
if (closestLookup.exists && closestLookup.ancestorOfRootIdx != null) {
commonRootDepth = closestLookup.ancestorOfRootIdx;
(0, _invariant.default)(
isDirectory(closestLookup.node),
"ancestors of the root must be directories",
);
commonRoot = closestLookup.node;
} else {
for (const ancestor of ancestorsOfInput) {
if (ancestor.ancestorOfRootIdx == null) {
break;
}
commonRootDepth = ancestor.ancestorOfRootIdx;
commonRoot = ancestor.node;
}
}
for (
let candidateIdx = ancestorsOfInput.length - 1;
candidateIdx >= commonRootDepth;
--candidateIdx
) {
const candidate = ancestorsOfInput[candidateIdx];
if (candidate.segmentName === opts.breakOnSegment) {
return null;
}
const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath(
candidate.normalPath,
subpath,
opts.subpathType,
invalidatedBy,
{
ancestorOfRootIdx: candidate.ancestorOfRootIdx,
node: candidate.node,
pathIdx:
candidate.normalPath.length > 0
? candidate.normalPath.length + 1
: 0,
},
);
if (maybeAbsolutePathMatch != null) {
let prefixLength = commonRootDepth * 3;
for (let i = commonRootDepth; i <= candidateIdx; i++) {
prefixLength = normalPath.indexOf(
_path.default.sep,
prefixLength + 1,
);
}
const containerRelativePath = normalPath.slice(prefixLength + 1);
return {
absolutePath: maybeAbsolutePathMatch,
containerRelativePath,
};
}
}
let candidateNormalPath =
commonRootDepth > 0 ? normalPath.slice(0, 3 * commonRootDepth - 1) : "";
const remainingNormalPath = normalPath.slice(commonRootDepth * 3);
let nextNode = commonRoot;
let depthBelowCommonRoot = 0;
while (isDirectory(nextNode)) {
const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath(
candidateNormalPath,
subpath,
opts.subpathType,
invalidatedBy,
null,
);
if (maybeAbsolutePathMatch != null) {
const rootDirParts = this.#pathUtils.getParts();
const relativeParts =
depthBelowCommonRoot > 0
? rootDirParts.slice(
-(depthBelowCommonRoot + commonRootDepth),
commonRootDepth > 0 ? -commonRootDepth : undefined,
)
: [];
if (remainingNormalPath !== "") {
relativeParts.push(remainingNormalPath);
}
return {
absolutePath: maybeAbsolutePathMatch,
containerRelativePath: relativeParts.join(_path.default.sep),
};
}
depthBelowCommonRoot++;
candidateNormalPath =
candidateNormalPath === ""
? ".."
: candidateNormalPath + _path.default.sep + "..";
nextNode = nextNode.get("..");
}
return null;
}
#checkCandidateHasSubpath(
normalCandidatePath,
subpath,
subpathType,
invalidatedBy,
start,
) {
const lookupResult = this.#lookupByNormalPath(
this.#pathUtils.joinNormalToRelative(normalCandidatePath, subpath)
.normalPath,
{
collectLinkPaths: invalidatedBy,
},
);
if (
lookupResult.exists &&
isDirectory(lookupResult.node) === (subpathType === "d")
) {
return this.#pathUtils.normalToAbsolute(lookupResult.canonicalPath);
} else if (invalidatedBy) {
invalidatedBy.add(
this.#pathUtils.normalToAbsolute(
lookupResult.exists
? lookupResult.canonicalPath
: lookupResult.canonicalMissingPath,
),
);
}
return null;
}
*metadataIterator(opts) {
yield* this.#metadataIterator(this.#rootNode, opts);
}
*#metadataIterator(rootNode, opts, prefix = "") {
for (const [name, node] of rootNode) {
if (
!opts.includeNodeModules &&
isDirectory(node) &&
name === "node_modules"
) {
continue;
}
const prefixedName =
prefix === "" ? name : prefix + _path.default.sep + name;
if (isDirectory(node)) {
yield* this.#metadataIterator(node, opts, prefixedName);
} else if (isRegularFile(node) || opts.includeSymlinks) {
yield {
baseName: name,
canonicalPath: prefixedName,
metadata: node,
};
}
}
}
#normalizePath(relativeOrAbsolutePath) {
return _path.default.isAbsolute(relativeOrAbsolutePath)
? this.#pathUtils.absoluteToNormal(relativeOrAbsolutePath)
: this.#pathUtils.relativeToNormal(relativeOrAbsolutePath);
}
*#directoryNodeIterator(node, parent, ancestorOfRootIdx) {
if (ancestorOfRootIdx != null && ancestorOfRootIdx > 0 && parent) {
yield [
this.#pathUtils.getBasenameOfNthAncestor(ancestorOfRootIdx - 1),
parent,
];
}
yield* node.entries();
}
*#pathIterator(
iterationRootNode,
iterationRootParentNode,
ancestorOfRootIdx,
opts,
pathPrefix = "",
followedLinks = new Set(),
) {
const pathSep = opts.alwaysYieldPosix ? "/" : _path.default.sep;
const prefixWithSep = pathPrefix === "" ? pathPrefix : pathPrefix + pathSep;
for (const [name, node] of this.#directoryNodeIterator(
iterationRootNode,
iterationRootParentNode,
ancestorOfRootIdx,
)) {
if (opts.subtreeOnly && name === "..") {
continue;
}
const nodePath = prefixWithSep + name;
if (!isDirectory(node)) {
if (isRegularFile(node)) {
yield nodePath;
} else {
const nodePathWithSystemSeparators =
pathSep === _path.default.sep
? nodePath
: nodePath.replaceAll(pathSep, _path.default.sep);
const normalPathOfSymlink = _path.default.join(
opts.canonicalPathOfRoot,
nodePathWithSystemSeparators,
);
const resolved = this.#lookupByNormalPath(normalPathOfSymlink, {
followLeaf: true,
});
if (!resolved.exists) {
continue;
}
const target = resolved.node;
if (!isDirectory(target)) {
yield nodePath;
} else if (
opts.recursive &&
opts.follow &&
!followedLinks.has(node)
) {
yield* this.#pathIterator(
target,
resolved.parentNode,
resolved.ancestorOfRootIdx,
opts,
nodePath,
new Set([...followedLinks, node]),
);
}
}
} else if (opts.recursive) {
yield* this.#pathIterator(
node,
iterationRootParentNode,
ancestorOfRootIdx != null && ancestorOfRootIdx > 0
? ancestorOfRootIdx - 1
: null,
opts,
nodePath,
followedLinks,
);
}
}
}
#resolveSymlinkTargetToNormalPath(symlinkNode, canonicalPathOfSymlink) {
const cachedResult = this.#cachedNormalSymlinkTargets.get(symlinkNode);
if (cachedResult != null) {
return cachedResult;
}
const literalSymlinkTarget = symlinkNode[_constants.default.SYMLINK];
(0, _invariant.default)(
typeof literalSymlinkTarget === "string",
"Expected symlink target to be populated.",
);
const absoluteSymlinkTarget = _path.default.resolve(
this.#rootDir,
canonicalPathOfSymlink,
"..",
literalSymlinkTarget,
);
const normalSymlinkTarget = _path.default.relative(
this.#rootDir,
absoluteSymlinkTarget,
);
const result = {
ancestorOfRootIdx:
this.#pathUtils.getAncestorOfRootIdx(normalSymlinkTarget),
normalPath: normalSymlinkTarget,
startOfBasenameIdx:
normalSymlinkTarget.lastIndexOf(_path.default.sep) + 1,
};
this.#cachedNormalSymlinkTargets.set(symlinkNode, result);
return result;
}
#getFileData(
filePath,
opts = {
followLeaf: true,
},
) {
const normalPath = this.#normalizePath(filePath);
const result = this.#lookupByNormalPath(normalPath, {
followLeaf: opts.followLeaf,
});
if (!result.exists || isDirectory(result.node)) {
return null;
}
return result.node;
}
#cloneTree(root) {
const clone = new Map();
for (const [name, node] of root) {
if (isDirectory(node)) {
clone.set(name, this.#cloneTree(node));
} else {
clone.set(name, [...node]);
}
}
return clone;
}
}
exports.default = TreeFS;

1272
node_modules/metro-file-map/src/lib/TreeFS.js.flow generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,20 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @generated SignedSource<<f72d8f0c4d8f513383584a02f36795ef>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/checkWatchmanCapabilities.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
declare function checkWatchmanCapabilities(
requiredCapabilities: ReadonlyArray<string>,
): Promise<{version: string}>;
export default checkWatchmanCapabilities;

View File

@@ -0,0 +1,55 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.default = checkWatchmanCapabilities;
var _child_process = require("child_process");
var _util = require("util");
async function checkWatchmanCapabilities(requiredCapabilities) {
const execFilePromise = (0, _util.promisify)(_child_process.execFile);
let rawResponse;
try {
const result = await execFilePromise("watchman", [
"list-capabilities",
"--output-encoding=json",
"--no-pretty",
"--no-spawn",
]);
rawResponse = result.stdout;
} catch (e) {
if (e?.code === "ENOENT") {
throw new Error("Watchman is not installed or not available on PATH");
}
throw e;
}
let parsedResponse;
try {
parsedResponse = JSON.parse(rawResponse);
} catch {
throw new Error(
"Failed to parse response from `watchman list-capabilities`",
);
}
if (
parsedResponse == null ||
typeof parsedResponse !== "object" ||
typeof parsedResponse.version !== "string" ||
!Array.isArray(parsedResponse.capabilities)
) {
throw new Error("Unexpected response from `watchman list-capabilities`");
}
const version = parsedResponse.version;
const capabilities = new Set(parsedResponse.capabilities);
const missingCapabilities = requiredCapabilities.filter(
(requiredCapability) => !capabilities.has(requiredCapability),
);
if (missingCapabilities.length > 0) {
throw new Error(
`The installed version of Watchman (${version}) is missing required capabilities: ${missingCapabilities.join(", ")}`,
);
}
return {
version,
};
}

View File

@@ -0,0 +1,68 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow strict
* @format
*/
import {execFile} from 'child_process';
import {promisify} from 'util';
export default async function checkWatchmanCapabilities(
requiredCapabilities: ReadonlyArray<string>,
): Promise<{version: string}> {
const execFilePromise: (
cmd: string,
args: ReadonlyArray<string>,
) => Promise<{stdout: string}> = promisify(execFile);
let rawResponse;
try {
const result = await execFilePromise('watchman', [
'list-capabilities',
'--output-encoding=json',
'--no-pretty',
'--no-spawn', // The client can answer this, so don't spawn a server
]);
rawResponse = result.stdout;
} catch (e) {
if (e?.code === 'ENOENT') {
throw new Error('Watchman is not installed or not available on PATH');
}
throw e;
}
let parsedResponse;
try {
parsedResponse = JSON.parse(rawResponse) as unknown;
} catch {
throw new Error(
'Failed to parse response from `watchman list-capabilities`',
);
}
if (
parsedResponse == null ||
typeof parsedResponse !== 'object' ||
typeof parsedResponse.version !== 'string' ||
!Array.isArray(parsedResponse.capabilities)
) {
throw new Error('Unexpected response from `watchman list-capabilities`');
}
const version = parsedResponse.version;
const capabilities = new Set(parsedResponse.capabilities);
const missingCapabilities = requiredCapabilities.filter(
requiredCapability => !capabilities.has(requiredCapability),
);
if (missingCapabilities.length > 0) {
throw new Error(
`The installed version of Watchman (${version}) is missing required capabilities: ${missingCapabilities.join(
', ',
)}`,
);
}
return {version};
}

View File

@@ -0,0 +1,14 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
*/
declare const dependencyExtractor: {
extract: (code: string) => Set<string>;
};
export = dependencyExtractor;

View File

@@ -0,0 +1,20 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @generated SignedSource<<30b5e6d2308dde108c136f95a59e3740>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/normalizePathSeparatorsToPosix.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
declare const $$EXPORT_DEFAULT_DECLARATION$$: (filePath: string) => string;
declare type $$EXPORT_DEFAULT_DECLARATION$$ =
typeof $$EXPORT_DEFAULT_DECLARATION$$;
export default $$EXPORT_DEFAULT_DECLARATION$$;

View File

@@ -0,0 +1,41 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.default = void 0;
var path = _interopRequireWildcard(require("path"));
function _interopRequireWildcard(e, t) {
if ("function" == typeof WeakMap)
var r = new WeakMap(),
n = new WeakMap();
return (_interopRequireWildcard = function (e, t) {
if (!t && e && e.__esModule) return e;
var o,
i,
f = { __proto__: null, default: e };
if (null === e || ("object" != typeof e && "function" != typeof e))
return f;
if ((o = t ? n : r)) {
if (o.has(e)) return o.get(e);
o.set(e, f);
}
for (const t in e)
"default" !== t &&
{}.hasOwnProperty.call(e, t) &&
((i =
(o = Object.defineProperty) &&
Object.getOwnPropertyDescriptor(e, t)) &&
(i.get || i.set)
? o(f, t, i)
: (f[t] = e[t]));
return f;
})(e, t);
}
let normalizePathSeparatorsToPosix;
if (path.sep === "/") {
normalizePathSeparatorsToPosix = (filePath) => filePath;
} else {
normalizePathSeparatorsToPosix = (filePath) => filePath.replace(/\\/g, "/");
}
var _default = (exports.default = normalizePathSeparatorsToPosix);

View File

@@ -0,0 +1,21 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow strict
* @format
*/
import * as path from 'path';
let normalizePathSeparatorsToPosix;
if (path.sep === '/') {
normalizePathSeparatorsToPosix = (filePath: string): string => filePath;
} else {
normalizePathSeparatorsToPosix = (filePath: string): string =>
filePath.replace(/\\/g, '/');
}
export default normalizePathSeparatorsToPosix as (filePath: string) => string;

View File

@@ -0,0 +1,20 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @generated SignedSource<<719a82b7670f09ecb97e007293fddfc6>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/normalizePathSeparatorsToSystem.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
declare const $$EXPORT_DEFAULT_DECLARATION$$: (filePath: string) => string;
declare type $$EXPORT_DEFAULT_DECLARATION$$ =
typeof $$EXPORT_DEFAULT_DECLARATION$$;
export default $$EXPORT_DEFAULT_DECLARATION$$;

View File

@@ -0,0 +1,42 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.default = void 0;
var path = _interopRequireWildcard(require("path"));
function _interopRequireWildcard(e, t) {
if ("function" == typeof WeakMap)
var r = new WeakMap(),
n = new WeakMap();
return (_interopRequireWildcard = function (e, t) {
if (!t && e && e.__esModule) return e;
var o,
i,
f = { __proto__: null, default: e };
if (null === e || ("object" != typeof e && "function" != typeof e))
return f;
if ((o = t ? n : r)) {
if (o.has(e)) return o.get(e);
o.set(e, f);
}
for (const t in e)
"default" !== t &&
{}.hasOwnProperty.call(e, t) &&
((i =
(o = Object.defineProperty) &&
Object.getOwnPropertyDescriptor(e, t)) &&
(i.get || i.set)
? o(f, t, i)
: (f[t] = e[t]));
return f;
})(e, t);
}
let normalizePathSeparatorsToSystem;
if (path.sep === "/") {
normalizePathSeparatorsToSystem = (filePath) => filePath;
} else {
normalizePathSeparatorsToSystem = (filePath) =>
filePath.replace(/\//g, path.sep);
}
var _default = (exports.default = normalizePathSeparatorsToSystem);

View File

@@ -0,0 +1,21 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow strict
* @format
*/
import * as path from 'path';
let normalizePathSeparatorsToSystem;
if (path.sep === '/') {
normalizePathSeparatorsToSystem = (filePath: string): string => filePath;
} else {
normalizePathSeparatorsToSystem = (filePath: string): string =>
filePath.replace(/\//g, path.sep);
}
export default normalizePathSeparatorsToSystem as (filePath: string) => string;

View File

@@ -0,0 +1,24 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @oncall react_native
* @generated SignedSource<<f82cf1eeac38c409c5bf891686c2e828>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/rootRelativeCacheKeys.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
import type {BuildParameters} from '../flow-types';
declare function rootRelativeCacheKeys(buildParameters: BuildParameters): {
rootDirHash: string;
relativeConfigHash: string;
};
export default rootRelativeCacheKeys;

View File

@@ -0,0 +1,55 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.default = rootRelativeCacheKeys;
var _normalizePathSeparatorsToPosix = _interopRequireDefault(
require("./normalizePathSeparatorsToPosix"),
);
var _RootPathUtils = require("./RootPathUtils");
var _crypto = require("crypto");
function _interopRequireDefault(e) {
return e && e.__esModule ? e : { default: e };
}
function rootRelativeCacheKeys(buildParameters) {
const { rootDir, plugins, ...otherParameters } = buildParameters;
const rootDirHash = (0, _crypto.createHash)("md5")
.update((0, _normalizePathSeparatorsToPosix.default)(rootDir))
.digest("hex");
const pathUtils = new _RootPathUtils.RootPathUtils(rootDir);
const cacheComponents = Object.keys(otherParameters)
.sort()
.map((key) => {
switch (key) {
case "roots":
return buildParameters[key].map((root) =>
(0, _normalizePathSeparatorsToPosix.default)(
pathUtils.absoluteToNormal(root),
),
);
case "cacheBreaker":
case "extensions":
case "computeSha1":
case "enableSymlinks":
case "forceNodeFilesystemAPI":
case "retainAllFiles":
return buildParameters[key] ?? null;
case "ignorePattern":
return buildParameters[key].toString();
default:
key;
throw new Error("Unrecognised key in build parameters: " + key);
}
});
for (const plugin of plugins) {
cacheComponents.push(plugin.getCacheKey());
}
const relativeConfigHash = (0, _crypto.createHash)("md5")
.update(JSON.stringify(cacheComponents))
.digest("hex");
return {
rootDirHash,
relativeConfigHash,
};
}

View File

@@ -0,0 +1,68 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow strict-local
* @format
* @oncall react_native
*/
import type {BuildParameters} from '../flow-types';
import normalizePathSeparatorsToPosix from './normalizePathSeparatorsToPosix';
import {RootPathUtils} from './RootPathUtils';
import {createHash} from 'crypto';
export default function rootRelativeCacheKeys(
buildParameters: BuildParameters,
): {
rootDirHash: string,
relativeConfigHash: string,
} {
const {rootDir, plugins, ...otherParameters} = buildParameters;
const rootDirHash = createHash('md5')
.update(normalizePathSeparatorsToPosix(rootDir))
.digest('hex');
const pathUtils = new RootPathUtils(rootDir);
const cacheComponents = Object.keys(otherParameters)
.sort()
.map(key => {
switch (key) {
case 'roots':
return buildParameters[key].map(root =>
normalizePathSeparatorsToPosix(pathUtils.absoluteToNormal(root)),
);
case 'cacheBreaker':
case 'extensions':
case 'computeSha1':
case 'enableSymlinks':
case 'forceNodeFilesystemAPI':
case 'retainAllFiles':
return buildParameters[key] ?? null;
case 'ignorePattern':
return buildParameters[key].toString();
default:
key as empty;
throw new Error('Unrecognised key in build parameters: ' + key);
}
});
for (const plugin of plugins) {
cacheComponents.push(plugin.getCacheKey());
}
// JSON.stringify is stable here because we only deal in (nested) arrays of
// primitives. Use a different approach if this is expanded to include
// objects/Sets/Maps, etc.
const relativeConfigHash = createHash('md5')
.update(JSON.stringify(cacheComponents))
.digest('hex');
return {
rootDirHash,
relativeConfigHash,
};
}

23
node_modules/metro-file-map/src/lib/sorting.d.ts generated vendored Normal file
View File

@@ -0,0 +1,23 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @noformat
* @generated SignedSource<<8805bc71542c6b43e940f8c5761ff187>>
*
* This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
* Original file: packages/metro-file-map/src/lib/sorting.js
* To regenerate, run:
* js1 build metro-ts-defs (internal) OR
* yarn run build-ts-defs (OSS)
*/
export declare function compareStrings(
a: null | string,
b: null | string,
): number;
export declare function chainComparators<T>(
...comparators: Array<(a: T, b: T) => number>
): (a: T, b: T) => number;

27
node_modules/metro-file-map/src/lib/sorting.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true,
});
exports.chainComparators = chainComparators;
exports.compareStrings = compareStrings;
function compareStrings(a, b) {
if (a == null) {
return b == null ? 0 : -1;
}
if (b == null) {
return 1;
}
return a.localeCompare(b);
}
function chainComparators(...comparators) {
return (a, b) => {
for (const comparator of comparators) {
const result = comparator(a, b);
if (result !== 0) {
return result;
}
}
return 0;
};
}

35
node_modules/metro-file-map/src/lib/sorting.js.flow generated vendored Normal file
View File

@@ -0,0 +1,35 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow strict
* @format
*/
// Utilities for working with Array.prototype.sort
export function compareStrings(a: null | string, b: null | string): number {
if (a == null) {
return b == null ? 0 : -1;
}
if (b == null) {
return 1;
}
return a.localeCompare(b);
}
export function chainComparators<T>(
...comparators: Array<(a: T, b: T) => number>
): (a: T, b: T) => number {
return (a, b) => {
for (const comparator of comparators) {
const result = comparator(a, b);
if (result !== 0) {
return result;
}
}
return 0;
};
}