fix: `strict` types

This commit is contained in:
alexander-akait 2025-03-11 17:20:50 +03:00
parent 3c753354bf
commit c1c085d74a
25 changed files with 545 additions and 408 deletions

View File

@ -66,7 +66,7 @@ let debugId = 1000;
*/
class Chunk {
/**
* @param {string=} name of chunk being created, is optional (for subclasses)
* @param {(string | null)=} name of chunk being created, is optional (for subclasses)
* @param {boolean} backCompat enable backward-compatibility
*/
constructor(name, backCompat = true) {
@ -76,7 +76,7 @@ class Chunk {
this.ids = null;
/** @type {number} */
this.debugId = debugId++;
/** @type {string | undefined} */
/** @type {string | null | undefined} */
this.name = name;
/** @type {SortableSet<string>} */
this.idNameHints = new SortableSet();

View File

@ -174,7 +174,7 @@ const { isSourceEqual } = require("./util/source");
* @returns {any}
*/
/** @typedef {new (...args: any[]) => Dependency} DepConstructor */
/** @typedef {new (...args: EXPECTED_ANY[]) => Dependency} DepConstructor */
/** @typedef {Record<string, Source>} CompilationAssets */
@ -220,41 +220,50 @@ const { isSourceEqual } = require("./util/source");
* @property {EntryOptions=} entryOptions
*/
/** @typedef {Record<string, any>} ExecuteModuleExports */
/**
* @typedef {object} ExecuteModuleResult
* @property {any} exports
* @property {ExecuteModuleExports} exports
* @property {boolean} cacheable
* @property {Map<string, { source: Source, info: AssetInfo }>} assets
* @property {Map<string, { source: Source, info: AssetInfo | undefined }>} assets
* @property {LazySet<string>} fileDependencies
* @property {LazySet<string>} contextDependencies
* @property {LazySet<string>} missingDependencies
* @property {LazySet<string>} buildDependencies
*/
/** @typedef {(id: string) => void} WebpackRequire */
/**
* @typedef {{ id: string | undefined, exports: any, loaded: boolean, error?: Error }} ModuleObject
*/
/**
* @typedef {{ id: string | undefined, module: ModuleObject, require: WebpackRequire }} ExecuteOptions
* @typedef {object} ExecuteModuleObject
* @property {string} [id] module id
* @property {ExecuteModuleExports} exports exports
* @property {boolean} loaded is loaded
* @property {Error} [error] error
*/
/**
* @typedef {object} ExecuteModuleArgument
* @property {Module} module
* @property {ModuleObject=} moduleObject
* @property {ExecuteModuleObject=} moduleObject
* @property {any} preparedInfo
* @property {CodeGenerationResult} codeGenerationResult
*/
/** @typedef {((id: string) => ExecuteModuleExports) & { i?: ((options: ExecuteOptions) => void)[], c?: Record<string, ExecuteModuleObject> }} WebpackRequire */
/**
* @typedef {object} ExecuteOptions
* @property {string} [id] module id
* @property {ExecuteModuleObject} module module
* @property {WebpackRequire} require require function
*/
/**
* @typedef {object} ExecuteModuleContext
* @property {Map<string, { source: Source, info: AssetInfo }>} assets
* @property {Map<string, { source: Source, info: AssetInfo | undefined }>} assets
* @property {Chunk} chunk
* @property {ChunkGraph} chunkGraph
* @property {function(string): any=} __webpack_require__
* @property {WebpackRequire=} __webpack_require__
*/
/**
@ -373,7 +382,7 @@ const { isSourceEqual } = require("./util/source");
* @property {boolean=} forToString
*/
/** @typedef {Record<string, any> & KnownCreateStatsOptionsContext} CreateStatsOptionsContext */
/** @typedef {KnownCreateStatsOptionsContext & Record<string, any>} CreateStatsOptionsContext */
/** @typedef {{ module: Module, hash: string, runtime: RuntimeSpec, runtimes: RuntimeSpec[]}} CodeGenerationJob */
@ -523,11 +532,16 @@ class Compilation {
return {
...remainingTap,
type: "async",
/**
* @param {CompilationAssets} assets assets
* @param {(err?: Error | null, result?: void) => void} callback callback
* @returns {void}
*/
fn: (assets, callback) => {
try {
fn(assets);
} catch (err) {
return callback(err);
return callback(/** @type {Error} */ (err));
}
if (processedAssets !== undefined)
processedAssets.add(this.assets);
@ -558,21 +572,33 @@ class Compilation {
}
return {
...remainingTap,
/**
* @param {CompilationAssets} assets assets
* @param {(err?: Error | null, result?: void) => void} callback callback
* @returns {void}
*/
fn: (assets, callback) => {
fn(assets, err => {
if (err) return callback(err);
if (processedAssets !== undefined)
processedAssets.add(this.assets);
const newAssets = popNewAssets(assets);
if (newAssets !== undefined) {
this.hooks.processAdditionalAssets.callAsync(
newAssets,
callback
);
return;
fn(
assets,
/**
* @param {Error} err err
* @returns {void}
*/
err => {
if (err) return callback(err);
if (processedAssets !== undefined)
processedAssets.add(this.assets);
const newAssets = popNewAssets(assets);
if (newAssets !== undefined) {
this.hooks.processAdditionalAssets.callAsync(
newAssets,
callback
);
return;
}
callback();
}
callback();
});
);
}
};
case "promise":
@ -588,6 +614,10 @@ class Compilation {
}
return {
...remainingTap,
/**
* @param {CompilationAssets} assets assets
* @returns {Promise<CompilationAssets>} result
*/
fn: assets => {
const p = fn(assets);
if (!p || !p.then) return p;
@ -619,6 +649,7 @@ class Compilation {
* @returns {FakeHook<Pick<AsyncSeriesHook<T>, "tap" | "tapAsync" | "tapPromise" | "name">>} fake hook which redirects
*/
const createProcessAssetsHook = (name, stage, getArgs, code) => {
// @ts-expect-error For better compatibility we will avoid the optional type
if (!this._backCompat && code) return;
/**
* @param {string} reason reason
@ -627,6 +658,10 @@ class Compilation {
const errorMessage =
reason => `Can't automatically convert plugin using Compilation.hooks.${name} to Compilation.hooks.processAssets because ${reason}.
BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a single Compilation.hooks.processAssets hook.`;
/**
* @param {string | (import("tapable").TapOptions & { name: string; } & ProcessAssetsAdditionalOptions)} options hook options
* @returns {import("tapable").TapOptions & { name: string; } & ProcessAssetsAdditionalOptions} modified options
*/
const getOptions = options => {
if (typeof options === "string") options = { name: options };
if (options.stage) {
@ -999,9 +1034,9 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
this.moduleMemCaches2 = undefined;
this.moduleGraph = new ModuleGraph();
/** @type {ChunkGraph} */
this.chunkGraph = undefined;
this.chunkGraph = /** @type {TODO} */ (undefined);
/** @type {CodeGenerationResults} */
this.codeGenerationResults = undefined;
this.codeGenerationResults = /** @type {TODO} */ (undefined);
/** @type {AsyncQueue<Module, Module, Module>} */
this.processDependenciesQueue = new AsyncQueue({
@ -2017,7 +2052,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
/**
* @private
* @param {Module} originModule original module
* @param {Module | null} originModule original module
* @param {Module} module module
* @param {boolean} recursive true if make it recursive, otherwise false
* @param {boolean} checkCycle true if need to check cycle, otherwise false
@ -2034,14 +2069,20 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
// Check for cycles when build is trigger inside another build
/** @type {Set<Module> | undefined} */
let creatingModuleDuringBuildSet;
if (checkCycle && this.buildQueue.isProcessing(originModule)) {
if (
checkCycle &&
this.buildQueue.isProcessing(/** @type {Module} */ (originModule))
) {
// Track build dependency
creatingModuleDuringBuildSet =
this.creatingModuleDuringBuild.get(originModule);
creatingModuleDuringBuildSet = this.creatingModuleDuringBuild.get(
/** @type {Module} */
(originModule)
);
if (creatingModuleDuringBuildSet === undefined) {
creatingModuleDuringBuildSet = new Set();
this.creatingModuleDuringBuild.set(
originModule,
/** @type {Module} */
(originModule),
creatingModuleDuringBuildSet
);
}
@ -2123,7 +2164,9 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
factory.create(
{
contextInfo: {
issuer: originModule ? originModule.nameForCondition() : "",
issuer: originModule
? /** @type {string} */ (originModule.nameForCondition())
: "",
issuerLayer: originModule ? originModule.layer : null,
compiler: /** @type {string} */ (this.compiler.name),
...contextInfo
@ -2320,9 +2363,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
continue;
}
if (entryData.options[key] === undefined) {
entryData.options[/** @type {TODO} */ (key)] = /** @type {TODO} */ (
options[key]
);
entryData.options[key] = /** @type {TODO} */ (options[key]);
} else {
return callback(
new WebpackError(
@ -3886,7 +3927,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
/**
* This method first looks to see if a name is provided for a new chunk,
* and first looks to see if any named chunks already exist and reuse that chunk instead.
* @param {string=} name optional chunk name to be provided
* @param {(string | null)=} name optional chunk name to be provided
* @returns {Chunk} create a chunk (invoked during seal event)
*/
addChunk(name) {
@ -5300,6 +5341,7 @@ This prevents using hashes of each other and should be avoided.`);
err => {
if (err) return callback(err);
/** @type {ExecuteModuleExports | undefined} */
let exports;
try {
const {
@ -5314,25 +5356,30 @@ This prevents using hashes of each other and should be avoided.`);
if (cached.error) throw cached.error;
return cached.exports;
}
const moduleArgument =
/** @type {TODO} */
(moduleArgumentsById).get(id);
return __webpack_require_module__(moduleArgument, id);
const moduleArgument = moduleArgumentsById.get(id);
return __webpack_require_module__(
/** @type {ExecuteModuleArgument} */
(moduleArgument),
id
);
};
/** @type {((options: ExecuteOptions) => void)[]} */
const interceptModuleExecution = (__webpack_require__[
RuntimeGlobals.interceptModuleExecution.replace(
`${RuntimeGlobals.require}.`,
""
/** @type {"i"} */
(
RuntimeGlobals.interceptModuleExecution.replace(
`${RuntimeGlobals.require}.`,
""
)
)
] = []);
/** @type {Record<string, ModuleObject>} */
] = /** @type {NonNullable<WebpackRequire["i"]>} */ ([]));
const moduleCache = (__webpack_require__[
RuntimeGlobals.moduleCache.replace(
`${RuntimeGlobals.require}.`,
""
/** @type {"c"} */ (
RuntimeGlobals.moduleCache.replace(
`${RuntimeGlobals.require}.`,
""
)
)
] = {});
] = /** @type {NonNullable<WebpackRequire["c"]>} */ ({}));
context.__webpack_require__ = __webpack_require__;
@ -5399,13 +5446,14 @@ This prevents using hashes of each other and should be avoided.`);
}
exports = __webpack_require__(module.identifier());
} catch (execErr) {
const { message, stack, module } = /** @type {TODO} */ (
execErr
);
const { message, stack, module } =
/** @type {WebpackError} */
(execErr);
const err = new WebpackError(
`Execution of module code from module graph (${module.readableIdentifier(
this.requestShortener
)}) failed: ${message}`
`Execution of module code from module graph (${
/** @type {Module} */
(module).readableIdentifier(this.requestShortener)
}) failed: ${message}`
);
err.stack = stack;
err.module = module;
@ -5495,7 +5543,7 @@ Compilation.prototype.factorizeModule = /**
(options: FactorizeModuleOptions & { factoryResult: true }, callback: ModuleFactoryResultCallback): void;
}} */ (
function (options, callback) {
this.factorizeQueue.add(options, callback);
this.factorizeQueue.add(options, /** @type {TODO} */ (callback));
}
);
/* eslint-enable jsdoc/require-asterisk-prefix */

View File

@ -42,7 +42,6 @@ const { isSourceEqual } = require("./util/source");
/** @typedef {import("./Chunk")} Chunk */
/** @typedef {import("./Compilation").References} References */
/** @typedef {import("./Dependency")} Dependency */
/** @typedef {import("./FileSystemInfo").FileSystemInfoEntry} FileSystemInfoEntry */
/** @typedef {import("./Module")} Module */
/** @typedef {import("./Module").BuildInfo} BuildInfo */
/** @typedef {import("./config/target").PlatformTargetProperties} PlatformTargetProperties */
@ -51,6 +50,7 @@ const { isSourceEqual } = require("./util/source");
/** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */
/** @typedef {import("./util/fs").IntermediateFileSystem} IntermediateFileSystem */
/** @typedef {import("./util/fs").OutputFileSystem} OutputFileSystem */
/** @typedef {import("./util/fs").TimeInfoEntries} TimeInfoEntries */
/** @typedef {import("./util/fs").WatchFileSystem} WatchFileSystem */
/**
@ -259,9 +259,9 @@ class Compiler {
this.modifiedFiles = undefined;
/** @type {ReadonlySet<string> | undefined} */
this.removedFiles = undefined;
/** @type {ReadonlyMap<string, FileSystemInfoEntry | "ignore" | null> | undefined} */
/** @type {TimeInfoEntries | undefined} */
this.fileTimestamps = undefined;
/** @type {ReadonlyMap<string, FileSystemInfoEntry | "ignore" | null> | undefined} */
/** @type {TimeInfoEntries | undefined} */
this.contextTimestamps = undefined;
/** @type {number | undefined} */
this.fsStartTime = undefined;

View File

@ -13,7 +13,7 @@ const WebpackError = require("./WebpackError");
class EnvironmentPlugin {
/**
* @param {(string | string[] | Record<string, any>)[]} keys keys
* @param {(string | string[] | Record<string, EXPECTED_ANY>)[]} keys keys
*/
constructor(...keys) {
if (keys.length === 1 && Array.isArray(keys[0])) {
@ -22,7 +22,9 @@ class EnvironmentPlugin {
this.defaultValues = {};
} else if (keys.length === 1 && keys[0] && typeof keys[0] === "object") {
this.keys = Object.keys(keys[0]);
this.defaultValues = /** @type {Record<string, any>} */ (keys[0]);
this.defaultValues =
/** @type {Record<string, EXPECTED_ANY>} */
(keys[0]);
} else {
this.keys = /** @type {string[]} */ (keys);
this.defaultValues = {};

View File

@ -10,6 +10,7 @@ const SortableSet = require("./util/SortableSet");
const makeSerializable = require("./util/makeSerializable");
const { forEachRuntime } = require("./util/runtime");
/** @typedef {import("./Dependency")} Dependency */
/** @typedef {import("./Dependency").RuntimeSpec} RuntimeSpec */
/** @typedef {import("./Module")} Module */
/** @typedef {import("./ModuleGraph")} ModuleGraph */
@ -294,7 +295,7 @@ class ExportsInfo {
/**
* @param {boolean=} canMangle true, if exports can still be mangled (defaults to false)
* @param {Set<string>=} excludeExports list of unaffected exports
* @param {any=} targetKey use this as key for the target
* @param {Dependency=} targetKey use this as key for the target
* @param {ModuleGraphConnection=} targetModule set this module as target
* @param {number=} priority priority
* @returns {boolean} true, if this call changed something
@ -783,7 +784,7 @@ class ExportsInfo {
}
/**
* @param {{ otherProvided: any, otherCanMangleProvide: any, otherTerminalBinding: any, exports: any }} data data
* @param {RestoreProvidedData} data data
*/
restoreProvided({
otherProvided,
@ -818,10 +819,10 @@ class ExportsInfo {
/** @typedef {Map<string, RuntimeUsageStateType>} UsedInRuntime */
/** @typedef {{ module: Module, export: string[] }} TargetItemWithoutConnection */
/** @typedef {{ module: Module, connection: ModuleGraphConnection, export: string[] | undefined }} TargetItemWithConnection */
/** @typedef {{ module: Module, connection: ModuleGraphConnection, export: string[] | undefined }} TargetItem */
/** @typedef {Map<any, { connection: ModuleGraphConnection, export: string[], priority: number }>} Target */
/** @typedef {{ connection: ModuleGraphConnection, export: string[], priority: number }} TargetItem */
/** @typedef {Map<Dependency | undefined, TargetItem>} Target */
class ExportInfo {
/**
@ -1107,7 +1108,7 @@ class ExportInfo {
}
/**
* @param {any} key the key
* @param {Dependency} key the key
* @returns {boolean} true, if something has changed
*/
unsetTarget(key) {
@ -1120,7 +1121,7 @@ class ExportInfo {
}
/**
* @param {any} key the key
* @param {Dependency} key the key
* @param {ModuleGraphConnection} connection the target module if a single one
* @param {(string[] | null)=} exportName the exported name
* @param {number=} priority priority
@ -1251,7 +1252,7 @@ class ExportInfo {
/**
* @param {ModuleGraph} moduleGraph the module graph
* @param {function(TargetItem): boolean} resolveTargetFilter filter function to further resolve target
* @param {function(TargetItemWithConnection): boolean} resolveTargetFilter filter function to further resolve target
* @returns {ExportInfo | ExportsInfo | undefined} the terminal binding export(s) info if known
*/
getTerminalBinding(moduleGraph, resolveTargetFilter = RETURNS_TRUE) {
@ -1344,8 +1345,8 @@ class ExportInfo {
/**
* @param {ModuleGraph} moduleGraph the module graph
* @param {function(TargetItem): boolean} resolveTargetFilter filter function to further resolve target
* @returns {TargetItem | undefined} the target
* @param {function(TargetItemWithConnection): boolean} resolveTargetFilter filter function to further resolve target
* @returns {TargetItemWithConnection | undefined} the target
*/
getTarget(moduleGraph, resolveTargetFilter = RETURNS_TRUE) {
const result = this._getTarget(moduleGraph, resolveTargetFilter, undefined);
@ -1355,15 +1356,15 @@ class ExportInfo {
/**
* @param {ModuleGraph} moduleGraph the module graph
* @param {function(TargetItem): boolean} resolveTargetFilter filter function to further resolve target
* @param {function(TargetItemWithConnection): boolean} resolveTargetFilter filter function to further resolve target
* @param {Set<ExportInfo> | undefined} alreadyVisited set of already visited export info to avoid circular references
* @returns {TargetItem | CIRCULAR | undefined} the target
* @returns {TargetItemWithConnection | CIRCULAR | undefined} the target
*/
_getTarget(moduleGraph, resolveTargetFilter, alreadyVisited) {
/**
* @param {TargetItem | null} inputTarget unresolved target
* @param {TargetItem | undefined | null} inputTarget unresolved target
* @param {Set<ExportInfo>} alreadyVisited set of already visited export info to avoid circular references
* @returns {TargetItem | CIRCULAR | null} resolved target
* @returns {TargetItemWithConnection | CIRCULAR | null} resolved target
*/
const resolveTarget = (inputTarget, alreadyVisited) => {
if (!inputTarget) return null;
@ -1374,7 +1375,7 @@ class ExportInfo {
export: undefined
};
}
/** @type {TargetItem} */
/** @type {TargetItemWithConnection} */
let target = {
module: inputTarget.connection.module,
connection: inputTarget.connection,
@ -1385,7 +1386,7 @@ class ExportInfo {
for (;;) {
const exportsInfo = moduleGraph.getExportsInfo(target.module);
const exportInfo = exportsInfo.getExportInfo(
/** @type {NonNullable<TargetItem["export"]>} */
/** @type {NonNullable<TargetItemWithConnection["export"]>} */
(target.export)[0]
);
if (!exportInfo) return target;
@ -1398,7 +1399,7 @@ class ExportInfo {
if (newTarget === CIRCULAR) return CIRCULAR;
if (!newTarget) return target;
if (
/** @type {NonNullable<TargetItem["export"]>} */
/** @type {NonNullable<TargetItemWithConnection["export"]>} */
(target.export).length === 1
) {
target = newTarget;
@ -1409,10 +1410,10 @@ class ExportInfo {
connection: newTarget.connection,
export: newTarget.export
? newTarget.export.concat(
/** @type {NonNullable<TargetItem["export"]>} */
/** @type {NonNullable<TargetItemWithConnection["export"]>} */
(target.export).slice(1)
)
: /** @type {NonNullable<TargetItem["export"]>} */
: /** @type {NonNullable<TargetItemWithConnection["export"]>} */
(target.export).slice(1)
};
}
@ -1453,18 +1454,20 @@ class ExportInfo {
/**
* Move the target forward as long resolveTargetFilter is fulfilled
* @param {ModuleGraph} moduleGraph the module graph
* @param {function(TargetItem): boolean} resolveTargetFilter filter function to further resolve target
* @param {function(TargetItem): ModuleGraphConnection=} updateOriginalConnection updates the original connection instead of using the target connection
* @returns {TargetItem | undefined} the resolved target when moved
* @param {function(TargetItemWithConnection): boolean} resolveTargetFilter filter function to further resolve target
* @param {function(TargetItemWithConnection): ModuleGraphConnection=} updateOriginalConnection updates the original connection instead of using the target connection
* @returns {TargetItemWithConnection | undefined} the resolved target when moved
*/
moveTarget(moduleGraph, resolveTargetFilter, updateOriginalConnection) {
const target = this._getTarget(moduleGraph, resolveTargetFilter, undefined);
if (target === CIRCULAR) return;
if (!target) return;
const originalTarget = /** @type {TargetItem} */ (
/** @type {Target} */
(this._getMaxTarget()).values().next().value
);
const originalTarget =
/** @type {TargetItem} */
(
/** @type {Target} */
(this._getMaxTarget()).values().next().value
);
if (
originalTarget.connection === target.connection &&
originalTarget.export === target.export
@ -1478,7 +1481,9 @@ class ExportInfo {
connection: updateOriginalConnection
? updateOriginalConnection(target)
: target.connection,
export: /** @type {NonNullable<TargetItem["export"]>} */ (target.export),
export: /** @type {NonNullable<TargetItemWithConnection["export"]>} */ (
target.export
),
priority: 0
});
return target;
@ -1643,3 +1648,4 @@ class ExportInfo {
module.exports = ExportsInfo;
module.exports.ExportInfo = ExportInfo;
module.exports.UsageState = UsageState;
module.exports.RestoreProvidedData = RestoreProvidedData;

View File

@ -73,12 +73,14 @@ const INVALID = Symbol("invalid");
* @property {string=} timestampHash
*/
/** @typedef {Set<string>} Symlinks */
/**
* @typedef {object} ContextFileSystemInfoEntry
* @property {number} safeTime
* @property {string=} timestampHash
* @property {ResolvedContextFileSystemInfoEntry=} resolved
* @property {Set<string>=} symlinks
* @property {Symlinks=} symlinks
*/
/**
@ -95,8 +97,6 @@ const INVALID = Symbol("invalid");
* @property {string} hash
*/
/** @typedef {Set<string>} Symlinks */
/**
* @typedef {object} ContextTimestampAndHash
* @property {number} safeTime
@ -123,16 +123,26 @@ const INVALID = Symbol("invalid");
* @property {Set<SnapshotOptimizationEntry> | undefined} children
*/
/** @typedef {Map<string, string | false | undefined>} ResolveResults */
/** @typedef {Set<string>} Files */
/** @typedef {Set<string>} Directories */
/** @typedef {Set<string>} Missing */
/**
* @typedef {object} ResolveDependencies
* @property {Files} files list of files
* @property {Directories} directories list of directories
* @property {Missing} missing list of missing entries
*/
/**
* @typedef {object} ResolveBuildDependenciesResult
* @property {Set<string>} files list of files
* @property {Set<string>} directories list of directories
* @property {Set<string>} missing list of missing entries
* @property {Map<string, string | false | undefined>} resolveResults stored resolve results
* @property {object} resolveDependencies dependencies of the resolving
* @property {Set<string>} resolveDependencies.files list of files
* @property {Set<string>} resolveDependencies.directories list of directories
* @property {Set<string>} resolveDependencies.missing list of missing entries
* @property {Files} files list of files
* @property {Directories} directories list of directories
* @property {Missing} missing list of missing entries
* @property {ResolveResults} resolveResults stored resolve results
* @property {ResolveDependencies} resolveDependencies dependencies of the resolving
*/
/**
@ -1031,6 +1041,10 @@ const addAll = (source, target) => {
/** @typedef {Set<string>} LoggedPaths */
/** @typedef {FileSystemInfoEntry | "ignore" | null} FileTimestamp */
/** @typedef {ContextFileSystemInfoEntry | "ignore" | null} ContextTimestamp */
/** @typedef {ResolvedContextFileSystemInfoEntry | "ignore" | null} ResolvedContextTimestamp */
/**
* Used to access information about the filesystem in a cached way
*/
@ -1127,13 +1141,13 @@ class FileSystemInfo {
false,
true
);
/** @type {StackedCacheMap<string, FileSystemInfoEntry | "ignore" | null>} */
/** @type {StackedCacheMap<string, FileTimestamp>} */
this._fileTimestamps = new StackedCacheMap();
/** @type {Map<string, string | null>} */
this._fileHashes = new Map();
/** @type {Map<string, TimestampAndHash | string>} */
this._fileTshs = new Map();
/** @type {StackedCacheMap<string, ContextFileSystemInfoEntry | "ignore" | null>} */
/** @type {StackedCacheMap<string, ContextTimestamp>} */
this._contextTimestamps = new StackedCacheMap();
/** @type {Map<string, ContextHash>} */
this._contextHashes = new Map();
@ -1184,28 +1198,34 @@ class FileSystemInfo {
processor: this._getManagedItemDirectoryInfo.bind(this)
});
const _unmanagedPaths = Array.from(unmanagedPaths);
this.unmanagedPathsWithSlash = /** @type {string[]} */ (
_unmanagedPaths.filter(p => typeof p === "string")
).map(p => join(fs, p, "_").slice(0, -1));
this.unmanagedPathsRegExps = /** @type {RegExp[]} */ (
_unmanagedPaths.filter(p => typeof p !== "string")
);
this.unmanagedPathsWithSlash =
/** @type {string[]} */
(_unmanagedPaths.filter(p => typeof p === "string")).map(p =>
join(fs, p, "_").slice(0, -1)
);
this.unmanagedPathsRegExps =
/** @type {RegExp[]} */
(_unmanagedPaths.filter(p => typeof p !== "string"));
this.managedPaths = Array.from(managedPaths);
this.managedPathsWithSlash = /** @type {string[]} */ (
this.managedPaths.filter(p => typeof p === "string")
).map(p => join(fs, p, "_").slice(0, -1));
this.managedPathsWithSlash =
/** @type {string[]} */
(this.managedPaths.filter(p => typeof p === "string")).map(p =>
join(fs, p, "_").slice(0, -1)
);
this.managedPathsRegExps = /** @type {RegExp[]} */ (
this.managedPaths.filter(p => typeof p !== "string")
);
this.managedPathsRegExps =
/** @type {RegExp[]} */
(this.managedPaths.filter(p => typeof p !== "string"));
this.immutablePaths = Array.from(immutablePaths);
this.immutablePathsWithSlash = /** @type {string[]} */ (
this.immutablePaths.filter(p => typeof p === "string")
).map(p => join(fs, p, "_").slice(0, -1));
this.immutablePathsRegExps = /** @type {RegExp[]} */ (
this.immutablePaths.filter(p => typeof p !== "string")
);
this.immutablePathsWithSlash =
/** @type {string[]} */
(this.immutablePaths.filter(p => typeof p === "string")).map(p =>
join(fs, p, "_").slice(0, -1)
);
this.immutablePathsRegExps =
/** @type {RegExp[]} */
(this.immutablePaths.filter(p => typeof p !== "string"));
this._cachedDeprecatedFileTimestamps = undefined;
this._cachedDeprecatedContextTimestamps = undefined;
@ -1310,6 +1330,7 @@ class FileSystemInfo {
}
/**
* @private
* @param {string} path path
* @param {string} reason reason
* @param {any[]} args arguments
@ -1366,7 +1387,7 @@ class FileSystemInfo {
}
/**
* @param {ReadonlyMap<string, FileSystemInfoEntry | "ignore" | null>} map timestamps
* @param {ReadonlyMap<string, FileTimestamp>} map timestamps
* @param {boolean=} immutable if 'map' is immutable and FileSystemInfo can keep referencing it
* @returns {void}
*/
@ -1376,7 +1397,7 @@ class FileSystemInfo {
}
/**
* @param {ReadonlyMap<string, FileSystemInfoEntry | "ignore" | null>} map timestamps
* @param {ReadonlyMap<string, ContextTimestamp>} map timestamps
* @param {boolean=} immutable if 'map' is immutable and FileSystemInfo can keep referencing it
* @returns {void}
*/
@ -1387,7 +1408,7 @@ class FileSystemInfo {
/**
* @param {string} path file path
* @param {function((WebpackError | null)=, (FileSystemInfoEntry | "ignore" | null)=): void} callback callback function
* @param {function((WebpackError | null)=, FileTimestamp=): void} callback callback function
* @returns {void}
*/
getFileTimestamp(path, callback) {
@ -1398,7 +1419,7 @@ class FileSystemInfo {
/**
* @param {string} path context path
* @param {function((WebpackError | null)=, (ResolvedContextFileSystemInfoEntry | "ignore" | null)=): void} callback callback function
* @param {function((WebpackError | null)=, ResolvedContextTimestamp=): void} callback callback function
* @returns {void}
*/
getContextTimestamp(path, callback) {
@ -1423,8 +1444,9 @@ class FileSystemInfo {
}
/**
* @private
* @param {string} path context path
* @param {function((WebpackError | null)=, (ContextFileSystemInfoEntry | "ignore" | null)=): void} callback callback function
* @param {function((WebpackError | null)=, ContextTimestamp=): void} callback callback function
* @returns {void}
*/
_getUnresolvedContextTimestamp(path, callback) {
@ -1468,6 +1490,7 @@ class FileSystemInfo {
}
/**
* @private
* @param {string} path context path
* @param {function((WebpackError | null)=, (ContextHash | null)=): void} callback callback function
* @returns {void}
@ -1500,6 +1523,7 @@ class FileSystemInfo {
}
/**
* @private
* @param {string} path context path
* @param {function((WebpackError | null)=, (ContextTimestampAndHash | null)=): void} callback callback function
* @returns {void}
@ -1548,24 +1572,25 @@ class FileSystemInfo {
const { resolveContext, resolveEsm, resolveCjs, resolveCjsAsChild } =
this._createBuildDependenciesResolvers();
/** @type {Set<string>} */
/** @type {Files} */
const files = new Set();
/** @type {Set<string>} */
/** @type {Symlinks} */
const fileSymlinks = new Set();
/** @type {Set<string>} */
/** @type {Directories} */
const directories = new Set();
/** @type {Set<string>} */
/** @type {Symlinks} */
const directorySymlinks = new Set();
/** @type {Set<string>} */
/** @type {Missing} */
const missing = new Set();
/** @type {Set<string>} */
/** @type {ResolveDependencies["files"]} */
const resolveFiles = new Set();
/** @type {Set<string>} */
/** @type {ResolveDependencies["directories"]} */
const resolveDirectories = new Set();
/** @type {Set<string>} */
/** @type {ResolveDependencies["missing"]} */
const resolveMissing = new Set();
/** @type {Map<string, string | false | undefined>} */
/** @type {ResolveResults} */
const resolveResults = new Map();
/** @type {Set<string>} */
const invalidResolveResults = new Set();
const resolverContext = {
fileDependencies: resolveFiles,
@ -2059,7 +2084,7 @@ class FileSystemInfo {
}
/**
* @param {Map<string, string | false>} resolveResults results from resolving
* @param {ResolveResults} resolveResults results from resolving
* @param {function((Error | null)=, boolean=): void} callback callback with true when resolveResults resolve the same way
* @returns {void}
*/
@ -2234,7 +2259,7 @@ class FileSystemInfo {
};
/**
* @param {string} path path
* @param {Set<string>} managedSet managed set
* @param {ManagedFiles} managedSet managed set
* @returns {boolean} true when managed
*/
const checkManaged = (path, managedSet) => {
@ -2285,6 +2310,7 @@ class FileSystemInfo {
* @returns {Set<string>} result
*/
const captureNonManaged = (items, managedSet) => {
/** @type {Set<string>} */
const capturedItems = new Set();
for (const path of items) {
if (!checkManaged(path, managedSet)) capturedItems.add(path);
@ -2292,7 +2318,7 @@ class FileSystemInfo {
return capturedItems;
};
/**
* @param {Set<string>} capturedFiles captured files
* @param {ManagedFiles} capturedFiles captured files
*/
const processCapturedFiles = capturedFiles => {
switch (mode) {
@ -2380,7 +2406,7 @@ class FileSystemInfo {
processCapturedFiles(captureNonManaged(files, managedFiles));
}
/**
* @param {Set<string>} capturedDirectories captured directories
* @param {ManagedContexts} capturedDirectories captured directories
*/
const processCapturedDirectories = capturedDirectories => {
switch (mode) {
@ -2485,7 +2511,7 @@ class FileSystemInfo {
jobs++;
/**
* @param {(Error | null)=} err error
* @param {(FileSystemInfoEntry | "ignore" | null)=} entry entry
* @param {FileTimestamp=} entry entry
* @returns {void}
*/
const callback = (err, entry) => {
@ -2525,7 +2551,7 @@ class FileSystemInfo {
);
}
/**
* @param {Set<string>} capturedMissing captured missing
* @param {ManagedMissing} capturedMissing captured missing
*/
const processCapturedMissing = capturedMissing => {
this._missingExistenceOptimization.optimize(snapshot, capturedMissing);
@ -2716,6 +2742,7 @@ class FileSystemInfo {
}
/**
* @private
* @param {Snapshot} snapshot the snapshot made
* @param {function((WebpackError | null)=, boolean=): void} callback callback function
* @returns {void}
@ -3019,7 +3046,7 @@ class FileSystemInfo {
jobs++;
/**
* @param {(WebpackError | null)=} err error
* @param {(ResolvedContextFileSystemInfoEntry | "ignore" | null)=} entry entry
* @param {ResolvedContextTimestamp=} entry entry
* @returns {void}
*/
const callback = (err, entry) => {
@ -3123,7 +3150,7 @@ class FileSystemInfo {
jobs++;
/**
* @param {(WebpackError | null)=} err error
* @param {(ResolvedContextFileSystemInfoEntry | "ignore" | null)=} entry entry
* @param {ResolvedContextTimestamp=} entry entry
* @returns {void}
*/
const callback = (err, entry) => {
@ -3222,8 +3249,8 @@ class FileSystemInfo {
}
/**
* @type {Processor<string, FileSystemInfoEntry>}
* @private
* @type {Processor<string, FileSystemInfoEntry>}
*/
_readFileTimestamp(path, callback) {
this.fs.stat(path, (err, _stat) => {
@ -3261,8 +3288,8 @@ class FileSystemInfo {
}
/**
* @type {Processor<string, string>}
* @private
* @type {Processor<string, string>}
*/
_readFileHash(path, callback) {
this.fs.readFile(path, (err, content) => {
@ -3297,9 +3324,9 @@ class FileSystemInfo {
}
/**
* @private
* @param {string} path path
* @param {function(WebpackError | null, TimestampAndHash=) : void} callback callback
* @private
*/
_getFileTimestampAndHash(path, callback) {
/**
@ -3349,6 +3376,7 @@ class FileSystemInfo {
}
/**
* @private
* @template T
* @template ItemType
* @param {object} options options
@ -3460,8 +3488,8 @@ class FileSystemInfo {
}
/**
* @type {Processor<string, ContextFileSystemInfoEntry>}
* @private
* @type {Processor<string, ContextFileSystemInfoEntry>}
*/
_readContextTimestamp(path, callback) {
this._readContext(
@ -3575,8 +3603,9 @@ class FileSystemInfo {
}
/**
* @private
* @param {ContextFileSystemInfoEntry} entry entry
* @param {function((WebpackError | null)=, (ResolvedContextFileSystemInfoEntry | "ignore" | null)=): void} callback callback
* @param {function((WebpackError | null)=, ResolvedContextTimestamp=): void} callback callback
* @returns {void}
*/
_resolveContextTimestamp(entry, callback) {
@ -3624,8 +3653,8 @@ class FileSystemInfo {
}
/**
* @type {Processor<string, ContextHash>}
* @private
* @type {Processor<string, ContextHash>}
*/
_readContextHash(path, callback) {
this._readContext(
@ -3694,6 +3723,7 @@ class FileSystemInfo {
}
/**
* @private
* @param {ContextHash} entry context hash
* @param {function(WebpackError | null, string=): void} callback callback
* @returns {void}
@ -3733,12 +3763,12 @@ class FileSystemInfo {
}
/**
* @type {Processor<string, ContextTimestampAndHash>}
* @private
* @type {Processor<string, ContextTimestampAndHash>}
*/
_readContextTimestampAndHash(path, callback) {
/**
* @param {ContextFileSystemInfoEntry | "ignore" | null} timestamp timestamp
* @param {ContextTimestamp} timestamp timestamp
* @param {ContextHash} hash hash
*/
const finalize = (timestamp, hash) => {
@ -3864,6 +3894,7 @@ class FileSystemInfo {
}
/**
* @private
* @param {ContextTimestampAndHash} entry entry
* @param {ProcessorCallback<ResolvedContextTimestampAndHash>} callback callback
* @returns {void}
@ -3923,8 +3954,8 @@ class FileSystemInfo {
}
/**
* @type {Processor<string, Set<string>>}
* @private
* @type {Processor<string, Set<string>>}
*/
_getManagedItemDirectoryInfo(path, callback) {
this.fs.readdir(path, (err, elements) => {
@ -3944,8 +3975,8 @@ class FileSystemInfo {
}
/**
* @type {Processor<string, string>}
* @private
* @type {Processor<string, string>}
*/
_getManagedItemInfo(path, callback) {
const dir = dirname(this.fs, path);
@ -4019,6 +4050,7 @@ class FileSystemInfo {
getDeprecatedFileTimestamps() {
if (this._cachedDeprecatedFileTimestamps !== undefined)
return this._cachedDeprecatedFileTimestamps;
/** @type {Map<string, number | null>} */
const map = new Map();
for (const [path, info] of this._fileTimestamps) {
if (info) map.set(path, typeof info === "object" ? info.safeTime : null);
@ -4029,6 +4061,7 @@ class FileSystemInfo {
getDeprecatedContextTimestamps() {
if (this._cachedDeprecatedContextTimestamps !== undefined)
return this._cachedDeprecatedContextTimestamps;
/** @type {Map<string, number | null>} */
const map = new Map();
for (const [path, info] of this._contextTimestamps) {
if (info) map.set(path, typeof info === "object" ? info.safeTime : null);

View File

@ -14,6 +14,7 @@ const Queue = require("./util/Queue");
/** @typedef {import("./Dependency").ExportSpec} ExportSpec */
/** @typedef {import("./Dependency").ExportsSpec} ExportsSpec */
/** @typedef {import("./ExportsInfo")} ExportsInfo */
/** @typedef {import("./ExportsInfo").RestoreProvidedData} RestoreProvidedData */
/** @typedef {import("./Module")} Module */
/** @typedef {import("./Module").BuildInfo} BuildInfo */
@ -400,7 +401,7 @@ class FlagDependencyExportsPlugin {
}
);
/** @type {WeakMap<Module, any>} */
/** @type {WeakMap<Module, RestoreProvidedData>} */
const providedExportsCache = new WeakMap();
compilation.hooks.rebuildModule.tap(PLUGIN_NAME, module => {
providedExportsCache.set(
@ -409,9 +410,10 @@ class FlagDependencyExportsPlugin {
);
});
compilation.hooks.finishRebuildingModule.tap(PLUGIN_NAME, module => {
moduleGraph
.getExportsInfo(module)
.restoreProvided(providedExportsCache.get(module));
moduleGraph.getExportsInfo(module).restoreProvided(
/** @type {RestoreProvidedData} */
(providedExportsCache.get(module))
);
});
});
}

View File

@ -455,7 +455,7 @@ class NormalModuleFactory extends ModuleFactory {
matchResourceData = {
resource: matchResource,
...cacheParseResource(matchResource)
.../** @type {TODO} */ (cacheParseResource(matchResource))
};
requestWithoutMatchResource = request.slice(
matchResourceMatch[0].length
@ -622,10 +622,10 @@ class NormalModuleFactory extends ModuleFactory {
] === "object" &&
settings[/** @type {keyof ModuleSettings} */ (r.type)] !== null
) {
const type = /** @type {TODO} */ (r.type);
const type = /** @type {keyof ModuleSettings} */ (r.type);
settings[type] = cachedCleverMerge(settings[type], r.value);
} else {
const type = /** @type {TODO} */ (r.type);
const type = /** @type {keyof ModuleSettings} */ (r.type);
settings[type] = r.value;
}
}
@ -753,7 +753,7 @@ class NormalModuleFactory extends ModuleFactory {
resourceData = {
resource: unresolvedResource,
data: {},
...cacheParseResource(unresolvedResource)
.../** @type {TODO} */ (cacheParseResource(unresolvedResource))
};
continueCallback();
}
@ -787,7 +787,8 @@ class NormalModuleFactory extends ModuleFactory {
data:
/** @type {ResolveRequest} */
(resolvedResourceResolveData),
...cacheParseResource(resolvedResource)
.../** @type {TODO} */
(cacheParseResource(resolvedResource))
};
}
continueCallback();

View File

@ -21,6 +21,7 @@ const {
/** @typedef {import("../Cache").Etag} Etag */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../FileSystemInfo").ResolveBuildDependenciesResult} ResolveBuildDependenciesResult */
/** @typedef {import("../FileSystemInfo").ResolveResults} ResolveResults */
/** @typedef {import("../FileSystemInfo").Snapshot} Snapshot */
/** @typedef {import("../logging/Logger").Logger} Logger */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
@ -28,14 +29,13 @@ const {
/** @typedef {typeof import("../util/Hash")} Hash */
/** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
/** @typedef {Map<string, string | false>} ResolveResults */
/** @typedef {Set<string>} Items */
/** @typedef {Set<string>} BuildDependencies */
/** @typedef {Map<string, PackItemInfo>} ItemInfo */
class PackContainer {
/**
* @param {object} data stored data
* @param {Pack} data stored data
* @param {string} version version identifier
* @param {Snapshot} buildSnapshot snapshot of all build dependencies
* @param {BuildDependencies} buildDependencies list of all unresolved build dependencies captured
@ -1283,7 +1283,7 @@ class PackFileCacheStrategy {
logger.timeEnd("check build dependencies");
if (buildSnapshotValid && resolveValid) {
logger.time("restore cache content metadata");
const d = packContainer.data();
const d = /** @type {TODO} */ (packContainer).data();
logger.timeEnd("restore cache content metadata");
return d;
}
@ -1379,104 +1379,110 @@ class PackFileCacheStrategy {
newBuildDependencies
).join(", ")})`
);
promise = new Promise((resolve, reject) => {
this.logger.time("resolve build dependencies");
this.fileSystemInfo.resolveBuildDependencies(
this.context,
newBuildDependencies,
(err, result) => {
this.logger.timeEnd("resolve build dependencies");
if (err) return reject(err);
promise = new Promise(
/**
* @param {(value?: undefined) => void} resolve resolve
* @param {(reason?: Error) => void} reject reject
*/
(resolve, reject) => {
this.logger.time("resolve build dependencies");
this.fileSystemInfo.resolveBuildDependencies(
this.context,
newBuildDependencies,
(err, result) => {
this.logger.timeEnd("resolve build dependencies");
if (err) return reject(err);
this.logger.time("snapshot build dependencies");
const {
files,
directories,
missing,
resolveResults,
resolveDependencies
} = /** @type {ResolveBuildDependenciesResult} */ (result);
if (this.resolveResults) {
for (const [key, value] of resolveResults) {
this.resolveResults.set(key, value);
this.logger.time("snapshot build dependencies");
const {
files,
directories,
missing,
resolveResults,
resolveDependencies
} = /** @type {ResolveBuildDependenciesResult} */ (result);
if (this.resolveResults) {
for (const [key, value] of resolveResults) {
this.resolveResults.set(key, value);
}
} else {
this.resolveResults = resolveResults;
}
} else {
this.resolveResults = resolveResults;
}
if (reportProgress) {
reportProgress(
0.6,
"snapshot build dependencies",
"resolving"
);
}
this.fileSystemInfo.createSnapshot(
undefined,
resolveDependencies.files,
resolveDependencies.directories,
resolveDependencies.missing,
this.snapshot.resolveBuildDependencies,
(err, snapshot) => {
if (err) {
this.logger.timeEnd("snapshot build dependencies");
return reject(err);
}
if (!snapshot) {
this.logger.timeEnd("snapshot build dependencies");
return reject(
new Error("Unable to snapshot resolve dependencies")
);
}
if (this.resolveBuildDependenciesSnapshot) {
this.resolveBuildDependenciesSnapshot =
this.fileSystemInfo.mergeSnapshots(
this.resolveBuildDependenciesSnapshot,
snapshot
);
} else {
this.resolveBuildDependenciesSnapshot = snapshot;
}
if (reportProgress) {
reportProgress(
0.7,
"snapshot build dependencies",
"modules"
);
}
this.fileSystemInfo.createSnapshot(
undefined,
files,
directories,
missing,
this.snapshot.buildDependencies,
(err, snapshot) => {
this.logger.timeEnd("snapshot build dependencies");
if (err) return reject(err);
if (!snapshot) {
return reject(
new Error("Unable to snapshot build dependencies")
);
}
this.logger.debug("Captured build dependencies");
if (this.buildSnapshot) {
this.buildSnapshot =
this.fileSystemInfo.mergeSnapshots(
this.buildSnapshot,
snapshot
);
} else {
this.buildSnapshot = snapshot;
}
resolve();
}
if (reportProgress) {
reportProgress(
0.6,
"snapshot build dependencies",
"resolving"
);
}
);
}
);
});
this.fileSystemInfo.createSnapshot(
undefined,
resolveDependencies.files,
resolveDependencies.directories,
resolveDependencies.missing,
this.snapshot.resolveBuildDependencies,
(err, snapshot) => {
if (err) {
this.logger.timeEnd("snapshot build dependencies");
return reject(err);
}
if (!snapshot) {
this.logger.timeEnd("snapshot build dependencies");
return reject(
new Error("Unable to snapshot resolve dependencies")
);
}
if (this.resolveBuildDependenciesSnapshot) {
this.resolveBuildDependenciesSnapshot =
this.fileSystemInfo.mergeSnapshots(
this.resolveBuildDependenciesSnapshot,
snapshot
);
} else {
this.resolveBuildDependenciesSnapshot = snapshot;
}
if (reportProgress) {
reportProgress(
0.7,
"snapshot build dependencies",
"modules"
);
}
this.fileSystemInfo.createSnapshot(
undefined,
files,
directories,
missing,
this.snapshot.buildDependencies,
(err, snapshot) => {
this.logger.timeEnd("snapshot build dependencies");
if (err) return reject(err);
if (!snapshot) {
return reject(
new Error("Unable to snapshot build dependencies")
);
}
this.logger.debug("Captured build dependencies");
if (this.buildSnapshot) {
this.buildSnapshot =
this.fileSystemInfo.mergeSnapshots(
this.buildSnapshot,
snapshot
);
} else {
this.buildSnapshot = snapshot;
}
resolve();
}
);
}
);
}
);
}
);
} else {
promise = Promise.resolve();
}

View File

@ -115,11 +115,14 @@ class ExportMode {
}
}
/** @typedef {string[]} Names */
/** @typedef {number[]} DependencyIndices */
/**
* @param {ModuleGraph} moduleGraph module graph
* @param {TODO} dependencies dependencies
* @param {TODO=} additionalDependency additional dependency
* @returns {TODO} result
* @param {Dependency=} additionalDependency additional dependency
* @returns {{ names: Names, dependencyIndices: DependencyIndices }} result
*/
const determineExportAssignments = (
moduleGraph,
@ -157,9 +160,6 @@ const determineExportAssignments = (
return { names: Array.from(names), dependencyIndices };
};
/** @typedef {string[]} Names */
/** @typedef {number[]} DependencyIndices */
/**
* @param {object} options options
* @param {Names} options.names names

View File

@ -319,9 +319,9 @@ class LazyCompilationDependencyFactory extends ModuleFactory {
* @returns {void}
*/
create(data, callback) {
const dependency = /** @type {LazyCompilationDependency} */ (
data.dependencies[0]
);
const dependency =
/** @type {LazyCompilationDependency} */
(data.dependencies[0]);
callback(null, {
module: dependency.proxyModule.originalModule
});
@ -386,7 +386,7 @@ class LazyCompilationPlugin {
(compilation, { normalModuleFactory }) => {
normalModuleFactory.hooks.module.tap(
"LazyCompilationPlugin",
(originalModule, createData, resolveData) => {
(module, createData, resolveData) => {
if (
resolveData.dependencies.every(dep =>
HMR_DEPENDENCY_TYPES.has(dep.type)
@ -407,7 +407,7 @@ class LazyCompilationPlugin {
hmrDep.request
)
);
if (!isReferringToDynamicImport) return;
if (!isReferringToDynamicImport) return module;
} else if (
!resolveData.dependencies.every(
dep =>
@ -418,21 +418,21 @@ class LazyCompilationPlugin {
(this.entries && dep.type === "entry")
)
)
return;
return module;
if (
/webpack[/\\]hot[/\\]|webpack-dev-server[/\\]client|webpack-hot-middleware[/\\]client/.test(
resolveData.request
) ||
!checkTest(this.test, originalModule)
!checkTest(this.test, module)
)
return;
const moduleInfo = backend.module(originalModule);
if (!moduleInfo) return;
return module;
const moduleInfo = backend.module(module);
if (!moduleInfo) return module;
const { client, data, active } = moduleInfo;
return new LazyCompilationProxyModule(
compiler.context,
originalModule,
module,
resolveData.request,
client,
data,

View File

@ -56,7 +56,7 @@ const JavascriptParser = require("./JavascriptParser");
/** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../CodeGenerationResults")} CodeGenerationResults */
/** @typedef {import("../Compilation").ChunkHashContext} ChunkHashContext */
/** @typedef {import("../Compilation").ModuleObject} ModuleObject */
/** @typedef {import("../Compilation").ExecuteModuleObject} ExecuteModuleObject */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../DependencyTemplates")} DependencyTemplates */
/** @typedef {import("../Entrypoint")} Entrypoint */
@ -250,7 +250,7 @@ class JavascriptModulesPlugin {
constructor(options = {}) {
this.options = options;
/** @type {WeakMap<Source, TODO>} */
/** @type {WeakMap<Source, { source: Source, needModule:boolean, needExports: boolean, needRequire: boolean, needThisAsExports: boolean, needStrict: boolean | undefined }>} */
this._moduleFactoryCache = new WeakMap();
}
@ -510,7 +510,7 @@ class JavascriptModulesPlugin {
);
const moduleObject =
/** @type {ModuleObject} */
/** @type {ExecuteModuleObject} */
(options.moduleObject);
try {

View File

@ -579,9 +579,9 @@ class JavascriptParser extends Parser {
});
this.sourceType = sourceType;
/** @type {ScopeInfo} */
this.scope = undefined;
this.scope = /** @type {TODO} */ (undefined);
/** @type {ParserState} */
this.state = undefined;
this.state = /** @type {TODO} */ (undefined);
/** @type {Comment[] | undefined} */
this.comments = undefined;
/** @type {Set<number> | undefined} */

View File

@ -421,7 +421,7 @@ const normalizeName = name => {
/**
* @param {OptimizationSplitChunksCacheGroup["chunks"]} chunks the chunk filter option
* @returns {ChunkFilterFunction} the chunk filter function
* @returns {ChunkFilterFunction | undefined} the chunk filter function
*/
const normalizeChunksFilter = chunks => {
if (chunks === "initial") {
@ -650,7 +650,9 @@ module.exports = class SplitChunksPlugin {
/** @type {SplitChunksOptions} */
this.options = {
chunksFilter: normalizeChunksFilter(options.chunks || "all"),
chunksFilter:
/** @type {ChunkFilterFunction} */
(normalizeChunksFilter(options.chunks || "all")),
defaultSizeTypes,
minSize,
minSizeReduction,
@ -685,9 +687,13 @@ module.exports = class SplitChunksPlugin {
automaticNameDelimiter: options.automaticNameDelimiter || "-",
usedExports: options.usedExports || false,
fallbackCacheGroup: {
chunksFilter: normalizeChunksFilter(
fallbackCacheGroup.chunks || options.chunks || "all"
),
chunksFilter:
/** @type {ChunkFilterFunction} */
(
normalizeChunksFilter(
fallbackCacheGroup.chunks || options.chunks || "all"
)
),
minSize: mergeSizes(
normalizeSizes(fallbackCacheGroup.minSize, defaultSizeTypes),
minSize

View File

@ -201,7 +201,7 @@ const loaders = new Map();
*/
class ObjectMiddleware extends SerializerMiddleware {
/**
* @param {function(any): void} extendContext context extensions
* @param {function(ObjectSerializerContext | ObjectDeserializerContext): void} extendContext context extensions
* @param {string | Hash} hashFunction hash function to use
*/
constructor(extendContext, hashFunction = "md4") {

View File

@ -396,7 +396,9 @@ const EXTRACT_ERROR = {
object.message = error;
} else {
if (error.chunk) {
object.chunkName = error.chunk.name;
object.chunkName =
/** @type {string | undefined} */
(error.chunk.name);
object.chunkEntry = error.chunk.hasRuntime();
object.chunkInitial = error.chunk.canBeInitial();
}

View File

@ -91,10 +91,12 @@ const cachedSetProperty = (obj, property, value) => {
* @property {ByValues} byValues value depending on selector property, merged with base
*/
/** @typedef {(function(...EXPECTED_ANY): object) & { [DYNAMIC_INFO]: [DynamicFunction, object] }} DynamicFunction */
/**
* @typedef {object} ParsedObject
* @property {Map<string, ObjectParsedPropertyEntry>} static static properties (key is property name)
* @property {{ byProperty: string, fn: Function } | undefined} dynamic dynamic part
* @property {{ byProperty: string, fn: DynamicFunction } | undefined} dynamic dynamic part
*/
/** @type {WeakMap<object, ParsedObject>} */
@ -205,7 +207,9 @@ const serializeObject = (info, dynamicInfo) => {
// Setup byProperty structure
for (const entry of info.values()) {
if (entry.byProperty !== undefined) {
const byObj = (obj[entry.byProperty] = obj[entry.byProperty] || {});
const byProperty = /** @type {keyof T} */ (entry.byProperty);
const byObj = (obj[byProperty] =
obj[byProperty] || /** @type {TODO} */ ({}));
for (const byValue of entry.byValues.keys()) {
byObj[byValue] = byObj[byValue] || {};
}
@ -217,7 +221,9 @@ const serializeObject = (info, dynamicInfo) => {
}
// Fill byProperty structure
if (entry.byProperty !== undefined) {
const byObj = (obj[entry.byProperty] = obj[entry.byProperty] || {});
const byProperty = /** @type {keyof T} */ (entry.byProperty);
const byObj = (obj[byProperty] =
obj[byProperty] || /** @type {TODO} */ ({}));
for (const byValue of Object.keys(byObj)) {
const value = getFromByValues(entry.byValues, byValue);
if (value !== undefined) byObj[byValue][key] = value;
@ -225,7 +231,8 @@ const serializeObject = (info, dynamicInfo) => {
}
}
if (dynamicInfo !== undefined) {
obj[dynamicInfo.byProperty] = dynamicInfo.fn;
/** @type {TODO} */
(obj)[dynamicInfo.byProperty] = dynamicInfo.fn;
}
return obj;
};
@ -300,6 +307,7 @@ const _cleverMerge = (first, second, internalCaching = false) => {
: cleverMerge(fnInfo[1], second);
fn = fnInfo[0];
}
/** @type {DynamicFunction} */
const newFn = (...args) => {
const fnResult = fn(...args);
return internalCaching
@ -524,14 +532,12 @@ const mergeSingleValue = (a, b, internalCaching) => {
*/
const removeOperations = (obj, keysToKeepOriginalValue = []) => {
const newObj = /** @type {T} */ ({});
for (const key of Object.keys(obj)) {
const value = obj[/** @type {keyof T} */ (key)];
for (const _key of Object.keys(obj)) {
const key = /** @type {keyof T} */ (_key);
const value = obj[key];
const type = getValueType(value);
if (
type === VALUE_TYPE_OBJECT &&
keysToKeepOriginalValue.includes(/** @type {keyof T} */ (key))
) {
newObj[/** @type {keyof T} */ (key)] = value;
if (type === VALUE_TYPE_OBJECT && keysToKeepOriginalValue.includes(key)) {
newObj[key] = value;
continue;
}
switch (type) {
@ -539,25 +545,26 @@ const removeOperations = (obj, keysToKeepOriginalValue = []) => {
case VALUE_TYPE_DELETE:
break;
case VALUE_TYPE_OBJECT:
newObj[/** @type {keyof T} */ (key)] =
newObj[key] =
/** @type {T[keyof T]} */
(
removeOperations(
/** @type {TODO} */ (value),
/** @type {T} */
(value),
keysToKeepOriginalValue
)
);
break;
case VALUE_TYPE_ARRAY_EXTEND:
newObj[/** @type {keyof T} */ (key)] =
newObj[key] =
/** @type {T[keyof T]} */
(
/** @type {any[]} */
/** @type {EXPECTED_ANY[]} */
(value).filter(i => i !== "...")
);
break;
default:
newObj[/** @type {keyof T} */ (key)] = value;
newObj[key] = value;
break;
}
}
@ -597,6 +604,7 @@ const resolveByProperty = (obj, byProperty, ...values) => {
resolveByProperty(result, byProperty, ...values)
);
}
return obj;
};
module.exports.cachedSetProperty = cachedSetProperty;

View File

@ -139,11 +139,13 @@ function findNewName(oldName, usedNamed1, usedNamed2, extraInfo) {
return nameWithNumber;
}
/** @typedef {Set<Scope>} ScopeSet */
/**
* @param {Scope | null} s scope
* @param {UsedNames} nameSet name set
* @param {TODO} scopeSet1 scope set 1
* @param {TODO} scopeSet2 scope set 2
* @param {ScopeSet} scopeSet1 scope set 1
* @param {ScopeSet} scopeSet2 scope set 2
*/
const addScopeSymbols = (s, nameSet, scopeSet1, scopeSet2) => {
let scope = s;
@ -197,10 +199,10 @@ const RESERVED_NAMES = new Set(
);
/**
* @param {Map<string, { usedNames: UsedNames, alreadyCheckedScopes: Set<TODO> }>} usedNamesInScopeInfo used names in scope info
* @param {Map<string, { usedNames: UsedNames, alreadyCheckedScopes: ScopeSet }>} usedNamesInScopeInfo used names in scope info
* @param {string} module module identifier
* @param {string} id export id
* @returns {{ usedNames: UsedNames, alreadyCheckedScopes: Set<TODO> }} info
* @returns {{ usedNames: UsedNames, alreadyCheckedScopes: ScopeSet }} info
*/
const getUsedNamesInScopeInfo = (usedNamesInScopeInfo, module, id) => {
const key = `${module}-${id}`;

View File

@ -152,15 +152,15 @@ const getTooSmallTypes = (size, minSize) => {
};
/**
* @template T
* @param {TODO} size size
* @template {object} T
* @param {T} size size
* @param {Set<string>} types types
* @returns {number} number of matching size types
*/
const getNumberOfMatchingSizeTypes = (size, types) => {
let i = 0;
for (const key of Object.keys(size)) {
if (size[key] !== 0 && types.has(key)) i++;
if (size[/** @type {keyof T} */ (key)] !== 0 && types.has(key)) i++;
}
return i;
};

View File

@ -7,7 +7,7 @@
const { register } = require("./serialization");
const Position = /** @type {TODO} */ (require("acorn")).Position;
const Position = require("acorn").Position;
const SourceLocation = require("acorn").SourceLocation;
const ValidationError = require("schema-utils").ValidationError;
const {
@ -23,10 +23,8 @@ const {
/** @typedef {import("acorn").Position} Position */
/** @typedef {import("../Dependency").RealDependencyLocation} RealDependencyLocation */
/** @typedef {import("../Dependency").SourcePosition} SourcePosition */
/** @typedef {import("./serialization").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("./serialization").ObjectSerializerContext} ObjectSerializerContext */
/** @typedef {ObjectSerializerContext & { writeLazy?: (value: any) => void }} WebpackObjectSerializerContext */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectSerializerContext} ObjectSerializerContext */
const CURRENT_MODULE = "webpack/lib/util/registerExternalSerializer";
@ -37,7 +35,7 @@ register(
new (class CachedSourceSerializer {
/**
* @param {CachedSource} source the cached source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write, writeLazy }) {
@ -68,7 +66,7 @@ register(
new (class RawSourceSerializer {
/**
* @param {RawSource} source the raw source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
@ -95,7 +93,7 @@ register(
new (class ConcatSourceSerializer {
/**
* @param {ConcatSource} source the concat source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
@ -121,7 +119,7 @@ register(
new (class PrefixSourceSerializer {
/**
* @param {PrefixSource} source the prefix source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
@ -146,7 +144,7 @@ register(
new (class ReplaceSourceSerializer {
/**
* @param {ReplaceSource} source the replace source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
@ -196,7 +194,7 @@ register(
new (class OriginalSourceSerializer {
/**
* @param {OriginalSource} source the original source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
@ -223,7 +221,7 @@ register(
new (class SourceLocationSerializer {
/**
* @param {SourceLocation} loc the location to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(loc, { write }) {
@ -259,7 +257,7 @@ register(
new (class PositionSerializer {
/**
* @param {Position} pos the position to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(pos, { write }) {
@ -287,7 +285,7 @@ register(
new (class SourceMapSourceSerializer {
/**
* @param {SourceMapSource} source the source map source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
@ -310,10 +308,9 @@ register(
CURRENT_MODULE,
"schema-utils/ValidationError",
new (class ValidationErrorSerializer {
// TODO error should be ValidationError, but this fails the type checks
/**
* @param {TODO} error the source map source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @param {ValidationError} error the source map source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(error, { write }) {
@ -328,7 +325,7 @@ register(
/**
* @param {ObjectDeserializerContext} context context
* @returns {TODO} error
* @returns {ValidationError} error
*/
deserialize({ read }) {
return new ValidationError(read(), read(), read());

View File

@ -8,8 +8,6 @@ const memoize = require("./memoize");
/** @typedef {import("../serialization/BinaryMiddleware").MEASURE_END_OPERATION_TYPE} MEASURE_END_OPERATION */
/** @typedef {import("../serialization/BinaryMiddleware").MEASURE_START_OPERATION_TYPE} MEASURE_START_OPERATION */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectSerializerContext} ObjectSerializerContext */
/** @typedef {import("../serialization/Serializer")} Serializer */
/** @typedef {typeof import("../util/Hash")} Hash */
/** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
@ -92,10 +90,7 @@ module.exports = {
return (buffersSerializer = new Serializer([
new SingleItemMiddleware(),
new (getObjectMiddleware())(context => {
if (context.write) {
/**
* @param {any} value value
*/
if ("write" in context) {
context.writeLazy = value => {
context.write(
SerializerMiddleware.createLazy(value, binaryMiddleware)
@ -122,20 +117,12 @@ module.exports = {
return new Serializer([
new SingleItemMiddleware(),
new (getObjectMiddleware())(context => {
if (context.write) {
/**
* @param {any} value value
*/
if ("write" in context) {
context.writeLazy = value => {
context.write(
SerializerMiddleware.createLazy(value, binaryMiddleware)
);
};
/**
* @param {any} value value
* @param {object=} options lazy options
* @returns {function(): Promise<any> | any} lazy function
*/
context.writeSeparate = (value, options) => {
const lazy = SerializerMiddleware.createLazy(
value,

View File

@ -6,8 +6,7 @@
"allowJs": true,
"checkJs": true,
"noEmit": true,
"strict": false,
"noImplicitThis": true,
"strict": true,
"alwaysStrict": true,
"types": ["node"],
"esModuleInterop": true

View File

@ -2,12 +2,12 @@
"compilerOptions": {
"target": "esnext",
"module": "esnext",
"moduleResolution": "node",
"lib": ["es2017", "dom"],
"allowJs": true,
"checkJs": true,
"noEmit": true,
"strict": true,
"moduleResolution": "node",
"types": ["node", "./module"],
"esModuleInterop": true
},

View File

@ -7,7 +7,6 @@
"checkJs": true,
"noEmit": true,
"strict": true,
"noImplicitThis": true,
"alwaysStrict": true,
"types": ["node"],
"esModuleInterop": true

151
types.d.ts vendored
View File

@ -1041,11 +1041,11 @@ declare interface CallbackWebpack<T> {
}
type Cell<T> = undefined | T;
declare class Chunk {
constructor(name?: string, backCompat?: boolean);
constructor(name?: null | string, backCompat?: boolean);
id: null | string | number;
ids: null | ChunkId[];
debugId: number;
name?: string;
name?: null | string;
idNameHints: SortableSet<string>;
preventIntegration: boolean;
filenameTemplate?: string | ((arg0: PathData, arg1?: AssetInfo) => string);
@ -2157,7 +2157,7 @@ declare class Compilation {
* This method first looks to see if a name is provided for a new chunk,
* and first looks to see if any named chunks already exist and reuse that chunk instead.
*/
addChunk(name?: string): Chunk;
addChunk(name?: null | string): Chunk;
assignDepth(module: Module): void;
assignDepths(modules: Set<Module>): void;
getDependencyReferencedExports(
@ -2438,11 +2438,8 @@ declare class Compiler {
immutablePaths: Set<string | RegExp>;
modifiedFiles?: ReadonlySet<string>;
removedFiles?: ReadonlySet<string>;
fileTimestamps?: ReadonlyMap<string, null | FileSystemInfoEntry | "ignore">;
contextTimestamps?: ReadonlyMap<
string,
null | FileSystemInfoEntry | "ignore"
>;
fileTimestamps?: Map<string, FileSystemInfoEntry | "ignore">;
contextTimestamps?: Map<string, FileSystemInfoEntry | "ignore">;
fsStartTime?: number;
resolverFactory: ResolverFactory;
infrastructureLogger?: (
@ -3083,6 +3080,7 @@ declare class ContextReplacementPlugin {
*/
apply(compiler: Compiler): void;
}
type ContextTimestamp = null | ContextFileSystemInfoEntry | "ignore";
declare interface ContextTimestampAndHash {
safeTime: number;
timestampHash?: string;
@ -3090,8 +3088,8 @@ declare interface ContextTimestampAndHash {
resolved?: ResolvedContextTimestampAndHash;
symlinks?: Set<string>;
}
type CreateStatsOptionsContext = Record<string, any> &
KnownCreateStatsOptionsContext;
type CreateStatsOptionsContext = KnownCreateStatsOptionsContext &
Record<string, any>;
type CreateWriteStreamFSImplementation = FSImplementation & {
write: (...args: any[]) => any;
close?: (...args: any[]) => any;
@ -4301,28 +4299,68 @@ declare class EvalSourceMapDevToolPlugin {
}
declare interface ExecuteModuleArgument {
module: Module;
moduleObject?: ModuleObject;
moduleObject?: ExecuteModuleObject;
preparedInfo: any;
codeGenerationResult: CodeGenerationResult;
}
declare interface ExecuteModuleContext {
assets: Map<string, { source: Source; info: AssetInfo }>;
assets: Map<string, { source: Source; info?: AssetInfo }>;
chunk: Chunk;
chunkGraph: ChunkGraph;
__webpack_require__?: (arg0: string) => any;
__webpack_require__?: WebpackRequire;
}
declare interface ExecuteModuleExports {
[index: string]: any;
}
declare interface ExecuteModuleObject {
/**
* module id
*/
id?: string;
/**
* exports
*/
exports: ExecuteModuleExports;
/**
* is loaded
*/
loaded: boolean;
/**
* error
*/
error?: Error;
}
declare interface ExecuteModuleOptions {
entryOptions?: EntryOptions;
}
declare interface ExecuteModuleResult {
exports: any;
exports: ExecuteModuleExports;
cacheable: boolean;
assets: Map<string, { source: Source; info: AssetInfo }>;
assets: Map<string, { source: Source; info?: AssetInfo }>;
fileDependencies: LazySet<string>;
contextDependencies: LazySet<string>;
missingDependencies: LazySet<string>;
buildDependencies: LazySet<string>;
}
declare interface ExecuteOptions {
/**
* module id
*/
id?: string;
/**
* module
*/
module: ExecuteModuleObject;
/**
* require function
*/
require: WebpackRequire;
}
type Experiments = ExperimentsCommon & ExperimentsExtra;
/**
@ -4454,9 +4492,9 @@ declare abstract class ExportInfo {
runtime: RuntimeSpec
): boolean;
setUsed(newValue: UsageStateType, runtime: RuntimeSpec): boolean;
unsetTarget(key?: any): boolean;
unsetTarget(key: Dependency): boolean;
setTarget(
key: any,
key: Dependency,
connection: ModuleGraphConnection,
exportName?: null | string[],
priority?: number
@ -4478,7 +4516,7 @@ declare abstract class ExportInfo {
setUsedName(name: string): void;
getTerminalBinding(
moduleGraph: ModuleGraph,
resolveTargetFilter?: (arg0: TargetItem) => boolean
resolveTargetFilter?: (arg0: TargetItemWithConnection) => boolean
): undefined | ExportsInfo | ExportInfo;
isReexport(): undefined | boolean;
findTarget(
@ -4487,17 +4525,19 @@ declare abstract class ExportInfo {
): undefined | null | false | TargetItemWithoutConnection;
getTarget(
moduleGraph: ModuleGraph,
resolveTargetFilter?: (arg0: TargetItem) => boolean
): undefined | TargetItem;
resolveTargetFilter?: (arg0: TargetItemWithConnection) => boolean
): undefined | TargetItemWithConnection;
/**
* Move the target forward as long resolveTargetFilter is fulfilled
*/
moveTarget(
moduleGraph: ModuleGraph,
resolveTargetFilter: (arg0: TargetItem) => boolean,
updateOriginalConnection?: (arg0: TargetItem) => ModuleGraphConnection
): undefined | TargetItem;
resolveTargetFilter: (arg0: TargetItemWithConnection) => boolean,
updateOriginalConnection?: (
arg0: TargetItemWithConnection
) => ModuleGraphConnection
): undefined | TargetItemWithConnection;
createNestedExportsInfo(): ExportsInfo;
getNestedExportsInfo(): undefined | ExportsInfo;
hasInfo(baseInfo: ExportInfo, runtime: RuntimeSpec): boolean;
@ -4572,7 +4612,7 @@ declare abstract class ExportsInfo {
setUnknownExportsProvided(
canMangle?: boolean,
excludeExports?: Set<string>,
targetKey?: any,
targetKey?: Dependency,
targetModule?: ModuleGraphConnection,
priority?: number
): boolean;
@ -4592,12 +4632,7 @@ declare abstract class ExportsInfo {
getUsedName(name: string | string[], runtime: RuntimeSpec): UsedName;
updateHash(hash: Hash, runtime: RuntimeSpec): void;
getRestoreProvidedData(): RestoreProvidedData;
restoreProvided(__0: {
otherProvided: any;
otherCanMangleProvide: any;
otherTerminalBinding: any;
exports: any;
}): void;
restoreProvided(__0: RestoreProvidedData): void;
}
declare interface ExportsSpec {
/**
@ -5099,11 +5134,11 @@ declare abstract class FileSystemInfo {
logStatistics(): void;
clear(): void;
addFileTimestamps(
map: ReadonlyMap<string, null | FileSystemInfoEntry | "ignore">,
map: ReadonlyMap<string, FileTimestamp>,
immutable?: boolean
): void;
addContextTimestamps(
map: ReadonlyMap<string, null | FileSystemInfoEntry | "ignore">,
map: ReadonlyMap<string, ContextTimestamp>,
immutable?: boolean
): void;
getFileTimestamp(
@ -5144,7 +5179,7 @@ declare abstract class FileSystemInfo {
) => void
): void;
checkResolveResultsValid(
resolveResults: Map<string, string | false>,
resolveResults: Map<string, undefined | string | false>,
callback: (arg0?: null | Error, arg1?: boolean) => void
): void;
createSnapshot(
@ -5160,13 +5195,14 @@ declare abstract class FileSystemInfo {
snapshot: Snapshot,
callback: (arg0?: null | WebpackError, arg1?: boolean) => void
): void;
getDeprecatedFileTimestamps(): Map<any, any>;
getDeprecatedContextTimestamps(): Map<any, any>;
getDeprecatedFileTimestamps(): Map<string, null | number>;
getDeprecatedContextTimestamps(): Map<string, null | number>;
}
declare interface FileSystemInfoEntry {
safeTime: number;
timestamp?: number;
}
type FileTimestamp = null | FileSystemInfoEntry | "ignore";
type FilterItemTypes = string | RegExp | ((value: string) => boolean);
declare interface Flags {
[index: string]: Argument;
@ -9214,12 +9250,6 @@ declare interface ModuleMemCachesItem {
references?: WeakMap<Dependency, Module>;
memCache: WeakTupleMap<any, any>;
}
declare interface ModuleObject {
id?: string;
exports: any;
loaded: boolean;
error?: Error;
}
/**
* Options affecting the normal modules (`NormalModuleFactory`).
@ -12456,20 +12486,7 @@ declare interface ResolveBuildDependenciesResult {
/**
* dependencies of the resolving
*/
resolveDependencies: {
/**
* list of files
*/
files: Set<string>;
/**
* list of directories
*/
directories: Set<string>;
/**
* list of missing entries
*/
missing: Set<string>;
};
resolveDependencies: ResolveDependencies;
}
declare interface ResolveContext {
contextDependencies?: WriteOnlySet<string>;
@ -12518,6 +12535,22 @@ declare interface ResolveData {
*/
cacheable: boolean;
}
declare interface ResolveDependencies {
/**
* list of files
*/
files: Set<string>;
/**
* list of directories
*/
directories: Set<string>;
/**
* list of missing entries
*/
missing: Set<string>;
}
/**
* Options object for resolving requests.
@ -15216,7 +15249,7 @@ declare interface TagInfo {
data?: TagData;
next?: TagInfo;
}
declare interface TargetItem {
declare interface TargetItemWithConnection {
module: Module;
connection: ModuleGraphConnection;
export?: string[];
@ -15793,6 +15826,12 @@ declare interface WebpackPluginInstance {
*/
apply: (compiler: Compiler) => void;
}
declare interface WebpackRequire {
(id: string): ExecuteModuleExports;
i?: ((options: ExecuteOptions) => void)[];
c?: Record<string, ExecuteModuleObject>;
}
declare interface WithId {
id: string | number;
}