mirror of https://github.com/webpack/webpack.git
refactor(types): more
This commit is contained in:
parent
cc734af66e
commit
3092f7cb20
|
@ -11,6 +11,7 @@ const ModuleFilenameHelpers = require("./ModuleFilenameHelpers");
|
||||||
const Template = require("./Template");
|
const Template = require("./Template");
|
||||||
const createSchemaValidation = require("./util/create-schema-validation");
|
const createSchemaValidation = require("./util/create-schema-validation");
|
||||||
|
|
||||||
|
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerFunction} BannerFunction */
|
||||||
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginArgument} BannerPluginArgument */
|
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginArgument} BannerPluginArgument */
|
||||||
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginOptions} BannerPluginOptions */
|
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginOptions} BannerPluginOptions */
|
||||||
/** @typedef {import("./Compiler")} Compiler */
|
/** @typedef {import("./Compiler")} Compiler */
|
||||||
|
@ -60,7 +61,7 @@ class BannerPlugin {
|
||||||
const getBanner = bannerOption;
|
const getBanner = bannerOption;
|
||||||
this.banner = this.options.raw
|
this.banner = this.options.raw
|
||||||
? getBanner
|
? getBanner
|
||||||
: data => wrapComment(getBanner(data));
|
: /** @type {BannerFunction} */ data => wrapComment(getBanner(data));
|
||||||
} else {
|
} else {
|
||||||
const banner = this.options.raw
|
const banner = this.options.raw
|
||||||
? bannerOption
|
? bannerOption
|
||||||
|
|
|
@ -143,7 +143,7 @@ class SnapshotIterable {
|
||||||
let state = 0;
|
let state = 0;
|
||||||
/** @type {IterableIterator<string>} */
|
/** @type {IterableIterator<string>} */
|
||||||
let it;
|
let it;
|
||||||
/** @type {(Snapshot) => (Map<string, any> | Set<string>)[]} */
|
/** @type {(snapshot: Snapshot) => (Map<string, any> | Set<string>)[]} */
|
||||||
let getMaps;
|
let getMaps;
|
||||||
/** @type {(Map<string, any> | Set<string>)[]} */
|
/** @type {(Map<string, any> | Set<string>)[]} */
|
||||||
let maps;
|
let maps;
|
||||||
|
@ -882,6 +882,11 @@ const getResolvedHash = entry => {
|
||||||
return entry.symlinks === undefined ? entry.hash : undefined;
|
return entry.symlinks === undefined ? entry.hash : undefined;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @template T
|
||||||
|
* @param {Set<T>} source source
|
||||||
|
* @param {Set<T>} target target
|
||||||
|
*/
|
||||||
const addAll = (source, target) => {
|
const addAll = (source, target) => {
|
||||||
for (const key of source) target.add(key);
|
for (const key of source) target.add(key);
|
||||||
};
|
};
|
||||||
|
@ -1150,6 +1155,11 @@ class FileSystemInfo {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} path path
|
||||||
|
* @param {string} reason reason
|
||||||
|
* @param {string[]} args arguments
|
||||||
|
*/
|
||||||
_log(path, reason, ...args) {
|
_log(path, reason, ...args) {
|
||||||
const key = path + reason;
|
const key = path + reason;
|
||||||
if (this._loggedPaths.has(key)) return;
|
if (this._loggedPaths.has(key)) return;
|
||||||
|
@ -1263,7 +1273,7 @@ class FileSystemInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} path file path
|
* @param {string} path file path
|
||||||
* @param {function((WebpackError | null)=, string=): void} callback callback function
|
* @param {function((WebpackError | null)=, (string | null)=): void} callback callback function
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
getFileHash(path, callback) {
|
getFileHash(path, callback) {
|
||||||
|
@ -1294,7 +1304,7 @@ class FileSystemInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} path context path
|
* @param {string} path context path
|
||||||
* @param {function((WebpackError | null)=, ContextHash=): void} callback callback function
|
* @param {function((WebpackError | null)=, (ContextHash | null)=): void} callback callback function
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_getUnresolvedContextHash(path, callback) {
|
_getUnresolvedContextHash(path, callback) {
|
||||||
|
@ -1325,7 +1335,7 @@ class FileSystemInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} path context path
|
* @param {string} path context path
|
||||||
* @param {function((WebpackError | null)=, ContextTimestampAndHash=): void} callback callback function
|
* @param {function((WebpackError | null)=, (ContextTimestampAndHash | null)=): void} callback callback function
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_getUnresolvedContextTsh(path, callback) {
|
_getUnresolvedContextTsh(path, callback) {
|
||||||
|
@ -1396,6 +1406,10 @@ class FileSystemInfo {
|
||||||
contextDependencies: resolveDirectories,
|
contextDependencies: resolveDirectories,
|
||||||
missingDependencies: resolveMissing
|
missingDependencies: resolveMissing
|
||||||
};
|
};
|
||||||
|
/**
|
||||||
|
* @param {string} expected expected result
|
||||||
|
* @returns {string} expected result
|
||||||
|
*/
|
||||||
const expectedToString = expected => {
|
const expectedToString = expected => {
|
||||||
return expected ? ` (expected ${expected})` : "";
|
return expected ? ` (expected ${expected})` : "";
|
||||||
};
|
};
|
||||||
|
@ -2056,6 +2070,9 @@ class FileSystemInfo {
|
||||||
}
|
}
|
||||||
return capturedItems;
|
return capturedItems;
|
||||||
};
|
};
|
||||||
|
/**
|
||||||
|
* @param {Set<string>} capturedFiles captured files
|
||||||
|
*/
|
||||||
const processCapturedFiles = capturedFiles => {
|
const processCapturedFiles = capturedFiles => {
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case 3:
|
case 3:
|
||||||
|
@ -2928,7 +2945,7 @@ class FileSystemInfo {
|
||||||
|
|
||||||
const hash = createHash(this._hashFunction);
|
const hash = createHash(this._hashFunction);
|
||||||
|
|
||||||
hash.update(content);
|
hash.update(/** @type {string | Buffer} */ (content));
|
||||||
|
|
||||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||||
|
|
||||||
|
@ -2992,7 +3009,7 @@ class FileSystemInfo {
|
||||||
* @param {function(string, IStats, function(Error=, ItemType=): void): void} options.fromFile called when context item is a file
|
* @param {function(string, IStats, function(Error=, ItemType=): void): void} options.fromFile called when context item is a file
|
||||||
* @param {function(string, IStats, function(Error=, ItemType=): void): void} options.fromDirectory called when context item is a directory
|
* @param {function(string, IStats, function(Error=, ItemType=): void): void} options.fromDirectory called when context item is a directory
|
||||||
* @param {function(string[], ItemType[]): T} options.reduce called from all context items
|
* @param {function(string[], ItemType[]): T} options.reduce called from all context items
|
||||||
* @param {function((Error | null)=, (T)=): void} callback callback
|
* @param {function((Error | null)=, (T | null)=): void} callback callback
|
||||||
*/
|
*/
|
||||||
_readContext(
|
_readContext(
|
||||||
{
|
{
|
||||||
|
@ -3179,6 +3196,7 @@ class FileSystemInfo {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_resolveContextTimestamp(entry, callback) {
|
_resolveContextTimestamp(entry, callback) {
|
||||||
|
/** @type {string[]} */
|
||||||
const hashes = [];
|
const hashes = [];
|
||||||
let safeTime = 0;
|
let safeTime = 0;
|
||||||
processAsyncTree(
|
processAsyncTree(
|
||||||
|
@ -3287,6 +3305,7 @@ class FileSystemInfo {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_resolveContextHash(entry, callback) {
|
_resolveContextHash(entry, callback) {
|
||||||
|
/** @type {string[]} */
|
||||||
const hashes = [];
|
const hashes = [];
|
||||||
processAsyncTree(
|
processAsyncTree(
|
||||||
entry.symlinks,
|
entry.symlinks,
|
||||||
|
@ -3443,7 +3462,9 @@ class FileSystemInfo {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_resolveContextTsh(entry, callback) {
|
_resolveContextTsh(entry, callback) {
|
||||||
|
/** @type {string[]} */
|
||||||
const hashes = [];
|
const hashes = [];
|
||||||
|
/** @type {string[]} */
|
||||||
const tsHashes = [];
|
const tsHashes = [];
|
||||||
let safeTime = 0;
|
let safeTime = 0;
|
||||||
processAsyncTree(
|
processAsyncTree(
|
||||||
|
@ -3561,7 +3582,7 @@ class FileSystemInfo {
|
||||||
}
|
}
|
||||||
let data;
|
let data;
|
||||||
try {
|
try {
|
||||||
data = JSON.parse(content.toString("utf-8"));
|
data = JSON.parse(/** @type {Buffer} */ (content).toString("utf-8"));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return callback(e);
|
return callback(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ const { getEntryRuntime, mergeRuntime } = require("./util/runtime");
|
||||||
/** @typedef {import("./Compilation")} Compilation */
|
/** @typedef {import("./Compilation")} Compilation */
|
||||||
/** @typedef {import("./DependenciesBlock")} DependenciesBlock */
|
/** @typedef {import("./DependenciesBlock")} DependenciesBlock */
|
||||||
/** @typedef {import("./Dependency")} Dependency */
|
/** @typedef {import("./Dependency")} Dependency */
|
||||||
|
/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */
|
||||||
/** @typedef {import("./Entrypoint")} Entrypoint */
|
/** @typedef {import("./Entrypoint")} Entrypoint */
|
||||||
/** @typedef {import("./Module")} Module */
|
/** @typedef {import("./Module")} Module */
|
||||||
/** @typedef {import("./ModuleGraph")} ModuleGraph */
|
/** @typedef {import("./ModuleGraph")} ModuleGraph */
|
||||||
|
@ -39,15 +40,15 @@ const { getEntryRuntime, mergeRuntime } = require("./util/runtime");
|
||||||
* @typedef {Object} ChunkGroupInfo
|
* @typedef {Object} ChunkGroupInfo
|
||||||
* @property {ChunkGroup} chunkGroup the chunk group
|
* @property {ChunkGroup} chunkGroup the chunk group
|
||||||
* @property {RuntimeSpec} runtime the runtimes
|
* @property {RuntimeSpec} runtime the runtimes
|
||||||
* @property {ModuleSetPlus} minAvailableModules current minimal set of modules available at this point
|
* @property {ModuleSetPlus | undefined} minAvailableModules current minimal set of modules available at this point
|
||||||
* @property {boolean} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
|
* @property {boolean | undefined} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
|
||||||
* @property {ModuleSetPlus[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules
|
* @property {ModuleSetPlus[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules
|
||||||
* @property {Set<Module>=} skippedItems modules that were skipped because module is already available in parent chunks (need to reconsider when minAvailableModules is shrinking)
|
* @property {Set<Module>=} skippedItems modules that were skipped because module is already available in parent chunks (need to reconsider when minAvailableModules is shrinking)
|
||||||
* @property {Set<[Module, ConnectionState]>=} skippedModuleConnections referenced modules that where skipped because they were not active in this runtime
|
* @property {Set<[Module, ConnectionState]>=} skippedModuleConnections referenced modules that where skipped because they were not active in this runtime
|
||||||
* @property {ModuleSetPlus} resultingAvailableModules set of modules available including modules from this chunk group
|
* @property {ModuleSetPlus | undefined} resultingAvailableModules set of modules available including modules from this chunk group
|
||||||
* @property {Set<ChunkGroupInfo>} children set of children chunk groups, that will be revisited when availableModules shrink
|
* @property {Set<ChunkGroupInfo> | undefined} children set of children chunk groups, that will be revisited when availableModules shrink
|
||||||
* @property {Set<ChunkGroupInfo>} availableSources set of chunk groups that are the source for minAvailableModules
|
* @property {Set<ChunkGroupInfo> | undefined} availableSources set of chunk groups that are the source for minAvailableModules
|
||||||
* @property {Set<ChunkGroupInfo>} availableChildren set of chunk groups which depend on the this chunk group as availableSource
|
* @property {Set<ChunkGroupInfo> | undefined} availableChildren set of chunk groups which depend on the this chunk group as availableSource
|
||||||
* @property {number} preOrderIndex next pre order index
|
* @property {number} preOrderIndex next pre order index
|
||||||
* @property {number} postOrderIndex next post order index
|
* @property {number} postOrderIndex next post order index
|
||||||
* @property {boolean} chunkLoading has a chunk loading mechanism
|
* @property {boolean} chunkLoading has a chunk loading mechanism
|
||||||
|
@ -199,6 +200,7 @@ const visitModules = (
|
||||||
|
|
||||||
/** @type {RuntimeSpec | false} */
|
/** @type {RuntimeSpec | false} */
|
||||||
let blockModulesMapRuntime = false;
|
let blockModulesMapRuntime = false;
|
||||||
|
/** @type {Map<DependenciesBlock, (Module | ConnectionState)[]>} */
|
||||||
let blockModulesMap;
|
let blockModulesMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -239,7 +241,7 @@ const visitModules = (
|
||||||
extractBlockModules(module, moduleGraph, runtime, blockModulesMap);
|
extractBlockModules(module, moduleGraph, runtime, blockModulesMap);
|
||||||
blockModules = blockModulesMap.get(block);
|
blockModules = blockModulesMap.get(block);
|
||||||
logger.timeAggregate("visitModules: prepare");
|
logger.timeAggregate("visitModules: prepare");
|
||||||
return blockModules;
|
return /** @type {(Module | ConnectionState)[]} */ (blockModules);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -290,7 +292,7 @@ const visitModules = (
|
||||||
for (const [chunkGroup, modules] of inputEntrypointsAndModules) {
|
for (const [chunkGroup, modules] of inputEntrypointsAndModules) {
|
||||||
const runtime = getEntryRuntime(
|
const runtime = getEntryRuntime(
|
||||||
compilation,
|
compilation,
|
||||||
chunkGroup.name,
|
/** @type {string} */ (chunkGroup.name),
|
||||||
chunkGroup.options
|
chunkGroup.options
|
||||||
);
|
);
|
||||||
/** @type {ChunkGroupInfo} */
|
/** @type {ChunkGroupInfo} */
|
||||||
|
@ -352,7 +354,9 @@ const visitModules = (
|
||||||
const { chunkGroup } = chunkGroupInfo;
|
const { chunkGroup } = chunkGroupInfo;
|
||||||
chunkGroupInfo.availableSources = new Set();
|
chunkGroupInfo.availableSources = new Set();
|
||||||
for (const parent of chunkGroup.parentsIterable) {
|
for (const parent of chunkGroup.parentsIterable) {
|
||||||
const parentChunkGroupInfo = chunkGroupInfoMap.get(parent);
|
const parentChunkGroupInfo =
|
||||||
|
/** @type {ChunkGroupInfo} */
|
||||||
|
(chunkGroupInfoMap.get(parent));
|
||||||
chunkGroupInfo.availableSources.add(parentChunkGroupInfo);
|
chunkGroupInfo.availableSources.add(parentChunkGroupInfo);
|
||||||
if (parentChunkGroupInfo.availableChildren === undefined) {
|
if (parentChunkGroupInfo.availableChildren === undefined) {
|
||||||
parentChunkGroupInfo.availableChildren = new Set();
|
parentChunkGroupInfo.availableChildren = new Set();
|
||||||
|
@ -399,15 +403,15 @@ const visitModules = (
|
||||||
// 1. We create a chunk group with single chunk in it for this Block
|
// 1. We create a chunk group with single chunk in it for this Block
|
||||||
// but only once (blockChunkGroups map)
|
// but only once (blockChunkGroups map)
|
||||||
let cgi = blockChunkGroups.get(b);
|
let cgi = blockChunkGroups.get(b);
|
||||||
/** @type {ChunkGroup} */
|
/** @type {ChunkGroup | undefined} */
|
||||||
let c;
|
let c;
|
||||||
/** @type {Entrypoint} */
|
/** @type {Entrypoint | undefined} */
|
||||||
let entrypoint;
|
let entrypoint;
|
||||||
const entryOptions = b.groupOptions && b.groupOptions.entryOptions;
|
const entryOptions = b.groupOptions && b.groupOptions.entryOptions;
|
||||||
if (cgi === undefined) {
|
if (cgi === undefined) {
|
||||||
const chunkName = (b.groupOptions && b.groupOptions.name) || b.chunkName;
|
const chunkName = (b.groupOptions && b.groupOptions.name) || b.chunkName;
|
||||||
if (entryOptions) {
|
if (entryOptions) {
|
||||||
cgi = namedAsyncEntrypoints.get(chunkName);
|
cgi = namedAsyncEntrypoints.get(/** @type {string} */ (chunkName));
|
||||||
if (!cgi) {
|
if (!cgi) {
|
||||||
entrypoint = compilation.addAsyncEntrypoint(
|
entrypoint = compilation.addAsyncEntrypoint(
|
||||||
entryOptions,
|
entryOptions,
|
||||||
|
@ -505,7 +509,11 @@ const visitModules = (
|
||||||
c = cgi.chunkGroup;
|
c = cgi.chunkGroup;
|
||||||
if (c.isInitial()) {
|
if (c.isInitial()) {
|
||||||
compilation.errors.push(
|
compilation.errors.push(
|
||||||
new AsyncDependencyToInitialChunkError(chunkName, module, b.loc)
|
new AsyncDependencyToInitialChunkError(
|
||||||
|
/** @type {string} */ (chunkName),
|
||||||
|
module,
|
||||||
|
b.loc
|
||||||
|
)
|
||||||
);
|
);
|
||||||
c = chunkGroup;
|
c = chunkGroup;
|
||||||
} else {
|
} else {
|
||||||
|
@ -515,7 +523,7 @@ const visitModules = (
|
||||||
}
|
}
|
||||||
blockConnections.set(b, []);
|
blockConnections.set(b, []);
|
||||||
}
|
}
|
||||||
blockChunkGroups.set(b, cgi);
|
blockChunkGroups.set(b, /** @type {ChunkGroupInfo} */ (cgi));
|
||||||
} else if (entryOptions) {
|
} else if (entryOptions) {
|
||||||
entrypoint = /** @type {Entrypoint} */ (cgi.chunkGroup);
|
entrypoint = /** @type {Entrypoint} */ (cgi.chunkGroup);
|
||||||
} else {
|
} else {
|
||||||
|
@ -536,7 +544,7 @@ const visitModules = (
|
||||||
connectList = new Set();
|
connectList = new Set();
|
||||||
queueConnect.set(chunkGroupInfo, connectList);
|
queueConnect.set(chunkGroupInfo, connectList);
|
||||||
}
|
}
|
||||||
connectList.add(cgi);
|
connectList.add(/** @type {ChunkGroupInfo} */ (cgi));
|
||||||
|
|
||||||
// TODO check if this really need to be done for each traversal
|
// TODO check if this really need to be done for each traversal
|
||||||
// or if it is enough when it's queued when created
|
// or if it is enough when it's queued when created
|
||||||
|
@ -547,7 +555,7 @@ const visitModules = (
|
||||||
module: module,
|
module: module,
|
||||||
chunk: c.chunks[0],
|
chunk: c.chunks[0],
|
||||||
chunkGroup: c,
|
chunkGroup: c,
|
||||||
chunkGroupInfo: cgi
|
chunkGroupInfo: /** @type {ChunkGroupInfo} */ (cgi)
|
||||||
});
|
});
|
||||||
} else if (entrypoint !== undefined) {
|
} else if (entrypoint !== undefined) {
|
||||||
chunkGroupInfo.chunkGroup.addAsyncEntrypoint(entrypoint);
|
chunkGroupInfo.chunkGroup.addAsyncEntrypoint(entrypoint);
|
||||||
|
@ -690,7 +698,7 @@ const visitModules = (
|
||||||
const processQueue = () => {
|
const processQueue = () => {
|
||||||
while (queue.length) {
|
while (queue.length) {
|
||||||
statProcessedQueueItems++;
|
statProcessedQueueItems++;
|
||||||
const queueItem = queue.pop();
|
const queueItem = /** @type {QueueItem} */ (queue.pop());
|
||||||
module = queueItem.module;
|
module = queueItem.module;
|
||||||
block = queueItem.block;
|
block = queueItem.block;
|
||||||
chunk = queueItem.chunk;
|
chunk = queueItem.chunk;
|
||||||
|
@ -1087,7 +1095,9 @@ const visitModules = (
|
||||||
|
|
||||||
const processChunkGroupsForCombining = () => {
|
const processChunkGroupsForCombining = () => {
|
||||||
for (const info of chunkGroupsForCombining) {
|
for (const info of chunkGroupsForCombining) {
|
||||||
for (const source of info.availableSources) {
|
for (const source of /** @type {Set<ChunkGroupInfo>} */ (
|
||||||
|
info.availableSources
|
||||||
|
)) {
|
||||||
if (!source.minAvailableModules) {
|
if (!source.minAvailableModules) {
|
||||||
chunkGroupsForCombining.delete(info);
|
chunkGroupsForCombining.delete(info);
|
||||||
break;
|
break;
|
||||||
|
@ -1106,7 +1116,9 @@ const visitModules = (
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// combine minAvailableModules from all resultingAvailableModules
|
// combine minAvailableModules from all resultingAvailableModules
|
||||||
for (const source of info.availableSources) {
|
for (const source of /** @type {Set<ChunkGroupInfo>} */ (
|
||||||
|
info.availableSources
|
||||||
|
)) {
|
||||||
const resultingAvailableModules =
|
const resultingAvailableModules =
|
||||||
calculateResultingAvailableModules(source);
|
calculateResultingAvailableModules(source);
|
||||||
mergeSet(resultingAvailableModules);
|
mergeSet(resultingAvailableModules);
|
||||||
|
@ -1126,7 +1138,9 @@ const visitModules = (
|
||||||
for (const info of outdatedChunkGroupInfo) {
|
for (const info of outdatedChunkGroupInfo) {
|
||||||
// 1. Reconsider skipped items
|
// 1. Reconsider skipped items
|
||||||
if (info.skippedItems !== undefined) {
|
if (info.skippedItems !== undefined) {
|
||||||
const { minAvailableModules } = info;
|
const minAvailableModules =
|
||||||
|
/** @type {ModuleSetPlus} */
|
||||||
|
(info.minAvailableModules);
|
||||||
for (const module of info.skippedItems) {
|
for (const module of info.skippedItems) {
|
||||||
if (
|
if (
|
||||||
!minAvailableModules.has(module) &&
|
!minAvailableModules.has(module) &&
|
||||||
|
@ -1147,7 +1161,9 @@ const visitModules = (
|
||||||
|
|
||||||
// 2. Reconsider skipped connections
|
// 2. Reconsider skipped connections
|
||||||
if (info.skippedModuleConnections !== undefined) {
|
if (info.skippedModuleConnections !== undefined) {
|
||||||
const { minAvailableModules } = info;
|
const minAvailableModules =
|
||||||
|
/** @type {ModuleSetPlus} */
|
||||||
|
(info.minAvailableModules);
|
||||||
for (const entry of info.skippedModuleConnections) {
|
for (const entry of info.skippedModuleConnections) {
|
||||||
const [module, activeState] = entry;
|
const [module, activeState] = entry;
|
||||||
if (activeState === false) continue;
|
if (activeState === false) continue;
|
||||||
|
|
|
@ -60,14 +60,27 @@ const chunkHasJs = (chunk, chunkGraph) => {
|
||||||
: false;
|
: false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Module} module a module
|
||||||
|
* @param {string} code the code
|
||||||
|
* @returns {string} generated code for the stack
|
||||||
|
*/
|
||||||
const printGeneratedCodeForStack = (module, code) => {
|
const printGeneratedCodeForStack = (module, code) => {
|
||||||
const lines = code.split("\n");
|
const lines = code.split("\n");
|
||||||
const n = `${lines.length}`.length;
|
const n = `${lines.length}`.length;
|
||||||
return `\n\nGenerated code for ${module.identifier()}\n${lines
|
return `\n\nGenerated code for ${module.identifier()}\n${lines
|
||||||
.map((line, i, lines) => {
|
.map(
|
||||||
const iStr = `${i + 1}`;
|
/**
|
||||||
return `${" ".repeat(n - iStr.length)}${iStr} | ${line}`;
|
* @param {string} line the line
|
||||||
})
|
* @param {number} i the index
|
||||||
|
* @param {string[]} lines the lines
|
||||||
|
* @returns {string} the line with line number
|
||||||
|
*/
|
||||||
|
(line, i, lines) => {
|
||||||
|
const iStr = `${i + 1}`;
|
||||||
|
return `${" ".repeat(n - iStr.length)}${iStr} | ${line}`;
|
||||||
|
}
|
||||||
|
)
|
||||||
.join("\n")}`;
|
.join("\n")}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1007,6 +1020,9 @@ class JavascriptModulesPlugin {
|
||||||
const useRequire =
|
const useRequire =
|
||||||
requireFunction || interceptModuleExecution || moduleUsed;
|
requireFunction || interceptModuleExecution || moduleUsed;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {{startup: string[], beforeStartup: string[], header: string[], afterStartup: string[], allowInlineStartup: boolean}}
|
||||||
|
*/
|
||||||
const result = {
|
const result = {
|
||||||
header: [],
|
header: [],
|
||||||
beforeStartup: [],
|
beforeStartup: [],
|
||||||
|
|
|
@ -68,8 +68,8 @@ class AmdLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
name: /** @type {string=} */ (name),
|
name: /** @type {string} */ (name),
|
||||||
amdContainer: /** @type {string=} */ (amdContainer)
|
amdContainer: /** @type {string} */ (amdContainer)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ const AbstractLibraryPlugin = require("./AbstractLibraryPlugin");
|
||||||
/** @typedef {import("../../declarations/WebpackOptions").LibraryOptions} LibraryOptions */
|
/** @typedef {import("../../declarations/WebpackOptions").LibraryOptions} LibraryOptions */
|
||||||
/** @typedef {import("../../declarations/WebpackOptions").LibraryType} LibraryType */
|
/** @typedef {import("../../declarations/WebpackOptions").LibraryType} LibraryType */
|
||||||
/** @typedef {import("../Chunk")} Chunk */
|
/** @typedef {import("../Chunk")} Chunk */
|
||||||
|
/** @typedef {import("../Compilation")} Compilation */
|
||||||
/** @typedef {import("../Compilation").ChunkHashContext} ChunkHashContext */
|
/** @typedef {import("../Compilation").ChunkHashContext} ChunkHashContext */
|
||||||
/** @typedef {import("../Compiler")} Compiler */
|
/** @typedef {import("../Compiler")} Compiler */
|
||||||
/** @typedef {import("../Module")} Module */
|
/** @typedef {import("../Module")} Module */
|
||||||
|
@ -59,6 +60,7 @@ const accessWithInit = (accessor, existingLength, initLast = false) => {
|
||||||
let i = 1;
|
let i = 1;
|
||||||
|
|
||||||
// all properties printed so far (excluding base)
|
// all properties printed so far (excluding base)
|
||||||
|
/** @type {string[] | undefined} */
|
||||||
let propsSoFar;
|
let propsSoFar;
|
||||||
|
|
||||||
// if there is existingLength, print all properties until this position as property access
|
// if there is existingLength, print all properties until this position as property access
|
||||||
|
@ -142,7 +144,7 @@ class AssignLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
name: /** @type {string|string[]=} */ (name),
|
name: /** @type {string | string[]} */ (name),
|
||||||
export: library.export
|
export: library.export
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -173,12 +175,22 @@ class AssignLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
moduleGraph.addExtraReason(module, "used as library export");
|
moduleGraph.addExtraReason(module, "used as library export");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Compilation} compilation the compilation
|
||||||
|
* @returns {string[]} the prefix
|
||||||
|
*/
|
||||||
_getPrefix(compilation) {
|
_getPrefix(compilation) {
|
||||||
return this.prefix === "global"
|
return this.prefix === "global"
|
||||||
? [compilation.runtimeTemplate.globalObject]
|
? [compilation.runtimeTemplate.globalObject]
|
||||||
: this.prefix;
|
: this.prefix;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {AssignLibraryPluginParsed} options the library options
|
||||||
|
* @param {Chunk} chunk the chunk
|
||||||
|
* @param {Compilation} compilation the compilation
|
||||||
|
* @returns {Array<string>} the resolved full name
|
||||||
|
*/
|
||||||
_getResolvedFullName(options, chunk, compilation) {
|
_getResolvedFullName(options, chunk, compilation) {
|
||||||
const prefix = this._getPrefix(compilation);
|
const prefix = this._getPrefix(compilation);
|
||||||
const fullName = options.name ? prefix.concat(options.name) : prefix;
|
const fullName = options.name ? prefix.concat(options.name) : prefix;
|
||||||
|
|
|
@ -12,6 +12,10 @@
|
||||||
/** @type {WeakMap<Compiler, Set<LibraryType>>} */
|
/** @type {WeakMap<Compiler, Set<LibraryType>>} */
|
||||||
const enabledTypes = new WeakMap();
|
const enabledTypes = new WeakMap();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Compiler} compiler the compiler instance
|
||||||
|
* @returns {Set<LibraryType>} enabled types
|
||||||
|
*/
|
||||||
const getEnabledTypes = compiler => {
|
const getEnabledTypes = compiler => {
|
||||||
let set = enabledTypes.get(compiler);
|
let set = enabledTypes.get(compiler);
|
||||||
if (set === undefined) {
|
if (set === undefined) {
|
||||||
|
|
|
@ -59,7 +59,7 @@ class SystemLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
name: /** @type {string=} */ (name)
|
name: /** @type {string} */ (name)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -148,6 +148,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
requiredExternals = externals;
|
requiredExternals = externals;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} str the string to replace
|
||||||
|
* @returns {string} the replaced keys
|
||||||
|
*/
|
||||||
const replaceKeys = str => {
|
const replaceKeys = str => {
|
||||||
return compilation.getPath(str, {
|
return compilation.getPath(str, {
|
||||||
chunk
|
chunk
|
||||||
|
@ -178,6 +182,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} type the type
|
||||||
|
* @returns {string} external require array
|
||||||
|
*/
|
||||||
const externalsRequireArray = type => {
|
const externalsRequireArray = type => {
|
||||||
return replaceKeys(
|
return replaceKeys(
|
||||||
externals
|
externals
|
||||||
|
@ -185,7 +193,9 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
let expr;
|
let expr;
|
||||||
let request = m.request;
|
let request = m.request;
|
||||||
if (typeof request === "object") {
|
if (typeof request === "object") {
|
||||||
request = request[type];
|
request =
|
||||||
|
/** @type {Record<string, string | string[]>} */
|
||||||
|
(request)[type];
|
||||||
}
|
}
|
||||||
if (request === undefined) {
|
if (request === undefined) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -246,6 +256,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
|
|
||||||
const { auxiliaryComment, namedDefine, names } = options;
|
const { auxiliaryComment, namedDefine, names } = options;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {keyof LibraryCustomUmdCommentObject} type type
|
||||||
|
* @returns {string} comment
|
||||||
|
*/
|
||||||
const getAuxiliaryComment = type => {
|
const getAuxiliaryComment = type => {
|
||||||
if (auxiliaryComment) {
|
if (auxiliaryComment) {
|
||||||
if (typeof auxiliaryComment === "string")
|
if (typeof auxiliaryComment === "string")
|
||||||
|
@ -299,7 +313,11 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
||||||
" else\n" +
|
" else\n" +
|
||||||
" " +
|
" " +
|
||||||
replaceKeys(
|
replaceKeys(
|
||||||
accessorAccess("root", names.root || names.commonjs)
|
accessorAccess(
|
||||||
|
"root",
|
||||||
|
/** @type {string | string[]} */ (names.root) ||
|
||||||
|
/** @type {string} */ (names.commonjs)
|
||||||
|
)
|
||||||
) +
|
) +
|
||||||
" = factory(" +
|
" = factory(" +
|
||||||
externalsRootArray(externals) +
|
externalsRootArray(externals) +
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"allowJs": true,
|
"allowJs": true,
|
||||||
"checkJs": true,
|
"checkJs": true,
|
||||||
"noEmit": true,
|
"noEmit": true,
|
||||||
"strict": true,
|
"strict": false,
|
||||||
"noImplicitThis": true,
|
"noImplicitThis": true,
|
||||||
"alwaysStrict": true,
|
"alwaysStrict": true,
|
||||||
"types": ["node"],
|
"types": ["node"],
|
||||||
|
|
|
@ -4617,7 +4617,7 @@ declare abstract class FileSystemInfo {
|
||||||
): void;
|
): void;
|
||||||
getFileHash(
|
getFileHash(
|
||||||
path: string,
|
path: string,
|
||||||
callback: (arg0?: null | WebpackError, arg1?: string) => void
|
callback: (arg0?: null | WebpackError, arg1?: null | string) => void
|
||||||
): void;
|
): void;
|
||||||
getContextHash(
|
getContextHash(
|
||||||
path: string,
|
path: string,
|
||||||
|
|
Loading…
Reference in New Issue