mirror of https://github.com/webpack/webpack.git
refactor(types): more
This commit is contained in:
parent
cc734af66e
commit
3092f7cb20
|
@ -11,6 +11,7 @@ const ModuleFilenameHelpers = require("./ModuleFilenameHelpers");
|
|||
const Template = require("./Template");
|
||||
const createSchemaValidation = require("./util/create-schema-validation");
|
||||
|
||||
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerFunction} BannerFunction */
|
||||
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginArgument} BannerPluginArgument */
|
||||
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginOptions} BannerPluginOptions */
|
||||
/** @typedef {import("./Compiler")} Compiler */
|
||||
|
@ -60,7 +61,7 @@ class BannerPlugin {
|
|||
const getBanner = bannerOption;
|
||||
this.banner = this.options.raw
|
||||
? getBanner
|
||||
: data => wrapComment(getBanner(data));
|
||||
: /** @type {BannerFunction} */ data => wrapComment(getBanner(data));
|
||||
} else {
|
||||
const banner = this.options.raw
|
||||
? bannerOption
|
||||
|
|
|
@ -143,7 +143,7 @@ class SnapshotIterable {
|
|||
let state = 0;
|
||||
/** @type {IterableIterator<string>} */
|
||||
let it;
|
||||
/** @type {(Snapshot) => (Map<string, any> | Set<string>)[]} */
|
||||
/** @type {(snapshot: Snapshot) => (Map<string, any> | Set<string>)[]} */
|
||||
let getMaps;
|
||||
/** @type {(Map<string, any> | Set<string>)[]} */
|
||||
let maps;
|
||||
|
@ -882,6 +882,11 @@ const getResolvedHash = entry => {
|
|||
return entry.symlinks === undefined ? entry.hash : undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {Set<T>} source source
|
||||
* @param {Set<T>} target target
|
||||
*/
|
||||
const addAll = (source, target) => {
|
||||
for (const key of source) target.add(key);
|
||||
};
|
||||
|
@ -1150,6 +1155,11 @@ class FileSystemInfo {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path path
|
||||
* @param {string} reason reason
|
||||
* @param {string[]} args arguments
|
||||
*/
|
||||
_log(path, reason, ...args) {
|
||||
const key = path + reason;
|
||||
if (this._loggedPaths.has(key)) return;
|
||||
|
@ -1263,7 +1273,7 @@ class FileSystemInfo {
|
|||
|
||||
/**
|
||||
* @param {string} path file path
|
||||
* @param {function((WebpackError | null)=, string=): void} callback callback function
|
||||
* @param {function((WebpackError | null)=, (string | null)=): void} callback callback function
|
||||
* @returns {void}
|
||||
*/
|
||||
getFileHash(path, callback) {
|
||||
|
@ -1294,7 +1304,7 @@ class FileSystemInfo {
|
|||
|
||||
/**
|
||||
* @param {string} path context path
|
||||
* @param {function((WebpackError | null)=, ContextHash=): void} callback callback function
|
||||
* @param {function((WebpackError | null)=, (ContextHash | null)=): void} callback callback function
|
||||
* @returns {void}
|
||||
*/
|
||||
_getUnresolvedContextHash(path, callback) {
|
||||
|
@ -1325,7 +1335,7 @@ class FileSystemInfo {
|
|||
|
||||
/**
|
||||
* @param {string} path context path
|
||||
* @param {function((WebpackError | null)=, ContextTimestampAndHash=): void} callback callback function
|
||||
* @param {function((WebpackError | null)=, (ContextTimestampAndHash | null)=): void} callback callback function
|
||||
* @returns {void}
|
||||
*/
|
||||
_getUnresolvedContextTsh(path, callback) {
|
||||
|
@ -1396,6 +1406,10 @@ class FileSystemInfo {
|
|||
contextDependencies: resolveDirectories,
|
||||
missingDependencies: resolveMissing
|
||||
};
|
||||
/**
|
||||
* @param {string} expected expected result
|
||||
* @returns {string} expected result
|
||||
*/
|
||||
const expectedToString = expected => {
|
||||
return expected ? ` (expected ${expected})` : "";
|
||||
};
|
||||
|
@ -2056,6 +2070,9 @@ class FileSystemInfo {
|
|||
}
|
||||
return capturedItems;
|
||||
};
|
||||
/**
|
||||
* @param {Set<string>} capturedFiles captured files
|
||||
*/
|
||||
const processCapturedFiles = capturedFiles => {
|
||||
switch (mode) {
|
||||
case 3:
|
||||
|
@ -2928,7 +2945,7 @@ class FileSystemInfo {
|
|||
|
||||
const hash = createHash(this._hashFunction);
|
||||
|
||||
hash.update(content);
|
||||
hash.update(/** @type {string | Buffer} */ (content));
|
||||
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
|
||||
|
@ -2992,7 +3009,7 @@ class FileSystemInfo {
|
|||
* @param {function(string, IStats, function(Error=, ItemType=): void): void} options.fromFile called when context item is a file
|
||||
* @param {function(string, IStats, function(Error=, ItemType=): void): void} options.fromDirectory called when context item is a directory
|
||||
* @param {function(string[], ItemType[]): T} options.reduce called from all context items
|
||||
* @param {function((Error | null)=, (T)=): void} callback callback
|
||||
* @param {function((Error | null)=, (T | null)=): void} callback callback
|
||||
*/
|
||||
_readContext(
|
||||
{
|
||||
|
@ -3179,6 +3196,7 @@ class FileSystemInfo {
|
|||
* @returns {void}
|
||||
*/
|
||||
_resolveContextTimestamp(entry, callback) {
|
||||
/** @type {string[]} */
|
||||
const hashes = [];
|
||||
let safeTime = 0;
|
||||
processAsyncTree(
|
||||
|
@ -3287,6 +3305,7 @@ class FileSystemInfo {
|
|||
* @returns {void}
|
||||
*/
|
||||
_resolveContextHash(entry, callback) {
|
||||
/** @type {string[]} */
|
||||
const hashes = [];
|
||||
processAsyncTree(
|
||||
entry.symlinks,
|
||||
|
@ -3443,7 +3462,9 @@ class FileSystemInfo {
|
|||
* @returns {void}
|
||||
*/
|
||||
_resolveContextTsh(entry, callback) {
|
||||
/** @type {string[]} */
|
||||
const hashes = [];
|
||||
/** @type {string[]} */
|
||||
const tsHashes = [];
|
||||
let safeTime = 0;
|
||||
processAsyncTree(
|
||||
|
@ -3561,7 +3582,7 @@ class FileSystemInfo {
|
|||
}
|
||||
let data;
|
||||
try {
|
||||
data = JSON.parse(content.toString("utf-8"));
|
||||
data = JSON.parse(/** @type {Buffer} */ (content).toString("utf-8"));
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ const { getEntryRuntime, mergeRuntime } = require("./util/runtime");
|
|||
/** @typedef {import("./Compilation")} Compilation */
|
||||
/** @typedef {import("./DependenciesBlock")} DependenciesBlock */
|
||||
/** @typedef {import("./Dependency")} Dependency */
|
||||
/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */
|
||||
/** @typedef {import("./Entrypoint")} Entrypoint */
|
||||
/** @typedef {import("./Module")} Module */
|
||||
/** @typedef {import("./ModuleGraph")} ModuleGraph */
|
||||
|
@ -39,15 +40,15 @@ const { getEntryRuntime, mergeRuntime } = require("./util/runtime");
|
|||
* @typedef {Object} ChunkGroupInfo
|
||||
* @property {ChunkGroup} chunkGroup the chunk group
|
||||
* @property {RuntimeSpec} runtime the runtimes
|
||||
* @property {ModuleSetPlus} minAvailableModules current minimal set of modules available at this point
|
||||
* @property {boolean} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
|
||||
* @property {ModuleSetPlus | undefined} minAvailableModules current minimal set of modules available at this point
|
||||
* @property {boolean | undefined} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
|
||||
* @property {ModuleSetPlus[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules
|
||||
* @property {Set<Module>=} skippedItems modules that were skipped because module is already available in parent chunks (need to reconsider when minAvailableModules is shrinking)
|
||||
* @property {Set<[Module, ConnectionState]>=} skippedModuleConnections referenced modules that where skipped because they were not active in this runtime
|
||||
* @property {ModuleSetPlus} resultingAvailableModules set of modules available including modules from this chunk group
|
||||
* @property {Set<ChunkGroupInfo>} children set of children chunk groups, that will be revisited when availableModules shrink
|
||||
* @property {Set<ChunkGroupInfo>} availableSources set of chunk groups that are the source for minAvailableModules
|
||||
* @property {Set<ChunkGroupInfo>} availableChildren set of chunk groups which depend on the this chunk group as availableSource
|
||||
* @property {ModuleSetPlus | undefined} resultingAvailableModules set of modules available including modules from this chunk group
|
||||
* @property {Set<ChunkGroupInfo> | undefined} children set of children chunk groups, that will be revisited when availableModules shrink
|
||||
* @property {Set<ChunkGroupInfo> | undefined} availableSources set of chunk groups that are the source for minAvailableModules
|
||||
* @property {Set<ChunkGroupInfo> | undefined} availableChildren set of chunk groups which depend on the this chunk group as availableSource
|
||||
* @property {number} preOrderIndex next pre order index
|
||||
* @property {number} postOrderIndex next post order index
|
||||
* @property {boolean} chunkLoading has a chunk loading mechanism
|
||||
|
@ -199,6 +200,7 @@ const visitModules = (
|
|||
|
||||
/** @type {RuntimeSpec | false} */
|
||||
let blockModulesMapRuntime = false;
|
||||
/** @type {Map<DependenciesBlock, (Module | ConnectionState)[]>} */
|
||||
let blockModulesMap;
|
||||
|
||||
/**
|
||||
|
@ -239,7 +241,7 @@ const visitModules = (
|
|||
extractBlockModules(module, moduleGraph, runtime, blockModulesMap);
|
||||
blockModules = blockModulesMap.get(block);
|
||||
logger.timeAggregate("visitModules: prepare");
|
||||
return blockModules;
|
||||
return /** @type {(Module | ConnectionState)[]} */ (blockModules);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -290,7 +292,7 @@ const visitModules = (
|
|||
for (const [chunkGroup, modules] of inputEntrypointsAndModules) {
|
||||
const runtime = getEntryRuntime(
|
||||
compilation,
|
||||
chunkGroup.name,
|
||||
/** @type {string} */ (chunkGroup.name),
|
||||
chunkGroup.options
|
||||
);
|
||||
/** @type {ChunkGroupInfo} */
|
||||
|
@ -352,7 +354,9 @@ const visitModules = (
|
|||
const { chunkGroup } = chunkGroupInfo;
|
||||
chunkGroupInfo.availableSources = new Set();
|
||||
for (const parent of chunkGroup.parentsIterable) {
|
||||
const parentChunkGroupInfo = chunkGroupInfoMap.get(parent);
|
||||
const parentChunkGroupInfo =
|
||||
/** @type {ChunkGroupInfo} */
|
||||
(chunkGroupInfoMap.get(parent));
|
||||
chunkGroupInfo.availableSources.add(parentChunkGroupInfo);
|
||||
if (parentChunkGroupInfo.availableChildren === undefined) {
|
||||
parentChunkGroupInfo.availableChildren = new Set();
|
||||
|
@ -399,15 +403,15 @@ const visitModules = (
|
|||
// 1. We create a chunk group with single chunk in it for this Block
|
||||
// but only once (blockChunkGroups map)
|
||||
let cgi = blockChunkGroups.get(b);
|
||||
/** @type {ChunkGroup} */
|
||||
/** @type {ChunkGroup | undefined} */
|
||||
let c;
|
||||
/** @type {Entrypoint} */
|
||||
/** @type {Entrypoint | undefined} */
|
||||
let entrypoint;
|
||||
const entryOptions = b.groupOptions && b.groupOptions.entryOptions;
|
||||
if (cgi === undefined) {
|
||||
const chunkName = (b.groupOptions && b.groupOptions.name) || b.chunkName;
|
||||
if (entryOptions) {
|
||||
cgi = namedAsyncEntrypoints.get(chunkName);
|
||||
cgi = namedAsyncEntrypoints.get(/** @type {string} */ (chunkName));
|
||||
if (!cgi) {
|
||||
entrypoint = compilation.addAsyncEntrypoint(
|
||||
entryOptions,
|
||||
|
@ -505,7 +509,11 @@ const visitModules = (
|
|||
c = cgi.chunkGroup;
|
||||
if (c.isInitial()) {
|
||||
compilation.errors.push(
|
||||
new AsyncDependencyToInitialChunkError(chunkName, module, b.loc)
|
||||
new AsyncDependencyToInitialChunkError(
|
||||
/** @type {string} */ (chunkName),
|
||||
module,
|
||||
b.loc
|
||||
)
|
||||
);
|
||||
c = chunkGroup;
|
||||
} else {
|
||||
|
@ -515,7 +523,7 @@ const visitModules = (
|
|||
}
|
||||
blockConnections.set(b, []);
|
||||
}
|
||||
blockChunkGroups.set(b, cgi);
|
||||
blockChunkGroups.set(b, /** @type {ChunkGroupInfo} */ (cgi));
|
||||
} else if (entryOptions) {
|
||||
entrypoint = /** @type {Entrypoint} */ (cgi.chunkGroup);
|
||||
} else {
|
||||
|
@ -536,7 +544,7 @@ const visitModules = (
|
|||
connectList = new Set();
|
||||
queueConnect.set(chunkGroupInfo, connectList);
|
||||
}
|
||||
connectList.add(cgi);
|
||||
connectList.add(/** @type {ChunkGroupInfo} */ (cgi));
|
||||
|
||||
// TODO check if this really need to be done for each traversal
|
||||
// or if it is enough when it's queued when created
|
||||
|
@ -547,7 +555,7 @@ const visitModules = (
|
|||
module: module,
|
||||
chunk: c.chunks[0],
|
||||
chunkGroup: c,
|
||||
chunkGroupInfo: cgi
|
||||
chunkGroupInfo: /** @type {ChunkGroupInfo} */ (cgi)
|
||||
});
|
||||
} else if (entrypoint !== undefined) {
|
||||
chunkGroupInfo.chunkGroup.addAsyncEntrypoint(entrypoint);
|
||||
|
@ -690,7 +698,7 @@ const visitModules = (
|
|||
const processQueue = () => {
|
||||
while (queue.length) {
|
||||
statProcessedQueueItems++;
|
||||
const queueItem = queue.pop();
|
||||
const queueItem = /** @type {QueueItem} */ (queue.pop());
|
||||
module = queueItem.module;
|
||||
block = queueItem.block;
|
||||
chunk = queueItem.chunk;
|
||||
|
@ -1087,7 +1095,9 @@ const visitModules = (
|
|||
|
||||
const processChunkGroupsForCombining = () => {
|
||||
for (const info of chunkGroupsForCombining) {
|
||||
for (const source of info.availableSources) {
|
||||
for (const source of /** @type {Set<ChunkGroupInfo>} */ (
|
||||
info.availableSources
|
||||
)) {
|
||||
if (!source.minAvailableModules) {
|
||||
chunkGroupsForCombining.delete(info);
|
||||
break;
|
||||
|
@ -1106,7 +1116,9 @@ const visitModules = (
|
|||
}
|
||||
};
|
||||
// combine minAvailableModules from all resultingAvailableModules
|
||||
for (const source of info.availableSources) {
|
||||
for (const source of /** @type {Set<ChunkGroupInfo>} */ (
|
||||
info.availableSources
|
||||
)) {
|
||||
const resultingAvailableModules =
|
||||
calculateResultingAvailableModules(source);
|
||||
mergeSet(resultingAvailableModules);
|
||||
|
@ -1126,7 +1138,9 @@ const visitModules = (
|
|||
for (const info of outdatedChunkGroupInfo) {
|
||||
// 1. Reconsider skipped items
|
||||
if (info.skippedItems !== undefined) {
|
||||
const { minAvailableModules } = info;
|
||||
const minAvailableModules =
|
||||
/** @type {ModuleSetPlus} */
|
||||
(info.minAvailableModules);
|
||||
for (const module of info.skippedItems) {
|
||||
if (
|
||||
!minAvailableModules.has(module) &&
|
||||
|
@ -1147,7 +1161,9 @@ const visitModules = (
|
|||
|
||||
// 2. Reconsider skipped connections
|
||||
if (info.skippedModuleConnections !== undefined) {
|
||||
const { minAvailableModules } = info;
|
||||
const minAvailableModules =
|
||||
/** @type {ModuleSetPlus} */
|
||||
(info.minAvailableModules);
|
||||
for (const entry of info.skippedModuleConnections) {
|
||||
const [module, activeState] = entry;
|
||||
if (activeState === false) continue;
|
||||
|
|
|
@ -60,14 +60,27 @@ const chunkHasJs = (chunk, chunkGraph) => {
|
|||
: false;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Module} module a module
|
||||
* @param {string} code the code
|
||||
* @returns {string} generated code for the stack
|
||||
*/
|
||||
const printGeneratedCodeForStack = (module, code) => {
|
||||
const lines = code.split("\n");
|
||||
const n = `${lines.length}`.length;
|
||||
return `\n\nGenerated code for ${module.identifier()}\n${lines
|
||||
.map((line, i, lines) => {
|
||||
const iStr = `${i + 1}`;
|
||||
return `${" ".repeat(n - iStr.length)}${iStr} | ${line}`;
|
||||
})
|
||||
.map(
|
||||
/**
|
||||
* @param {string} line the line
|
||||
* @param {number} i the index
|
||||
* @param {string[]} lines the lines
|
||||
* @returns {string} the line with line number
|
||||
*/
|
||||
(line, i, lines) => {
|
||||
const iStr = `${i + 1}`;
|
||||
return `${" ".repeat(n - iStr.length)}${iStr} | ${line}`;
|
||||
}
|
||||
)
|
||||
.join("\n")}`;
|
||||
};
|
||||
|
||||
|
@ -1007,6 +1020,9 @@ class JavascriptModulesPlugin {
|
|||
const useRequire =
|
||||
requireFunction || interceptModuleExecution || moduleUsed;
|
||||
|
||||
/**
|
||||
* @type {{startup: string[], beforeStartup: string[], header: string[], afterStartup: string[], allowInlineStartup: boolean}}
|
||||
*/
|
||||
const result = {
|
||||
header: [],
|
||||
beforeStartup: [],
|
||||
|
|
|
@ -68,8 +68,8 @@ class AmdLibraryPlugin extends AbstractLibraryPlugin {
|
|||
}
|
||||
}
|
||||
return {
|
||||
name: /** @type {string=} */ (name),
|
||||
amdContainer: /** @type {string=} */ (amdContainer)
|
||||
name: /** @type {string} */ (name),
|
||||
amdContainer: /** @type {string} */ (amdContainer)
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ const AbstractLibraryPlugin = require("./AbstractLibraryPlugin");
|
|||
/** @typedef {import("../../declarations/WebpackOptions").LibraryOptions} LibraryOptions */
|
||||
/** @typedef {import("../../declarations/WebpackOptions").LibraryType} LibraryType */
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../Compilation")} Compilation */
|
||||
/** @typedef {import("../Compilation").ChunkHashContext} ChunkHashContext */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Module")} Module */
|
||||
|
@ -59,6 +60,7 @@ const accessWithInit = (accessor, existingLength, initLast = false) => {
|
|||
let i = 1;
|
||||
|
||||
// all properties printed so far (excluding base)
|
||||
/** @type {string[] | undefined} */
|
||||
let propsSoFar;
|
||||
|
||||
// if there is existingLength, print all properties until this position as property access
|
||||
|
@ -142,7 +144,7 @@ class AssignLibraryPlugin extends AbstractLibraryPlugin {
|
|||
}
|
||||
}
|
||||
return {
|
||||
name: /** @type {string|string[]=} */ (name),
|
||||
name: /** @type {string | string[]} */ (name),
|
||||
export: library.export
|
||||
};
|
||||
}
|
||||
|
@ -173,12 +175,22 @@ class AssignLibraryPlugin extends AbstractLibraryPlugin {
|
|||
moduleGraph.addExtraReason(module, "used as library export");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Compilation} compilation the compilation
|
||||
* @returns {string[]} the prefix
|
||||
*/
|
||||
_getPrefix(compilation) {
|
||||
return this.prefix === "global"
|
||||
? [compilation.runtimeTemplate.globalObject]
|
||||
: this.prefix;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AssignLibraryPluginParsed} options the library options
|
||||
* @param {Chunk} chunk the chunk
|
||||
* @param {Compilation} compilation the compilation
|
||||
* @returns {Array<string>} the resolved full name
|
||||
*/
|
||||
_getResolvedFullName(options, chunk, compilation) {
|
||||
const prefix = this._getPrefix(compilation);
|
||||
const fullName = options.name ? prefix.concat(options.name) : prefix;
|
||||
|
|
|
@ -12,6 +12,10 @@
|
|||
/** @type {WeakMap<Compiler, Set<LibraryType>>} */
|
||||
const enabledTypes = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {Set<LibraryType>} enabled types
|
||||
*/
|
||||
const getEnabledTypes = compiler => {
|
||||
let set = enabledTypes.get(compiler);
|
||||
if (set === undefined) {
|
||||
|
|
|
@ -59,7 +59,7 @@ class SystemLibraryPlugin extends AbstractLibraryPlugin {
|
|||
);
|
||||
}
|
||||
return {
|
||||
name: /** @type {string=} */ (name)
|
||||
name: /** @type {string} */ (name)
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -148,6 +148,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
|||
requiredExternals = externals;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} str the string to replace
|
||||
* @returns {string} the replaced keys
|
||||
*/
|
||||
const replaceKeys = str => {
|
||||
return compilation.getPath(str, {
|
||||
chunk
|
||||
|
@ -178,6 +182,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
|||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} type the type
|
||||
* @returns {string} external require array
|
||||
*/
|
||||
const externalsRequireArray = type => {
|
||||
return replaceKeys(
|
||||
externals
|
||||
|
@ -185,7 +193,9 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
|||
let expr;
|
||||
let request = m.request;
|
||||
if (typeof request === "object") {
|
||||
request = request[type];
|
||||
request =
|
||||
/** @type {Record<string, string | string[]>} */
|
||||
(request)[type];
|
||||
}
|
||||
if (request === undefined) {
|
||||
throw new Error(
|
||||
|
@ -246,6 +256,10 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
|||
|
||||
const { auxiliaryComment, namedDefine, names } = options;
|
||||
|
||||
/**
|
||||
* @param {keyof LibraryCustomUmdCommentObject} type type
|
||||
* @returns {string} comment
|
||||
*/
|
||||
const getAuxiliaryComment = type => {
|
||||
if (auxiliaryComment) {
|
||||
if (typeof auxiliaryComment === "string")
|
||||
|
@ -299,7 +313,11 @@ class UmdLibraryPlugin extends AbstractLibraryPlugin {
|
|||
" else\n" +
|
||||
" " +
|
||||
replaceKeys(
|
||||
accessorAccess("root", names.root || names.commonjs)
|
||||
accessorAccess(
|
||||
"root",
|
||||
/** @type {string | string[]} */ (names.root) ||
|
||||
/** @type {string} */ (names.commonjs)
|
||||
)
|
||||
) +
|
||||
" = factory(" +
|
||||
externalsRootArray(externals) +
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
"allowJs": true,
|
||||
"checkJs": true,
|
||||
"noEmit": true,
|
||||
"strict": true,
|
||||
"strict": false,
|
||||
"noImplicitThis": true,
|
||||
"alwaysStrict": true,
|
||||
"types": ["node"],
|
||||
|
|
|
@ -4617,7 +4617,7 @@ declare abstract class FileSystemInfo {
|
|||
): void;
|
||||
getFileHash(
|
||||
path: string,
|
||||
callback: (arg0?: null | WebpackError, arg1?: string) => void
|
||||
callback: (arg0?: null | WebpackError, arg1?: null | string) => void
|
||||
): void;
|
||||
getContextHash(
|
||||
path: string,
|
||||
|
|
Loading…
Reference in New Issue