mirror of https://github.com/webpack/webpack.git
Merge branch 'master' of https://github.com/webpack/webpack
This commit is contained in:
commit
3fadc23117
|
|
@ -178,6 +178,8 @@ class Compiler {
|
|||
this.root = this;
|
||||
/** @type {string} */
|
||||
this.outputPath = "";
|
||||
/** @type {Watching} */
|
||||
this.watching = undefined;
|
||||
|
||||
/** @type {OutputFileSystem} */
|
||||
this.outputFileSystem = null;
|
||||
|
|
@ -340,7 +342,8 @@ class Compiler {
|
|||
|
||||
this.running = true;
|
||||
this.watchMode = true;
|
||||
return new Watching(this, watchOptions, handler);
|
||||
this.watching = new Watching(this, watchOptions, handler);
|
||||
return this.watching;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -461,6 +461,10 @@ class HotModuleReplacementPlugin {
|
|||
ChunkGraph.setChunkGraphForChunk(hotUpdateChunk, chunkGraph);
|
||||
hotUpdateChunk.id = chunkId;
|
||||
hotUpdateChunk.runtime = newRuntime;
|
||||
if (currentChunk) {
|
||||
for (const group of currentChunk.groupsIterable)
|
||||
hotUpdateChunk.addGroup(group);
|
||||
}
|
||||
chunkGraph.attachModules(hotUpdateChunk, newModules || []);
|
||||
chunkGraph.attachRuntimeModules(
|
||||
hotUpdateChunk,
|
||||
|
|
|
|||
|
|
@ -313,6 +313,7 @@ class Watching {
|
|||
const finalCallback = (err, compilation) => {
|
||||
this.running = false;
|
||||
this.compiler.running = false;
|
||||
this.compiler.watching = undefined;
|
||||
this.compiler.watchMode = false;
|
||||
this.compiler.modifiedFiles = undefined;
|
||||
this.compiler.removedFiles = undefined;
|
||||
|
|
|
|||
|
|
@ -241,7 +241,7 @@ class CommonJsExportsParserPlugin {
|
|||
bailoutHint(
|
||||
`${base}${propertyAccess(
|
||||
members
|
||||
)}(...) prevents optimization as ${base} is passed as call context as ${formatLocation(
|
||||
)}(...) prevents optimization as ${base} is passed as call context at ${formatLocation(
|
||||
expr.loc
|
||||
)}`
|
||||
);
|
||||
|
|
|
|||
|
|
@ -266,6 +266,7 @@ HarmonyImportSpecifierDependency.Template = class HarmonyImportSpecifierDependen
|
|||
|
||||
let exportExpr;
|
||||
if (
|
||||
connection &&
|
||||
concatenationScope &&
|
||||
concatenationScope.isModuleInScope(connection.module)
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ const CommentCompilationWarning = require("../CommentCompilationWarning");
|
|||
const UnsupportedFeatureWarning = require("../UnsupportedFeatureWarning");
|
||||
const formatLocation = require("../formatLocation");
|
||||
const EnableChunkLoadingPlugin = require("../javascript/EnableChunkLoadingPlugin");
|
||||
const { equals } = require("../util/ArrayHelpers");
|
||||
const {
|
||||
harmonySpecifierTag
|
||||
} = require("./HarmonyImportDependencyParserPlugin");
|
||||
|
|
@ -27,6 +28,13 @@ const getUrl = module => {
|
|||
return pathToFileURL(module.resource).toString();
|
||||
};
|
||||
|
||||
const DEFAULT_SYNTAX = [
|
||||
"Worker",
|
||||
"SharedWorker",
|
||||
"navigator.serviceWorker.register()",
|
||||
"Worker from worker_threads"
|
||||
];
|
||||
|
||||
class WorkerPlugin {
|
||||
constructor(chunkLoading) {
|
||||
this._chunkLoading = chunkLoading;
|
||||
|
|
@ -115,6 +123,9 @@ class WorkerPlugin {
|
|||
*/
|
||||
const parserPlugin = (parser, parserOptions) => {
|
||||
if (parserOptions.worker === false) return;
|
||||
const options = !Array.isArray(parserOptions.worker)
|
||||
? ["..."]
|
||||
: parserOptions.worker;
|
||||
const handleNewWorker = expr => {
|
||||
if (expr.arguments.length === 0 || expr.arguments.length > 2)
|
||||
return;
|
||||
|
|
@ -224,27 +235,40 @@ class WorkerPlugin {
|
|||
return true;
|
||||
}
|
||||
};
|
||||
parser.hooks.new.for("Worker").tap("WorkerPlugin", handleNewWorker);
|
||||
parser.hooks.new
|
||||
.for("SharedWorker")
|
||||
.tap("WorkerPlugin", handleNewWorker);
|
||||
parser.hooks.call
|
||||
.for("navigator.serviceWorker.register")
|
||||
.tap("WorkerPlugin", handleNewWorker);
|
||||
parser.hooks.new
|
||||
.for(harmonySpecifierTag)
|
||||
.tap("WorkerPlugin", expr => {
|
||||
const settings = /** @type {HarmonySettings} */ (parser.currentTagData);
|
||||
if (
|
||||
!settings ||
|
||||
settings.source !== "worker_threads" ||
|
||||
settings.ids.length !== 1 ||
|
||||
settings.ids[0] !== "Worker"
|
||||
) {
|
||||
return;
|
||||
const processItem = item => {
|
||||
if (item.endsWith("()")) {
|
||||
parser.hooks.call
|
||||
.for(item.slice(0, -2))
|
||||
.tap("WorkerPlugin", handleNewWorker);
|
||||
} else {
|
||||
const match = /^(.+?)(\(\))?\s+from\s+(.+)$/.exec(item);
|
||||
if (match) {
|
||||
const ids = match[1].split(".");
|
||||
const call = match[2];
|
||||
const source = match[3];
|
||||
(call ? parser.hooks.call : parser.hooks.new)
|
||||
.for(harmonySpecifierTag)
|
||||
.tap("WorkerPlugin", expr => {
|
||||
const settings = /** @type {HarmonySettings} */ (parser.currentTagData);
|
||||
if (
|
||||
!settings ||
|
||||
settings.source !== source ||
|
||||
!equals(settings.ids, ids)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
return handleNewWorker(expr);
|
||||
});
|
||||
} else {
|
||||
parser.hooks.new.for(item).tap("WorkerPlugin", handleNewWorker);
|
||||
}
|
||||
return handleNewWorker(expr);
|
||||
});
|
||||
}
|
||||
};
|
||||
for (const item of options) {
|
||||
if (item === "...") {
|
||||
DEFAULT_SYNTAX.forEach(processItem);
|
||||
} else processItem(item);
|
||||
}
|
||||
};
|
||||
normalModuleFactory.hooks.parser
|
||||
.for("javascript/auto")
|
||||
|
|
|
|||
|
|
@ -154,6 +154,9 @@ module.exports = mergeExports(fn, {
|
|||
get Generator() {
|
||||
return require("./Generator");
|
||||
},
|
||||
get HotUpdateChunk() {
|
||||
return require("./HotUpdateChunk");
|
||||
},
|
||||
get HotModuleReplacementPlugin() {
|
||||
return require("./HotModuleReplacementPlugin");
|
||||
},
|
||||
|
|
@ -351,6 +354,9 @@ module.exports = mergeExports(fn, {
|
|||
},
|
||||
|
||||
runtime: {
|
||||
get GetChunkFilenameRuntimeModule() {
|
||||
return require("./runtime/GetChunkFilenameRuntimeModule");
|
||||
},
|
||||
get LoadScriptRuntimeModule() {
|
||||
return require("./runtime/LoadScriptRuntimeModule");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
|||
/** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../ChunkGraph")} ChunkGraph */
|
||||
/** @typedef {import("../ChunkGroup")} ChunkGroup */
|
||||
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
|
||||
/** @typedef {import("../Compilation").PathData} PathData */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
|
@ -329,6 +330,24 @@ const checkMinSize = (sizes, minSize) => {
|
|||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {SplitChunksSizes} sizes the sizes
|
||||
* @param {SplitChunksSizes} minSize the min sizes
|
||||
* @returns {undefined | string[]} list of size types that are below min size
|
||||
*/
|
||||
const getViolatingMinSizes = (sizes, minSize) => {
|
||||
let list;
|
||||
for (const key of Object.keys(minSize)) {
|
||||
const size = sizes[key];
|
||||
if (size === undefined || size === 0) continue;
|
||||
if (size < minSize[key]) {
|
||||
if (list === undefined) list = [key];
|
||||
else list.push(key);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {SplitChunksSizes} sizes the sizes
|
||||
* @returns {number} the total size
|
||||
|
|
@ -926,8 +945,10 @@ module.exports = class SplitChunksPlugin {
|
|||
return entry2;
|
||||
};
|
||||
|
||||
/** @type {Map<string, boolean>} */
|
||||
const alreadyValidatedParents = new Map();
|
||||
/** @type {Set<string>} */
|
||||
const alreadyValidatedNames = new Set();
|
||||
const alreadyReportedErrors = new Set();
|
||||
|
||||
// Map a list of chunks to a list of modules
|
||||
// For the key the chunk "index" is used, the value is a SortableSet of modules
|
||||
|
|
@ -958,23 +979,55 @@ module.exports = class SplitChunksPlugin {
|
|||
cacheGroup.key
|
||||
);
|
||||
// Check if the name is ok
|
||||
if (!alreadyValidatedNames.has(name)) {
|
||||
alreadyValidatedNames.add(name);
|
||||
if (compilation.namedChunks.has(name)) {
|
||||
compilation.errors.push(
|
||||
new WebpackError(
|
||||
"SplitChunksPlugin\n" +
|
||||
`Cache group "${cacheGroup.key}" conflicts with existing chunk.\n` +
|
||||
`Both have the same name "${name}".\n` +
|
||||
"Use a different name for the cache group.\n" +
|
||||
'HINT: You can omit "name" to automatically create a name.\n' +
|
||||
"BREAKING CHANGE: webpack < 5 used to allow to use the " +
|
||||
"entrypoint as splitChunk. This is no longer allowed. " +
|
||||
"Remove this entrypoint and add modules to cache group's 'test' instead. " +
|
||||
"If you need modules to be evaluated on startup, add them to the existing entrypoints (make them arrays). " +
|
||||
"See migration guide of more info."
|
||||
)
|
||||
);
|
||||
const existingChunk = compilation.namedChunks.get(name);
|
||||
if (existingChunk) {
|
||||
const parentValidationKey = `${name}|${selectedChunksKey}`;
|
||||
const valid = alreadyValidatedParents.get(parentValidationKey);
|
||||
if (valid === false) return;
|
||||
if (valid === undefined) {
|
||||
// Module can only be moved into the existing chunk if the existing chunk
|
||||
// is a parent of all selected chunks
|
||||
let isInAllParents = true;
|
||||
/** @type {Set<ChunkGroup>} */
|
||||
const queue = new Set();
|
||||
for (const chunk of selectedChunks) {
|
||||
for (const group of chunk.groupsIterable) {
|
||||
queue.add(group);
|
||||
}
|
||||
}
|
||||
for (const group of queue) {
|
||||
if (existingChunk.isInGroup(group)) continue;
|
||||
let hasParent = false;
|
||||
for (const parent of group.parentsIterable) {
|
||||
hasParent = true;
|
||||
queue.add(parent);
|
||||
}
|
||||
if (!hasParent) {
|
||||
isInAllParents = false;
|
||||
}
|
||||
}
|
||||
const valid = isInAllParents;
|
||||
alreadyValidatedParents.set(parentValidationKey, valid);
|
||||
if (!valid) {
|
||||
if (!alreadyReportedErrors.has(name)) {
|
||||
alreadyReportedErrors.add(name);
|
||||
compilation.errors.push(
|
||||
new WebpackError(
|
||||
"SplitChunksPlugin\n" +
|
||||
`Cache group "${cacheGroup.key}" conflicts with existing chunk.\n` +
|
||||
`Both have the same name "${name}" and existing chunk is not a parent of the selected modules.\n` +
|
||||
"Use a different name for the cache group or make sure that the existing chunk is a parent (e. g. via dependsOn).\n" +
|
||||
'HINT: You can omit "name" to automatically create a name.\n' +
|
||||
"BREAKING CHANGE: webpack < 5 used to allow to use an entrypoint as splitChunk. " +
|
||||
"This is no longer allowed when the entrypoint is not a parent of the selected modules.\n" +
|
||||
"Remove this entrypoint and add modules to cache group's 'test' instead. " +
|
||||
"If you need modules to be evaluated on startup, add them to the existing entrypoints (make them arrays). " +
|
||||
"See migration guide of more info."
|
||||
)
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Create key for maps
|
||||
|
|
@ -1095,12 +1148,40 @@ module.exports = class SplitChunksPlugin {
|
|||
|
||||
logger.time("queue");
|
||||
|
||||
/**
|
||||
* @param {ChunksInfoItem} info entry
|
||||
* @param {string[]} sourceTypes source types to be removed
|
||||
*/
|
||||
const removeModulesWithSourceType = (info, sourceTypes) => {
|
||||
for (const module of info.modules) {
|
||||
const types = module.getSourceTypes();
|
||||
if (sourceTypes.some(type => types.has(type))) {
|
||||
info.modules.delete(module);
|
||||
for (const type of types) {
|
||||
info.sizes[type] -= module.size(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ChunksInfoItem} info entry
|
||||
* @returns {boolean} true, if entry become empty
|
||||
*/
|
||||
const removeMinSizeViolatingModules = info => {
|
||||
if (!info.cacheGroup._validateSize) return false;
|
||||
const violatingSizes = getViolatingMinSizes(
|
||||
info.sizes,
|
||||
info.cacheGroup.minSize
|
||||
);
|
||||
if (violatingSizes === undefined) return false;
|
||||
removeModulesWithSourceType(info, violatingSizes);
|
||||
return info.modules.size === 0;
|
||||
};
|
||||
|
||||
// Filter items were size < minSize
|
||||
for (const [key, info] of chunksInfoMap) {
|
||||
if (
|
||||
info.cacheGroup._validateSize &&
|
||||
!checkMinSize(info.sizes, info.cacheGroup.minSize)
|
||||
) {
|
||||
if (removeMinSizeViolatingModules(info)) {
|
||||
chunksInfoMap.delete(key);
|
||||
}
|
||||
}
|
||||
|
|
@ -1263,7 +1344,21 @@ module.exports = class SplitChunksPlugin {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (!checkMinSize(chunkSizes, item.cacheGroup.minRemainingSize)) {
|
||||
const violatingSizes = getViolatingMinSizes(
|
||||
chunkSizes,
|
||||
item.cacheGroup.minRemainingSize
|
||||
);
|
||||
if (violatingSizes !== undefined) {
|
||||
const oldModulesSize = item.modules.size;
|
||||
removeModulesWithSourceType(item, violatingSizes);
|
||||
if (
|
||||
item.modules.size > 0 &&
|
||||
item.modules.size !== oldModulesSize
|
||||
) {
|
||||
// queue this item again to be processed again
|
||||
// without violating modules
|
||||
chunksInfoMap.set(bestEntryKey, item);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
|
@ -1289,13 +1384,6 @@ module.exports = class SplitChunksPlugin {
|
|||
}
|
||||
if (chunkName) {
|
||||
newChunk.chunkReason += ` (name: ${chunkName})`;
|
||||
// If the chosen name is already an entry point we remove the entry point
|
||||
const entrypoint = compilation.entrypoints.get(chunkName);
|
||||
if (entrypoint) {
|
||||
compilation.entrypoints.delete(chunkName);
|
||||
entrypoint.remove();
|
||||
chunkGraph.disconnectEntries(newChunk);
|
||||
}
|
||||
}
|
||||
if (item.cacheGroup.filename) {
|
||||
newChunk.filenameTemplate = item.cacheGroup.filename;
|
||||
|
|
@ -1379,11 +1467,9 @@ module.exports = class SplitChunksPlugin {
|
|||
chunksInfoMap.delete(key);
|
||||
continue;
|
||||
}
|
||||
if (
|
||||
info.cacheGroup._validateSize &&
|
||||
!checkMinSize(info.sizes, info.cacheGroup.minSize)
|
||||
) {
|
||||
if (removeMinSizeViolatingModules(info)) {
|
||||
chunksInfoMap.delete(key);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -134,20 +134,24 @@ exports.arrayToSetDeprecation = (set, name) => {
|
|||
};
|
||||
return fn;
|
||||
};
|
||||
let indexerDefined = 0;
|
||||
const defineIndexGetter = index => {
|
||||
Object.defineProperty(set, index, {
|
||||
get: createIndexGetter(index),
|
||||
set(value) {
|
||||
throw new Error(
|
||||
`${name} was changed from Array to Set (indexing Array with write is not possible)`
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
defineIndexGetter(0);
|
||||
let indexerDefined = 1;
|
||||
Object.defineProperty(set, "length", {
|
||||
get() {
|
||||
dLength();
|
||||
const length = this.size;
|
||||
for (indexerDefined; indexerDefined < length; indexerDefined++) {
|
||||
Object.defineProperty(set, indexerDefined, {
|
||||
get: createIndexGetter(indexerDefined),
|
||||
set(value) {
|
||||
throw new Error(
|
||||
`${name} was changed from Array to Set (indexing Array with write is not possible)`
|
||||
);
|
||||
}
|
||||
});
|
||||
for (indexerDefined; indexerDefined < length + 1; indexerDefined++) {
|
||||
defineIndexGetter(indexerDefined);
|
||||
}
|
||||
return length;
|
||||
},
|
||||
|
|
|
|||
|
|
@ -11,34 +11,61 @@ const validate = require("schema-utils");
|
|||
const DID_YOU_MEAN = {
|
||||
rules: "module.rules",
|
||||
loaders: "module.rules or module.rules.*.use",
|
||||
query: "module.rules.*.options (BREAKING CHANGE since webpack 5)",
|
||||
noParse: "module.noParse",
|
||||
filename: "output.filename or module.rules.*.generator.filename",
|
||||
file: "output.filename",
|
||||
jsonpFunction: "output.chunkLoadingGlobal",
|
||||
chunkCallbackName: "output.chunkLoadingGlobal",
|
||||
hotUpdateFunction: "output.hotUpdateGlobal",
|
||||
chunkFilename: "output.chunkFilename",
|
||||
chunkfilename: "output.chunkFilename",
|
||||
ecmaVersion: "output.environment",
|
||||
ecmaversion: "output.environment",
|
||||
ecma: "output.environment",
|
||||
ecmaVersion:
|
||||
"output.environment (output.ecmaVersion was a temporary configuration option during webpack 5 beta)",
|
||||
ecmaversion:
|
||||
"output.environment (output.ecmaVersion was a temporary configuration option during webpack 5 beta)",
|
||||
ecma:
|
||||
"output.environment (output.ecmaVersion was a temporary configuration option during webpack 5 beta)",
|
||||
path: "output.path",
|
||||
pathinfo: "output.pathinfo",
|
||||
pathInfo: "output.pathinfo",
|
||||
jsonpFunction: "output.chunkLoadingGlobal (BREAKING CHANGE since webpack 5)",
|
||||
chunkCallbackName:
|
||||
"output.chunkLoadingGlobal (BREAKING CHANGE since webpack 5)",
|
||||
jsonpScriptType: "output.scriptType (BREAKING CHANGE since webpack 5)",
|
||||
hotUpdateFunction: "output.hotUpdateGlobal (BREAKING CHANGE since webpack 5)",
|
||||
splitChunks: "optimization.splitChunks",
|
||||
immutablePaths: "snapshot.immutablePaths",
|
||||
managedPaths: "snapshot.managedPaths",
|
||||
maxModules: "stats.modulesSpace",
|
||||
maxModules: "stats.modulesSpace (BREAKING CHANGE since webpack 5)",
|
||||
hashedModuleIds:
|
||||
'optimization.moduleIds: "hashed" (BREAKING CHANGE since webpack 5)',
|
||||
namedChunks:
|
||||
'optimization.chunkIds: "named" (BREAKING CHANGE since webpack 5)',
|
||||
namedModules:
|
||||
'optimization.moduleIds: "named" (BREAKING CHANGE since webpack 5)',
|
||||
occurrenceOrder:
|
||||
'optimization.chunkIds: "size" and optimization.moduleIds: "size" (BREAKING CHANGE since webpack 5)',
|
||||
automaticNamePrefix:
|
||||
"optimization.splitChunks.[cacheGroups.*].idHint (BREAKING CHANGE since webpack 5)",
|
||||
noEmitOnErrors:
|
||||
"optimization.emitOnErrors (BREAKING CHANGE since webpack 5: logic is inverted to avoid negative flags)"
|
||||
"optimization.emitOnErrors (BREAKING CHANGE since webpack 5: logic is inverted to avoid negative flags)",
|
||||
Buffer:
|
||||
"to use the ProvidePlugin to process the Buffer variable to modules as polyfill\n" +
|
||||
"BREAKING CHANGE: webpack 5 no longer provided Node.js polyfills by default.\n" +
|
||||
"Note: if you are using 'node.Buffer: false', you can just remove that as this is the default behavior now.\n" +
|
||||
"To provide a polyfill to modules use:\n" +
|
||||
'new ProvidePlugin({ Buffer: ["buffer", "Buffer"] }) and npm install buffer.',
|
||||
process:
|
||||
"to use the ProvidePlugin to process the process variable to modules as polyfill\n" +
|
||||
"BREAKING CHANGE: webpack 5 no longer provided Node.js polyfills by default.\n" +
|
||||
"Note: if you are using 'node.process: false', you can just remove that as this is the default behavior now.\n" +
|
||||
"To provide a polyfill to modules use:\n" +
|
||||
'new ProvidePlugin({ process: "process" }) and npm install buffer.'
|
||||
};
|
||||
|
||||
const REMOVED = {
|
||||
concord:
|
||||
"BREAKING CHANGE: resolve.concord has been removed and is no longer avaiable.",
|
||||
devtoolLineToLine:
|
||||
"BREAKING CHANGE: output.devtoolLineToLine has been removed and is no longer avaiable."
|
||||
};
|
||||
/* cSpell:enable */
|
||||
|
||||
|
|
@ -84,6 +111,15 @@ const validateSchema = (schema, options) => {
|
|||
}?`;
|
||||
}
|
||||
|
||||
if (
|
||||
Object.prototype.hasOwnProperty.call(
|
||||
REMOVED,
|
||||
params.additionalProperty
|
||||
)
|
||||
) {
|
||||
return `${formattedError}\n${REMOVED[params.additionalProperty]}?`;
|
||||
}
|
||||
|
||||
if (!error.dataPath) {
|
||||
if (params.additionalProperty === "debug") {
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -517,6 +517,23 @@ describe("Compiler", () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
it("should set compiler.watching correctly", function (done) {
|
||||
const compiler = webpack({
|
||||
context: __dirname,
|
||||
mode: "production",
|
||||
entry: "./c",
|
||||
output: {
|
||||
path: "/directory",
|
||||
filename: "bundle.js"
|
||||
}
|
||||
});
|
||||
compiler.outputFileSystem = createFsFromVolume(new Volume());
|
||||
const watching = compiler.watch({}, (err, stats) => {
|
||||
if (err) return done(err);
|
||||
done();
|
||||
});
|
||||
expect(compiler.watching).toBe(watching);
|
||||
});
|
||||
it("should watch again correctly after first closed watch", function (done) {
|
||||
const compiler = webpack({
|
||||
context: __dirname,
|
||||
|
|
|
|||
|
|
@ -498,7 +498,7 @@ describe("Validation", () => {
|
|||
- configuration.output has an unknown property 'ecmaVersion'. These properties are valid:
|
||||
object { assetModuleFilename?, auxiliaryComment?, charset?, chunkFilename?, chunkFormat?, chunkLoadTimeout?, chunkLoading?, chunkLoadingGlobal?, compareBeforeEmit?, crossOriginLoading?, devtoolFallbackModuleFilenameTemplate?, devtoolModuleFilenameTemplate?, devtoolNamespace?, enabledChunkLoadingTypes?, enabledLibraryTypes?, enabledWasmLoadingTypes?, environment?, filename?, globalObject?, hashDigest?, hashDigestLength?, hashFunction?, hashSalt?, hotUpdateChunkFilename?, hotUpdateGlobal?, hotUpdateMainFilename?, iife?, importFunctionName?, importMetaName?, library?, libraryExport?, libraryTarget?, module?, path?, pathinfo?, publicPath?, scriptType?, sourceMapFilename?, sourcePrefix?, strictModuleExceptionHandling?, umdNamedDefine?, uniqueName?, wasmLoading?, webassemblyModuleFilename?, workerChunkLoading?, workerWasmLoading? }
|
||||
-> Options affecting the output of the compilation. \`output\` options tell webpack how to write the compiled files to disk.
|
||||
Did you mean output.environment?"
|
||||
Did you mean output.environment (output.ecmaVersion was a temporary configuration option during webpack 5 beta)?"
|
||||
`)
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
export default "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
import "./big-module";
|
||||
import "./wasm.wat";
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
import "./big-module";
|
||||
import "./wasm.wat";
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
it("should", async () => {
|
||||
import("./chunk1");
|
||||
import("./chunk2");
|
||||
});
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
module.exports = {
|
||||
findBundle: function (i, options) {
|
||||
return ["test.js", "main.js"];
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
(module
|
||||
(func $add (export "add") (param $p0 i32) (param $p1 i32) (result i32)
|
||||
(i32.add
|
||||
(get_local $p0)
|
||||
(get_local $p1)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
/** @type {import("../../../../").Configuration} */
|
||||
module.exports = {
|
||||
entry: "./index",
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.wat$/,
|
||||
loader: "wast-loader",
|
||||
type: "webassembly/async"
|
||||
}
|
||||
]
|
||||
},
|
||||
output: {
|
||||
filename: "[name].js"
|
||||
},
|
||||
optimization: {
|
||||
splitChunks: {
|
||||
cacheGroups: {
|
||||
test: {
|
||||
name: "test",
|
||||
minChunks: 2,
|
||||
minSize: {
|
||||
javascript: 100,
|
||||
webassembly: 100
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
experiments: {
|
||||
asyncWebAssembly: true
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
export { default as moduleA } from "./moduleA";
|
||||
export { default as moduleB } from "./moduleB";
|
||||
export { default as moduleC } from "./moduleC";
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
it("should place the module correctly", async () => {
|
||||
const { moduleA, moduleB, moduleC } = await import("./chunk");
|
||||
expect(fs.readFileSync(path.resolve(__dirname, "a.js"), "utf-8")).toContain(
|
||||
moduleA
|
||||
);
|
||||
expect(fs.readFileSync(path.resolve(__dirname, "b.js"), "utf-8")).toContain(
|
||||
moduleB
|
||||
);
|
||||
expect(
|
||||
fs.readFileSync(path.resolve(__dirname, "runtime.js"), "utf-8")
|
||||
).toContain(moduleC);
|
||||
});
|
||||
|
|
@ -0,0 +1 @@
|
|||
export default "This is module a";
|
||||
|
|
@ -0,0 +1 @@
|
|||
export default "This is module b";
|
||||
|
|
@ -0,0 +1 @@
|
|||
export default "This is module c";
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
module.exports = {
|
||||
findBundle: function (i, options) {
|
||||
return ["runtime.js", "a.js", "b.js"];
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
/** @type {import("../../../../").Configuration} */
|
||||
module.exports = {
|
||||
entry: {
|
||||
a: {
|
||||
import: "./entryA",
|
||||
runtime: "runtime"
|
||||
},
|
||||
b: {
|
||||
import: "./entryB",
|
||||
dependOn: "a"
|
||||
}
|
||||
},
|
||||
output: {
|
||||
filename: "[name].js"
|
||||
},
|
||||
optimization: {
|
||||
chunkIds: "named",
|
||||
splitChunks: {
|
||||
cacheGroups: {
|
||||
a: {
|
||||
test: /moduleA/,
|
||||
name: "a",
|
||||
enforce: true
|
||||
},
|
||||
b: {
|
||||
test: /moduleB/,
|
||||
name: "b",
|
||||
enforce: true
|
||||
},
|
||||
c: {
|
||||
test: /moduleC/,
|
||||
name: "runtime",
|
||||
enforce: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import Worker from "web-worker";
|
||||
|
||||
it("should allow to create a WebWorker", async () => {
|
||||
const worker = new Worker(new URL("./worker.js", import.meta.url), {
|
||||
type: "module"
|
||||
});
|
||||
worker.postMessage("ok");
|
||||
const result = await new Promise(resolve => {
|
||||
worker.onmessage = event => {
|
||||
resolve(event.data);
|
||||
};
|
||||
});
|
||||
expect(result).toBe("data: OK, thanks");
|
||||
});
|
||||
|
||||
it("should allow to share chunks", async () => {
|
||||
const promise = import("./module");
|
||||
const script = document.head._children[0];
|
||||
const src = script.src;
|
||||
const file = src.slice(src.lastIndexOf("/"));
|
||||
__non_webpack_require__(`./${file}`);
|
||||
script.onload();
|
||||
const { upper } = await promise;
|
||||
expect(upper("ok")).toBe("OK");
|
||||
});
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
export function upper(str) {
|
||||
return str.toUpperCase();
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
export default Worker;
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
module.exports = {
|
||||
findBundle: function(i, options) {
|
||||
return ["main.js"];
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
var supportsWorker = require("../../../helpers/supportsWorker");
|
||||
|
||||
module.exports = function (config) {
|
||||
return supportsWorker();
|
||||
};
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
module.exports = {
|
||||
output: {
|
||||
filename: "[name].js"
|
||||
},
|
||||
target: "web",
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.[cm]?js$/,
|
||||
parser: {
|
||||
worker: ["default from web-worker", "..."]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
onmessage = async event => {
|
||||
const { upper } = await import("./module");
|
||||
postMessage(`data: ${upper(event.data)}, thanks`);
|
||||
};
|
||||
|
|
@ -1618,6 +1618,7 @@ declare class Compiler {
|
|||
parentCompilation: Compilation;
|
||||
root: Compiler;
|
||||
outputPath: string;
|
||||
watching: Watching;
|
||||
outputFileSystem: OutputFileSystem;
|
||||
intermediateFileSystem: InputFileSystem &
|
||||
OutputFileSystem &
|
||||
|
|
@ -3510,6 +3511,23 @@ declare class Generator {
|
|||
updateHash(hash: Hash, __1: UpdateHashContextGenerator): void;
|
||||
static byType(map?: any): ByTypeGenerator;
|
||||
}
|
||||
declare class GetChunkFilenameRuntimeModule extends RuntimeModule {
|
||||
constructor(
|
||||
contentType: string,
|
||||
name: string,
|
||||
global: string,
|
||||
getFilenameForChunk: (
|
||||
arg0: Chunk
|
||||
) => string | ((arg0: PathData, arg1: AssetInfo) => string),
|
||||
allChunks: boolean
|
||||
);
|
||||
contentType: string;
|
||||
global: string;
|
||||
getFilenameForChunk: (
|
||||
arg0: Chunk
|
||||
) => string | ((arg0: PathData, arg1: AssetInfo) => string);
|
||||
allChunks: boolean;
|
||||
}
|
||||
declare interface GroupConfig<T, R> {
|
||||
getKeys: (arg0: T) => string[];
|
||||
createGroup: (arg0: string, arg1: (T | R)[], arg2: T[]) => R;
|
||||
|
|
@ -3581,6 +3599,9 @@ declare class HotModuleReplacementPlugin {
|
|||
apply(compiler: Compiler): void;
|
||||
static getParserHooks(parser: JavascriptParser): HMRJavascriptParserHooks;
|
||||
}
|
||||
declare class HotUpdateChunk extends Chunk {
|
||||
constructor();
|
||||
}
|
||||
declare class HttpUriPlugin {
|
||||
constructor();
|
||||
|
||||
|
|
@ -9961,7 +9982,7 @@ declare namespace exports {
|
|||
};
|
||||
}
|
||||
export namespace runtime {
|
||||
export { LoadScriptRuntimeModule };
|
||||
export { GetChunkFilenameRuntimeModule, LoadScriptRuntimeModule };
|
||||
}
|
||||
export namespace prefetch {
|
||||
export { ChunkPrefetchPreloadPlugin };
|
||||
|
|
@ -10144,6 +10165,7 @@ declare namespace exports {
|
|||
ExternalModule,
|
||||
ExternalsPlugin,
|
||||
Generator,
|
||||
HotUpdateChunk,
|
||||
HotModuleReplacementPlugin,
|
||||
IgnorePlugin,
|
||||
JavascriptModulesPlugin,
|
||||
|
|
|
|||
Loading…
Reference in New Issue