Merge pull request #9928 from webpack/perf/minor

Many small performance improvements
This commit is contained in:
Tobias Koppers 2019-11-04 21:30:13 +01:00 committed by GitHub
commit f05669fab3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 496 additions and 221 deletions

View File

@ -1180,6 +1180,10 @@ export interface OutputOptions {
* Number of milliseconds before chunk request expires
*/
chunkLoadTimeout?: number;
/**
* Check if to be emitted file already exists and have the same content before writing to output filesystem
*/
compareBeforeEmit?: boolean;
/**
* This option enables cross-origin loading of chunks.
*/

View File

@ -481,7 +481,8 @@ class Compilation {
this.inputFileSystem = compiler.inputFileSystem;
this.fileSystemInfo = new FileSystemInfo(this.inputFileSystem, {
managedPaths: compiler.managedPaths,
immutablePaths: compiler.immutablePaths
immutablePaths: compiler.immutablePaths,
logger: this.getLogger("webpack.FileSystemInfo")
});
if (compiler.fileTimestamps) {
this.fileSystemInfo.addFileTimestamps(compiler.fileTimestamps);
@ -619,6 +620,8 @@ class Compilation {
this._rebuildingModules = new Map();
/** @type {Set<string>} */
this.emittedAssets = new Set();
/** @type {Set<string>} */
this.comparedForEmitAssets = new Set();
/** @type {LazySet<string>} */
this.fileDependencies = new LazySet();
/** @type {LazySet<string>} */

View File

@ -413,7 +413,7 @@ class Compiler {
asyncLib.forEachLimit(
compilation.getAssets(),
15,
({ name: file, source }, callback) => {
({ name: file, source, info }, callback) => {
let targetFile = file;
const queryStringIdx = targetFile.indexOf("?");
if (queryStringIdx >= 0) {
@ -443,6 +443,120 @@ class Compiler {
this._assetEmittingSourceCache.set(source, cacheEntry);
}
/**
* get the binary (Buffer) content from the Source
* @returns {Buffer} content for the source
*/
const getContent = () => {
if (typeof source.buffer === "function") {
return source.buffer();
} else {
const bufferOrString = source.source();
if (Buffer.isBuffer(bufferOrString)) {
return bufferOrString;
} else {
return Buffer.from(bufferOrString, "utf8");
}
}
};
const alreadyWritten = () => {
// cache the information that the Source has been already been written to that location
if (targetFileGeneration === undefined) {
const newGeneration = 1;
this._assetEmittingWrittenFiles.set(targetPath, newGeneration);
cacheEntry.writtenTo.set(targetPath, newGeneration);
} else {
cacheEntry.writtenTo.set(targetPath, targetFileGeneration);
}
callback();
};
/**
* Write the file to output file system
* @param {Buffer} content content to be written
* @returns {void}
*/
const doWrite = content => {
this.outputFileSystem.writeFile(targetPath, content, err => {
if (err) return callback(err);
// information marker that the asset has been emitted
compilation.emittedAssets.add(file);
// cache the information that the Source has been written to that location
const newGeneration =
targetFileGeneration === undefined
? 1
: targetFileGeneration + 1;
cacheEntry.writtenTo.set(targetPath, newGeneration);
this._assetEmittingWrittenFiles.set(targetPath, newGeneration);
this.hooks.assetEmitted.callAsync(
file,
{
content,
source,
outputPath,
compilation,
targetPath
},
callback
);
});
};
const updateWithReplacementSource = size => {
// Create a replacement resource which only allows to ask for size
// This allows to GC all memory allocated by the Source
// (expect when the Source is stored in any other cache)
if (!cacheEntry.sizeOnlySource) {
cacheEntry.sizeOnlySource = new SizeOnlySource(size);
}
compilation.updateAsset(file, cacheEntry.sizeOnlySource, {
size
});
};
const processExistingFile = stats => {
// skip emitting if it's already there and an immutable file
if (info.immutable) {
updateWithReplacementSource(stats.size);
return alreadyWritten();
}
const content = getContent();
updateWithReplacementSource(content.length);
// if it exists and content on disk matches content
// skip writing the same content again
// (to keep mtime and don't trigger watchers)
// for a fast negative match file size is compared first
if (content.length === stats.size) {
compilation.comparedForEmitAssets.add(file);
return this.outputFileSystem.readFile(
targetPath,
(err, existingContent) => {
if (err || !content.equals(existingContent)) {
return doWrite(content);
} else {
return alreadyWritten();
}
}
);
}
return doWrite(content);
};
const processMissingFile = () => {
const content = getContent();
updateWithReplacementSource(content.length);
return doWrite(content);
};
// if the target file has already been written
if (targetFileGeneration !== undefined) {
// check if the Source has been written to this target file
@ -458,59 +572,28 @@ class Compiler {
return callback();
}
}
// TODO webpack 5: if info.immutable check if file already exists in output
// skip emitting if it's already there
// get the binary (Buffer) content from the Source
/** @type {Buffer} */
let content;
if (typeof source.buffer === "function") {
content = source.buffer();
} else {
const bufferOrString = source.source();
if (Buffer.isBuffer(bufferOrString)) {
content = bufferOrString;
} else {
content = Buffer.from(bufferOrString, "utf8");
if (!info.immutable) {
// We wrote to this file before which has very likly a different content
// skip comparing and assume content is different for performance
// This case happens often during watch mode.
return processMissingFile();
}
}
// Create a replacement resource which only allows to ask for size
// This allows to GC all memory allocated by the Source
// (expect when the Source is stored in any other cache)
cacheEntry.sizeOnlySource = new SizeOnlySource(content.length);
compilation.updateAsset(file, cacheEntry.sizeOnlySource, {
size: content.length
});
if (this.options.output.compareBeforeEmit) {
this.outputFileSystem.stat(targetPath, (err, stats) => {
const exists = !err && stats.isFile();
// Write the file to output file system
this.outputFileSystem.writeFile(targetPath, content, err => {
if (err) return callback(err);
// information marker that the asset has been emitted
compilation.emittedAssets.add(file);
// cache the information that the Source has been written to that location
const newGeneration =
targetFileGeneration === undefined
? 1
: targetFileGeneration + 1;
cacheEntry.writtenTo.set(targetPath, newGeneration);
this._assetEmittingWrittenFiles.set(targetPath, newGeneration);
this.hooks.assetEmitted.callAsync(
file,
{
content,
source,
outputPath,
compilation,
targetPath
},
callback
);
});
if (exists) {
processExistingFile(stats);
} else {
processMissingFile();
}
});
} else {
processMissingFile();
}
};
if (targetFile.match(/\/|\\/)) {

View File

@ -12,6 +12,7 @@ const createHash = require("./util/createHash");
const { join, dirname, relative } = require("./util/fs");
/** @typedef {import("./WebpackError")} WebpackError */
/** @typedef {import("./logging/Logger").Logger} Logger */
/** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */
const resolveContext = resolve.create({
@ -174,9 +175,13 @@ class FileSystemInfo {
* @param {Object} options options
* @param {Iterable<string>=} options.managedPaths paths that are only managed by a package manager
* @param {Iterable<string>=} options.immutablePaths paths that are immutable
* @param {Logger=} options.logger logger used to log invalid snapshots
*/
constructor(fs, { managedPaths = [], immutablePaths = [] } = {}) {
constructor(fs, { managedPaths = [], immutablePaths = [], logger } = {}) {
this.fs = fs;
this.logger = logger;
this._remainingLogs = logger ? 40 : 0;
this._loggedPaths = logger ? new Set() : undefined;
/** @type {WeakMap<Snapshot, boolean | (function(WebpackError=, boolean=): void)[]>} */
this._snapshotCache = new WeakMap();
/** @type {Map<string, FileSystemInfoEntry | null>} */
@ -224,6 +229,17 @@ class FileSystemInfo {
);
}
_log(path, reason) {
if (this._loggedPaths.has(path)) return;
this._loggedPaths.add(path);
this.logger.debug(`${path} invalidated because ${reason}`);
if (--this._remainingLogs === 0) {
this.logger.debug(
"Logging limit has been reached and no futher logging will be emitted by FileSystemInfo"
);
}
}
/**
* @param {Map<string, FileSystemInfoEntry | null>} map timestamps
* @returns {void}
@ -906,8 +922,7 @@ class FileSystemInfo {
}
return;
}
const callbacks = [callback];
this._snapshotCache.set(snapshot, callbacks);
let callbacks;
const {
startTime,
fileTimestamps,
@ -921,56 +936,110 @@ class FileSystemInfo {
const jobDone = () => {
if (--jobs === 0) {
this._snapshotCache.set(snapshot, true);
for (const callback of callbacks) callback(null, true);
callback(null, true);
}
};
const invalid = () => {
if (jobs > 0) {
jobs = NaN;
// large negative number instead of NaN or something else to keep jobs to stay a SMI (v8)
jobs = -100000000;
this._snapshotCache.set(snapshot, false);
for (const callback of callbacks) callback(null, false);
callback(null, false);
}
};
const checkHash = (current, snap) => {
const invalidWithError = (path, err) => {
if (this._remainingLogs > 0) {
this._log(path, `error occured: ${err}`);
}
invalid();
};
/**
* @param {string} path file path
* @param {string} current current hash
* @param {string} snap snapshot hash
* @returns {boolean} true, if ok
*/
const checkHash = (path, current, snap) => {
if (snap === "error") {
// If there was an error while snapshotting (i. e. EBUSY)
// we can't compare further data and assume it's invalid
if (this._remainingLogs > 0) {
this._log(path, `there was an error during snapshotting`);
}
return false;
}
return current === snap;
if (current !== snap) {
// If hash differ it's invalid
if (this._remainingLogs > 0) {
this._log(path, `hashes differ (${current} != ${snap})`);
}
return false;
}
return true;
};
/**
* @param {string} path file path
* @param {FileSystemInfoEntry} current current entry
* @param {FileSystemInfoEntry | "error"} snap entry from snapshot
* @returns {boolean} true, if ok
*/
const checkExistance = (current, snap) => {
const checkExistance = (path, current, snap) => {
if (snap === "error") {
// If there was an error while snapshotting (i. e. EBUSY)
// we can't compare further data and assume it's invalid
return false;
}
return !current === !snap;
};
/**
* @param {FileSystemInfoEntry} current current entry
* @param {FileSystemInfoEntry | "error"} snap entry from snapshot
* @returns {boolean} true, if ok
*/
const checkFile = (current, snap) => {
if (snap === "error") {
// If there was an error while snapshotting (i. e. EBUSY)
// we can't compare further data and assume it's invalid
return false;
}
if (current && current.safeTime > startTime) {
// If a change happened after starting reading the item
// this may no longer be valid
if (this._remainingLogs > 0) {
this._log(path, `there was an error during snapshotting`);
}
return false;
}
if (!current !== !snap) {
// If existance of item differs
// it's invalid
if (this._remainingLogs > 0) {
this._log(
path,
current ? "it didn't exist before" : "it does no longer exist"
);
}
return false;
}
return true;
};
/**
* @param {string} path file path
* @param {FileSystemInfoEntry} current current entry
* @param {FileSystemInfoEntry | "error"} snap entry from snapshot
* @returns {boolean} true, if ok
*/
const checkFile = (path, current, snap) => {
if (snap === "error") {
// If there was an error while snapshotting (i. e. EBUSY)
// we can't compare further data and assume it's invalid
if (this._remainingLogs > 0) {
this._log(path, `there was an error during snapshotting`);
}
return false;
}
if (current && current.safeTime > startTime) {
// If a change happened after starting reading the item
// this may no longer be valid
if (this._remainingLogs > 0) {
this._log(
path,
`it may have changed (${current.safeTime}) after the start time of the snapshot (${startTime})`
);
}
return false;
}
if (!current !== !snap) {
// If existance of item differs
// it's invalid
if (this._remainingLogs > 0) {
this._log(
path,
current ? "it didn't exist before" : "it does no longer exist"
);
}
return false;
}
if (current) {
@ -981,6 +1050,12 @@ class FileSystemInfo {
) {
// If we have a timestamp (it was a file or symlink) and it differs from current timestamp
// it's invalid
if (this._remainingLogs > 0) {
this._log(
path,
`timestamps differ (${current.timestamp} != ${snap.timestamp})`
);
}
return false;
}
if (
@ -989,6 +1064,12 @@ class FileSystemInfo {
) {
// If we have a timestampHash (it was a directory) and it differs from current timestampHash
// it's invalid
if (this._remainingLogs > 0) {
this._log(
path,
`timestamps hashes differ (${current.timestampHash} != ${snap.timestampHash})`
);
}
return false;
}
}
@ -998,14 +1079,15 @@ class FileSystemInfo {
for (const [path, ts] of fileTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkFile(cache, ts)) {
if (!checkFile(path, cache, ts)) {
invalid();
return;
}
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkFile(entry, ts)) {
if (err) return invalidWithError(path, err);
if (!checkFile(path, entry, ts)) {
invalid();
} else {
jobDone();
@ -1018,14 +1100,15 @@ class FileSystemInfo {
for (const [path, hash] of fileHashes) {
const cache = this._fileHashes.get(path);
if (cache !== undefined) {
if (!checkHash(cache, hash)) {
if (!checkHash(path, cache, hash)) {
invalid();
return;
}
} else {
jobs++;
this.fileHashQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkHash(entry, hash)) {
if (err) return invalidWithError(path, err);
if (!checkHash(path, entry, hash)) {
invalid();
} else {
jobDone();
@ -1038,14 +1121,15 @@ class FileSystemInfo {
for (const [path, ts] of contextTimestamps) {
const cache = this._contextTimestamps.get(path);
if (cache !== undefined) {
if (!checkFile(cache, ts)) {
if (!checkFile(path, cache, ts)) {
invalid();
return;
}
} else {
jobs++;
this.contextTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkFile(entry, ts)) {
if (err) return invalidWithError(path, err);
if (!checkFile(path, entry, ts)) {
invalid();
} else {
jobDone();
@ -1058,14 +1142,15 @@ class FileSystemInfo {
for (const [path, hash] of contextHashes) {
const cache = this._contextHashes.get(path);
if (cache !== undefined) {
if (!checkHash(cache, hash)) {
if (!checkHash(path, cache, hash)) {
invalid();
return;
}
} else {
jobs++;
this.contextHashQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkHash(entry, hash)) {
if (err) return invalidWithError(path, err);
if (!checkHash(path, entry, hash)) {
invalid();
} else {
jobDone();
@ -1078,14 +1163,15 @@ class FileSystemInfo {
for (const [path, ts] of missingTimestamps) {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (!checkExistance(cache, ts)) {
if (!checkExistance(path, cache, ts)) {
invalid();
return;
}
} else {
jobs++;
this.fileTimestampQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkExistance(entry, ts)) {
if (err) return invalidWithError(path, err);
if (!checkExistance(path, entry, ts)) {
invalid();
} else {
jobDone();
@ -1098,14 +1184,15 @@ class FileSystemInfo {
for (const [path, info] of managedItemInfo) {
const cache = this._managedItems.get(path);
if (cache !== undefined) {
if (!checkHash(cache, info)) {
if (!checkHash(path, cache, info)) {
invalid();
return;
}
} else {
jobs++;
this.managedItemQueue.add(path, (err, entry) => {
if (err) return invalid();
if (!checkHash(entry, info)) {
if (err) return invalidWithError(path, err);
if (!checkHash(path, entry, info)) {
invalid();
} else {
jobDone();
@ -1115,6 +1202,16 @@ class FileSystemInfo {
}
}
jobDone();
// if there was an async action
// try to join multiple concurrent request for this snapshot
if (jobs > 0) {
callbacks = [callback];
callback = (err, result) => {
for (const callback of callbacks) callback(err, result);
};
this._snapshotCache.set(snapshot, callbacks);
}
}
_readFileTimestamp(path, callback) {

View File

@ -232,6 +232,7 @@ class WebpackOptionsDefaulter extends OptionsDefaulter {
this.set("output.webassemblyModuleFilename", "[hash].module.wasm");
this.set("output.library", "");
this.set("output.publicPath", "");
this.set("output.compareBeforeEmit", true);
this.set("output.hotUpdateFunction", "make", options => {
return Template.toIdentifier(
"webpackHotUpdate" + Template.toIdentifier(options.output.library)

View File

@ -5,6 +5,8 @@
"use strict";
const LazySet = require("../util/LazySet");
/** @typedef {import("enhanced-resolve/lib/Resolver")} Resolver */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../FileSystemInfo")} FileSystemInfo */
@ -14,16 +16,16 @@
/**
* @typedef {Object} CacheEntry
* @property {Object} result
* @property {Set<string>} fileDependencies
* @property {Set<string>} contextDependencies
* @property {Set<string>} missingDependencies
* @property {LazySet<string>} fileDependencies
* @property {LazySet<string>} contextDependencies
* @property {LazySet<string>} missingDependencies
* @property {Snapshot} snapshot
*/
/**
* @template T
* @param {Set<T> | LazySet<T>} set set to add items to
* @param {Set<T>} otherSet set to add items from
* @param {Set<T> | LazySet<T>} otherSet set to add items from
* @returns {void}
*/
const addAllToSet = (set, otherSet) => {
@ -68,15 +70,17 @@ class ResolverCachePlugin {
compiler.hooks.thisCompilation.tap("ResolverCachePlugin", compilation => {
fileSystemInfo = compilation.fileSystemInfo;
compilation.hooks.finishModules.tap("ResolverCachePlugin", () => {
const logger = compilation.getLogger("webpack.ResolverCachePlugin");
logger.debug(
`${Math.round(
(100 * realResolves) / (realResolves + cachedResolves)
)}% really resolved (${realResolves} real resolves, ${cachedResolves} cached, ${cacheInvalidResolves} cached but invalid)`
);
realResolves = 0;
cachedResolves = 0;
cacheInvalidResolves = 0;
if (realResolves + cachedResolves > 0) {
const logger = compilation.getLogger("webpack.ResolverCachePlugin");
logger.debug(
`${Math.round(
(100 * realResolves) / (realResolves + cachedResolves)
)}% really resolved (${realResolves} real resolves, ${cachedResolves} cached, ${cacheInvalidResolves} cached but invalid)`
);
realResolves = 0;
cachedResolves = 0;
cacheInvalidResolves = 0;
}
});
});
/**
@ -104,15 +108,13 @@ class ResolverCachePlugin {
const newResolveContext = {
...resolveContext,
stack: new Set(),
missingDependencies: new Set(),
fileDependencies: new Set(),
contextDependencies: new Set()
missingDependencies: new LazySet(),
fileDependencies: new LazySet(),
contextDependencies: new LazySet()
};
const propagate = key => {
if (resolveContext[key]) {
for (const dep of newResolveContext[key]) {
resolveContext[key].add(dep);
}
addAllToSet(resolveContext[key], newResolveContext[key]);
}
};
const resolveTime = Date.now();
@ -129,10 +131,6 @@ class ResolverCachePlugin {
const fileDependencies = newResolveContext.fileDependencies;
const contextDependencies = newResolveContext.contextDependencies;
const missingDependencies = newResolveContext.missingDependencies;
// TODO remove this when enhanced-resolve supports fileDependencies
if (result && result.path) {
fileDependencies.add(result.path);
}
fileSystemInfo.createSnapshot(
resolveTime,
fileDependencies,

View File

@ -10,6 +10,8 @@ const SerializerMiddleware = require("./SerializerMiddleware");
/** @typedef {import("./types").BufferSerializableType} BufferSerializableType */
/** @typedef {import("./types").PrimitiveSerializableType} PrimitiveSerializableType */
const SERIALIZED_INFO = Symbol("serialized info");
/*
Format:
@ -86,6 +88,8 @@ const identifyNumber = n => {
*/
class BinaryMiddleware extends SerializerMiddleware {
_handleFunctionSerialization(fn, context) {
const serializedInfo = fn[SERIALIZED_INFO];
if (serializedInfo) return serializedInfo;
return memorize(() => {
const r = fn();
if (r instanceof Promise)
@ -96,12 +100,14 @@ class BinaryMiddleware extends SerializerMiddleware {
}
_handleFunctionDeserialization(fn, context) {
return memorize(() => {
const result = memorize(() => {
const r = fn();
if (r instanceof Promise)
return r.then(data => this.deserialize(data, context));
return this.deserialize(r, context);
});
result[SERIALIZED_INFO] = fn;
return result;
}
/**

View File

@ -18,6 +18,8 @@ const SetObjectSerializer = require("./SetObjectSerializer");
/** @typedef {new (...params: any[]) => any} Constructor */
const SERIALIZED_INFO = Symbol("serialized info");
/*
Format:
@ -224,6 +226,8 @@ class ObjectMiddleware extends SerializerMiddleware {
}
_handleFunctionSerialization(fn, context) {
const serializedInfo = fn[SERIALIZED_INFO];
if (serializedInfo) return serializedInfo;
return memorize(() => {
const r = fn();
@ -235,7 +239,7 @@ class ObjectMiddleware extends SerializerMiddleware {
}
_handleFunctionDeserialization(fn, context) {
return memorize(() => {
const result = memorize(() => {
const r = fn();
if (r instanceof Promise)
@ -243,6 +247,8 @@ class ObjectMiddleware extends SerializerMiddleware {
return this.deserialize(r, context)[0];
});
result[SERIALIZED_INFO] = fn;
return result;
}
/**
@ -452,22 +458,44 @@ class ObjectMiddleware extends SerializerMiddleware {
objectTypeLookup.set(currentPosTypeLookup++, serializer);
}
const item = serializer.deserialize(ctx);
const end1 = read();
try {
const item = serializer.deserialize(ctx);
const end1 = read();
if (end1 !== ESCAPE) {
throw new Error("Expected end of object");
if (end1 !== ESCAPE) {
throw new Error("Expected end of object");
}
const end2 = read();
if (end2 !== ESCAPE_END_OBJECT) {
throw new Error("Expected end of object");
}
addReferenceable(item);
return item;
} catch (err) {
// As this is only for error handling, we omit creating a Map for
// faster access to this information, as this would affect performance
// in the good case
let serializerEntry;
for (const entry of serializers) {
if (entry[1].serializer === serializer) {
serializerEntry = entry;
break;
}
}
const name = !serializerEntry
? "unknown"
: !serializerEntry[1].request
? serializerEntry[0].name
: serializerEntry[1].name
? `${serializerEntry[1].request} ${serializerEntry[1].name}`
: serializerEntry[1].request;
err.message += `\n(during deserialization of ${name})`;
throw err;
}
const end2 = read();
if (end2 !== ESCAPE_END_OBJECT) {
throw new Error("Expected end of object");
}
addReferenceable(item);
return item;
}
} else if (typeof item === "string") {
if (item !== "") {

View File

@ -488,6 +488,9 @@ const SIMPLE_EXTRACTORS = {
compareIds
);
object.emitted = compilation.emittedAssets.has(asset.name);
object.comparedForEmit = compilation.comparedForEmitAssets.has(
asset.name
);
object.info = asset.info;
},
ids: (
@ -939,7 +942,12 @@ const FILTER = {
if (excluded) return false;
},
"!cachedAssets": (asset, { compilation }) => {
if (!compilation.emittedAssets.has(asset.name)) return false;
if (
!compilation.emittedAssets.has(asset.name) &&
!compilation.comparedForEmitAssets.has(asset.name)
) {
return false;
}
}
},
"compilation.modules": ({

View File

@ -155,6 +155,8 @@ const SIMPLE_PRINTERS = {
) => ((isOverSizeLimit ? yellow(formatSize(size)) : formatSize(size))),
"asset.emitted": (emitted, { green, formatFlag }) =>
emitted ? green(formatFlag("emitted")) : undefined,
"asset.comparedForEmit": (comparedForEmit, { yellow, formatFlag }) =>
comparedForEmit ? yellow(formatFlag("compared for emit")) : undefined,
"asset.isOverSizeLimit": (isOverSizeLimit, { yellow, formatFlag }) =>
isOverSizeLimit ? yellow(formatFlag("big")) : undefined,
@ -548,6 +550,7 @@ const PREFERED_ORDERS = {
"chunks",
"auxiliaryChunks",
"emitted",
"comparedForEmit",
"info",
"isOverSizeLimit",
"chunkNames",
@ -1133,7 +1136,11 @@ class DefaultStatsPrinterPlugin {
[elementsMap.chunks, elementsMap.auxiliaryChunks]
.filter(Boolean)
.join(" "),
[elementsMap.emitted, elementsMap.info]
[
elementsMap.emitted,
elementsMap.comparedForEmit,
elementsMap.info
]
.filter(Boolean)
.join(" "),
elementsMap.isOverSizeLimit || "",

View File

@ -5,6 +5,8 @@
"use strict";
const makeSerializable = require("./makeSerializable.js");
/**
* @template T
* @param {Set<T>} targetSet set where items should be added
@ -174,6 +176,17 @@ class LazySet {
get [Symbol.toStringTag]() {
return "LazySet";
}
serialize({ write }) {
if (this._needMerge) this._merge();
write(this._set);
}
static deserialize({ read }) {
return new LazySet(read());
}
}
makeSerializable(LazySet, "webpack/lib/util/LazySet");
module.exports = LazySet;

View File

@ -17,6 +17,8 @@ const path = require("path");
* @typedef {Object} OutputFileSystem
* @property {function(string, Buffer|string, Callback): void} writeFile
* @property {function(string, Callback): void} mkdir
* @property {function(string, StatsCallback): void} stat
* @property {function(string, BufferCallback): void} readFile
* @property {(function(string, string): string)=} join
* @property {(function(string, string): string)=} relative
* @property {(function(string): string)=} dirname

View File

@ -129,6 +129,7 @@ module.exports = {
NormalModule: () => require("../NormalModule"),
RawModule: () => require("../RawModule"),
UnsupportedFeatureWarning: () => require("../UnsupportedFeatureWarning"),
"util/LazySet": () => require("../util/LazySet"),
WebpackError: () => require("../WebpackError"),
"util/registerExternalSerializer": () => {

View File

@ -1123,6 +1123,10 @@
"description": "Number of milliseconds before chunk request expires",
"type": "number"
},
"compareBeforeEmit": {
"description": "Check if to be emitted file already exists and have the same content before writing to output filesystem",
"type": "boolean"
},
"crossOriginLoading": {
"description": "This option enables cross-origin loading of chunks.",
"enum": [false, "anonymous", "use-credentials"]

View File

@ -40,6 +40,9 @@ describe("Compiler (caching)", () => {
logs.writeFile.push(name, content);
files[name] = content.toString("utf-8");
callback();
},
stat(path, callback) {
callback(new Error("ENOENT"));
}
};
c.hooks.compilation.tap(

View File

@ -39,6 +39,9 @@ describe("Compiler", () => {
logs.writeFile.push(name, content);
files[name] = content.toString("utf-8");
callback();
},
stat(path, callback) {
callback(new Error("ENOENT"));
}
};
c.hooks.compilation.tap(

View File

@ -85,6 +85,9 @@ const defaults = {
},
writeFile(file, content, callback) {
callback();
},
stat(file, callback) {
callback(new Error("ENOENT"));
}
}
};

View File

@ -184,6 +184,7 @@ describe("Stats", () => {
"chunkNames": Array [
"chunkB",
],
"comparedForEmit": false,
"emitted": true,
"info": Object {
"size": 111,
@ -198,6 +199,7 @@ describe("Stats", () => {
"chunkNames": Array [
"entryA",
],
"comparedForEmit": false,
"emitted": true,
"info": Object {
"size": 198,
@ -212,6 +214,7 @@ describe("Stats", () => {
"chunkNames": Array [
"entryB",
],
"comparedForEmit": false,
"emitted": true,
"info": Object {
"size": 2085,

View File

@ -2,6 +2,8 @@
const path = require("path");
const fs = require("graceful-fs");
const mkdirp = require("mkdirp");
const rimraf = require("rimraf");
const captureStdio = require("./helpers/captureStdio");
let webpack;
@ -46,6 +48,9 @@ describe("StatsTestCases", () => {
tests.forEach(testName => {
it("should print correct stats for " + testName, done => {
jest.setTimeout(30000);
const outputDirectory = path.join(outputBase, testName);
rimraf.sync(outputDirectory);
mkdirp.sync(outputDirectory);
let options = {
mode: "development",
entry: "./index",
@ -59,8 +64,7 @@ describe("StatsTestCases", () => {
(Array.isArray(options) ? options : [options]).forEach(options => {
if (!options.context) options.context = path.join(base, testName);
if (!options.output) options.output = options.output || {};
if (!options.output.path)
options.output.path = path.join(outputBase, testName);
if (!options.output.path) options.output.path = outputDirectory;
if (!options.plugins) options.plugins = [];
if (!options.optimization) options.optimization = {};
if (options.optimization.minimize === undefined)

View File

@ -1,61 +1,61 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`StatsTestCases should print correct stats for aggressive-splitting-entry 1`] = `
"Hash: d89b2fa88cb132dd2f3ed89b2fa88cb132dd2f3e
"Hash: 21902312560238912c7e45ec1b9983d1c74bd8c5
Child fitting:
Hash: d89b2fa88cb132dd2f3e
Hash: 21902312560238912c7e
Time: Xms
Built at: 1970-04-20 12:42:42
PublicPath: (none)
Asset Size
35bda92cf5ea05aff412.js 1.91 KiB [emitted] [immutable]
3e4894c38178f8515f2a.js 1.91 KiB [emitted] [immutable]
9fbc06f33314445dd40e.js 13 KiB [emitted] [immutable]
dc9eb8f6a8dc6829a3ce.js 1.08 KiB [emitted] [immutable]
Entrypoint main = 35bda92cf5ea05aff412.js 3e4894c38178f8515f2a.js 9fbc06f33314445dd40e.js
chunk 9fbc06f33314445dd40e.js 1.87 KiB (javascript) 6.5 KiB (runtime) [entry] [rendered]
Asset Size
fitting-07302ed1a3c05122e3f9.js 13 KiB [emitted] [immutable]
fitting-35bda92cf5ea05aff412.js 1.91 KiB [emitted] [immutable]
fitting-3e4894c38178f8515f2a.js 1.91 KiB [emitted] [immutable]
fitting-dc9eb8f6a8dc6829a3ce.js 1.08 KiB [emitted] [immutable]
Entrypoint main = fitting-35bda92cf5ea05aff412.js fitting-3e4894c38178f8515f2a.js fitting-07302ed1a3c05122e3f9.js
chunk fitting-07302ed1a3c05122e3f9.js 1.87 KiB (javascript) 6.51 KiB (runtime) [entry] [rendered]
> ./index main
./e.js 899 bytes [built]
./f.js 900 bytes [built]
./index.js 111 bytes [built]
+ 7 hidden chunk modules
chunk 3e4894c38178f8515f2a.js 1.76 KiB [initial] [rendered] [recorded] aggressive splitted
chunk fitting-3e4894c38178f8515f2a.js 1.76 KiB [initial] [rendered] [recorded] aggressive splitted
> ./index main
./c.js 899 bytes [built]
./d.js 899 bytes [built]
chunk 35bda92cf5ea05aff412.js 1.76 KiB [initial] [rendered] [recorded] aggressive splitted
chunk fitting-35bda92cf5ea05aff412.js 1.76 KiB [initial] [rendered] [recorded] aggressive splitted
> ./index main
./a.js 899 bytes [built]
./b.js 899 bytes [built]
chunk dc9eb8f6a8dc6829a3ce.js 916 bytes [rendered]
chunk fitting-dc9eb8f6a8dc6829a3ce.js 916 bytes [rendered]
> ./g ./index.js 7:0-13
./g.js 916 bytes [built]
Child content-change:
Hash: d89b2fa88cb132dd2f3e
Hash: 45ec1b9983d1c74bd8c5
Time: Xms
Built at: 1970-04-20 12:42:42
PublicPath: (none)
Asset Size
35bda92cf5ea05aff412.js 1.91 KiB [emitted] [immutable]
3e4894c38178f8515f2a.js 1.91 KiB [emitted] [immutable]
9fbc06f33314445dd40e.js 13 KiB [emitted] [immutable]
dc9eb8f6a8dc6829a3ce.js 1.08 KiB [emitted] [immutable]
Entrypoint main = 35bda92cf5ea05aff412.js 3e4894c38178f8515f2a.js 9fbc06f33314445dd40e.js
chunk 9fbc06f33314445dd40e.js 1.87 KiB (javascript) 6.5 KiB (runtime) [entry] [rendered]
Asset Size
content-change-35bda92cf5ea05aff412.js 1.91 KiB [emitted] [immutable]
content-change-3e4894c38178f8515f2a.js 1.91 KiB [emitted] [immutable]
content-change-d60032b193fdb4bf4c38.js 13 KiB [emitted] [immutable]
content-change-dc9eb8f6a8dc6829a3ce.js 1.08 KiB [emitted] [immutable]
Entrypoint main = content-change-35bda92cf5ea05aff412.js content-change-3e4894c38178f8515f2a.js content-change-d60032b193fdb4bf4c38.js
chunk content-change-d60032b193fdb4bf4c38.js 1.87 KiB (javascript) 6.51 KiB (runtime) [entry] [rendered]
> ./index main
./e.js 899 bytes [built]
./f.js 900 bytes [built]
./index.js 111 bytes [built]
+ 7 hidden chunk modules
chunk 3e4894c38178f8515f2a.js 1.76 KiB [initial] [rendered] [recorded] aggressive splitted
chunk content-change-3e4894c38178f8515f2a.js 1.76 KiB [initial] [rendered] [recorded] aggressive splitted
> ./index main
./c.js 899 bytes [built]
./d.js 899 bytes [built]
chunk 35bda92cf5ea05aff412.js 1.76 KiB [initial] [rendered] [recorded] aggressive splitted
chunk content-change-35bda92cf5ea05aff412.js 1.76 KiB [initial] [rendered] [recorded] aggressive splitted
> ./index main
./a.js 899 bytes [built]
./b.js 899 bytes [built]
chunk dc9eb8f6a8dc6829a3ce.js 916 bytes [rendered]
chunk content-change-dc9eb8f6a8dc6829a3ce.js 916 bytes [rendered]
> ./g ./index.js 7:0-13
./g.js 916 bytes [built]"
`;
@ -758,9 +758,9 @@ Child undefined:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle0.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle0.js
WARNING in Terser Plugin: Dropping unused function someRemoteUnUsedFunction1 [./a.js:3,0]
@ -788,51 +788,51 @@ Child Terser:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle1.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle1.js
Child /Terser/:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle2.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle2.js
Child warnings => true:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle3.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle3.js
Child [Terser]:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle4.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle4.js
Child [/Terser/]:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle5.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle5.js
Child [warnings => true]:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle6.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle6.js
Child should not filter:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle7.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle7.js
WARNING in Terser Plugin: Dropping unused function someRemoteUnUsedFunction1 [./a.js:3,0]
@ -860,9 +860,9 @@ Child /should not filter/:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle8.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle8.js
WARNING in Terser Plugin: Dropping unused function someRemoteUnUsedFunction1 [./a.js:3,0]
@ -890,9 +890,9 @@ Child warnings => false:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle9.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle9.js
WARNING in Terser Plugin: Dropping unused function someRemoteUnUsedFunction1 [./a.js:3,0]
@ -920,9 +920,9 @@ Child [should not filter]:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle10.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle10.js
WARNING in Terser Plugin: Dropping unused function someRemoteUnUsedFunction1 [./a.js:3,0]
@ -950,9 +950,9 @@ Child [/should not filter/]:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle11.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle11.js
WARNING in Terser Plugin: Dropping unused function someRemoteUnUsedFunction1 [./a.js:3,0]
@ -980,9 +980,9 @@ Child [warnings => false]:
Hash: e865ee1cdbd25d5b868d
Time: Xms
Built at: 1970-04-20 12:42:42
Asset Size
bundle.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle.js
Asset Size
bundle12.js 1010 bytes [emitted] [name: main]
Entrypoint main = bundle12.js
WARNING in Terser Plugin: Dropping unused function someRemoteUnUsedFunction1 [./a.js:3,0]
@ -2427,23 +2427,23 @@ Entrypoint e2 = runtime~e2.js e2.js"
exports[`StatsTestCases should print correct stats for runtime-chunk-integration 1`] = `
"Child base:
Asset Size
505.js 1.22 KiB [emitted]
main1.js 556 bytes [emitted] [name: main1]
runtime.js 9.74 KiB [emitted] [name: runtime]
Entrypoint main1 = runtime.js main1.js
Asset Size
without-505.js 1.22 KiB [emitted]
without-main1.js 556 bytes [emitted] [name: main1]
without-runtime.js 9.75 KiB [emitted] [name: runtime]
Entrypoint main1 = without-runtime.js without-main1.js
./main1.js 66 bytes [built]
./b.js 20 bytes [built]
./c.js 20 bytes [built]
./d.js 20 bytes [built]
+ 6 hidden modules
Child manifest is named entry:
Asset Size
505.js 1.22 KiB [emitted]
main1.js 556 bytes [emitted] [name: main1]
manifest.js 9.87 KiB [emitted] [name: manifest]
Entrypoint main1 = manifest.js main1.js
Entrypoint manifest = manifest.js
Asset Size
with-505.js 1.22 KiB [emitted]
with-main1.js 556 bytes [emitted] [name: main1]
with-manifest.js 9.88 KiB [emitted] [name: manifest]
Entrypoint main1 = with-manifest.js with-main1.js
Entrypoint manifest = with-manifest.js
./main1.js 66 bytes [built]
./f.js 20 bytes [built]
./b.js 20 bytes [built]

View File

@ -5,8 +5,8 @@ module.exports = ["fitting", "content-change"].map(type => ({
cache: true, // AggressiveSplittingPlugin rebuilds multiple times, we need to cache the assets
entry: "./index",
output: {
filename: "[chunkhash].js",
chunkFilename: "[chunkhash].js"
filename: `${type}-[chunkhash].js`,
chunkFilename: `${type}-[chunkhash].js`
},
plugins: [
new webpack.optimize.AggressiveSplittingPlugin({

View File

@ -2,9 +2,6 @@ const TerserPlugin = require("terser-webpack-plugin");
const baseConfig = {
mode: "production",
entry: "./index",
output: {
filename: "bundle.js"
},
optimization: {
minimize: true,
minimizer: [
@ -46,8 +43,12 @@ module.exports = [
["should not filter"],
[/should not filter/],
[warnings => false]
].map(filter => ({
].map((filter, idx) => ({
...baseConfig,
name: Array.isArray(filter) ? `[${filter}]` : `${filter}`,
stats: { ...baseConfig.stats, warningsFilter: filter }
output: { filename: `bundle${idx}.js` },
stats: {
...baseConfig.stats,
warningsFilter: filter
}
}));

View File

@ -3,9 +3,6 @@ const { MinChunkSizePlugin } = require("../../../").optimize;
const baseConfig = {
mode: "production",
target: "web",
output: {
filename: "[name].js"
},
stats: {
hash: false,
timings: false,
@ -20,6 +17,9 @@ const baseConfig = {
const withoutNamedEntry = {
...baseConfig,
output: {
filename: "without-[name].js"
},
name: "base",
entry: {
main1: "./main1"
@ -31,6 +31,9 @@ const withoutNamedEntry = {
const withNamedEntry = {
...baseConfig,
output: {
filename: "with-[name].js"
},
name: "manifest is named entry",
entry: {
main1: "./main1",