mirror of https://github.com/webpack/webpack.git
Merge pull request #11166 from webpack/http2/defaults
Http2 defaults for splitChunks
This commit is contained in:
commit
020b8d6997
|
|
@ -1372,6 +1372,10 @@ export interface OptimizationSplitChunksOptions {
|
||||||
chunks?:
|
chunks?:
|
||||||
| ("initial" | "async" | "all")
|
| ("initial" | "async" | "all")
|
||||||
| ((chunk: import("../lib/Chunk")) => boolean);
|
| ((chunk: import("../lib/Chunk")) => boolean);
|
||||||
|
/**
|
||||||
|
* Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored.
|
||||||
|
*/
|
||||||
|
enforceSizeThreshold?: OptimizationSplitChunksSizes;
|
||||||
/**
|
/**
|
||||||
* Options for modules not selected by any other cache group.
|
* Options for modules not selected by any other cache group.
|
||||||
*/
|
*/
|
||||||
|
|
@ -1465,6 +1469,10 @@ export interface OptimizationSplitChunksCacheGroup {
|
||||||
* Ignore minimum size, minimum chunks and maximum requests and always create chunks for this cache group.
|
* Ignore minimum size, minimum chunks and maximum requests and always create chunks for this cache group.
|
||||||
*/
|
*/
|
||||||
enforce?: boolean;
|
enforce?: boolean;
|
||||||
|
/**
|
||||||
|
* Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored.
|
||||||
|
*/
|
||||||
|
enforceSizeThreshold?: OptimizationSplitChunksSizes;
|
||||||
/**
|
/**
|
||||||
* Sets the template for the filename for created chunks.
|
* Sets the template for the filename for created chunks.
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
|
|
@ -580,8 +580,9 @@ const applyOptimizationDefaults = (
|
||||||
D(splitChunks, "hidePathInfo", production);
|
D(splitChunks, "hidePathInfo", production);
|
||||||
D(splitChunks, "chunks", "async");
|
D(splitChunks, "chunks", "async");
|
||||||
D(splitChunks, "minChunks", 1);
|
D(splitChunks, "minChunks", 1);
|
||||||
F(splitChunks, "minSize", () => (production ? 30000 : 10000));
|
F(splitChunks, "minSize", () => (production ? 20000 : 10000));
|
||||||
F(splitChunks, "minRemainingSize", () => (development ? 0 : undefined));
|
F(splitChunks, "minRemainingSize", () => (development ? 0 : undefined));
|
||||||
|
F(splitChunks, "enforceSizeThreshold", () => (production ? 50000 : 30000));
|
||||||
F(splitChunks, "maxAsyncRequests", () => (production ? 30 : Infinity));
|
F(splitChunks, "maxAsyncRequests", () => (production ? 30 : Infinity));
|
||||||
F(splitChunks, "maxInitialRequests", () => (production ? 30 : Infinity));
|
F(splitChunks, "maxInitialRequests", () => (production ? 30 : Infinity));
|
||||||
D(splitChunks, "automaticNameDelimiter", "-");
|
D(splitChunks, "automaticNameDelimiter", "-");
|
||||||
|
|
|
||||||
|
|
@ -58,6 +58,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @property {boolean=} enforce
|
* @property {boolean=} enforce
|
||||||
* @property {SplitChunksSizes} minSize
|
* @property {SplitChunksSizes} minSize
|
||||||
* @property {SplitChunksSizes} minRemainingSize
|
* @property {SplitChunksSizes} minRemainingSize
|
||||||
|
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||||
* @property {SplitChunksSizes} maxAsyncSize
|
* @property {SplitChunksSizes} maxAsyncSize
|
||||||
* @property {SplitChunksSizes} maxInitialSize
|
* @property {SplitChunksSizes} maxInitialSize
|
||||||
* @property {number=} minChunks
|
* @property {number=} minChunks
|
||||||
|
|
@ -75,10 +76,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @property {number=} priority
|
* @property {number=} priority
|
||||||
* @property {GetName=} getName
|
* @property {GetName=} getName
|
||||||
* @property {ChunkFilterFunction=} chunksFilter
|
* @property {ChunkFilterFunction=} chunksFilter
|
||||||
* @property {boolean=} enforce
|
|
||||||
* @property {SplitChunksSizes} minSize
|
* @property {SplitChunksSizes} minSize
|
||||||
* @property {SplitChunksSizes} minRemainingSize
|
* @property {SplitChunksSizes} minRemainingSize
|
||||||
* @property {SplitChunksSizes} minSizeForMaxSize
|
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||||
* @property {SplitChunksSizes} maxAsyncSize
|
* @property {SplitChunksSizes} maxAsyncSize
|
||||||
* @property {SplitChunksSizes} maxInitialSize
|
* @property {SplitChunksSizes} maxInitialSize
|
||||||
* @property {number=} minChunks
|
* @property {number=} minChunks
|
||||||
|
|
@ -88,6 +88,10 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @property {string=} idHint
|
* @property {string=} idHint
|
||||||
* @property {string} automaticNameDelimiter
|
* @property {string} automaticNameDelimiter
|
||||||
* @property {boolean=} reuseExistingChunk
|
* @property {boolean=} reuseExistingChunk
|
||||||
|
* @property {boolean} _validateSize
|
||||||
|
* @property {boolean} _validateRemainingSize
|
||||||
|
* @property {SplitChunksSizes} _minSizeForMaxSize
|
||||||
|
* @property {boolean} _conditionalEnforce
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -124,6 +128,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @property {ChunkFilterFunction} chunksFilter
|
* @property {ChunkFilterFunction} chunksFilter
|
||||||
* @property {SplitChunksSizes} minSize
|
* @property {SplitChunksSizes} minSize
|
||||||
* @property {SplitChunksSizes} minRemainingSize
|
* @property {SplitChunksSizes} minRemainingSize
|
||||||
|
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||||
* @property {SplitChunksSizes} maxInitialSize
|
* @property {SplitChunksSizes} maxInitialSize
|
||||||
* @property {SplitChunksSizes} maxAsyncSize
|
* @property {SplitChunksSizes} maxAsyncSize
|
||||||
* @property {number} minChunks
|
* @property {number} minChunks
|
||||||
|
|
@ -141,8 +146,8 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @typedef {Object} ChunksInfoItem
|
* @typedef {Object} ChunksInfoItem
|
||||||
* @property {SortableSet<Module>} modules
|
* @property {SortableSet<Module>} modules
|
||||||
* @property {CacheGroup} cacheGroup
|
* @property {CacheGroup} cacheGroup
|
||||||
|
* @property {number} cacheGroupIndex
|
||||||
* @property {string} name
|
* @property {string} name
|
||||||
* @property {boolean} validateSize
|
|
||||||
* @property {Record<string, number>} sizes
|
* @property {Record<string, number>} sizes
|
||||||
* @property {Set<Chunk>} chunks
|
* @property {Set<Chunk>} chunks
|
||||||
* @property {Set<Chunk>} reuseableChunks
|
* @property {Set<Chunk>} reuseableChunks
|
||||||
|
|
@ -220,12 +225,15 @@ const compareEntries = (a, b) => {
|
||||||
const bSizeReduce = totalSize(b.sizes) * (b.chunks.size - 1);
|
const bSizeReduce = totalSize(b.sizes) * (b.chunks.size - 1);
|
||||||
const diffSizeReduce = aSizeReduce - bSizeReduce;
|
const diffSizeReduce = aSizeReduce - bSizeReduce;
|
||||||
if (diffSizeReduce) return diffSizeReduce;
|
if (diffSizeReduce) return diffSizeReduce;
|
||||||
// 4. by number of modules (to be able to compare by identifier)
|
// 4. by cache group index
|
||||||
|
const indexDiff = a.cacheGroupIndex - b.cacheGroupIndex;
|
||||||
|
if (indexDiff) return indexDiff;
|
||||||
|
// 5. by number of modules (to be able to compare by identifier)
|
||||||
const modulesA = a.modules;
|
const modulesA = a.modules;
|
||||||
const modulesB = b.modules;
|
const modulesB = b.modules;
|
||||||
const diff = modulesA.size - modulesB.size;
|
const diff = modulesA.size - modulesB.size;
|
||||||
if (diff) return diff;
|
if (diff) return diff;
|
||||||
// 5. by module identifiers
|
// 6. by module identifiers
|
||||||
modulesA.sort();
|
modulesA.sort();
|
||||||
modulesB.sort();
|
modulesB.sort();
|
||||||
return compareModuleIterables(modulesA, modulesB);
|
return compareModuleIterables(modulesA, modulesB);
|
||||||
|
|
@ -507,6 +515,7 @@ const createCacheGroupSource = (options, key) => {
|
||||||
enforce: options.enforce,
|
enforce: options.enforce,
|
||||||
minSize: normalizeSizes(options.minSize),
|
minSize: normalizeSizes(options.minSize),
|
||||||
minRemainingSize: mergeSizes(options.minRemainingSize, options.minSize),
|
minRemainingSize: mergeSizes(options.minRemainingSize, options.minSize),
|
||||||
|
enforceSizeThreshold: normalizeSizes(options.enforceSizeThreshold),
|
||||||
maxAsyncSize: mergeSizes(options.maxAsyncSize, options.maxSize),
|
maxAsyncSize: mergeSizes(options.maxAsyncSize, options.maxSize),
|
||||||
maxInitialSize: mergeSizes(options.maxInitialSize, options.maxSize),
|
maxInitialSize: mergeSizes(options.maxInitialSize, options.maxSize),
|
||||||
minChunks: options.minChunks,
|
minChunks: options.minChunks,
|
||||||
|
|
@ -531,6 +540,7 @@ module.exports = class SplitChunksPlugin {
|
||||||
chunksFilter: normalizeChunksFilter(options.chunks || "all"),
|
chunksFilter: normalizeChunksFilter(options.chunks || "all"),
|
||||||
minSize: normalizeSizes(options.minSize),
|
minSize: normalizeSizes(options.minSize),
|
||||||
minRemainingSize: mergeSizes(options.minRemainingSize, options.minSize),
|
minRemainingSize: mergeSizes(options.minRemainingSize, options.minSize),
|
||||||
|
enforceSizeThreshold: normalizeSizes(options.enforceSizeThreshold),
|
||||||
maxAsyncSize: mergeSizes(options.maxAsyncSize, options.maxSize),
|
maxAsyncSize: mergeSizes(options.maxAsyncSize, options.maxSize),
|
||||||
maxInitialSize: mergeSizes(options.maxInitialSize, options.maxSize),
|
maxInitialSize: mergeSizes(options.maxInitialSize, options.maxSize),
|
||||||
minChunks: options.minChunks || 1,
|
minChunks: options.minChunks || 1,
|
||||||
|
|
@ -714,6 +724,7 @@ module.exports = class SplitChunksPlugin {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {CacheGroup} cacheGroup the current cache group
|
* @param {CacheGroup} cacheGroup the current cache group
|
||||||
|
* @param {number} cacheGroupIndex the index of the cache group of ordering
|
||||||
* @param {Chunk[]} selectedChunks chunks selected for this module
|
* @param {Chunk[]} selectedChunks chunks selected for this module
|
||||||
* @param {bigint} selectedChunksKey a key of selectedChunks
|
* @param {bigint} selectedChunksKey a key of selectedChunks
|
||||||
* @param {Module} module the current module
|
* @param {Module} module the current module
|
||||||
|
|
@ -721,6 +732,7 @@ module.exports = class SplitChunksPlugin {
|
||||||
*/
|
*/
|
||||||
const addModuleToChunksInfoMap = (
|
const addModuleToChunksInfoMap = (
|
||||||
cacheGroup,
|
cacheGroup,
|
||||||
|
cacheGroupIndex,
|
||||||
selectedChunks,
|
selectedChunks,
|
||||||
selectedChunksKey,
|
selectedChunksKey,
|
||||||
module
|
module
|
||||||
|
|
@ -771,10 +783,8 @@ module.exports = class SplitChunksPlugin {
|
||||||
compareModulesByIdentifier
|
compareModulesByIdentifier
|
||||||
),
|
),
|
||||||
cacheGroup,
|
cacheGroup,
|
||||||
|
cacheGroupIndex,
|
||||||
name,
|
name,
|
||||||
validateSize:
|
|
||||||
hasNonZeroSizes(cacheGroup.minSize) ||
|
|
||||||
hasNonZeroSizes(cacheGroup.minRemainingSize),
|
|
||||||
sizes: {},
|
sizes: {},
|
||||||
chunks: new Set(),
|
chunks: new Set(),
|
||||||
reuseableChunks: new Set(),
|
reuseableChunks: new Set(),
|
||||||
|
|
@ -783,10 +793,8 @@ module.exports = class SplitChunksPlugin {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
info.modules.add(module);
|
info.modules.add(module);
|
||||||
if (info.validateSize) {
|
for (const type of module.getSourceTypes()) {
|
||||||
for (const type of module.getSourceTypes()) {
|
info.sizes[type] = (info.sizes[type] || 0) + module.size(type);
|
||||||
info.sizes[type] = (info.sizes[type] || 0) + module.size(type);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!info.chunksKeys.has(selectedChunksKey)) {
|
if (!info.chunksKeys.has(selectedChunksKey)) {
|
||||||
info.chunksKeys.add(selectedChunksKey);
|
info.chunksKeys.add(selectedChunksKey);
|
||||||
|
|
@ -823,29 +831,35 @@ module.exports = class SplitChunksPlugin {
|
||||||
combinationsCache.set(chunksKey, combs);
|
combinationsCache.set(chunksKey, combs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let cacheGroupIndex = 0;
|
||||||
for (const cacheGroupSource of cacheGroups) {
|
for (const cacheGroupSource of cacheGroups) {
|
||||||
/** @type {CacheGroup} */
|
/** @type {CacheGroup} */
|
||||||
let cacheGroup = this._cacheGroupCache.get(cacheGroupSource);
|
let cacheGroup = this._cacheGroupCache.get(cacheGroupSource);
|
||||||
if (cacheGroup === undefined) {
|
if (cacheGroup === undefined) {
|
||||||
|
const minSize = mergeSizes(
|
||||||
|
cacheGroupSource.minSize,
|
||||||
|
cacheGroupSource.enforce ? undefined : this.options.minSize
|
||||||
|
);
|
||||||
|
const minRemainingSize = mergeSizes(
|
||||||
|
cacheGroupSource.minRemainingSize,
|
||||||
|
cacheGroupSource.enforce
|
||||||
|
? undefined
|
||||||
|
: this.options.minRemainingSize
|
||||||
|
);
|
||||||
|
const enforceSizeThreshold = mergeSizes(
|
||||||
|
cacheGroupSource.enforceSizeThreshold,
|
||||||
|
cacheGroupSource.enforce
|
||||||
|
? undefined
|
||||||
|
: this.options.enforceSizeThreshold
|
||||||
|
);
|
||||||
cacheGroup = {
|
cacheGroup = {
|
||||||
key: cacheGroupSource.key,
|
key: cacheGroupSource.key,
|
||||||
priority: cacheGroupSource.priority || 0,
|
priority: cacheGroupSource.priority || 0,
|
||||||
chunksFilter:
|
chunksFilter:
|
||||||
cacheGroupSource.chunksFilter || this.options.chunksFilter,
|
cacheGroupSource.chunksFilter || this.options.chunksFilter,
|
||||||
minSize: mergeSizes(
|
minSize,
|
||||||
cacheGroupSource.minSize,
|
minRemainingSize,
|
||||||
cacheGroupSource.enforce ? undefined : this.options.minSize
|
enforceSizeThreshold,
|
||||||
),
|
|
||||||
minRemainingSize: mergeSizes(
|
|
||||||
cacheGroupSource.minRemainingSize,
|
|
||||||
cacheGroupSource.enforce
|
|
||||||
? undefined
|
|
||||||
: this.options.minRemainingSize
|
|
||||||
),
|
|
||||||
minSizeForMaxSize: mergeSizes(
|
|
||||||
cacheGroupSource.minSize,
|
|
||||||
this.options.minSize
|
|
||||||
),
|
|
||||||
maxAsyncSize: mergeSizes(
|
maxAsyncSize: mergeSizes(
|
||||||
cacheGroupSource.maxAsyncSize,
|
cacheGroupSource.maxAsyncSize,
|
||||||
cacheGroupSource.enforce
|
cacheGroupSource.enforce
|
||||||
|
|
@ -892,7 +906,14 @@ module.exports = class SplitChunksPlugin {
|
||||||
cacheGroupSource.idHint !== undefined
|
cacheGroupSource.idHint !== undefined
|
||||||
? cacheGroupSource.idHint
|
? cacheGroupSource.idHint
|
||||||
: cacheGroupSource.key,
|
: cacheGroupSource.key,
|
||||||
reuseExistingChunk: cacheGroupSource.reuseExistingChunk
|
reuseExistingChunk: cacheGroupSource.reuseExistingChunk,
|
||||||
|
_validateSize: hasNonZeroSizes(minSize),
|
||||||
|
_validateRemainingSize: hasNonZeroSizes(minRemainingSize),
|
||||||
|
_minSizeForMaxSize: mergeSizes(
|
||||||
|
cacheGroupSource.minSize,
|
||||||
|
this.options.minSize
|
||||||
|
),
|
||||||
|
_conditionalEnforce: hasNonZeroSizes(enforceSizeThreshold)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
// For all combination of chunk selection
|
// For all combination of chunk selection
|
||||||
|
|
@ -910,11 +931,13 @@ module.exports = class SplitChunksPlugin {
|
||||||
|
|
||||||
addModuleToChunksInfoMap(
|
addModuleToChunksInfoMap(
|
||||||
cacheGroup,
|
cacheGroup,
|
||||||
|
cacheGroupIndex,
|
||||||
selectedChunks,
|
selectedChunks,
|
||||||
selectedChunksKey,
|
selectedChunksKey,
|
||||||
module
|
module
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
cacheGroupIndex++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -923,13 +946,12 @@ module.exports = class SplitChunksPlugin {
|
||||||
logger.time("queue");
|
logger.time("queue");
|
||||||
|
|
||||||
// Filter items were size < minSize
|
// Filter items were size < minSize
|
||||||
for (const pair of chunksInfoMap) {
|
for (const [key, info] of chunksInfoMap) {
|
||||||
const info = pair[1];
|
|
||||||
if (
|
if (
|
||||||
info.validateSize &&
|
info.cacheGroup._validateSize &&
|
||||||
!checkMinSize(info.sizes, info.cacheGroup.minSize)
|
!checkMinSize(info.sizes, info.cacheGroup.minSize)
|
||||||
) {
|
) {
|
||||||
chunksInfoMap.delete(pair[0]);
|
chunksInfoMap.delete(key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1021,18 +1043,20 @@ module.exports = class SplitChunksPlugin {
|
||||||
// TODO check if this check is really needed, shouldn't chunks always be non-empty?
|
// TODO check if this check is really needed, shouldn't chunks always be non-empty?
|
||||||
if (item.chunks.size === 0 && !isExistingChunk) continue;
|
if (item.chunks.size === 0 && !isExistingChunk) continue;
|
||||||
|
|
||||||
|
const enforced =
|
||||||
|
item.cacheGroup._conditionalEnforce &&
|
||||||
|
checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold);
|
||||||
|
|
||||||
|
const usedChunks = new Set(item.chunks);
|
||||||
|
|
||||||
// Check if maxRequests condition can be fulfilled
|
// Check if maxRequests condition can be fulfilled
|
||||||
// TODO try to avoid creating a new array here
|
|
||||||
const usedChunks = Array.from(item.chunks);
|
|
||||||
|
|
||||||
let validChunks = usedChunks;
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
Number.isFinite(item.cacheGroup.maxInitialRequests) ||
|
!enforced &&
|
||||||
Number.isFinite(item.cacheGroup.maxAsyncRequests)
|
(Number.isFinite(item.cacheGroup.maxInitialRequests) ||
|
||||||
|
Number.isFinite(item.cacheGroup.maxAsyncRequests))
|
||||||
) {
|
) {
|
||||||
validChunks = validChunks.filter(chunk => {
|
for (const chunk of usedChunks) {
|
||||||
// respect max requests when not enforced
|
// respect max requests
|
||||||
const maxRequests = chunk.isOnlyInitial()
|
const maxRequests = chunk.isOnlyInitial()
|
||||||
? item.cacheGroup.maxInitialRequests
|
? item.cacheGroup.maxInitialRequests
|
||||||
: chunk.canBeInitial()
|
: chunk.canBeInitial()
|
||||||
|
|
@ -1041,27 +1065,34 @@ module.exports = class SplitChunksPlugin {
|
||||||
item.cacheGroup.maxAsyncRequests
|
item.cacheGroup.maxAsyncRequests
|
||||||
)
|
)
|
||||||
: item.cacheGroup.maxAsyncRequests;
|
: item.cacheGroup.maxAsyncRequests;
|
||||||
return (
|
if (
|
||||||
!isFinite(maxRequests) || getRequests(chunk) < maxRequests
|
isFinite(maxRequests) &&
|
||||||
);
|
getRequests(chunk) >= maxRequests
|
||||||
});
|
) {
|
||||||
|
usedChunks.delete(chunk);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
validChunks = validChunks.filter(chunk => {
|
outer: for (const chunk of usedChunks) {
|
||||||
for (const module of item.modules) {
|
for (const module of item.modules) {
|
||||||
if (chunkGraph.isModuleInChunk(module, chunk)) return true;
|
if (chunkGraph.isModuleInChunk(module, chunk)) continue outer;
|
||||||
}
|
}
|
||||||
return false;
|
usedChunks.delete(chunk);
|
||||||
});
|
}
|
||||||
|
|
||||||
if (validChunks.length < usedChunks.length) {
|
// Were some (invalid) chunks removed from usedChunks?
|
||||||
if (isExistingChunk) validChunks.push(newChunk);
|
// => readd all modules to the queue, as things could have been changed
|
||||||
if (validChunks.length >= item.cacheGroup.minChunks) {
|
if (usedChunks.size < item.chunks.size) {
|
||||||
|
if (isExistingChunk) usedChunks.add(newChunk);
|
||||||
|
if (usedChunks.size >= item.cacheGroup.minChunks) {
|
||||||
|
const chunksArr = Array.from(usedChunks);
|
||||||
for (const module of item.modules) {
|
for (const module of item.modules) {
|
||||||
addModuleToChunksInfoMap(
|
addModuleToChunksInfoMap(
|
||||||
item.cacheGroup,
|
item.cacheGroup,
|
||||||
validChunks,
|
item.cacheGroupIndex,
|
||||||
getKey(validChunks),
|
chunksArr,
|
||||||
|
getKey(usedChunks),
|
||||||
module
|
module
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -1071,13 +1102,19 @@ module.exports = class SplitChunksPlugin {
|
||||||
|
|
||||||
// Validate minRemainingSize constraint when a single chunk is left over
|
// Validate minRemainingSize constraint when a single chunk is left over
|
||||||
if (
|
if (
|
||||||
validChunks.length === 1 &&
|
!enforced &&
|
||||||
hasNonZeroSizes(item.cacheGroup.minRemainingSize)
|
item.cacheGroup._validateRemainingSize &&
|
||||||
|
usedChunks.size === 1
|
||||||
) {
|
) {
|
||||||
const chunk = validChunks[0];
|
const [chunk] = usedChunks;
|
||||||
const chunkSizes = { ...chunkGraph.getChunkModulesSizes(chunk) };
|
let chunkSizes = Object.create(null);
|
||||||
for (const key of Object.keys(item.sizes)) {
|
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
|
||||||
chunkSizes[key] -= item.sizes[key];
|
if (!item.modules.has(module)) {
|
||||||
|
for (const type of module.getSourceTypes()) {
|
||||||
|
chunkSizes[type] =
|
||||||
|
(chunkSizes[type] || 0) + module.size(type);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (!checkMinSize(chunkSizes, item.cacheGroup.minRemainingSize)) {
|
if (!checkMinSize(chunkSizes, item.cacheGroup.minRemainingSize)) {
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -1148,7 +1185,7 @@ module.exports = class SplitChunksPlugin {
|
||||||
minSize: oldMaxSizeSettings
|
minSize: oldMaxSizeSettings
|
||||||
? combineSizes(
|
? combineSizes(
|
||||||
oldMaxSizeSettings.minSize,
|
oldMaxSizeSettings.minSize,
|
||||||
item.cacheGroup.minSizeForMaxSize,
|
item.cacheGroup._minSizeForMaxSize,
|
||||||
Math.max
|
Math.max
|
||||||
)
|
)
|
||||||
: item.cacheGroup.minSize,
|
: item.cacheGroup.minSize,
|
||||||
|
|
@ -1175,38 +1212,31 @@ module.exports = class SplitChunksPlugin {
|
||||||
|
|
||||||
// remove all modules from other entries and update size
|
// remove all modules from other entries and update size
|
||||||
for (const [key, info] of chunksInfoMap) {
|
for (const [key, info] of chunksInfoMap) {
|
||||||
if (isOverlap(info.chunks, item.chunks)) {
|
if (isOverlap(info.chunks, usedChunks)) {
|
||||||
if (info.validateSize) {
|
// update modules and total size
|
||||||
// update modules and total size
|
// may remove it from the map when < minSize
|
||||||
// may remove it from the map when < minSize
|
let updated = false;
|
||||||
let updated = false;
|
for (const module of item.modules) {
|
||||||
for (const module of item.modules) {
|
if (info.modules.has(module)) {
|
||||||
if (info.modules.has(module)) {
|
// remove module
|
||||||
// remove module
|
|
||||||
info.modules.delete(module);
|
|
||||||
// update size
|
|
||||||
for (const key of module.getSourceTypes()) {
|
|
||||||
info.sizes[key] -= module.size(key);
|
|
||||||
}
|
|
||||||
updated = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (updated) {
|
|
||||||
if (info.modules.size === 0) {
|
|
||||||
chunksInfoMap.delete(key);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!checkMinSize(info.sizes, info.cacheGroup.minSize)) {
|
|
||||||
chunksInfoMap.delete(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// only update the modules
|
|
||||||
for (const module of item.modules) {
|
|
||||||
info.modules.delete(module);
|
info.modules.delete(module);
|
||||||
|
// update size
|
||||||
|
for (const key of module.getSourceTypes()) {
|
||||||
|
info.sizes[key] -= module.size(key);
|
||||||
|
}
|
||||||
|
updated = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if (updated) {
|
||||||
if (info.modules.size === 0) {
|
if (info.modules.size === 0) {
|
||||||
chunksInfoMap.delete(key);
|
chunksInfoMap.delete(key);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
info.cacheGroup._validateSize &&
|
||||||
|
!checkMinSize(info.sizes, info.cacheGroup.minSize)
|
||||||
|
) {
|
||||||
|
chunksInfoMap.delete(key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1257,6 +1257,14 @@
|
||||||
"description": "Ignore minimum size, minimum chunks and maximum requests and always create chunks for this cache group.",
|
"description": "Ignore minimum size, minimum chunks and maximum requests and always create chunks for this cache group.",
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
|
"enforceSizeThreshold": {
|
||||||
|
"description": "Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored.",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/OptimizationSplitChunksSizes"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"filename": {
|
"filename": {
|
||||||
"description": "Sets the template for the filename for created chunks.",
|
"description": "Sets the template for the filename for created chunks.",
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
|
|
@ -1464,6 +1472,14 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"enforceSizeThreshold": {
|
||||||
|
"description": "Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored.",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/OptimizationSplitChunksSizes"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"fallbackCacheGroup": {
|
"fallbackCacheGroup": {
|
||||||
"description": "Options for modules not selected by any other cache group.",
|
"description": "Options for modules not selected by any other cache group.",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
|
|
||||||
|
|
@ -184,6 +184,7 @@ describe("Defaults", () => {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"chunks": "async",
|
"chunks": "async",
|
||||||
|
"enforceSizeThreshold": 30000,
|
||||||
"hidePathInfo": false,
|
"hidePathInfo": false,
|
||||||
"maxAsyncRequests": Infinity,
|
"maxAsyncRequests": Infinity,
|
||||||
"maxInitialRequests": Infinity,
|
"maxInitialRequests": Infinity,
|
||||||
|
|
@ -379,15 +380,17 @@ describe("Defaults", () => {
|
||||||
+ "noEmitOnErrors": true,
|
+ "noEmitOnErrors": true,
|
||||||
+ "nodeEnv": "production",
|
+ "nodeEnv": "production",
|
||||||
@@ ... @@
|
@@ ... @@
|
||||||
|
- "enforceSizeThreshold": 30000,
|
||||||
- "hidePathInfo": false,
|
- "hidePathInfo": false,
|
||||||
- "maxAsyncRequests": Infinity,
|
- "maxAsyncRequests": Infinity,
|
||||||
- "maxInitialRequests": Infinity,
|
- "maxInitialRequests": Infinity,
|
||||||
|
+ "enforceSizeThreshold": 50000,
|
||||||
+ "hidePathInfo": true,
|
+ "hidePathInfo": true,
|
||||||
+ "maxAsyncRequests": 30,
|
+ "maxAsyncRequests": 30,
|
||||||
+ "maxInitialRequests": 30,
|
+ "maxInitialRequests": 30,
|
||||||
@@ ... @@
|
@@ ... @@
|
||||||
- "minSize": 10000,
|
- "minSize": 10000,
|
||||||
+ "minSize": 30000,
|
+ "minSize": 20000,
|
||||||
@@ ... @@
|
@@ ... @@
|
||||||
- "usedExports": false,
|
- "usedExports": false,
|
||||||
+ "usedExports": true,
|
+ "usedExports": true,
|
||||||
|
|
@ -432,15 +435,17 @@ describe("Defaults", () => {
|
||||||
+ "noEmitOnErrors": true,
|
+ "noEmitOnErrors": true,
|
||||||
+ "nodeEnv": "production",
|
+ "nodeEnv": "production",
|
||||||
@@ ... @@
|
@@ ... @@
|
||||||
|
- "enforceSizeThreshold": 30000,
|
||||||
- "hidePathInfo": false,
|
- "hidePathInfo": false,
|
||||||
- "maxAsyncRequests": Infinity,
|
- "maxAsyncRequests": Infinity,
|
||||||
- "maxInitialRequests": Infinity,
|
- "maxInitialRequests": Infinity,
|
||||||
|
+ "enforceSizeThreshold": 50000,
|
||||||
+ "hidePathInfo": true,
|
+ "hidePathInfo": true,
|
||||||
+ "maxAsyncRequests": 30,
|
+ "maxAsyncRequests": 30,
|
||||||
+ "maxInitialRequests": 30,
|
+ "maxInitialRequests": 30,
|
||||||
@@ ... @@
|
@@ ... @@
|
||||||
- "minSize": 10000,
|
- "minSize": 10000,
|
||||||
+ "minSize": 30000,
|
+ "minSize": 20000,
|
||||||
@@ ... @@
|
@@ ... @@
|
||||||
- "usedExports": false,
|
- "usedExports": false,
|
||||||
+ "usedExports": true,
|
+ "usedExports": true,
|
||||||
|
|
@ -956,6 +961,7 @@ describe("Defaults", () => {
|
||||||
- },
|
- },
|
||||||
- },
|
- },
|
||||||
- "chunks": "async",
|
- "chunks": "async",
|
||||||
|
- "enforceSizeThreshold": 30000,
|
||||||
- "hidePathInfo": false,
|
- "hidePathInfo": false,
|
||||||
- "maxAsyncRequests": Infinity,
|
- "maxAsyncRequests": Infinity,
|
||||||
- "maxInitialRequests": Infinity,
|
- "maxInitialRequests": Infinity,
|
||||||
|
|
|
||||||
|
|
@ -461,7 +461,7 @@ describe("Validation", () => {
|
||||||
test: ...
|
test: ...
|
||||||
}
|
}
|
||||||
}.
|
}.
|
||||||
object { <key>: false | RegExp | string | function | object { automaticNameDelimiter?, chunks?, enforce?, filename?, idHint?, maxAsyncRequests?, maxAsyncSize?, maxInitialRequests?, maxInitialSize?, maxSize?, minChunks?, minRemainingSize?, minSize?, name?, priority?, reuseExistingChunk?, test?, type? } }
|
object { <key>: false | RegExp | string | function | object { automaticNameDelimiter?, chunks?, enforce?, enforceSizeThreshold?, filename?, idHint?, maxAsyncRequests?, maxAsyncSize?, maxInitialRequests?, maxInitialSize?, maxSize?, minChunks?, minRemainingSize?, minSize?, name?, priority?, reuseExistingChunk?, test?, type? } }
|
||||||
-> Assign modules to a cache group (modules from different cache groups are tried to keep in separate chunks, default categories: 'default', 'defaultVendors')."
|
-> Assign modules to a cache group (modules from different cache groups are tried to keep in separate chunks, default categories: 'default', 'defaultVendors')."
|
||||||
`)
|
`)
|
||||||
);
|
);
|
||||||
|
|
@ -715,7 +715,7 @@ describe("Validation", () => {
|
||||||
expect(msg).toMatchInlineSnapshot(`
|
expect(msg).toMatchInlineSnapshot(`
|
||||||
"Invalid configuration object. Webpack has been initialized using a configuration object that does not match the API schema.
|
"Invalid configuration object. Webpack has been initialized using a configuration object that does not match the API schema.
|
||||||
- configuration.optimization.splitChunks has an unknown property 'automaticNamePrefix'. These properties are valid:
|
- configuration.optimization.splitChunks has an unknown property 'automaticNamePrefix'. These properties are valid:
|
||||||
object { automaticNameDelimiter?, cacheGroups?, chunks?, fallbackCacheGroup?, filename?, hidePathInfo?, maxAsyncRequests?, maxAsyncSize?, maxInitialRequests?, maxInitialSize?, maxSize?, minChunks?, minRemainingSize?, minSize?, name? }
|
object { automaticNameDelimiter?, cacheGroups?, chunks?, enforceSizeThreshold?, fallbackCacheGroup?, filename?, hidePathInfo?, maxAsyncRequests?, maxAsyncSize?, maxInitialRequests?, maxInitialSize?, maxSize?, minChunks?, minRemainingSize?, minSize?, name? }
|
||||||
-> Options object for splitting chunks into smaller chunks."
|
-> Options object for splitting chunks into smaller chunks."
|
||||||
`)
|
`)
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -1532,6 +1532,19 @@ Object {
|
||||||
"multiple": false,
|
"multiple": false,
|
||||||
"simpleType": "string",
|
"simpleType": "string",
|
||||||
},
|
},
|
||||||
|
"optimization-split-chunks-enforce-size-threshold": Object {
|
||||||
|
"configs": Array [
|
||||||
|
Object {
|
||||||
|
"description": "Size of the javascript part of the chunk.",
|
||||||
|
"multiple": false,
|
||||||
|
"path": "optimization.splitChunks.enforceSizeThreshold",
|
||||||
|
"type": "number",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"description": "Size of the javascript part of the chunk.",
|
||||||
|
"multiple": false,
|
||||||
|
"simpleType": "number",
|
||||||
|
},
|
||||||
"optimization-split-chunks-fallback-cache-group-automatic-name-delimiter": Object {
|
"optimization-split-chunks-fallback-cache-group-automatic-name-delimiter": Object {
|
||||||
"configs": Array [
|
"configs": Array [
|
||||||
Object {
|
Object {
|
||||||
|
|
|
||||||
|
|
@ -3413,9 +3413,12 @@ chunk a.js (a) 12 bytes (javascript) 3.87 KiB (runtime) ={282}= [entry] [rendere
|
||||||
|
|
||||||
exports[`StatsTestCases should print correct stats for split-chunks-keep-remaining-size 1`] = `
|
exports[`StatsTestCases should print correct stats for split-chunks-keep-remaining-size 1`] = `
|
||||||
"Entrypoint main = default/main.js
|
"Entrypoint main = default/main.js
|
||||||
chunk default/main.js (main) 147 bytes (javascript) 5.69 KiB (runtime) >{334}< >{383}< >{794}< >{821}< [entry] [rendered]
|
chunk default/async-d.js (async-d) 58 bytes <{179}> ={782}= [rendered]
|
||||||
|
> ./d ./index.js 4:0-47
|
||||||
|
./d.js 58 bytes [built]
|
||||||
|
chunk default/main.js (main) 196 bytes (javascript) 5.7 KiB (runtime) >{31}< >{334}< >{383}< >{782}< >{794}< >{821}< [entry] [rendered]
|
||||||
> ./ main
|
> ./ main
|
||||||
./index.js 147 bytes [built]
|
./index.js 196 bytes [built]
|
||||||
+ 9 hidden chunk modules
|
+ 9 hidden chunk modules
|
||||||
chunk default/async-b.js (async-b) 39 bytes <{179}> ={821}= [rendered]
|
chunk default/async-b.js (async-b) 39 bytes <{179}> ={821}= [rendered]
|
||||||
> ./b ./index.js 2:0-47
|
> ./b ./index.js 2:0-47
|
||||||
|
|
@ -3423,6 +3426,10 @@ chunk default/async-b.js (async-b) 39 bytes <{179}> ={821}= [rendered]
|
||||||
chunk default/async-c.js (async-c) 39 bytes <{179}> ={821}= [rendered]
|
chunk default/async-c.js (async-c) 39 bytes <{179}> ={821}= [rendered]
|
||||||
> ./c ./index.js 3:0-47
|
> ./c ./index.js 3:0-47
|
||||||
./c.js 39 bytes [built]
|
./c.js 39 bytes [built]
|
||||||
|
chunk default/782.js (id hint: vendors) 204 bytes <{179}> ={31}= [rendered] split chunk (cache group: defaultVendors)
|
||||||
|
> ./d ./index.js 4:0-47
|
||||||
|
./node_modules/shared.js?3 102 bytes [built]
|
||||||
|
./node_modules/shared.js?4 102 bytes [built]
|
||||||
chunk default/async-a.js (async-a) 141 bytes <{179}> [rendered]
|
chunk default/async-a.js (async-a) 141 bytes <{179}> [rendered]
|
||||||
> ./a ./index.js 1:0-47
|
> ./a ./index.js 1:0-47
|
||||||
./a.js 39 bytes [built]
|
./a.js 39 bytes [built]
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
import "shared?3";
|
||||||
|
import "shared?4";
|
||||||
|
export default "d";
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
import(/* webpackChunkName: "async-a" */ "./a");
|
import(/* webpackChunkName: "async-a" */ "./a");
|
||||||
import(/* webpackChunkName: "async-b" */ "./b");
|
import(/* webpackChunkName: "async-b" */ "./b");
|
||||||
import(/* webpackChunkName: "async-c" */ "./c");
|
import(/* webpackChunkName: "async-c" */ "./c");
|
||||||
|
import(/* webpackChunkName: "async-d" */ "./d");
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,8 @@ module.exports = {
|
||||||
},
|
},
|
||||||
optimization: {
|
optimization: {
|
||||||
splitChunks: {
|
splitChunks: {
|
||||||
minSize: 100
|
minSize: 100,
|
||||||
|
enforceSizeThreshold: 200
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
stats
|
stats
|
||||||
|
|
|
||||||
|
|
@ -430,6 +430,7 @@ declare interface CacheGroupSource {
|
||||||
enforce?: boolean;
|
enforce?: boolean;
|
||||||
minSize: Record<string, number>;
|
minSize: Record<string, number>;
|
||||||
minRemainingSize: Record<string, number>;
|
minRemainingSize: Record<string, number>;
|
||||||
|
enforceSizeThreshold: Record<string, number>;
|
||||||
maxAsyncSize: Record<string, number>;
|
maxAsyncSize: Record<string, number>;
|
||||||
maxInitialSize: Record<string, number>;
|
maxInitialSize: Record<string, number>;
|
||||||
minChunks?: number;
|
minChunks?: number;
|
||||||
|
|
@ -4789,6 +4790,11 @@ declare interface OptimizationSplitChunksCacheGroup {
|
||||||
*/
|
*/
|
||||||
enforce?: boolean;
|
enforce?: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored.
|
||||||
|
*/
|
||||||
|
enforceSizeThreshold?: OptimizationSplitChunksSizes;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the template for the filename for created chunks.
|
* Sets the template for the filename for created chunks.
|
||||||
*/
|
*/
|
||||||
|
|
@ -4891,6 +4897,11 @@ declare interface OptimizationSplitChunksOptions {
|
||||||
*/
|
*/
|
||||||
chunks?: "initial" | "async" | "all" | ((chunk: Chunk) => boolean);
|
chunks?: "initial" | "async" | "all" | ((chunk: Chunk) => boolean);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored.
|
||||||
|
*/
|
||||||
|
enforceSizeThreshold?: OptimizationSplitChunksSizes;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Options for modules not selected by any other cache group.
|
* Options for modules not selected by any other cache group.
|
||||||
*/
|
*/
|
||||||
|
|
@ -7368,6 +7379,7 @@ declare interface SplitChunksOptions {
|
||||||
chunksFilter: (chunk: Chunk) => boolean;
|
chunksFilter: (chunk: Chunk) => boolean;
|
||||||
minSize: Record<string, number>;
|
minSize: Record<string, number>;
|
||||||
minRemainingSize: Record<string, number>;
|
minRemainingSize: Record<string, number>;
|
||||||
|
enforceSizeThreshold: Record<string, number>;
|
||||||
maxInitialSize: Record<string, number>;
|
maxInitialSize: Record<string, number>;
|
||||||
maxAsyncSize: Record<string, number>;
|
maxAsyncSize: Record<string, number>;
|
||||||
minChunks: number;
|
minChunks: number;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue