mirror of https://github.com/webpack/webpack.git
				
				
				
			Merge pull request #11166 from webpack/http2/defaults
Http2 defaults for splitChunks
This commit is contained in:
		
						commit
						020b8d6997
					
				|  | @ -1372,6 +1372,10 @@ export interface OptimizationSplitChunksOptions { | |||
| 	chunks?: | ||||
| 		| ("initial" | "async" | "all") | ||||
| 		| ((chunk: import("../lib/Chunk")) => boolean); | ||||
| 	/** | ||||
| 	 * Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored. | ||||
| 	 */ | ||||
| 	enforceSizeThreshold?: OptimizationSplitChunksSizes; | ||||
| 	/** | ||||
| 	 * Options for modules not selected by any other cache group. | ||||
| 	 */ | ||||
|  | @ -1465,6 +1469,10 @@ export interface OptimizationSplitChunksCacheGroup { | |||
| 	 * Ignore minimum size, minimum chunks and maximum requests and always create chunks for this cache group. | ||||
| 	 */ | ||||
| 	enforce?: boolean; | ||||
| 	/** | ||||
| 	 * Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored. | ||||
| 	 */ | ||||
| 	enforceSizeThreshold?: OptimizationSplitChunksSizes; | ||||
| 	/** | ||||
| 	 * Sets the template for the filename for created chunks. | ||||
| 	 */ | ||||
|  |  | |||
|  | @ -580,8 +580,9 @@ const applyOptimizationDefaults = ( | |||
| 		D(splitChunks, "hidePathInfo", production); | ||||
| 		D(splitChunks, "chunks", "async"); | ||||
| 		D(splitChunks, "minChunks", 1); | ||||
| 		F(splitChunks, "minSize", () => (production ? 30000 : 10000)); | ||||
| 		F(splitChunks, "minSize", () => (production ? 20000 : 10000)); | ||||
| 		F(splitChunks, "minRemainingSize", () => (development ? 0 : undefined)); | ||||
| 		F(splitChunks, "enforceSizeThreshold", () => (production ? 50000 : 30000)); | ||||
| 		F(splitChunks, "maxAsyncRequests", () => (production ? 30 : Infinity)); | ||||
| 		F(splitChunks, "maxInitialRequests", () => (production ? 30 : Infinity)); | ||||
| 		D(splitChunks, "automaticNameDelimiter", "-"); | ||||
|  |  | |||
|  | @ -58,6 +58,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning"); | |||
|  * @property {boolean=} enforce | ||||
|  * @property {SplitChunksSizes} minSize | ||||
|  * @property {SplitChunksSizes} minRemainingSize | ||||
|  * @property {SplitChunksSizes} enforceSizeThreshold | ||||
|  * @property {SplitChunksSizes} maxAsyncSize | ||||
|  * @property {SplitChunksSizes} maxInitialSize | ||||
|  * @property {number=} minChunks | ||||
|  | @ -75,10 +76,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning"); | |||
|  * @property {number=} priority | ||||
|  * @property {GetName=} getName | ||||
|  * @property {ChunkFilterFunction=} chunksFilter | ||||
|  * @property {boolean=} enforce | ||||
|  * @property {SplitChunksSizes} minSize | ||||
|  * @property {SplitChunksSizes} minRemainingSize | ||||
|  * @property {SplitChunksSizes} minSizeForMaxSize | ||||
|  * @property {SplitChunksSizes} enforceSizeThreshold | ||||
|  * @property {SplitChunksSizes} maxAsyncSize | ||||
|  * @property {SplitChunksSizes} maxInitialSize | ||||
|  * @property {number=} minChunks | ||||
|  | @ -88,6 +88,10 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning"); | |||
|  * @property {string=} idHint | ||||
|  * @property {string} automaticNameDelimiter | ||||
|  * @property {boolean=} reuseExistingChunk | ||||
|  * @property {boolean} _validateSize | ||||
|  * @property {boolean} _validateRemainingSize | ||||
|  * @property {SplitChunksSizes} _minSizeForMaxSize | ||||
|  * @property {boolean} _conditionalEnforce | ||||
|  */ | ||||
| 
 | ||||
| /** | ||||
|  | @ -124,6 +128,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning"); | |||
|  * @property {ChunkFilterFunction} chunksFilter | ||||
|  * @property {SplitChunksSizes} minSize | ||||
|  * @property {SplitChunksSizes} minRemainingSize | ||||
|  * @property {SplitChunksSizes} enforceSizeThreshold | ||||
|  * @property {SplitChunksSizes} maxInitialSize | ||||
|  * @property {SplitChunksSizes} maxAsyncSize | ||||
|  * @property {number} minChunks | ||||
|  | @ -141,8 +146,8 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning"); | |||
|  * @typedef {Object} ChunksInfoItem | ||||
|  * @property {SortableSet<Module>} modules | ||||
|  * @property {CacheGroup} cacheGroup | ||||
|  * @property {number} cacheGroupIndex | ||||
|  * @property {string} name | ||||
|  * @property {boolean} validateSize | ||||
|  * @property {Record<string, number>} sizes | ||||
|  * @property {Set<Chunk>} chunks | ||||
|  * @property {Set<Chunk>} reuseableChunks | ||||
|  | @ -220,12 +225,15 @@ const compareEntries = (a, b) => { | |||
| 	const bSizeReduce = totalSize(b.sizes) * (b.chunks.size - 1); | ||||
| 	const diffSizeReduce = aSizeReduce - bSizeReduce; | ||||
| 	if (diffSizeReduce) return diffSizeReduce; | ||||
| 	// 4. by number of modules (to be able to compare by identifier)
 | ||||
| 	// 4. by cache group index
 | ||||
| 	const indexDiff = a.cacheGroupIndex - b.cacheGroupIndex; | ||||
| 	if (indexDiff) return indexDiff; | ||||
| 	// 5. by number of modules (to be able to compare by identifier)
 | ||||
| 	const modulesA = a.modules; | ||||
| 	const modulesB = b.modules; | ||||
| 	const diff = modulesA.size - modulesB.size; | ||||
| 	if (diff) return diff; | ||||
| 	// 5. by module identifiers
 | ||||
| 	// 6. by module identifiers
 | ||||
| 	modulesA.sort(); | ||||
| 	modulesB.sort(); | ||||
| 	return compareModuleIterables(modulesA, modulesB); | ||||
|  | @ -507,6 +515,7 @@ const createCacheGroupSource = (options, key) => { | |||
| 		enforce: options.enforce, | ||||
| 		minSize: normalizeSizes(options.minSize), | ||||
| 		minRemainingSize: mergeSizes(options.minRemainingSize, options.minSize), | ||||
| 		enforceSizeThreshold: normalizeSizes(options.enforceSizeThreshold), | ||||
| 		maxAsyncSize: mergeSizes(options.maxAsyncSize, options.maxSize), | ||||
| 		maxInitialSize: mergeSizes(options.maxInitialSize, options.maxSize), | ||||
| 		minChunks: options.minChunks, | ||||
|  | @ -531,6 +540,7 @@ module.exports = class SplitChunksPlugin { | |||
| 			chunksFilter: normalizeChunksFilter(options.chunks || "all"), | ||||
| 			minSize: normalizeSizes(options.minSize), | ||||
| 			minRemainingSize: mergeSizes(options.minRemainingSize, options.minSize), | ||||
| 			enforceSizeThreshold: normalizeSizes(options.enforceSizeThreshold), | ||||
| 			maxAsyncSize: mergeSizes(options.maxAsyncSize, options.maxSize), | ||||
| 			maxInitialSize: mergeSizes(options.maxInitialSize, options.maxSize), | ||||
| 			minChunks: options.minChunks || 1, | ||||
|  | @ -714,6 +724,7 @@ module.exports = class SplitChunksPlugin { | |||
| 
 | ||||
| 					/** | ||||
| 					 * @param {CacheGroup} cacheGroup the current cache group | ||||
| 					 * @param {number} cacheGroupIndex the index of the cache group of ordering | ||||
| 					 * @param {Chunk[]} selectedChunks chunks selected for this module | ||||
| 					 * @param {bigint} selectedChunksKey a key of selectedChunks | ||||
| 					 * @param {Module} module the current module | ||||
|  | @ -721,6 +732,7 @@ module.exports = class SplitChunksPlugin { | |||
| 					 */ | ||||
| 					const addModuleToChunksInfoMap = ( | ||||
| 						cacheGroup, | ||||
| 						cacheGroupIndex, | ||||
| 						selectedChunks, | ||||
| 						selectedChunksKey, | ||||
| 						module | ||||
|  | @ -771,10 +783,8 @@ module.exports = class SplitChunksPlugin { | |||
| 										compareModulesByIdentifier | ||||
| 									), | ||||
| 									cacheGroup, | ||||
| 									cacheGroupIndex, | ||||
| 									name, | ||||
| 									validateSize: | ||||
| 										hasNonZeroSizes(cacheGroup.minSize) || | ||||
| 										hasNonZeroSizes(cacheGroup.minRemainingSize), | ||||
| 									sizes: {}, | ||||
| 									chunks: new Set(), | ||||
| 									reuseableChunks: new Set(), | ||||
|  | @ -783,11 +793,9 @@ module.exports = class SplitChunksPlugin { | |||
| 							); | ||||
| 						} | ||||
| 						info.modules.add(module); | ||||
| 						if (info.validateSize) { | ||||
| 						for (const type of module.getSourceTypes()) { | ||||
| 							info.sizes[type] = (info.sizes[type] || 0) + module.size(type); | ||||
| 						} | ||||
| 						} | ||||
| 						if (!info.chunksKeys.has(selectedChunksKey)) { | ||||
| 							info.chunksKeys.add(selectedChunksKey); | ||||
| 							for (const chunk of selectedChunks) { | ||||
|  | @ -823,29 +831,35 @@ module.exports = class SplitChunksPlugin { | |||
| 							combinationsCache.set(chunksKey, combs); | ||||
| 						} | ||||
| 
 | ||||
| 						let cacheGroupIndex = 0; | ||||
| 						for (const cacheGroupSource of cacheGroups) { | ||||
| 							/** @type {CacheGroup} */ | ||||
| 							let cacheGroup = this._cacheGroupCache.get(cacheGroupSource); | ||||
| 							if (cacheGroup === undefined) { | ||||
| 								const minSize = mergeSizes( | ||||
| 									cacheGroupSource.minSize, | ||||
| 									cacheGroupSource.enforce ? undefined : this.options.minSize | ||||
| 								); | ||||
| 								const minRemainingSize = mergeSizes( | ||||
| 									cacheGroupSource.minRemainingSize, | ||||
| 									cacheGroupSource.enforce | ||||
| 										? undefined | ||||
| 										: this.options.minRemainingSize | ||||
| 								); | ||||
| 								const enforceSizeThreshold = mergeSizes( | ||||
| 									cacheGroupSource.enforceSizeThreshold, | ||||
| 									cacheGroupSource.enforce | ||||
| 										? undefined | ||||
| 										: this.options.enforceSizeThreshold | ||||
| 								); | ||||
| 								cacheGroup = { | ||||
| 									key: cacheGroupSource.key, | ||||
| 									priority: cacheGroupSource.priority || 0, | ||||
| 									chunksFilter: | ||||
| 										cacheGroupSource.chunksFilter || this.options.chunksFilter, | ||||
| 									minSize: mergeSizes( | ||||
| 										cacheGroupSource.minSize, | ||||
| 										cacheGroupSource.enforce ? undefined : this.options.minSize | ||||
| 									), | ||||
| 									minRemainingSize: mergeSizes( | ||||
| 										cacheGroupSource.minRemainingSize, | ||||
| 										cacheGroupSource.enforce | ||||
| 											? undefined | ||||
| 											: this.options.minRemainingSize | ||||
| 									), | ||||
| 									minSizeForMaxSize: mergeSizes( | ||||
| 										cacheGroupSource.minSize, | ||||
| 										this.options.minSize | ||||
| 									), | ||||
| 									minSize, | ||||
| 									minRemainingSize, | ||||
| 									enforceSizeThreshold, | ||||
| 									maxAsyncSize: mergeSizes( | ||||
| 										cacheGroupSource.maxAsyncSize, | ||||
| 										cacheGroupSource.enforce | ||||
|  | @ -892,7 +906,14 @@ module.exports = class SplitChunksPlugin { | |||
| 										cacheGroupSource.idHint !== undefined | ||||
| 											? cacheGroupSource.idHint | ||||
| 											: cacheGroupSource.key, | ||||
| 									reuseExistingChunk: cacheGroupSource.reuseExistingChunk | ||||
| 									reuseExistingChunk: cacheGroupSource.reuseExistingChunk, | ||||
| 									_validateSize: hasNonZeroSizes(minSize), | ||||
| 									_validateRemainingSize: hasNonZeroSizes(minRemainingSize), | ||||
| 									_minSizeForMaxSize: mergeSizes( | ||||
| 										cacheGroupSource.minSize, | ||||
| 										this.options.minSize | ||||
| 									), | ||||
| 									_conditionalEnforce: hasNonZeroSizes(enforceSizeThreshold) | ||||
| 								}; | ||||
| 							} | ||||
| 							// For all combination of chunk selection
 | ||||
|  | @ -910,11 +931,13 @@ module.exports = class SplitChunksPlugin { | |||
| 
 | ||||
| 								addModuleToChunksInfoMap( | ||||
| 									cacheGroup, | ||||
| 									cacheGroupIndex, | ||||
| 									selectedChunks, | ||||
| 									selectedChunksKey, | ||||
| 									module | ||||
| 								); | ||||
| 							} | ||||
| 							cacheGroupIndex++; | ||||
| 						} | ||||
| 					} | ||||
| 
 | ||||
|  | @ -923,13 +946,12 @@ module.exports = class SplitChunksPlugin { | |||
| 					logger.time("queue"); | ||||
| 
 | ||||
| 					// Filter items were size < minSize
 | ||||
| 					for (const pair of chunksInfoMap) { | ||||
| 						const info = pair[1]; | ||||
| 					for (const [key, info] of chunksInfoMap) { | ||||
| 						if ( | ||||
| 							info.validateSize && | ||||
| 							info.cacheGroup._validateSize && | ||||
| 							!checkMinSize(info.sizes, info.cacheGroup.minSize) | ||||
| 						) { | ||||
| 							chunksInfoMap.delete(pair[0]); | ||||
| 							chunksInfoMap.delete(key); | ||||
| 						} | ||||
| 					} | ||||
| 
 | ||||
|  | @ -1021,18 +1043,20 @@ module.exports = class SplitChunksPlugin { | |||
| 						// TODO check if this check is really needed, shouldn't chunks always be non-empty?
 | ||||
| 						if (item.chunks.size === 0 && !isExistingChunk) continue; | ||||
| 
 | ||||
| 						const enforced = | ||||
| 							item.cacheGroup._conditionalEnforce && | ||||
| 							checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold); | ||||
| 
 | ||||
| 						const usedChunks = new Set(item.chunks); | ||||
| 
 | ||||
| 						// Check if maxRequests condition can be fulfilled
 | ||||
| 						// TODO try to avoid creating a new array here
 | ||||
| 						const usedChunks = Array.from(item.chunks); | ||||
| 
 | ||||
| 						let validChunks = usedChunks; | ||||
| 
 | ||||
| 						if ( | ||||
| 							Number.isFinite(item.cacheGroup.maxInitialRequests) || | ||||
| 							Number.isFinite(item.cacheGroup.maxAsyncRequests) | ||||
| 							!enforced && | ||||
| 							(Number.isFinite(item.cacheGroup.maxInitialRequests) || | ||||
| 								Number.isFinite(item.cacheGroup.maxAsyncRequests)) | ||||
| 						) { | ||||
| 							validChunks = validChunks.filter(chunk => { | ||||
| 								// respect max requests when not enforced
 | ||||
| 							for (const chunk of usedChunks) { | ||||
| 								// respect max requests
 | ||||
| 								const maxRequests = chunk.isOnlyInitial() | ||||
| 									? item.cacheGroup.maxInitialRequests | ||||
| 									: chunk.canBeInitial() | ||||
|  | @ -1041,27 +1065,34 @@ module.exports = class SplitChunksPlugin { | |||
| 											item.cacheGroup.maxAsyncRequests | ||||
| 									  ) | ||||
| 									: item.cacheGroup.maxAsyncRequests; | ||||
| 								return ( | ||||
| 									!isFinite(maxRequests) || getRequests(chunk) < maxRequests | ||||
| 								); | ||||
| 							}); | ||||
| 								if ( | ||||
| 									isFinite(maxRequests) && | ||||
| 									getRequests(chunk) >= maxRequests | ||||
| 								) { | ||||
| 									usedChunks.delete(chunk); | ||||
| 								} | ||||
| 							} | ||||
| 						} | ||||
| 
 | ||||
| 						validChunks = validChunks.filter(chunk => { | ||||
| 						outer: for (const chunk of usedChunks) { | ||||
| 							for (const module of item.modules) { | ||||
| 								if (chunkGraph.isModuleInChunk(module, chunk)) return true; | ||||
| 								if (chunkGraph.isModuleInChunk(module, chunk)) continue outer; | ||||
| 							} | ||||
| 							usedChunks.delete(chunk); | ||||
| 						} | ||||
| 							return false; | ||||
| 						}); | ||||
| 
 | ||||
| 						if (validChunks.length < usedChunks.length) { | ||||
| 							if (isExistingChunk) validChunks.push(newChunk); | ||||
| 							if (validChunks.length >= item.cacheGroup.minChunks) { | ||||
| 						// Were some (invalid) chunks removed from usedChunks?
 | ||||
| 						// => readd all modules to the queue, as things could have been changed
 | ||||
| 						if (usedChunks.size < item.chunks.size) { | ||||
| 							if (isExistingChunk) usedChunks.add(newChunk); | ||||
| 							if (usedChunks.size >= item.cacheGroup.minChunks) { | ||||
| 								const chunksArr = Array.from(usedChunks); | ||||
| 								for (const module of item.modules) { | ||||
| 									addModuleToChunksInfoMap( | ||||
| 										item.cacheGroup, | ||||
| 										validChunks, | ||||
| 										getKey(validChunks), | ||||
| 										item.cacheGroupIndex, | ||||
| 										chunksArr, | ||||
| 										getKey(usedChunks), | ||||
| 										module | ||||
| 									); | ||||
| 								} | ||||
|  | @ -1071,13 +1102,19 @@ module.exports = class SplitChunksPlugin { | |||
| 
 | ||||
| 						// Validate minRemainingSize constraint when a single chunk is left over
 | ||||
| 						if ( | ||||
| 							validChunks.length === 1 && | ||||
| 							hasNonZeroSizes(item.cacheGroup.minRemainingSize) | ||||
| 							!enforced && | ||||
| 							item.cacheGroup._validateRemainingSize && | ||||
| 							usedChunks.size === 1 | ||||
| 						) { | ||||
| 							const chunk = validChunks[0]; | ||||
| 							const chunkSizes = { ...chunkGraph.getChunkModulesSizes(chunk) }; | ||||
| 							for (const key of Object.keys(item.sizes)) { | ||||
| 								chunkSizes[key] -= item.sizes[key]; | ||||
| 							const [chunk] = usedChunks; | ||||
| 							let chunkSizes = Object.create(null); | ||||
| 							for (const module of chunkGraph.getChunkModulesIterable(chunk)) { | ||||
| 								if (!item.modules.has(module)) { | ||||
| 									for (const type of module.getSourceTypes()) { | ||||
| 										chunkSizes[type] = | ||||
| 											(chunkSizes[type] || 0) + module.size(type); | ||||
| 									} | ||||
| 								} | ||||
| 							} | ||||
| 							if (!checkMinSize(chunkSizes, item.cacheGroup.minRemainingSize)) { | ||||
| 								continue; | ||||
|  | @ -1148,7 +1185,7 @@ module.exports = class SplitChunksPlugin { | |||
| 								minSize: oldMaxSizeSettings | ||||
| 									? combineSizes( | ||||
| 											oldMaxSizeSettings.minSize, | ||||
| 											item.cacheGroup.minSizeForMaxSize, | ||||
| 											item.cacheGroup._minSizeForMaxSize, | ||||
| 											Math.max | ||||
| 									  ) | ||||
| 									: item.cacheGroup.minSize, | ||||
|  | @ -1175,8 +1212,7 @@ module.exports = class SplitChunksPlugin { | |||
| 
 | ||||
| 						// remove all modules from other entries and update size
 | ||||
| 						for (const [key, info] of chunksInfoMap) { | ||||
| 							if (isOverlap(info.chunks, item.chunks)) { | ||||
| 								if (info.validateSize) { | ||||
| 							if (isOverlap(info.chunks, usedChunks)) { | ||||
| 								// update modules and total size
 | ||||
| 								// may remove it from the map when < minSize
 | ||||
| 								let updated = false; | ||||
|  | @ -1196,16 +1232,10 @@ module.exports = class SplitChunksPlugin { | |||
| 										chunksInfoMap.delete(key); | ||||
| 										continue; | ||||
| 									} | ||||
| 										if (!checkMinSize(info.sizes, info.cacheGroup.minSize)) { | ||||
| 											chunksInfoMap.delete(key); | ||||
| 										} | ||||
| 									} | ||||
| 								} else { | ||||
| 									// only update the modules
 | ||||
| 									for (const module of item.modules) { | ||||
| 										info.modules.delete(module); | ||||
| 									} | ||||
| 									if (info.modules.size === 0) { | ||||
| 									if ( | ||||
| 										info.cacheGroup._validateSize && | ||||
| 										!checkMinSize(info.sizes, info.cacheGroup.minSize) | ||||
| 									) { | ||||
| 										chunksInfoMap.delete(key); | ||||
| 									} | ||||
| 								} | ||||
|  |  | |||
|  | @ -1257,6 +1257,14 @@ | |||
|           "description": "Ignore minimum size, minimum chunks and maximum requests and always create chunks for this cache group.", | ||||
|           "type": "boolean" | ||||
|         }, | ||||
|         "enforceSizeThreshold": { | ||||
|           "description": "Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored.", | ||||
|           "oneOf": [ | ||||
|             { | ||||
|               "$ref": "#/definitions/OptimizationSplitChunksSizes" | ||||
|             } | ||||
|           ] | ||||
|         }, | ||||
|         "filename": { | ||||
|           "description": "Sets the template for the filename for created chunks.", | ||||
|           "anyOf": [ | ||||
|  | @ -1464,6 +1472,14 @@ | |||
|             } | ||||
|           ] | ||||
|         }, | ||||
|         "enforceSizeThreshold": { | ||||
|           "description": "Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored.", | ||||
|           "oneOf": [ | ||||
|             { | ||||
|               "$ref": "#/definitions/OptimizationSplitChunksSizes" | ||||
|             } | ||||
|           ] | ||||
|         }, | ||||
|         "fallbackCacheGroup": { | ||||
|           "description": "Options for modules not selected by any other cache group.", | ||||
|           "type": "object", | ||||
|  |  | |||
|  | @ -184,6 +184,7 @@ describe("Defaults", () => { | |||
| 		        }, | ||||
| 		      }, | ||||
| 		      "chunks": "async", | ||||
| 		      "enforceSizeThreshold": 30000, | ||||
| 		      "hidePathInfo": false, | ||||
| 		      "maxAsyncRequests": Infinity, | ||||
| 		      "maxInitialRequests": Infinity, | ||||
|  | @ -379,15 +380,17 @@ describe("Defaults", () => { | |||
| 		+     "noEmitOnErrors": true, | ||||
| 		+     "nodeEnv": "production", | ||||
| 		@@ ... @@ | ||||
| 		-       "enforceSizeThreshold": 30000, | ||||
| 		-       "hidePathInfo": false, | ||||
| 		-       "maxAsyncRequests": Infinity, | ||||
| 		-       "maxInitialRequests": Infinity, | ||||
| 		+       "enforceSizeThreshold": 50000, | ||||
| 		+       "hidePathInfo": true, | ||||
| 		+       "maxAsyncRequests": 30, | ||||
| 		+       "maxInitialRequests": 30, | ||||
| 		@@ ... @@ | ||||
| 		-       "minSize": 10000, | ||||
| 		+       "minSize": 30000, | ||||
| 		+       "minSize": 20000, | ||||
| 		@@ ... @@ | ||||
| 		-     "usedExports": false, | ||||
| 		+     "usedExports": true, | ||||
|  | @ -432,15 +435,17 @@ describe("Defaults", () => { | |||
| 		+     "noEmitOnErrors": true, | ||||
| 		+     "nodeEnv": "production", | ||||
| 		@@ ... @@ | ||||
| 		-       "enforceSizeThreshold": 30000, | ||||
| 		-       "hidePathInfo": false, | ||||
| 		-       "maxAsyncRequests": Infinity, | ||||
| 		-       "maxInitialRequests": Infinity, | ||||
| 		+       "enforceSizeThreshold": 50000, | ||||
| 		+       "hidePathInfo": true, | ||||
| 		+       "maxAsyncRequests": 30, | ||||
| 		+       "maxInitialRequests": 30, | ||||
| 		@@ ... @@ | ||||
| 		-       "minSize": 10000, | ||||
| 		+       "minSize": 30000, | ||||
| 		+       "minSize": 20000, | ||||
| 		@@ ... @@ | ||||
| 		-     "usedExports": false, | ||||
| 		+     "usedExports": true, | ||||
|  | @ -956,6 +961,7 @@ describe("Defaults", () => { | |||
| 			-         }, | ||||
| 			-       }, | ||||
| 			-       "chunks": "async", | ||||
| 			-       "enforceSizeThreshold": 30000, | ||||
| 			-       "hidePathInfo": false, | ||||
| 			-       "maxAsyncRequests": Infinity, | ||||
| 			-       "maxInitialRequests": Infinity, | ||||
|  |  | |||
|  | @ -461,7 +461,7 @@ describe("Validation", () => { | |||
| 			       test: ... | ||||
| 			     } | ||||
| 			   }. | ||||
| 			   object { <key>: false | RegExp | string | function | object { automaticNameDelimiter?, chunks?, enforce?, filename?, idHint?, maxAsyncRequests?, maxAsyncSize?, maxInitialRequests?, maxInitialSize?, maxSize?, minChunks?, minRemainingSize?, minSize?, name?, priority?, reuseExistingChunk?, test?, type? } } | ||||
| 			   object { <key>: false | RegExp | string | function | object { automaticNameDelimiter?, chunks?, enforce?, enforceSizeThreshold?, filename?, idHint?, maxAsyncRequests?, maxAsyncSize?, maxInitialRequests?, maxInitialSize?, maxSize?, minChunks?, minRemainingSize?, minSize?, name?, priority?, reuseExistingChunk?, test?, type? } } | ||||
| 			   -> Assign modules to a cache group (modules from different cache groups are tried to keep in separate chunks, default categories: 'default', 'defaultVendors')." | ||||
| 		`)
 | ||||
| 	); | ||||
|  | @ -715,7 +715,7 @@ describe("Validation", () => { | |||
| 				expect(msg).toMatchInlineSnapshot(` | ||||
| 			"Invalid configuration object. Webpack has been initialized using a configuration object that does not match the API schema. | ||||
| 			 - configuration.optimization.splitChunks has an unknown property 'automaticNamePrefix'. These properties are valid: | ||||
| 			   object { automaticNameDelimiter?, cacheGroups?, chunks?, fallbackCacheGroup?, filename?, hidePathInfo?, maxAsyncRequests?, maxAsyncSize?, maxInitialRequests?, maxInitialSize?, maxSize?, minChunks?, minRemainingSize?, minSize?, name? } | ||||
| 			   object { automaticNameDelimiter?, cacheGroups?, chunks?, enforceSizeThreshold?, fallbackCacheGroup?, filename?, hidePathInfo?, maxAsyncRequests?, maxAsyncSize?, maxInitialRequests?, maxInitialSize?, maxSize?, minChunks?, minRemainingSize?, minSize?, name? } | ||||
| 			   -> Options object for splitting chunks into smaller chunks." | ||||
| 		`)
 | ||||
| 		); | ||||
|  |  | |||
|  | @ -1532,6 +1532,19 @@ Object { | |||
|     "multiple": false, | ||||
|     "simpleType": "string", | ||||
|   }, | ||||
|   "optimization-split-chunks-enforce-size-threshold": Object { | ||||
|     "configs": Array [ | ||||
|       Object { | ||||
|         "description": "Size of the javascript part of the chunk.", | ||||
|         "multiple": false, | ||||
|         "path": "optimization.splitChunks.enforceSizeThreshold", | ||||
|         "type": "number", | ||||
|       }, | ||||
|     ], | ||||
|     "description": "Size of the javascript part of the chunk.", | ||||
|     "multiple": false, | ||||
|     "simpleType": "number", | ||||
|   }, | ||||
|   "optimization-split-chunks-fallback-cache-group-automatic-name-delimiter": Object { | ||||
|     "configs": Array [ | ||||
|       Object { | ||||
|  |  | |||
|  | @ -3413,9 +3413,12 @@ chunk a.js (a) 12 bytes (javascript) 3.87 KiB (runtime) ={282}= [entry] [rendere | |||
| 
 | ||||
| exports[`StatsTestCases should print correct stats for split-chunks-keep-remaining-size 1`] = ` | ||||
| "Entrypoint main = default/main.js | ||||
| chunk default/main.js (main) 147 bytes (javascript) 5.69 KiB (runtime) >{334}< >{383}< >{794}< >{821}< [entry] [rendered] | ||||
| chunk default/async-d.js (async-d) 58 bytes <{179}> ={782}= [rendered] | ||||
|     > ./d ./index.js 4:0-47 | ||||
|  ./d.js 58 bytes [built] | ||||
| chunk default/main.js (main) 196 bytes (javascript) 5.7 KiB (runtime) >{31}< >{334}< >{383}< >{782}< >{794}< >{821}< [entry] [rendered] | ||||
|     > ./ main | ||||
|  ./index.js 147 bytes [built] | ||||
|  ./index.js 196 bytes [built] | ||||
|      + 9 hidden chunk modules | ||||
| chunk default/async-b.js (async-b) 39 bytes <{179}> ={821}= [rendered] | ||||
|     > ./b ./index.js 2:0-47 | ||||
|  | @ -3423,6 +3426,10 @@ chunk default/async-b.js (async-b) 39 bytes <{179}> ={821}= [rendered] | |||
| chunk default/async-c.js (async-c) 39 bytes <{179}> ={821}= [rendered] | ||||
|     > ./c ./index.js 3:0-47 | ||||
|  ./c.js 39 bytes [built] | ||||
| chunk default/782.js (id hint: vendors) 204 bytes <{179}> ={31}= [rendered] split chunk (cache group: defaultVendors) | ||||
|     > ./d ./index.js 4:0-47 | ||||
|  ./node_modules/shared.js?3 102 bytes [built] | ||||
|  ./node_modules/shared.js?4 102 bytes [built] | ||||
| chunk default/async-a.js (async-a) 141 bytes <{179}> [rendered] | ||||
|     > ./a ./index.js 1:0-47 | ||||
|  ./a.js 39 bytes [built] | ||||
|  |  | |||
|  | @ -0,0 +1,3 @@ | |||
| import "shared?3"; | ||||
| import "shared?4"; | ||||
| export default "d"; | ||||
|  | @ -1,3 +1,4 @@ | |||
| import(/* webpackChunkName: "async-a" */ "./a"); | ||||
| import(/* webpackChunkName: "async-b" */ "./b"); | ||||
| import(/* webpackChunkName: "async-c" */ "./c"); | ||||
| import(/* webpackChunkName: "async-d" */ "./d"); | ||||
|  |  | |||
|  | @ -21,7 +21,8 @@ module.exports = { | |||
| 	}, | ||||
| 	optimization: { | ||||
| 		splitChunks: { | ||||
| 			minSize: 100 | ||||
| 			minSize: 100, | ||||
| 			enforceSizeThreshold: 200 | ||||
| 		} | ||||
| 	}, | ||||
| 	stats | ||||
|  |  | |||
|  | @ -430,6 +430,7 @@ declare interface CacheGroupSource { | |||
| 	enforce?: boolean; | ||||
| 	minSize: Record<string, number>; | ||||
| 	minRemainingSize: Record<string, number>; | ||||
| 	enforceSizeThreshold: Record<string, number>; | ||||
| 	maxAsyncSize: Record<string, number>; | ||||
| 	maxInitialSize: Record<string, number>; | ||||
| 	minChunks?: number; | ||||
|  | @ -4789,6 +4790,11 @@ declare interface OptimizationSplitChunksCacheGroup { | |||
| 	 */ | ||||
| 	enforce?: boolean; | ||||
| 
 | ||||
| 	/** | ||||
| 	 * Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored. | ||||
| 	 */ | ||||
| 	enforceSizeThreshold?: OptimizationSplitChunksSizes; | ||||
| 
 | ||||
| 	/** | ||||
| 	 * Sets the template for the filename for created chunks. | ||||
| 	 */ | ||||
|  | @ -4891,6 +4897,11 @@ declare interface OptimizationSplitChunksOptions { | |||
| 	 */ | ||||
| 	chunks?: "initial" | "async" | "all" | ((chunk: Chunk) => boolean); | ||||
| 
 | ||||
| 	/** | ||||
| 	 * Size threshold at which splitting is enforced and other restrictions (minRemainingSize, maxAsyncRequests, maxInitialRequests) are ignored. | ||||
| 	 */ | ||||
| 	enforceSizeThreshold?: OptimizationSplitChunksSizes; | ||||
| 
 | ||||
| 	/** | ||||
| 	 * Options for modules not selected by any other cache group. | ||||
| 	 */ | ||||
|  | @ -7368,6 +7379,7 @@ declare interface SplitChunksOptions { | |||
| 	chunksFilter: (chunk: Chunk) => boolean; | ||||
| 	minSize: Record<string, number>; | ||||
| 	minRemainingSize: Record<string, number>; | ||||
| 	enforceSizeThreshold: Record<string, number>; | ||||
| 	maxInitialSize: Record<string, number>; | ||||
| 	maxAsyncSize: Record<string, number>; | ||||
| 	minChunks: number; | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue