Merge pull request #14436 from webpack/revert/chunk-combinations

Revert "track chunk combinations for modules"
This commit is contained in:
Tobias Koppers 2021-10-08 15:39:01 +02:00 committed by GitHub
commit d106f00db5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 255 additions and 373 deletions

View File

@ -1,187 +0,0 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const SortableSet = require("./util/SortableSet");
/** @typedef {import("./Chunk")} Chunk */
/**
* @template T
* @param {SortableSet<T>} set the set
* @returns {T[]} set as array
*/
const getArray = set => {
return Array.from(set);
};
let debugId = 1;
class ChunkCombination {
constructor() {
this.debugId = debugId++;
this.size = 0;
/**
* (do not modify)
* @type {SortableSet<Chunk>}
*/
this._chunks = new SortableSet();
/** @type {ChunkCombination} */
this._parent = undefined;
this._lastChunk = undefined;
/** @type {WeakMap<Chunk, ChunkCombination>} */
this._addMap = new WeakMap();
/** @type {WeakMap<Chunk, ChunkCombination>} */
this._removeCache = new WeakMap();
}
/**
* @returns {Iterable<Chunk>} iterable of chunks
*/
get chunksIterable() {
return this._chunks;
}
/**
* @param {Chunk} chunk chunk to add
* @returns {ChunkCombination} new chunk combination
*/
with(chunk) {
if (this._chunks.has(chunk)) return this;
let next = this._addMap.get(chunk);
if (next !== undefined) return next;
// must insert chunks in order to maintain order-independent identity of ChunkCombination
if (!this._parent || this._lastChunk.debugId < chunk.debugId) {
next = new ChunkCombination();
for (const chunk of this._chunks) {
next._chunks.add(chunk);
}
next._chunks.add(chunk);
next._removeCache.set(chunk, this);
next.size = this.size + 1;
next._parent = this;
next._lastChunk = chunk;
} else {
next = this._parent.with(chunk).with(this._lastChunk);
}
this._addMap.set(chunk, next);
return next;
}
/**
* @param {Chunk} chunk chunk to remove
* @returns {ChunkCombination} new chunk combination
*/
without(chunk) {
if (!this._chunks.has(chunk)) return this;
let next = this._removeCache.get(chunk);
if (next !== undefined) return next;
const stack = [this._lastChunk];
let current = this._parent;
while (current._lastChunk !== chunk) {
stack.push(current._lastChunk);
current = current._parent;
}
next = current._parent;
while (stack.length) next = next.with(stack.pop());
this._removeCache.set(chunk, next);
return next;
}
withAll(other) {
if (other.size === 0) return this;
if (this.size === 0) return other;
const stack = [];
/** @type {ChunkCombination} */
let current = this;
for (;;) {
if (current._lastChunk.debugId < other._lastChunk.debugId) {
stack.push(other._lastChunk);
other = other._parent;
if (other.size === 0) {
while (stack.length) current = current.with(stack.pop());
return current;
}
} else {
stack.push(current._lastChunk);
current = current._parent;
if (current.size === 0) {
while (stack.length) other = other.with(stack.pop());
return other;
}
}
}
}
hasSharedChunks(other) {
if (this.size > other.size) {
const chunks = this._chunks;
for (const chunk of other._chunks) {
if (chunks.has(chunk)) return true;
}
} else {
const chunks = other._chunks;
for (const chunk of this._chunks) {
if (chunks.has(chunk)) return true;
}
}
return false;
}
/**
* @param {ChunkCombination} other other combination
* @returns {boolean} true, when other is a subset of this combination
*/
isSubset(other) {
// TODO: This could be more efficient when using the debugId order of the combinations
/** @type {ChunkCombination} */
let current = this;
let otherSize = other.size;
let currentSize = current.size;
if (otherSize === 0) return true;
for (;;) {
if (currentSize === 0) return false;
if (otherSize === 1) {
if (currentSize === 1) {
return current._lastChunk === other._lastChunk;
} else {
return current._chunks.has(other._lastChunk);
}
}
if (otherSize * 8 < currentSize) {
// go for the Set access when current >> other
const chunks = current._chunks;
for (const item of other._chunks) {
if (!chunks.has(item)) return false;
}
return true;
}
const otherId = other._lastChunk.debugId;
// skip over nodes in current that have higher ids
while (otherId < current._lastChunk.debugId) {
current = current._parent;
currentSize--;
if (currentSize === 0) return false;
}
if (otherId > current._lastChunk.debugId) {
return false;
}
other = other._parent;
otherSize--;
if (otherSize === 0) return true;
current = current._parent;
currentSize--;
}
}
getChunks() {
return this._chunks.getFromUnorderedCache(getArray);
}
}
ChunkCombination.empty = new ChunkCombination();
module.exports = ChunkCombination;

View File

@ -6,7 +6,6 @@
"use strict";
const util = require("util");
const ChunkCombination = require("./ChunkCombination");
const Entrypoint = require("./Entrypoint");
const ModuleGraphConnection = require("./ModuleGraphConnection");
const { first } = require("./util/SetHelpers");
@ -41,8 +40,6 @@ const {
/** @type {ReadonlySet<string>} */
const EMPTY_SET = new Set();
const EMPTY_RUNTIME_SPEC_SET = new RuntimeSpecSet();
const ZERO_BIG_INT = BigInt(0);
const compareModuleIterables = compareIterables(compareModulesByIdentifier);
@ -180,7 +177,8 @@ const isAvailableChunk = (a, b) => {
class ChunkGraphModule {
constructor() {
this.chunkCombination = ChunkCombination.empty;
/** @type {SortableSet<Chunk>} */
this.chunks = new SortableSet();
/** @type {Set<Chunk> | undefined} */
this.entryInChunks = undefined;
/** @type {Set<Chunk> | undefined} */
@ -303,7 +301,7 @@ class ChunkGraph {
connectChunkAndModule(chunk, module) {
const cgm = this._getChunkGraphModule(module);
const cgc = this._getChunkGraphChunk(chunk);
cgm.chunkCombination = cgm.chunkCombination.with(chunk);
cgm.chunks.add(chunk);
cgc.modules.add(module);
}
@ -316,7 +314,7 @@ class ChunkGraph {
const cgm = this._getChunkGraphModule(module);
const cgc = this._getChunkGraphChunk(chunk);
cgc.modules.delete(module);
cgm.chunkCombination = cgm.chunkCombination.without(chunk);
cgm.chunks.delete(chunk);
}
/**
@ -327,7 +325,7 @@ class ChunkGraph {
const cgc = this._getChunkGraphChunk(chunk);
for (const module of cgc.modules) {
const cgm = this._getChunkGraphModule(module);
cgm.chunkCombination = cgm.chunkCombination.without(chunk);
cgm.chunks.delete(chunk);
}
cgc.modules.clear();
chunk.disconnectFromGroups();
@ -394,13 +392,13 @@ class ChunkGraph {
const oldCgm = this._getChunkGraphModule(oldModule);
const newCgm = this._getChunkGraphModule(newModule);
for (const chunk of oldCgm.chunkCombination._chunks) {
for (const chunk of oldCgm.chunks) {
const cgc = this._getChunkGraphChunk(chunk);
cgc.modules.delete(oldModule);
cgc.modules.add(newModule);
newCgm.chunkCombination = newCgm.chunkCombination.with(chunk);
newCgm.chunks.add(chunk);
}
oldCgm.chunkCombination = ChunkCombination.empty;
oldCgm.chunks.clear();
if (oldCgm.entryInChunks !== undefined) {
if (newCgm.entryInChunks === undefined) {
@ -487,22 +485,13 @@ class ChunkGraph {
return cgm.entryInChunks !== undefined;
}
/**
* @param {Module} module the module
* @returns {ChunkCombination} chunk combination (do not modify)
*/
getModuleChunkCombination(module) {
const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination;
}
/**
* @param {Module} module the module
* @returns {Iterable<Chunk>} iterable of chunks (do not modify)
*/
getModuleChunksIterable(module) {
const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination._chunks;
return cgm.chunks;
}
/**
@ -512,9 +501,8 @@ class ChunkGraph {
*/
getOrderedModuleChunksIterable(module, sortFn) {
const cgm = this._getChunkGraphModule(module);
const chunks = cgm.chunkCombination._chunks;
chunks.sortWith(sortFn);
return chunks;
cgm.chunks.sortWith(sortFn);
return cgm.chunks;
}
/**
@ -523,7 +511,7 @@ class ChunkGraph {
*/
getModuleChunks(module) {
const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination.getChunks();
return cgm.chunks.getFromCache(getArray);
}
/**
@ -532,7 +520,7 @@ class ChunkGraph {
*/
getNumberOfModuleChunks(module) {
const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination.size;
return cgm.chunks.size;
}
/**
@ -541,10 +529,7 @@ class ChunkGraph {
*/
getModuleRuntimes(module) {
const cgm = this._getChunkGraphModule(module);
if (cgm.chunkCombination.size === 0) return EMPTY_RUNTIME_SPEC_SET;
return cgm.chunkCombination._chunks.getFromUnorderedCache(
getModuleRuntimes
);
return cgm.chunks.getFromUnorderedCache(getModuleRuntimes);
}
/**
@ -908,7 +893,8 @@ class ChunkGraph {
// Merge runtime
chunkA.runtime = mergeRuntime(chunkA.runtime, chunkB.runtime);
for (const module of this.getChunkModulesIterable(chunkB)) {
// getChunkModules is used here to create a clone, because disconnectChunkAndModule modifies
for (const module of this.getChunkModules(chunkB)) {
this.disconnectChunkAndModule(chunkB, module);
this.connectChunkAndModule(chunkA, module);
}

View File

@ -3633,7 +3633,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
this.moduleGraph.removeConnection(dep);
if (this.chunkGraph) {
for (const chunk of this.chunkGraph.getModuleChunksIterable(
for (const chunk of this.chunkGraph.getModuleChunks(
originalModule
)) {
this.patchChunksAfterReasonRemoval(originalModule, chunk);

View File

@ -5,10 +5,11 @@
"use strict";
const ChunkCombination = require("../ChunkCombination");
const Chunk = require("../Chunk");
const { STAGE_ADVANCED } = require("../OptimizationStages");
const WebpackError = require("../WebpackError");
const { requestToId } = require("../ids/IdHelpers");
const { isSubset } = require("../util/SetHelpers");
const SortableSet = require("../util/SortableSet");
const {
compareModulesByIdentifier,
@ -25,7 +26,6 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */
/** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../ChunkGroup")} ChunkGroup */
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
@ -155,9 +155,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
* @property {number} cacheGroupIndex
* @property {string} name
* @property {Record<string, number>} sizes
* @property {ChunkCombination} chunks
* @property {Set<Chunk>} chunks
* @property {Set<Chunk>} reuseableChunks
* @property {Set<ChunkCombination>} chunkCombinations
* @property {Set<bigint | Chunk>} chunksKeys
*/
const defaultGetName = /** @type {GetName} */ (() => {});
@ -204,6 +204,19 @@ const mapObject = (obj, fn) => {
return newObj;
};
/**
* @template T
* @param {Set<T>} a set
* @param {Set<T>} b other set
* @returns {boolean} true if at least one item of a is in b
*/
const isOverlap = (a, b) => {
for (const item of a) {
if (b.has(item)) return true;
}
return false;
};
const compareModuleIterables = compareIterables(compareModulesByIdentifier);
/**
@ -756,132 +769,207 @@ module.exports = class SplitChunksPlugin {
logger.time("prepare");
const chunkGraph = compilation.chunkGraph;
const moduleGraph = compilation.moduleGraph;
const getChunkCombinationsInGraph = memoize(() => {
/** @type {Set<ChunkCombination>} */
const chunkCombinationsInGraph = new Set();
for (const module of compilation.modules) {
const chunkCombination =
chunkGraph.getModuleChunkCombination(module);
chunkCombinationsInGraph.add(chunkCombination);
// Give each selected chunk an index (to create strings from chunks)
/** @type {Map<Chunk, bigint>} */
const chunkIndexMap = new Map();
const ZERO = BigInt("0");
const ONE = BigInt("1");
let index = ONE;
for (const chunk of chunks) {
chunkIndexMap.set(chunk, index);
index = index << ONE;
}
return chunkCombinationsInGraph;
/**
* @param {Iterable<Chunk>} chunks list of chunks
* @returns {bigint | Chunk} key of the chunks
*/
const getKey = chunks => {
const iterator = chunks[Symbol.iterator]();
let result = iterator.next();
if (result.done) return ZERO;
const first = result.value;
result = iterator.next();
if (result.done) return first;
let key =
chunkIndexMap.get(first) | chunkIndexMap.get(result.value);
while (!(result = iterator.next()).done) {
key = key | chunkIndexMap.get(result.value);
}
return key;
};
const keyToString = key => {
if (typeof key === "bigint") return key.toString(16);
return chunkIndexMap.get(key).toString(16);
};
const getChunkSetsInGraph = memoize(() => {
/** @type {Map<bigint, Set<Chunk>>} */
const chunkSetsInGraph = new Map();
/** @type {Set<Chunk>} */
const singleChunkSets = new Set();
for (const module of compilation.modules) {
const chunks = chunkGraph.getModuleChunksIterable(module);
const chunksKey = getKey(chunks);
if (typeof chunksKey === "bigint") {
if (!chunkSetsInGraph.has(chunksKey)) {
chunkSetsInGraph.set(chunksKey, new Set(chunks));
}
} else {
singleChunkSets.add(chunksKey);
}
}
return { chunkSetsInGraph, singleChunkSets };
});
/**
* @param {Module} module the module
* @returns {Iterable<ChunkCombination>} groups of chunks with equal exports
* @returns {Iterable<Chunk[]>} groups of chunks with equal exports
*/
const groupChunksByExports = module => {
const exportsInfo = moduleGraph.getExportsInfo(module);
const groupedByUsedExports = new Map();
for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
const key = exportsInfo.getUsageKey(chunk.runtime);
const combination =
groupedByUsedExports.get(key) || ChunkCombination.empty;
groupedByUsedExports.set(key, combination.with(chunk));
const list = groupedByUsedExports.get(key);
if (list !== undefined) {
list.push(chunk);
} else {
groupedByUsedExports.set(key, [chunk]);
}
}
return groupedByUsedExports.values();
};
/** @type {Map<Module, Iterable<ChunkCombination>>} */
/** @type {Map<Module, Iterable<Chunk[]>>} */
const groupedByExportsMap = new Map();
const getExportsChunkCombinationsInGraph = memoize(() => {
/** @type {Set<ChunkCombination>} */
const chunkCombinationsInGraph = new Set();
const getExportsChunkSetsInGraph = memoize(() => {
/** @type {Map<bigint, Set<Chunk>>} */
const chunkSetsInGraph = new Map();
/** @type {Set<Chunk>} */
const singleChunkSets = new Set();
for (const module of compilation.modules) {
const groupedChunks = Array.from(groupChunksByExports(module));
groupedByExportsMap.set(module, groupedChunks);
for (const chunkCombination of groupedChunks) {
chunkCombinationsInGraph.add(chunkCombination);
for (const chunks of groupedChunks) {
if (chunks.length === 1) {
singleChunkSets.add(chunks[0]);
} else {
const chunksKey = /** @type {bigint} */ (getKey(chunks));
if (!chunkSetsInGraph.has(chunksKey)) {
chunkSetsInGraph.set(chunksKey, new Set(chunks));
}
}
return chunkCombinationsInGraph;
}
}
return { chunkSetsInGraph, singleChunkSets };
});
// group these set of chunks by count
// to allow to check less sets via isSubset
// (only smaller sets can be subset)
const groupChunkCombinationsByCount = chunkCombinations => {
/** @type {Map<number, ChunkCombination[]>} */
const chunkCombinationsByCount = new Map();
for (const chunksSet of chunkCombinations) {
const groupChunkSetsByCount = chunkSets => {
/** @type {Map<number, Array<Set<Chunk>>>} */
const chunkSetsByCount = new Map();
for (const chunksSet of chunkSets) {
const count = chunksSet.size;
let array = chunkCombinationsByCount.get(count);
let array = chunkSetsByCount.get(count);
if (array === undefined) {
array = [];
chunkCombinationsByCount.set(count, array);
chunkSetsByCount.set(count, array);
}
array.push(chunksSet);
}
return chunkCombinationsByCount;
return chunkSetsByCount;
};
const getChunkCombinationsByCount = memoize(() =>
groupChunkCombinationsByCount(getChunkCombinationsInGraph())
const getChunkSetsByCount = memoize(() =>
groupChunkSetsByCount(
getChunkSetsInGraph().chunkSetsInGraph.values()
)
);
const getExportsChunkCombinationsByCount = memoize(() =>
groupChunkCombinationsByCount(getExportsChunkCombinationsInGraph())
const getExportsChunkSetsByCount = memoize(() =>
groupChunkSetsByCount(
getExportsChunkSetsInGraph().chunkSetsInGraph.values()
)
);
/**
* Create a list of possible combinations
* @param {Map<number, ChunkCombination[]>} chunkCombinationsByCount by count
* @returns {function(ChunkCombination): ChunkCombination[]} get combinations function
*/
const createGetCombinations = chunkCombinationsByCount => {
/** @type {Map<ChunkCombination, ChunkCombination[]>} */
// Create a list of possible combinations
const createGetCombinations = (
chunkSets,
singleChunkSets,
chunkSetsByCount
) => {
/** @type {Map<bigint | Chunk, (Set<Chunk> | Chunk)[]>} */
const combinationsCache = new Map();
/**
* @param {ChunkCombination} chunkCombination chunkCombination
* @returns {ChunkCombination[]} combinations
*/
return chunkCombination => {
const cacheEntry = combinationsCache.get(chunkCombination);
return key => {
const cacheEntry = combinationsCache.get(key);
if (cacheEntry !== undefined) return cacheEntry;
if (chunkCombination.size === 1) {
const result = [chunkCombination];
combinationsCache.set(chunkCombination, result);
if (key instanceof Chunk) {
const result = [key];
combinationsCache.set(key, result);
return result;
}
/** @type {ChunkCombination[]} */
const array = [chunkCombination];
for (const [count, setArray] of chunkCombinationsByCount) {
const chunksSet = chunkSets.get(key);
/** @type {(Set<Chunk> | Chunk)[]} */
const array = [chunksSet];
for (const [count, setArray] of chunkSetsByCount) {
// "equal" is not needed because they would have been merge in the first step
if (count < chunkCombination.size) {
if (count < chunksSet.size) {
for (const set of setArray) {
if (chunkCombination.isSubset(set)) {
if (isSubset(chunksSet, set)) {
array.push(set);
}
}
}
}
combinationsCache.set(chunkCombination, array);
for (const chunk of singleChunkSets) {
if (chunksSet.has(chunk)) {
array.push(chunk);
}
}
combinationsCache.set(key, array);
return array;
};
};
const getCombinationsFactory = memoize(() => {
return createGetCombinations(getChunkCombinationsByCount());
const { chunkSetsInGraph, singleChunkSets } = getChunkSetsInGraph();
return createGetCombinations(
chunkSetsInGraph,
singleChunkSets,
getChunkSetsByCount()
);
});
const getCombinations = key => getCombinationsFactory()(key);
const getExportsCombinationsFactory = memoize(() => {
return createGetCombinations(getExportsChunkCombinationsByCount());
const { chunkSetsInGraph, singleChunkSets } =
getExportsChunkSetsInGraph();
return createGetCombinations(
chunkSetsInGraph,
singleChunkSets,
getExportsChunkSetsByCount()
);
});
const getExportsCombinations = key =>
getExportsCombinationsFactory()(key);
/** @type {WeakMap<ChunkCombination, WeakMap<ChunkFilterFunction, ChunkCombination>>} */
/**
* @typedef {Object} SelectedChunksResult
* @property {Chunk[]} chunks the list of chunks
* @property {bigint | Chunk} key a key of the list
*/
/** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
const selectedChunksCacheByChunksSet = new WeakMap();
/**
* get chunks by applying the filter function to the list
* get list and key by applying the filter function to the list
* It is cached for performance reasons
* @param {ChunkCombination} chunks list of chunks
* @param {Set<Chunk> | Chunk} chunks list of chunks
* @param {ChunkFilterFunction} chunkFilter filter function for chunks
* @returns {ChunkCombination} selected chunks
* @returns {SelectedChunksResult} list and key
*/
const getSelectedChunks = (chunks, chunkFilter) => {
let entry = selectedChunksCacheByChunksSet.get(chunks);
@ -889,16 +977,22 @@ module.exports = class SplitChunksPlugin {
entry = new WeakMap();
selectedChunksCacheByChunksSet.set(chunks, entry);
}
/** @type {ChunkCombination} */
/** @type {SelectedChunksResult} */
let entry2 = entry.get(chunkFilter);
if (entry2 === undefined) {
/** @type {ChunkCombination} */
let selectedChunks = ChunkCombination.empty;
for (const chunk of chunks.chunksIterable) {
if (chunkFilter(chunk))
selectedChunks = selectedChunks.with(chunk);
/** @type {Chunk[]} */
const selectedChunks = [];
if (chunks instanceof Chunk) {
if (chunkFilter(chunks)) selectedChunks.push(chunks);
} else {
for (const chunk of chunks) {
if (chunkFilter(chunk)) selectedChunks.push(chunk);
}
entry2 = selectedChunks;
}
entry2 = {
chunks: selectedChunks,
key: getKey(selectedChunks)
};
entry.set(chunkFilter, entry2);
}
return entry2;
@ -917,7 +1011,8 @@ module.exports = class SplitChunksPlugin {
/**
* @param {CacheGroup} cacheGroup the current cache group
* @param {number} cacheGroupIndex the index of the cache group of ordering
* @param {ChunkCombination} selectedChunks chunks selected for this module
* @param {Chunk[]} selectedChunks chunks selected for this module
* @param {bigint | Chunk} selectedChunksKey a key of selectedChunks
* @param {Module} module the current module
* @returns {void}
*/
@ -925,20 +1020,25 @@ module.exports = class SplitChunksPlugin {
cacheGroup,
cacheGroupIndex,
selectedChunks,
selectedChunksKey,
module
) => {
// Break if minimum number of chunks is not reached
if (selectedChunks.size < cacheGroup.minChunks) return;
if (selectedChunks.length < cacheGroup.minChunks) return;
// Determine name for split chunk
const name = cacheGroup.getName(
module,
selectedChunks.getChunks(),
selectedChunks,
cacheGroup.key
);
// Check if the name is ok
const existingChunk = compilation.namedChunks.get(name);
if (existingChunk) {
const parentValidationKey = `${name}|${selectedChunks.debugId}`;
const parentValidationKey = `${name}|${
typeof selectedChunksKey === "bigint"
? selectedChunksKey
: selectedChunksKey.debugId
}`;
const valid = alreadyValidatedParents.get(parentValidationKey);
if (valid === false) return;
if (valid === undefined) {
@ -947,7 +1047,7 @@ module.exports = class SplitChunksPlugin {
let isInAllParents = true;
/** @type {Set<ChunkGroup>} */
const queue = new Set();
for (const chunk of selectedChunks.chunksIterable) {
for (const chunk of selectedChunks) {
for (const group of chunk.groupsIterable) {
queue.add(group);
}
@ -993,7 +1093,9 @@ module.exports = class SplitChunksPlugin {
// This automatically merges equal names
const key =
cacheGroup.key +
(name ? ` name:${name}` : ` chunks:${selectedChunks.debugId}`);
(name
? ` name:${name}`
: ` chunks:${keyToString(selectedChunksKey)}`);
// Add module to maps
let info = chunksInfoMap.get(key);
if (info === undefined) {
@ -1008,9 +1110,9 @@ module.exports = class SplitChunksPlugin {
cacheGroupIndex,
name,
sizes: {},
chunks: ChunkCombination.empty,
chunks: new Set(),
reuseableChunks: new Set(),
chunkCombinations: new Set()
chunksKeys: new Set()
})
);
}
@ -1021,10 +1123,12 @@ module.exports = class SplitChunksPlugin {
info.sizes[type] = (info.sizes[type] || 0) + module.size(type);
}
}
const oldChunksKeysSize = info.chunkCombinations.size;
info.chunkCombinations.add(selectedChunks);
if (oldChunksKeysSize !== info.chunkCombinations.size) {
info.chunks = info.chunks.withAll(selectedChunks);
const oldChunksKeysSize = info.chunksKeys.size;
info.chunksKeys.add(selectedChunksKey);
if (oldChunksKeysSize !== info.chunksKeys.size) {
for (const chunk of selectedChunks) {
info.chunks.add(chunk);
}
}
};
@ -1045,56 +1149,50 @@ module.exports = class SplitChunksPlugin {
continue;
}
const chunkCombination =
chunkGraph.getModuleChunkCombination(module);
// Prepare some values (usedExports = false)
const getCombs = memoize(() => {
const chunks = chunkGraph.getModuleChunksIterable(module);
const chunksKey = getKey(chunks);
return getCombinations(chunksKey);
});
// Prepare some values (usedExports = true)
const getCombsByUsedExports = memoize(() => {
// fill the groupedByExportsMap
getExportsChunkSetsInGraph();
/** @type {Set<Set<Chunk> | Chunk>} */
const set = new Set();
const groupedByUsedExports = groupedByExportsMap.get(module);
for (const chunks of groupedByUsedExports) {
const chunksKey = getKey(chunks);
for (const comb of getExportsCombinations(chunksKey))
set.add(comb);
}
return set;
});
let cacheGroupIndex = 0;
for (const cacheGroupSource of cacheGroups) {
const cacheGroup = this._getCacheGroup(cacheGroupSource);
// Break if minimum number of chunks is not reached
if (chunkCombination.size < cacheGroup.minChunks) continue;
/** @type {Iterable<ChunkCombination>} */
let combs;
if (cacheGroup.usedExports) {
// fill the groupedByExportsMap
getExportsChunkCombinationsInGraph();
/** @type {Set<ChunkCombination>} */
const set = new Set();
const groupedByUsedExports = groupedByExportsMap.get(module);
for (const chunkCombination of groupedByUsedExports) {
const preSelectedChunks = getSelectedChunks(
chunkCombination,
cacheGroup.chunksFilter
);
// Break if minimum number of chunks is not reached
if (preSelectedChunks.size < cacheGroup.minChunks) continue;
for (const comb of getExportsCombinations(preSelectedChunks))
set.add(comb);
}
combs = set;
} else {
const preSelectedChunks = getSelectedChunks(
chunkCombination,
cacheGroup.chunksFilter
);
// Break if minimum number of chunks is not reached
if (preSelectedChunks.size < cacheGroup.minChunks) continue;
combs = getCombinations(preSelectedChunks);
}
const combs = cacheGroup.usedExports
? getCombsByUsedExports()
: getCombs();
// For all combination of chunk selection
for (const selectedChunks of combs) {
for (const chunkCombination of combs) {
// Break if minimum number of chunks is not reached
const count = chunkCombination.size;
const count =
chunkCombination instanceof Chunk ? 1 : chunkCombination.size;
if (count < cacheGroup.minChunks) continue;
// Select chunks by configuration
const { chunks: selectedChunks, key: selectedChunksKey } =
getSelectedChunks(chunkCombination, cacheGroup.chunksFilter);
addModuleToChunksInfoMap(
cacheGroup,
cacheGroupIndex,
selectedChunks,
selectedChunksKey,
module
);
}
@ -1186,12 +1284,12 @@ module.exports = class SplitChunksPlugin {
const chunkByName = compilation.namedChunks.get(chunkName);
if (chunkByName !== undefined) {
newChunk = chunkByName;
const newChunks = item.chunks.without(newChunk);
isExistingChunk = newChunks !== item.chunks;
if (isExistingChunk) item.chunks = newChunks;
const oldSize = item.chunks.size;
item.chunks.delete(newChunk);
isExistingChunk = item.chunks.size !== oldSize;
}
} else if (item.cacheGroup.reuseExistingChunk) {
outer: for (const chunk of item.chunks.chunksIterable) {
outer: for (const chunk of item.chunks) {
if (
chunkGraph.getNumberOfChunkModules(chunk) !==
item.modules.size
@ -1225,7 +1323,7 @@ module.exports = class SplitChunksPlugin {
}
}
if (newChunk) {
item.chunks = item.chunks.without(newChunk);
item.chunks.delete(newChunk);
chunkName = undefined;
isExistingChunk = true;
isReusedWithAllModules = true;
@ -1236,7 +1334,7 @@ module.exports = class SplitChunksPlugin {
item.cacheGroup._conditionalEnforce &&
checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold);
let usedChunks = item.chunks;
const usedChunks = new Set(item.chunks);
// Check if maxRequests condition can be fulfilled
if (
@ -1244,7 +1342,7 @@ module.exports = class SplitChunksPlugin {
(Number.isFinite(item.cacheGroup.maxInitialRequests) ||
Number.isFinite(item.cacheGroup.maxAsyncRequests))
) {
for (const chunk of usedChunks.chunksIterable) {
for (const chunk of usedChunks) {
// respect max requests
const maxRequests = chunk.isOnlyInitial()
? item.cacheGroup.maxInitialRequests
@ -1258,28 +1356,30 @@ module.exports = class SplitChunksPlugin {
isFinite(maxRequests) &&
getRequests(chunk) >= maxRequests
) {
usedChunks = usedChunks.without(chunk);
usedChunks.delete(chunk);
}
}
}
outer: for (const chunk of usedChunks.chunksIterable) {
outer: for (const chunk of usedChunks) {
for (const module of item.modules) {
if (chunkGraph.isModuleInChunk(module, chunk)) continue outer;
}
usedChunks = usedChunks.without(chunk);
usedChunks.delete(chunk);
}
// Were some (invalid) chunks removed from usedChunks?
// => readd all modules to the queue, as things could have been changed
if (usedChunks !== item.chunks) {
if (isExistingChunk) usedChunks = usedChunks.with(newChunk);
if (usedChunks.size < item.chunks.size) {
if (isExistingChunk) usedChunks.add(newChunk);
if (usedChunks.size >= item.cacheGroup.minChunks) {
const chunksArr = Array.from(usedChunks);
for (const module of item.modules) {
addModuleToChunksInfoMap(
item.cacheGroup,
item.cacheGroupIndex,
usedChunks,
chunksArr,
getKey(usedChunks),
module
);
}
@ -1293,7 +1393,7 @@ module.exports = class SplitChunksPlugin {
item.cacheGroup._validateRemainingSize &&
usedChunks.size === 1
) {
const [chunk] = usedChunks.chunksIterable;
const [chunk] = usedChunks;
let chunkSizes = Object.create(null);
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
if (!item.modules.has(module)) {
@ -1327,7 +1427,7 @@ module.exports = class SplitChunksPlugin {
newChunk = compilation.addChunk(chunkName);
}
// Walk through all chunks
for (const chunk of usedChunks.chunksIterable) {
for (const chunk of usedChunks) {
// Add graph connections for splitted chunk
chunk.split(newChunk);
}
@ -1357,14 +1457,14 @@ module.exports = class SplitChunksPlugin {
// Add module to new chunk
chunkGraph.connectChunkAndModule(newChunk, module);
// Remove module from used chunks
for (const chunk of usedChunks.chunksIterable) {
for (const chunk of usedChunks) {
chunkGraph.disconnectChunkAndModule(chunk, module);
}
}
} else {
// Remove all modules from used chunks
for (const module of item.modules) {
for (const chunk of usedChunks.chunksIterable) {
for (const chunk of usedChunks) {
chunkGraph.disconnectChunkAndModule(chunk, module);
}
}
@ -1406,7 +1506,7 @@ module.exports = class SplitChunksPlugin {
// remove all modules from other entries and update size
for (const [key, info] of chunksInfoMap) {
if (info.chunks.hasSharedChunks(usedChunks)) {
if (isOverlap(info.chunks, usedChunks)) {
// update modules and total size
// may remove it from the map when < minSize
let updated = false;

View File

@ -5,7 +5,6 @@
"use strict";
const WebpackError = require("../WebpackError");
const { someInIterable } = require("../util/IterableHelpers");
/** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../Module")} Module */
@ -43,11 +42,7 @@ const getInitialModuleChains = (
for (const connection of moduleGraph.getIncomingConnections(head)) {
const newHead = connection.originModule;
if (newHead) {
if (
!someInIterable(chunkGraph.getModuleChunksIterable(newHead), c =>
c.canBeInitial()
)
)
if (!chunkGraph.getModuleChunks(newHead).some(c => c.canBeInitial()))
continue;
final = false;
if (alreadyReferencedModules.has(newHead)) continue;

12
types.d.ts vendored
View File

@ -757,17 +757,6 @@ declare class Chunk {
filterFn?: (c: Chunk, chunkGraph: ChunkGraph) => boolean
): Record<string | number, Record<string, (string | number)[]>>;
}
declare abstract class ChunkCombination {
debugId: number;
size: number;
readonly chunksIterable: Iterable<Chunk>;
with(chunk: Chunk): ChunkCombination;
without(chunk: Chunk): ChunkCombination;
withAll(other?: any): any;
hasSharedChunks(other?: any): boolean;
isSubset(other: ChunkCombination): boolean;
getChunks(): Chunk[];
}
declare class ChunkGraph {
constructor(moduleGraph: ModuleGraph, hashFunction?: string | typeof Hash);
moduleGraph: ModuleGraph;
@ -785,7 +774,6 @@ declare class ChunkGraph {
isModuleInChunk(module: Module, chunk: Chunk): boolean;
isModuleInChunkGroup(module: Module, chunkGroup: ChunkGroup): boolean;
isEntryModule(module: Module): boolean;
getModuleChunkCombination(module: Module): ChunkCombination;
getModuleChunksIterable(module: Module): Iterable<Chunk>;
getOrderedModuleChunksIterable(
module: Module,