Merge pull request #14436 from webpack/revert/chunk-combinations

Revert "track chunk combinations for modules"
This commit is contained in:
Tobias Koppers 2021-10-08 15:39:01 +02:00 committed by GitHub
commit d106f00db5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 255 additions and 373 deletions

View File

@ -1,187 +0,0 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const SortableSet = require("./util/SortableSet");
/** @typedef {import("./Chunk")} Chunk */
/**
* @template T
* @param {SortableSet<T>} set the set
* @returns {T[]} set as array
*/
const getArray = set => {
return Array.from(set);
};
let debugId = 1;
class ChunkCombination {
constructor() {
this.debugId = debugId++;
this.size = 0;
/**
* (do not modify)
* @type {SortableSet<Chunk>}
*/
this._chunks = new SortableSet();
/** @type {ChunkCombination} */
this._parent = undefined;
this._lastChunk = undefined;
/** @type {WeakMap<Chunk, ChunkCombination>} */
this._addMap = new WeakMap();
/** @type {WeakMap<Chunk, ChunkCombination>} */
this._removeCache = new WeakMap();
}
/**
* @returns {Iterable<Chunk>} iterable of chunks
*/
get chunksIterable() {
return this._chunks;
}
/**
* @param {Chunk} chunk chunk to add
* @returns {ChunkCombination} new chunk combination
*/
with(chunk) {
if (this._chunks.has(chunk)) return this;
let next = this._addMap.get(chunk);
if (next !== undefined) return next;
// must insert chunks in order to maintain order-independent identity of ChunkCombination
if (!this._parent || this._lastChunk.debugId < chunk.debugId) {
next = new ChunkCombination();
for (const chunk of this._chunks) {
next._chunks.add(chunk);
}
next._chunks.add(chunk);
next._removeCache.set(chunk, this);
next.size = this.size + 1;
next._parent = this;
next._lastChunk = chunk;
} else {
next = this._parent.with(chunk).with(this._lastChunk);
}
this._addMap.set(chunk, next);
return next;
}
/**
* @param {Chunk} chunk chunk to remove
* @returns {ChunkCombination} new chunk combination
*/
without(chunk) {
if (!this._chunks.has(chunk)) return this;
let next = this._removeCache.get(chunk);
if (next !== undefined) return next;
const stack = [this._lastChunk];
let current = this._parent;
while (current._lastChunk !== chunk) {
stack.push(current._lastChunk);
current = current._parent;
}
next = current._parent;
while (stack.length) next = next.with(stack.pop());
this._removeCache.set(chunk, next);
return next;
}
withAll(other) {
if (other.size === 0) return this;
if (this.size === 0) return other;
const stack = [];
/** @type {ChunkCombination} */
let current = this;
for (;;) {
if (current._lastChunk.debugId < other._lastChunk.debugId) {
stack.push(other._lastChunk);
other = other._parent;
if (other.size === 0) {
while (stack.length) current = current.with(stack.pop());
return current;
}
} else {
stack.push(current._lastChunk);
current = current._parent;
if (current.size === 0) {
while (stack.length) other = other.with(stack.pop());
return other;
}
}
}
}
hasSharedChunks(other) {
if (this.size > other.size) {
const chunks = this._chunks;
for (const chunk of other._chunks) {
if (chunks.has(chunk)) return true;
}
} else {
const chunks = other._chunks;
for (const chunk of this._chunks) {
if (chunks.has(chunk)) return true;
}
}
return false;
}
/**
* @param {ChunkCombination} other other combination
* @returns {boolean} true, when other is a subset of this combination
*/
isSubset(other) {
// TODO: This could be more efficient when using the debugId order of the combinations
/** @type {ChunkCombination} */
let current = this;
let otherSize = other.size;
let currentSize = current.size;
if (otherSize === 0) return true;
for (;;) {
if (currentSize === 0) return false;
if (otherSize === 1) {
if (currentSize === 1) {
return current._lastChunk === other._lastChunk;
} else {
return current._chunks.has(other._lastChunk);
}
}
if (otherSize * 8 < currentSize) {
// go for the Set access when current >> other
const chunks = current._chunks;
for (const item of other._chunks) {
if (!chunks.has(item)) return false;
}
return true;
}
const otherId = other._lastChunk.debugId;
// skip over nodes in current that have higher ids
while (otherId < current._lastChunk.debugId) {
current = current._parent;
currentSize--;
if (currentSize === 0) return false;
}
if (otherId > current._lastChunk.debugId) {
return false;
}
other = other._parent;
otherSize--;
if (otherSize === 0) return true;
current = current._parent;
currentSize--;
}
}
getChunks() {
return this._chunks.getFromUnorderedCache(getArray);
}
}
ChunkCombination.empty = new ChunkCombination();
module.exports = ChunkCombination;

View File

@ -6,7 +6,6 @@
"use strict"; "use strict";
const util = require("util"); const util = require("util");
const ChunkCombination = require("./ChunkCombination");
const Entrypoint = require("./Entrypoint"); const Entrypoint = require("./Entrypoint");
const ModuleGraphConnection = require("./ModuleGraphConnection"); const ModuleGraphConnection = require("./ModuleGraphConnection");
const { first } = require("./util/SetHelpers"); const { first } = require("./util/SetHelpers");
@ -41,8 +40,6 @@ const {
/** @type {ReadonlySet<string>} */ /** @type {ReadonlySet<string>} */
const EMPTY_SET = new Set(); const EMPTY_SET = new Set();
const EMPTY_RUNTIME_SPEC_SET = new RuntimeSpecSet();
const ZERO_BIG_INT = BigInt(0); const ZERO_BIG_INT = BigInt(0);
const compareModuleIterables = compareIterables(compareModulesByIdentifier); const compareModuleIterables = compareIterables(compareModulesByIdentifier);
@ -180,7 +177,8 @@ const isAvailableChunk = (a, b) => {
class ChunkGraphModule { class ChunkGraphModule {
constructor() { constructor() {
this.chunkCombination = ChunkCombination.empty; /** @type {SortableSet<Chunk>} */
this.chunks = new SortableSet();
/** @type {Set<Chunk> | undefined} */ /** @type {Set<Chunk> | undefined} */
this.entryInChunks = undefined; this.entryInChunks = undefined;
/** @type {Set<Chunk> | undefined} */ /** @type {Set<Chunk> | undefined} */
@ -303,7 +301,7 @@ class ChunkGraph {
connectChunkAndModule(chunk, module) { connectChunkAndModule(chunk, module) {
const cgm = this._getChunkGraphModule(module); const cgm = this._getChunkGraphModule(module);
const cgc = this._getChunkGraphChunk(chunk); const cgc = this._getChunkGraphChunk(chunk);
cgm.chunkCombination = cgm.chunkCombination.with(chunk); cgm.chunks.add(chunk);
cgc.modules.add(module); cgc.modules.add(module);
} }
@ -316,7 +314,7 @@ class ChunkGraph {
const cgm = this._getChunkGraphModule(module); const cgm = this._getChunkGraphModule(module);
const cgc = this._getChunkGraphChunk(chunk); const cgc = this._getChunkGraphChunk(chunk);
cgc.modules.delete(module); cgc.modules.delete(module);
cgm.chunkCombination = cgm.chunkCombination.without(chunk); cgm.chunks.delete(chunk);
} }
/** /**
@ -327,7 +325,7 @@ class ChunkGraph {
const cgc = this._getChunkGraphChunk(chunk); const cgc = this._getChunkGraphChunk(chunk);
for (const module of cgc.modules) { for (const module of cgc.modules) {
const cgm = this._getChunkGraphModule(module); const cgm = this._getChunkGraphModule(module);
cgm.chunkCombination = cgm.chunkCombination.without(chunk); cgm.chunks.delete(chunk);
} }
cgc.modules.clear(); cgc.modules.clear();
chunk.disconnectFromGroups(); chunk.disconnectFromGroups();
@ -394,13 +392,13 @@ class ChunkGraph {
const oldCgm = this._getChunkGraphModule(oldModule); const oldCgm = this._getChunkGraphModule(oldModule);
const newCgm = this._getChunkGraphModule(newModule); const newCgm = this._getChunkGraphModule(newModule);
for (const chunk of oldCgm.chunkCombination._chunks) { for (const chunk of oldCgm.chunks) {
const cgc = this._getChunkGraphChunk(chunk); const cgc = this._getChunkGraphChunk(chunk);
cgc.modules.delete(oldModule); cgc.modules.delete(oldModule);
cgc.modules.add(newModule); cgc.modules.add(newModule);
newCgm.chunkCombination = newCgm.chunkCombination.with(chunk); newCgm.chunks.add(chunk);
} }
oldCgm.chunkCombination = ChunkCombination.empty; oldCgm.chunks.clear();
if (oldCgm.entryInChunks !== undefined) { if (oldCgm.entryInChunks !== undefined) {
if (newCgm.entryInChunks === undefined) { if (newCgm.entryInChunks === undefined) {
@ -487,22 +485,13 @@ class ChunkGraph {
return cgm.entryInChunks !== undefined; return cgm.entryInChunks !== undefined;
} }
/**
* @param {Module} module the module
* @returns {ChunkCombination} chunk combination (do not modify)
*/
getModuleChunkCombination(module) {
const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination;
}
/** /**
* @param {Module} module the module * @param {Module} module the module
* @returns {Iterable<Chunk>} iterable of chunks (do not modify) * @returns {Iterable<Chunk>} iterable of chunks (do not modify)
*/ */
getModuleChunksIterable(module) { getModuleChunksIterable(module) {
const cgm = this._getChunkGraphModule(module); const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination._chunks; return cgm.chunks;
} }
/** /**
@ -512,9 +501,8 @@ class ChunkGraph {
*/ */
getOrderedModuleChunksIterable(module, sortFn) { getOrderedModuleChunksIterable(module, sortFn) {
const cgm = this._getChunkGraphModule(module); const cgm = this._getChunkGraphModule(module);
const chunks = cgm.chunkCombination._chunks; cgm.chunks.sortWith(sortFn);
chunks.sortWith(sortFn); return cgm.chunks;
return chunks;
} }
/** /**
@ -523,7 +511,7 @@ class ChunkGraph {
*/ */
getModuleChunks(module) { getModuleChunks(module) {
const cgm = this._getChunkGraphModule(module); const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination.getChunks(); return cgm.chunks.getFromCache(getArray);
} }
/** /**
@ -532,7 +520,7 @@ class ChunkGraph {
*/ */
getNumberOfModuleChunks(module) { getNumberOfModuleChunks(module) {
const cgm = this._getChunkGraphModule(module); const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination.size; return cgm.chunks.size;
} }
/** /**
@ -541,10 +529,7 @@ class ChunkGraph {
*/ */
getModuleRuntimes(module) { getModuleRuntimes(module) {
const cgm = this._getChunkGraphModule(module); const cgm = this._getChunkGraphModule(module);
if (cgm.chunkCombination.size === 0) return EMPTY_RUNTIME_SPEC_SET; return cgm.chunks.getFromUnorderedCache(getModuleRuntimes);
return cgm.chunkCombination._chunks.getFromUnorderedCache(
getModuleRuntimes
);
} }
/** /**
@ -908,7 +893,8 @@ class ChunkGraph {
// Merge runtime // Merge runtime
chunkA.runtime = mergeRuntime(chunkA.runtime, chunkB.runtime); chunkA.runtime = mergeRuntime(chunkA.runtime, chunkB.runtime);
for (const module of this.getChunkModulesIterable(chunkB)) { // getChunkModules is used here to create a clone, because disconnectChunkAndModule modifies
for (const module of this.getChunkModules(chunkB)) {
this.disconnectChunkAndModule(chunkB, module); this.disconnectChunkAndModule(chunkB, module);
this.connectChunkAndModule(chunkA, module); this.connectChunkAndModule(chunkA, module);
} }

View File

@ -3633,7 +3633,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
this.moduleGraph.removeConnection(dep); this.moduleGraph.removeConnection(dep);
if (this.chunkGraph) { if (this.chunkGraph) {
for (const chunk of this.chunkGraph.getModuleChunksIterable( for (const chunk of this.chunkGraph.getModuleChunks(
originalModule originalModule
)) { )) {
this.patchChunksAfterReasonRemoval(originalModule, chunk); this.patchChunksAfterReasonRemoval(originalModule, chunk);

View File

@ -5,10 +5,11 @@
"use strict"; "use strict";
const ChunkCombination = require("../ChunkCombination"); const Chunk = require("../Chunk");
const { STAGE_ADVANCED } = require("../OptimizationStages"); const { STAGE_ADVANCED } = require("../OptimizationStages");
const WebpackError = require("../WebpackError"); const WebpackError = require("../WebpackError");
const { requestToId } = require("../ids/IdHelpers"); const { requestToId } = require("../ids/IdHelpers");
const { isSubset } = require("../util/SetHelpers");
const SortableSet = require("../util/SortableSet"); const SortableSet = require("../util/SortableSet");
const { const {
compareModulesByIdentifier, compareModulesByIdentifier,
@ -25,7 +26,6 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */ /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */ /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */
/** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */ /** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../ChunkGraph")} ChunkGraph */ /** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../ChunkGroup")} ChunkGroup */ /** @typedef {import("../ChunkGroup")} ChunkGroup */
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */ /** @typedef {import("../Compilation").AssetInfo} AssetInfo */
@ -155,9 +155,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
* @property {number} cacheGroupIndex * @property {number} cacheGroupIndex
* @property {string} name * @property {string} name
* @property {Record<string, number>} sizes * @property {Record<string, number>} sizes
* @property {ChunkCombination} chunks * @property {Set<Chunk>} chunks
* @property {Set<Chunk>} reuseableChunks * @property {Set<Chunk>} reuseableChunks
* @property {Set<ChunkCombination>} chunkCombinations * @property {Set<bigint | Chunk>} chunksKeys
*/ */
const defaultGetName = /** @type {GetName} */ (() => {}); const defaultGetName = /** @type {GetName} */ (() => {});
@ -204,6 +204,19 @@ const mapObject = (obj, fn) => {
return newObj; return newObj;
}; };
/**
* @template T
* @param {Set<T>} a set
* @param {Set<T>} b other set
* @returns {boolean} true if at least one item of a is in b
*/
const isOverlap = (a, b) => {
for (const item of a) {
if (b.has(item)) return true;
}
return false;
};
const compareModuleIterables = compareIterables(compareModulesByIdentifier); const compareModuleIterables = compareIterables(compareModulesByIdentifier);
/** /**
@ -756,132 +769,207 @@ module.exports = class SplitChunksPlugin {
logger.time("prepare"); logger.time("prepare");
const chunkGraph = compilation.chunkGraph; const chunkGraph = compilation.chunkGraph;
const moduleGraph = compilation.moduleGraph; const moduleGraph = compilation.moduleGraph;
// Give each selected chunk an index (to create strings from chunks)
const getChunkCombinationsInGraph = memoize(() => { /** @type {Map<Chunk, bigint>} */
/** @type {Set<ChunkCombination>} */ const chunkIndexMap = new Map();
const chunkCombinationsInGraph = new Set(); const ZERO = BigInt("0");
for (const module of compilation.modules) { const ONE = BigInt("1");
const chunkCombination = let index = ONE;
chunkGraph.getModuleChunkCombination(module); for (const chunk of chunks) {
chunkCombinationsInGraph.add(chunkCombination); chunkIndexMap.set(chunk, index);
index = index << ONE;
}
/**
* @param {Iterable<Chunk>} chunks list of chunks
* @returns {bigint | Chunk} key of the chunks
*/
const getKey = chunks => {
const iterator = chunks[Symbol.iterator]();
let result = iterator.next();
if (result.done) return ZERO;
const first = result.value;
result = iterator.next();
if (result.done) return first;
let key =
chunkIndexMap.get(first) | chunkIndexMap.get(result.value);
while (!(result = iterator.next()).done) {
key = key | chunkIndexMap.get(result.value);
} }
return chunkCombinationsInGraph; return key;
};
const keyToString = key => {
if (typeof key === "bigint") return key.toString(16);
return chunkIndexMap.get(key).toString(16);
};
const getChunkSetsInGraph = memoize(() => {
/** @type {Map<bigint, Set<Chunk>>} */
const chunkSetsInGraph = new Map();
/** @type {Set<Chunk>} */
const singleChunkSets = new Set();
for (const module of compilation.modules) {
const chunks = chunkGraph.getModuleChunksIterable(module);
const chunksKey = getKey(chunks);
if (typeof chunksKey === "bigint") {
if (!chunkSetsInGraph.has(chunksKey)) {
chunkSetsInGraph.set(chunksKey, new Set(chunks));
}
} else {
singleChunkSets.add(chunksKey);
}
}
return { chunkSetsInGraph, singleChunkSets };
}); });
/** /**
* @param {Module} module the module * @param {Module} module the module
* @returns {Iterable<ChunkCombination>} groups of chunks with equal exports * @returns {Iterable<Chunk[]>} groups of chunks with equal exports
*/ */
const groupChunksByExports = module => { const groupChunksByExports = module => {
const exportsInfo = moduleGraph.getExportsInfo(module); const exportsInfo = moduleGraph.getExportsInfo(module);
const groupedByUsedExports = new Map(); const groupedByUsedExports = new Map();
for (const chunk of chunkGraph.getModuleChunksIterable(module)) { for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
const key = exportsInfo.getUsageKey(chunk.runtime); const key = exportsInfo.getUsageKey(chunk.runtime);
const combination = const list = groupedByUsedExports.get(key);
groupedByUsedExports.get(key) || ChunkCombination.empty; if (list !== undefined) {
groupedByUsedExports.set(key, combination.with(chunk)); list.push(chunk);
} else {
groupedByUsedExports.set(key, [chunk]);
}
} }
return groupedByUsedExports.values(); return groupedByUsedExports.values();
}; };
/** @type {Map<Module, Iterable<ChunkCombination>>} */ /** @type {Map<Module, Iterable<Chunk[]>>} */
const groupedByExportsMap = new Map(); const groupedByExportsMap = new Map();
const getExportsChunkCombinationsInGraph = memoize(() => { const getExportsChunkSetsInGraph = memoize(() => {
/** @type {Set<ChunkCombination>} */ /** @type {Map<bigint, Set<Chunk>>} */
const chunkCombinationsInGraph = new Set(); const chunkSetsInGraph = new Map();
/** @type {Set<Chunk>} */
const singleChunkSets = new Set();
for (const module of compilation.modules) { for (const module of compilation.modules) {
const groupedChunks = Array.from(groupChunksByExports(module)); const groupedChunks = Array.from(groupChunksByExports(module));
groupedByExportsMap.set(module, groupedChunks); groupedByExportsMap.set(module, groupedChunks);
for (const chunkCombination of groupedChunks) { for (const chunks of groupedChunks) {
chunkCombinationsInGraph.add(chunkCombination); if (chunks.length === 1) {
singleChunkSets.add(chunks[0]);
} else {
const chunksKey = /** @type {bigint} */ (getKey(chunks));
if (!chunkSetsInGraph.has(chunksKey)) {
chunkSetsInGraph.set(chunksKey, new Set(chunks));
}
}
} }
} }
return chunkCombinationsInGraph; return { chunkSetsInGraph, singleChunkSets };
}); });
// group these set of chunks by count // group these set of chunks by count
// to allow to check less sets via isSubset // to allow to check less sets via isSubset
// (only smaller sets can be subset) // (only smaller sets can be subset)
const groupChunkCombinationsByCount = chunkCombinations => { const groupChunkSetsByCount = chunkSets => {
/** @type {Map<number, ChunkCombination[]>} */ /** @type {Map<number, Array<Set<Chunk>>>} */
const chunkCombinationsByCount = new Map(); const chunkSetsByCount = new Map();
for (const chunksSet of chunkCombinations) { for (const chunksSet of chunkSets) {
const count = chunksSet.size; const count = chunksSet.size;
let array = chunkCombinationsByCount.get(count); let array = chunkSetsByCount.get(count);
if (array === undefined) { if (array === undefined) {
array = []; array = [];
chunkCombinationsByCount.set(count, array); chunkSetsByCount.set(count, array);
} }
array.push(chunksSet); array.push(chunksSet);
} }
return chunkCombinationsByCount; return chunkSetsByCount;
}; };
const getChunkCombinationsByCount = memoize(() => const getChunkSetsByCount = memoize(() =>
groupChunkCombinationsByCount(getChunkCombinationsInGraph()) groupChunkSetsByCount(
getChunkSetsInGraph().chunkSetsInGraph.values()
)
); );
const getExportsChunkCombinationsByCount = memoize(() => const getExportsChunkSetsByCount = memoize(() =>
groupChunkCombinationsByCount(getExportsChunkCombinationsInGraph()) groupChunkSetsByCount(
getExportsChunkSetsInGraph().chunkSetsInGraph.values()
)
); );
/** // Create a list of possible combinations
* Create a list of possible combinations const createGetCombinations = (
* @param {Map<number, ChunkCombination[]>} chunkCombinationsByCount by count chunkSets,
* @returns {function(ChunkCombination): ChunkCombination[]} get combinations function singleChunkSets,
*/ chunkSetsByCount
const createGetCombinations = chunkCombinationsByCount => { ) => {
/** @type {Map<ChunkCombination, ChunkCombination[]>} */ /** @type {Map<bigint | Chunk, (Set<Chunk> | Chunk)[]>} */
const combinationsCache = new Map(); const combinationsCache = new Map();
/** return key => {
* @param {ChunkCombination} chunkCombination chunkCombination const cacheEntry = combinationsCache.get(key);
* @returns {ChunkCombination[]} combinations
*/
return chunkCombination => {
const cacheEntry = combinationsCache.get(chunkCombination);
if (cacheEntry !== undefined) return cacheEntry; if (cacheEntry !== undefined) return cacheEntry;
if (chunkCombination.size === 1) { if (key instanceof Chunk) {
const result = [chunkCombination]; const result = [key];
combinationsCache.set(chunkCombination, result); combinationsCache.set(key, result);
return result; return result;
} }
/** @type {ChunkCombination[]} */ const chunksSet = chunkSets.get(key);
const array = [chunkCombination]; /** @type {(Set<Chunk> | Chunk)[]} */
for (const [count, setArray] of chunkCombinationsByCount) { const array = [chunksSet];
for (const [count, setArray] of chunkSetsByCount) {
// "equal" is not needed because they would have been merge in the first step // "equal" is not needed because they would have been merge in the first step
if (count < chunkCombination.size) { if (count < chunksSet.size) {
for (const set of setArray) { for (const set of setArray) {
if (chunkCombination.isSubset(set)) { if (isSubset(chunksSet, set)) {
array.push(set); array.push(set);
} }
} }
} }
} }
combinationsCache.set(chunkCombination, array); for (const chunk of singleChunkSets) {
if (chunksSet.has(chunk)) {
array.push(chunk);
}
}
combinationsCache.set(key, array);
return array; return array;
}; };
}; };
const getCombinationsFactory = memoize(() => { const getCombinationsFactory = memoize(() => {
return createGetCombinations(getChunkCombinationsByCount()); const { chunkSetsInGraph, singleChunkSets } = getChunkSetsInGraph();
return createGetCombinations(
chunkSetsInGraph,
singleChunkSets,
getChunkSetsByCount()
);
}); });
const getCombinations = key => getCombinationsFactory()(key); const getCombinations = key => getCombinationsFactory()(key);
const getExportsCombinationsFactory = memoize(() => { const getExportsCombinationsFactory = memoize(() => {
return createGetCombinations(getExportsChunkCombinationsByCount()); const { chunkSetsInGraph, singleChunkSets } =
getExportsChunkSetsInGraph();
return createGetCombinations(
chunkSetsInGraph,
singleChunkSets,
getExportsChunkSetsByCount()
);
}); });
const getExportsCombinations = key => const getExportsCombinations = key =>
getExportsCombinationsFactory()(key); getExportsCombinationsFactory()(key);
/** @type {WeakMap<ChunkCombination, WeakMap<ChunkFilterFunction, ChunkCombination>>} */ /**
* @typedef {Object} SelectedChunksResult
* @property {Chunk[]} chunks the list of chunks
* @property {bigint | Chunk} key a key of the list
*/
/** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
const selectedChunksCacheByChunksSet = new WeakMap(); const selectedChunksCacheByChunksSet = new WeakMap();
/** /**
* get chunks by applying the filter function to the list * get list and key by applying the filter function to the list
* It is cached for performance reasons * It is cached for performance reasons
* @param {ChunkCombination} chunks list of chunks * @param {Set<Chunk> | Chunk} chunks list of chunks
* @param {ChunkFilterFunction} chunkFilter filter function for chunks * @param {ChunkFilterFunction} chunkFilter filter function for chunks
* @returns {ChunkCombination} selected chunks * @returns {SelectedChunksResult} list and key
*/ */
const getSelectedChunks = (chunks, chunkFilter) => { const getSelectedChunks = (chunks, chunkFilter) => {
let entry = selectedChunksCacheByChunksSet.get(chunks); let entry = selectedChunksCacheByChunksSet.get(chunks);
@ -889,16 +977,22 @@ module.exports = class SplitChunksPlugin {
entry = new WeakMap(); entry = new WeakMap();
selectedChunksCacheByChunksSet.set(chunks, entry); selectedChunksCacheByChunksSet.set(chunks, entry);
} }
/** @type {ChunkCombination} */ /** @type {SelectedChunksResult} */
let entry2 = entry.get(chunkFilter); let entry2 = entry.get(chunkFilter);
if (entry2 === undefined) { if (entry2 === undefined) {
/** @type {ChunkCombination} */ /** @type {Chunk[]} */
let selectedChunks = ChunkCombination.empty; const selectedChunks = [];
for (const chunk of chunks.chunksIterable) { if (chunks instanceof Chunk) {
if (chunkFilter(chunk)) if (chunkFilter(chunks)) selectedChunks.push(chunks);
selectedChunks = selectedChunks.with(chunk); } else {
for (const chunk of chunks) {
if (chunkFilter(chunk)) selectedChunks.push(chunk);
}
} }
entry2 = selectedChunks; entry2 = {
chunks: selectedChunks,
key: getKey(selectedChunks)
};
entry.set(chunkFilter, entry2); entry.set(chunkFilter, entry2);
} }
return entry2; return entry2;
@ -917,7 +1011,8 @@ module.exports = class SplitChunksPlugin {
/** /**
* @param {CacheGroup} cacheGroup the current cache group * @param {CacheGroup} cacheGroup the current cache group
* @param {number} cacheGroupIndex the index of the cache group of ordering * @param {number} cacheGroupIndex the index of the cache group of ordering
* @param {ChunkCombination} selectedChunks chunks selected for this module * @param {Chunk[]} selectedChunks chunks selected for this module
* @param {bigint | Chunk} selectedChunksKey a key of selectedChunks
* @param {Module} module the current module * @param {Module} module the current module
* @returns {void} * @returns {void}
*/ */
@ -925,20 +1020,25 @@ module.exports = class SplitChunksPlugin {
cacheGroup, cacheGroup,
cacheGroupIndex, cacheGroupIndex,
selectedChunks, selectedChunks,
selectedChunksKey,
module module
) => { ) => {
// Break if minimum number of chunks is not reached // Break if minimum number of chunks is not reached
if (selectedChunks.size < cacheGroup.minChunks) return; if (selectedChunks.length < cacheGroup.minChunks) return;
// Determine name for split chunk // Determine name for split chunk
const name = cacheGroup.getName( const name = cacheGroup.getName(
module, module,
selectedChunks.getChunks(), selectedChunks,
cacheGroup.key cacheGroup.key
); );
// Check if the name is ok // Check if the name is ok
const existingChunk = compilation.namedChunks.get(name); const existingChunk = compilation.namedChunks.get(name);
if (existingChunk) { if (existingChunk) {
const parentValidationKey = `${name}|${selectedChunks.debugId}`; const parentValidationKey = `${name}|${
typeof selectedChunksKey === "bigint"
? selectedChunksKey
: selectedChunksKey.debugId
}`;
const valid = alreadyValidatedParents.get(parentValidationKey); const valid = alreadyValidatedParents.get(parentValidationKey);
if (valid === false) return; if (valid === false) return;
if (valid === undefined) { if (valid === undefined) {
@ -947,7 +1047,7 @@ module.exports = class SplitChunksPlugin {
let isInAllParents = true; let isInAllParents = true;
/** @type {Set<ChunkGroup>} */ /** @type {Set<ChunkGroup>} */
const queue = new Set(); const queue = new Set();
for (const chunk of selectedChunks.chunksIterable) { for (const chunk of selectedChunks) {
for (const group of chunk.groupsIterable) { for (const group of chunk.groupsIterable) {
queue.add(group); queue.add(group);
} }
@ -993,7 +1093,9 @@ module.exports = class SplitChunksPlugin {
// This automatically merges equal names // This automatically merges equal names
const key = const key =
cacheGroup.key + cacheGroup.key +
(name ? ` name:${name}` : ` chunks:${selectedChunks.debugId}`); (name
? ` name:${name}`
: ` chunks:${keyToString(selectedChunksKey)}`);
// Add module to maps // Add module to maps
let info = chunksInfoMap.get(key); let info = chunksInfoMap.get(key);
if (info === undefined) { if (info === undefined) {
@ -1008,9 +1110,9 @@ module.exports = class SplitChunksPlugin {
cacheGroupIndex, cacheGroupIndex,
name, name,
sizes: {}, sizes: {},
chunks: ChunkCombination.empty, chunks: new Set(),
reuseableChunks: new Set(), reuseableChunks: new Set(),
chunkCombinations: new Set() chunksKeys: new Set()
}) })
); );
} }
@ -1021,10 +1123,12 @@ module.exports = class SplitChunksPlugin {
info.sizes[type] = (info.sizes[type] || 0) + module.size(type); info.sizes[type] = (info.sizes[type] || 0) + module.size(type);
} }
} }
const oldChunksKeysSize = info.chunkCombinations.size; const oldChunksKeysSize = info.chunksKeys.size;
info.chunkCombinations.add(selectedChunks); info.chunksKeys.add(selectedChunksKey);
if (oldChunksKeysSize !== info.chunkCombinations.size) { if (oldChunksKeysSize !== info.chunksKeys.size) {
info.chunks = info.chunks.withAll(selectedChunks); for (const chunk of selectedChunks) {
info.chunks.add(chunk);
}
} }
}; };
@ -1045,56 +1149,50 @@ module.exports = class SplitChunksPlugin {
continue; continue;
} }
const chunkCombination = // Prepare some values (usedExports = false)
chunkGraph.getModuleChunkCombination(module); const getCombs = memoize(() => {
const chunks = chunkGraph.getModuleChunksIterable(module);
const chunksKey = getKey(chunks);
return getCombinations(chunksKey);
});
// Prepare some values (usedExports = true)
const getCombsByUsedExports = memoize(() => {
// fill the groupedByExportsMap
getExportsChunkSetsInGraph();
/** @type {Set<Set<Chunk> | Chunk>} */
const set = new Set();
const groupedByUsedExports = groupedByExportsMap.get(module);
for (const chunks of groupedByUsedExports) {
const chunksKey = getKey(chunks);
for (const comb of getExportsCombinations(chunksKey))
set.add(comb);
}
return set;
});
let cacheGroupIndex = 0; let cacheGroupIndex = 0;
for (const cacheGroupSource of cacheGroups) { for (const cacheGroupSource of cacheGroups) {
const cacheGroup = this._getCacheGroup(cacheGroupSource); const cacheGroup = this._getCacheGroup(cacheGroupSource);
// Break if minimum number of chunks is not reached const combs = cacheGroup.usedExports
if (chunkCombination.size < cacheGroup.minChunks) continue; ? getCombsByUsedExports()
: getCombs();
/** @type {Iterable<ChunkCombination>} */
let combs;
if (cacheGroup.usedExports) {
// fill the groupedByExportsMap
getExportsChunkCombinationsInGraph();
/** @type {Set<ChunkCombination>} */
const set = new Set();
const groupedByUsedExports = groupedByExportsMap.get(module);
for (const chunkCombination of groupedByUsedExports) {
const preSelectedChunks = getSelectedChunks(
chunkCombination,
cacheGroup.chunksFilter
);
// Break if minimum number of chunks is not reached
if (preSelectedChunks.size < cacheGroup.minChunks) continue;
for (const comb of getExportsCombinations(preSelectedChunks))
set.add(comb);
}
combs = set;
} else {
const preSelectedChunks = getSelectedChunks(
chunkCombination,
cacheGroup.chunksFilter
);
// Break if minimum number of chunks is not reached
if (preSelectedChunks.size < cacheGroup.minChunks) continue;
combs = getCombinations(preSelectedChunks);
}
// For all combination of chunk selection // For all combination of chunk selection
for (const selectedChunks of combs) { for (const chunkCombination of combs) {
// Break if minimum number of chunks is not reached // Break if minimum number of chunks is not reached
const count = chunkCombination.size; const count =
chunkCombination instanceof Chunk ? 1 : chunkCombination.size;
if (count < cacheGroup.minChunks) continue; if (count < cacheGroup.minChunks) continue;
// Select chunks by configuration
const { chunks: selectedChunks, key: selectedChunksKey } =
getSelectedChunks(chunkCombination, cacheGroup.chunksFilter);
addModuleToChunksInfoMap( addModuleToChunksInfoMap(
cacheGroup, cacheGroup,
cacheGroupIndex, cacheGroupIndex,
selectedChunks, selectedChunks,
selectedChunksKey,
module module
); );
} }
@ -1186,12 +1284,12 @@ module.exports = class SplitChunksPlugin {
const chunkByName = compilation.namedChunks.get(chunkName); const chunkByName = compilation.namedChunks.get(chunkName);
if (chunkByName !== undefined) { if (chunkByName !== undefined) {
newChunk = chunkByName; newChunk = chunkByName;
const newChunks = item.chunks.without(newChunk); const oldSize = item.chunks.size;
isExistingChunk = newChunks !== item.chunks; item.chunks.delete(newChunk);
if (isExistingChunk) item.chunks = newChunks; isExistingChunk = item.chunks.size !== oldSize;
} }
} else if (item.cacheGroup.reuseExistingChunk) { } else if (item.cacheGroup.reuseExistingChunk) {
outer: for (const chunk of item.chunks.chunksIterable) { outer: for (const chunk of item.chunks) {
if ( if (
chunkGraph.getNumberOfChunkModules(chunk) !== chunkGraph.getNumberOfChunkModules(chunk) !==
item.modules.size item.modules.size
@ -1225,7 +1323,7 @@ module.exports = class SplitChunksPlugin {
} }
} }
if (newChunk) { if (newChunk) {
item.chunks = item.chunks.without(newChunk); item.chunks.delete(newChunk);
chunkName = undefined; chunkName = undefined;
isExistingChunk = true; isExistingChunk = true;
isReusedWithAllModules = true; isReusedWithAllModules = true;
@ -1236,7 +1334,7 @@ module.exports = class SplitChunksPlugin {
item.cacheGroup._conditionalEnforce && item.cacheGroup._conditionalEnforce &&
checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold); checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold);
let usedChunks = item.chunks; const usedChunks = new Set(item.chunks);
// Check if maxRequests condition can be fulfilled // Check if maxRequests condition can be fulfilled
if ( if (
@ -1244,7 +1342,7 @@ module.exports = class SplitChunksPlugin {
(Number.isFinite(item.cacheGroup.maxInitialRequests) || (Number.isFinite(item.cacheGroup.maxInitialRequests) ||
Number.isFinite(item.cacheGroup.maxAsyncRequests)) Number.isFinite(item.cacheGroup.maxAsyncRequests))
) { ) {
for (const chunk of usedChunks.chunksIterable) { for (const chunk of usedChunks) {
// respect max requests // respect max requests
const maxRequests = chunk.isOnlyInitial() const maxRequests = chunk.isOnlyInitial()
? item.cacheGroup.maxInitialRequests ? item.cacheGroup.maxInitialRequests
@ -1258,28 +1356,30 @@ module.exports = class SplitChunksPlugin {
isFinite(maxRequests) && isFinite(maxRequests) &&
getRequests(chunk) >= maxRequests getRequests(chunk) >= maxRequests
) { ) {
usedChunks = usedChunks.without(chunk); usedChunks.delete(chunk);
} }
} }
} }
outer: for (const chunk of usedChunks.chunksIterable) { outer: for (const chunk of usedChunks) {
for (const module of item.modules) { for (const module of item.modules) {
if (chunkGraph.isModuleInChunk(module, chunk)) continue outer; if (chunkGraph.isModuleInChunk(module, chunk)) continue outer;
} }
usedChunks = usedChunks.without(chunk); usedChunks.delete(chunk);
} }
// Were some (invalid) chunks removed from usedChunks? // Were some (invalid) chunks removed from usedChunks?
// => readd all modules to the queue, as things could have been changed // => readd all modules to the queue, as things could have been changed
if (usedChunks !== item.chunks) { if (usedChunks.size < item.chunks.size) {
if (isExistingChunk) usedChunks = usedChunks.with(newChunk); if (isExistingChunk) usedChunks.add(newChunk);
if (usedChunks.size >= item.cacheGroup.minChunks) { if (usedChunks.size >= item.cacheGroup.minChunks) {
const chunksArr = Array.from(usedChunks);
for (const module of item.modules) { for (const module of item.modules) {
addModuleToChunksInfoMap( addModuleToChunksInfoMap(
item.cacheGroup, item.cacheGroup,
item.cacheGroupIndex, item.cacheGroupIndex,
usedChunks, chunksArr,
getKey(usedChunks),
module module
); );
} }
@ -1293,7 +1393,7 @@ module.exports = class SplitChunksPlugin {
item.cacheGroup._validateRemainingSize && item.cacheGroup._validateRemainingSize &&
usedChunks.size === 1 usedChunks.size === 1
) { ) {
const [chunk] = usedChunks.chunksIterable; const [chunk] = usedChunks;
let chunkSizes = Object.create(null); let chunkSizes = Object.create(null);
for (const module of chunkGraph.getChunkModulesIterable(chunk)) { for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
if (!item.modules.has(module)) { if (!item.modules.has(module)) {
@ -1327,7 +1427,7 @@ module.exports = class SplitChunksPlugin {
newChunk = compilation.addChunk(chunkName); newChunk = compilation.addChunk(chunkName);
} }
// Walk through all chunks // Walk through all chunks
for (const chunk of usedChunks.chunksIterable) { for (const chunk of usedChunks) {
// Add graph connections for splitted chunk // Add graph connections for splitted chunk
chunk.split(newChunk); chunk.split(newChunk);
} }
@ -1357,14 +1457,14 @@ module.exports = class SplitChunksPlugin {
// Add module to new chunk // Add module to new chunk
chunkGraph.connectChunkAndModule(newChunk, module); chunkGraph.connectChunkAndModule(newChunk, module);
// Remove module from used chunks // Remove module from used chunks
for (const chunk of usedChunks.chunksIterable) { for (const chunk of usedChunks) {
chunkGraph.disconnectChunkAndModule(chunk, module); chunkGraph.disconnectChunkAndModule(chunk, module);
} }
} }
} else { } else {
// Remove all modules from used chunks // Remove all modules from used chunks
for (const module of item.modules) { for (const module of item.modules) {
for (const chunk of usedChunks.chunksIterable) { for (const chunk of usedChunks) {
chunkGraph.disconnectChunkAndModule(chunk, module); chunkGraph.disconnectChunkAndModule(chunk, module);
} }
} }
@ -1406,7 +1506,7 @@ module.exports = class SplitChunksPlugin {
// remove all modules from other entries and update size // remove all modules from other entries and update size
for (const [key, info] of chunksInfoMap) { for (const [key, info] of chunksInfoMap) {
if (info.chunks.hasSharedChunks(usedChunks)) { if (isOverlap(info.chunks, usedChunks)) {
// update modules and total size // update modules and total size
// may remove it from the map when < minSize // may remove it from the map when < minSize
let updated = false; let updated = false;

View File

@ -5,7 +5,6 @@
"use strict"; "use strict";
const WebpackError = require("../WebpackError"); const WebpackError = require("../WebpackError");
const { someInIterable } = require("../util/IterableHelpers");
/** @typedef {import("../ChunkGraph")} ChunkGraph */ /** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../Module")} Module */ /** @typedef {import("../Module")} Module */
@ -43,11 +42,7 @@ const getInitialModuleChains = (
for (const connection of moduleGraph.getIncomingConnections(head)) { for (const connection of moduleGraph.getIncomingConnections(head)) {
const newHead = connection.originModule; const newHead = connection.originModule;
if (newHead) { if (newHead) {
if ( if (!chunkGraph.getModuleChunks(newHead).some(c => c.canBeInitial()))
!someInIterable(chunkGraph.getModuleChunksIterable(newHead), c =>
c.canBeInitial()
)
)
continue; continue;
final = false; final = false;
if (alreadyReferencedModules.has(newHead)) continue; if (alreadyReferencedModules.has(newHead)) continue;

12
types.d.ts vendored
View File

@ -757,17 +757,6 @@ declare class Chunk {
filterFn?: (c: Chunk, chunkGraph: ChunkGraph) => boolean filterFn?: (c: Chunk, chunkGraph: ChunkGraph) => boolean
): Record<string | number, Record<string, (string | number)[]>>; ): Record<string | number, Record<string, (string | number)[]>>;
} }
declare abstract class ChunkCombination {
debugId: number;
size: number;
readonly chunksIterable: Iterable<Chunk>;
with(chunk: Chunk): ChunkCombination;
without(chunk: Chunk): ChunkCombination;
withAll(other?: any): any;
hasSharedChunks(other?: any): boolean;
isSubset(other: ChunkCombination): boolean;
getChunks(): Chunk[];
}
declare class ChunkGraph { declare class ChunkGraph {
constructor(moduleGraph: ModuleGraph, hashFunction?: string | typeof Hash); constructor(moduleGraph: ModuleGraph, hashFunction?: string | typeof Hash);
moduleGraph: ModuleGraph; moduleGraph: ModuleGraph;
@ -785,7 +774,6 @@ declare class ChunkGraph {
isModuleInChunk(module: Module, chunk: Chunk): boolean; isModuleInChunk(module: Module, chunk: Chunk): boolean;
isModuleInChunkGroup(module: Module, chunkGroup: ChunkGroup): boolean; isModuleInChunkGroup(module: Module, chunkGroup: ChunkGroup): boolean;
isEntryModule(module: Module): boolean; isEntryModule(module: Module): boolean;
getModuleChunkCombination(module: Module): ChunkCombination;
getModuleChunksIterable(module: Module): Iterable<Chunk>; getModuleChunksIterable(module: Module): Iterable<Chunk>;
getOrderedModuleChunksIterable( getOrderedModuleChunksIterable(
module: Module, module: Module,