mirror of https://github.com/webpack/webpack.git
Merge pull request #14358 from webpack/bugfix/shared-cache
This commit is contained in:
commit
0c3fab62ab
|
|
@ -78,7 +78,7 @@ const {
|
|||
createFakeHook
|
||||
} = require("./util/deprecation");
|
||||
const processAsyncTree = require("./util/processAsyncTree");
|
||||
const { getRuntimeKey, RuntimeSpecMap } = require("./util/runtime");
|
||||
const { getRuntimeKey } = require("./util/runtime");
|
||||
const { isSourceEqual } = require("./util/source");
|
||||
|
||||
/** @template T @typedef {import("tapable").AsArray<T>} AsArray<T> */
|
||||
|
|
@ -2178,7 +2178,39 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
|
|||
let statNew = 0;
|
||||
let statChanged = 0;
|
||||
let statUnchanged = 0;
|
||||
let statReferencesChanged = 0;
|
||||
let statWithoutHash = 0;
|
||||
|
||||
const computeReferences = module => {
|
||||
/** @type {WeakMap<Dependency, Module>} */
|
||||
let references = undefined;
|
||||
for (const connection of moduleGraph.getOutgoingConnections(module)) {
|
||||
const d = connection.dependency;
|
||||
const m = connection.module;
|
||||
if (!d || !m || unsafeCacheDependencies.has(d)) continue;
|
||||
if (references === undefined) references = new WeakMap();
|
||||
references.set(d, m);
|
||||
}
|
||||
return references;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Module} module the module
|
||||
* @param {WeakMap<Dependency, Module>} references references
|
||||
* @returns {boolean} true, when the references differ
|
||||
*/
|
||||
const compareReferences = (module, references) => {
|
||||
if (references === undefined) return true;
|
||||
for (const connection of moduleGraph.getOutgoingConnections(module)) {
|
||||
const d = connection.dependency;
|
||||
if (!d) continue;
|
||||
const entry = references.get(d);
|
||||
if (entry === undefined) continue;
|
||||
if (entry !== connection.module) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
for (const module of modules) {
|
||||
const hash = module.buildInfo && module.buildInfo.hash;
|
||||
if (typeof hash === "string") {
|
||||
|
|
@ -2188,27 +2220,33 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
|
|||
const memCache = new WeakTupleMap();
|
||||
moduleMemCacheCache.set(module, {
|
||||
hash: hash,
|
||||
references: computeReferences(module),
|
||||
memCache
|
||||
});
|
||||
moduleMemCaches.set(module, memCache);
|
||||
affectedModules.add(module);
|
||||
statNew++;
|
||||
} else if (cachedMemCache.hash === hash) {
|
||||
// keep the old mem cache
|
||||
moduleMemCaches.set(module, cachedMemCache.memCache);
|
||||
statUnchanged++;
|
||||
} else {
|
||||
} else if (cachedMemCache.hash !== hash) {
|
||||
// use a new one
|
||||
const memCache = new WeakTupleMap();
|
||||
moduleMemCacheCache.set(module, {
|
||||
hash: hash,
|
||||
memCache
|
||||
});
|
||||
moduleMemCaches.set(module, memCache);
|
||||
affectedModules.add(module);
|
||||
cachedMemCache.hash = hash;
|
||||
cachedMemCache.references = computeReferences(module);
|
||||
cachedMemCache.memCache = memCache;
|
||||
statChanged++;
|
||||
} else if (!compareReferences(module, cachedMemCache.references)) {
|
||||
// use a new one
|
||||
const memCache = new WeakTupleMap();
|
||||
moduleMemCaches.set(module, memCache);
|
||||
affectedModules.add(module);
|
||||
cachedMemCache.references = computeReferences(module);
|
||||
cachedMemCache.memCache = memCache;
|
||||
statReferencesChanged++;
|
||||
} else {
|
||||
// keep the old mem cache
|
||||
moduleMemCaches.set(module, cachedMemCache.memCache);
|
||||
statUnchanged++;
|
||||
}
|
||||
} else {
|
||||
infectedModules.add(module);
|
||||
|
|
@ -2261,7 +2299,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
|
|||
affectedModules.add(referencingModule);
|
||||
}
|
||||
const memCache = new WeakTupleMap();
|
||||
const cache = moduleMemCacheCache.get(module);
|
||||
const cache = moduleMemCacheCache.get(referencingModule);
|
||||
cache.memCache = memCache;
|
||||
moduleMemCaches.set(referencingModule, memCache);
|
||||
}
|
||||
|
|
@ -2275,7 +2313,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
|
|||
infectedModules.size
|
||||
} infected of ${
|
||||
this.modules.size
|
||||
}) modules flagged as affected (${statNew} new modules, ${statChanged} changed, ${statUnchanged} unchanged, ${statWithoutHash} without hash)`
|
||||
}) modules flagged as affected (${statNew} new modules, ${statChanged} changed, ${statReferencesChanged} references changed, ${statUnchanged} unchanged, ${statWithoutHash} without hash)`
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -3095,16 +3133,11 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
// modules with async blocks depend on the chunk graph and can't be cached that way
|
||||
module.blocks.length === 0 &&
|
||||
moduleMemCaches.get(module);
|
||||
/** @type {RuntimeSpecMap<Set<string>>} */
|
||||
const moduleRuntimeRequirementsMemCache =
|
||||
memCache &&
|
||||
memCache.provide(
|
||||
"moduleRuntimeRequirements",
|
||||
() => new RuntimeSpecMap()
|
||||
);
|
||||
for (const runtime of chunkGraph.getModuleRuntimes(module)) {
|
||||
if (moduleRuntimeRequirementsMemCache) {
|
||||
const cached = moduleRuntimeRequirementsMemCache.get(runtime);
|
||||
if (memCache) {
|
||||
const cached = memCache.get(
|
||||
`moduleRuntimeRequirements-${getRuntimeKey(runtime)}`
|
||||
);
|
||||
if (cached !== undefined) {
|
||||
if (cached !== null) {
|
||||
chunkGraph.addModuleRuntimeRequirements(
|
||||
|
|
@ -3125,8 +3158,11 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
} else if (additionalModuleRuntimeRequirements.isUsed()) {
|
||||
set = new Set();
|
||||
} else {
|
||||
if (moduleRuntimeRequirementsMemCache) {
|
||||
moduleRuntimeRequirementsMemCache.set(runtime, null);
|
||||
if (memCache) {
|
||||
memCache.set(
|
||||
`moduleRuntimeRequirements-${getRuntimeKey(runtime)}`,
|
||||
null
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
|
@ -3137,12 +3173,18 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
if (hook !== undefined) hook.call(module, set, context);
|
||||
}
|
||||
if (set.size === 0) {
|
||||
if (moduleRuntimeRequirementsMemCache) {
|
||||
moduleRuntimeRequirementsMemCache.set(runtime, null);
|
||||
if (memCache) {
|
||||
memCache.set(
|
||||
`moduleRuntimeRequirements-${getRuntimeKey(runtime)}`,
|
||||
null
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (moduleRuntimeRequirementsMemCache) {
|
||||
moduleRuntimeRequirementsMemCache.set(runtime, set);
|
||||
if (memCache) {
|
||||
memCache.set(
|
||||
`moduleRuntimeRequirements-${getRuntimeKey(runtime)}`,
|
||||
set
|
||||
);
|
||||
chunkGraph.addModuleRuntimeRequirements(
|
||||
module,
|
||||
runtime,
|
||||
|
|
@ -3555,13 +3597,9 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
// modules with async blocks depend on the chunk graph and can't be cached that way
|
||||
module.blocks.length === 0 &&
|
||||
moduleMemCaches.get(module);
|
||||
/** @type {RuntimeSpecMap<string>} */
|
||||
const moduleHashesMemCache =
|
||||
memCache &&
|
||||
memCache.provide("moduleHashes", () => new RuntimeSpecMap());
|
||||
for (const runtime of chunkGraph.getModuleRuntimes(module)) {
|
||||
if (moduleHashesMemCache) {
|
||||
const digest = moduleHashesMemCache.get(runtime);
|
||||
if (memCache) {
|
||||
const digest = memCache.get(`moduleHash-${getRuntimeKey(runtime)}`);
|
||||
if (digest !== undefined) {
|
||||
chunkGraph.setModuleHashes(
|
||||
module,
|
||||
|
|
@ -3583,8 +3621,8 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
hashDigest,
|
||||
hashDigestLength
|
||||
);
|
||||
if (moduleHashesMemCache) {
|
||||
moduleHashesMemCache.set(runtime, digest);
|
||||
if (memCache) {
|
||||
memCache.set(`moduleHash-${getRuntimeKey(runtime)}`, digest);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ const { isSourceEqual } = require("./util/source");
|
|||
/** @typedef {import("../declarations/WebpackOptions").WebpackOptionsNormalized} WebpackOptions */
|
||||
/** @typedef {import("../declarations/WebpackOptions").WebpackPluginInstance} WebpackPluginInstance */
|
||||
/** @typedef {import("./Chunk")} Chunk */
|
||||
/** @typedef {import("./Dependency")} Dependency */
|
||||
/** @typedef {import("./FileSystemInfo").FileSystemInfoEntry} FileSystemInfoEntry */
|
||||
/** @typedef {import("./Module")} Module */
|
||||
/** @typedef {import("./util/WeakTupleMap")} WeakTupleMap */
|
||||
|
|
@ -248,7 +249,7 @@ class Compiler {
|
|||
|
||||
this.cache = new Cache();
|
||||
|
||||
/** @type {WeakMap<Module, { hash: string, memCache: WeakTupleMap }> | undefined} */
|
||||
/** @type {WeakMap<Module, { hash: string, references: WeakMap<Dependency, Module>, memCache: WeakTupleMap }> | undefined} */
|
||||
this.moduleMemCaches = undefined;
|
||||
|
||||
this.compilerPath = "";
|
||||
|
|
|
|||
|
|
@ -1909,7 +1909,11 @@ declare class Compiler {
|
|||
cache: Cache;
|
||||
moduleMemCaches?: WeakMap<
|
||||
Module,
|
||||
{ hash: string; memCache: WeakTupleMap<any, any> }
|
||||
{
|
||||
hash: string;
|
||||
references: WeakMap<Dependency, Module>;
|
||||
memCache: WeakTupleMap<any, any>;
|
||||
}
|
||||
>;
|
||||
compilerPath: string;
|
||||
running: boolean;
|
||||
|
|
|
|||
Loading…
Reference in New Issue