mirror of https://github.com/webpack/webpack.git
use the full hash on modules that need the full hash of the compilation
use getFullHash runtime module for updating the hash in HMR
This commit is contained in:
parent
54a90430a2
commit
50ec72a6ca
|
|
@ -115,8 +115,10 @@ class APIPlugin {
|
|||
|
||||
compilation.hooks.runtimeRequirementInTree
|
||||
.for(RuntimeGlobals.getFullHash)
|
||||
.tap("APIPlugin", chunk => {
|
||||
compilation.addRuntimeModule(chunk, new GetFullHashRuntimeModule());
|
||||
.tap("APIPlugin", (chunk, set) => {
|
||||
const module = new GetFullHashRuntimeModule();
|
||||
compilation.addRuntimeModule(chunk, module);
|
||||
compilation.chunkGraph.addFullHashModuleToChunk(chunk, module);
|
||||
return true;
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -184,6 +184,8 @@ class ChunkGraphChunk {
|
|||
this.entryModules = new Map();
|
||||
/** @type {SortableSet<RuntimeModule>} */
|
||||
this.runtimeModules = new SortableSet();
|
||||
/** @type {Set<RuntimeModule> | undefined} */
|
||||
this.fullHashModules = undefined;
|
||||
/** @type {Set<string> | undefined} */
|
||||
this.runtimeRequirements = undefined;
|
||||
/** @type {Set<string>} */
|
||||
|
|
@ -346,6 +348,19 @@ class ChunkGraph {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Chunk} chunk the chunk
|
||||
* @param {Iterable<RuntimeModule>} modules the modules that require a full hash
|
||||
* @returns {void}
|
||||
*/
|
||||
attachFullHashModules(chunk, modules) {
|
||||
const cgc = this._getChunkGraphChunk(chunk);
|
||||
if (cgc.fullHashModules === undefined) cgc.fullHashModules = new Set();
|
||||
for (const module of modules) {
|
||||
cgc.fullHashModules.add(module);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Module} oldModule the replaced module
|
||||
* @param {Module} newModule the replacing module
|
||||
|
|
@ -394,6 +409,13 @@ class ChunkGraph {
|
|||
cgc.runtimeModules.delete(/** @type {RuntimeModule} */ (oldModule));
|
||||
cgc.runtimeModules.add(/** @type {RuntimeModule} */ (newModule));
|
||||
newCgm.runtimeInChunks.add(chunk);
|
||||
if (
|
||||
cgc.fullHashModules !== undefined &&
|
||||
cgc.fullHashModules.has(/** @type {RuntimeModule} */ (oldModule))
|
||||
) {
|
||||
cgc.fullHashModules.delete(/** @type {RuntimeModule} */ (oldModule));
|
||||
cgc.fullHashModules.add(/** @type {RuntimeModule} */ (newModule));
|
||||
}
|
||||
}
|
||||
oldCgm.runtimeInChunks = undefined;
|
||||
}
|
||||
|
|
@ -886,6 +908,17 @@ class ChunkGraph {
|
|||
cgc.runtimeModules.add(module);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Chunk} chunk the new chunk
|
||||
* @param {RuntimeModule} module the module that require a full hash
|
||||
* @returns {void}
|
||||
*/
|
||||
addFullHashModuleToChunk(chunk, module) {
|
||||
const cgc = this._getChunkGraphChunk(chunk);
|
||||
if (cgc.fullHashModules === undefined) cgc.fullHashModules = new Set();
|
||||
cgc.fullHashModules.add(module);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Chunk} chunk the new chunk
|
||||
* @param {Module} module the entry module
|
||||
|
|
@ -1038,6 +1071,15 @@ class ChunkGraph {
|
|||
return array;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Chunk} chunk the chunk
|
||||
* @returns {Iterable<RuntimeModule> | undefined} iterable of modules (do not modify)
|
||||
*/
|
||||
getChunkFullHashModulesIterable(chunk) {
|
||||
const cgc = this._getChunkGraphChunk(chunk);
|
||||
return cgc.fullHashModules;
|
||||
}
|
||||
|
||||
/** @typedef {[Module, Entrypoint | undefined]} EntryModuleWithChunkGroup */
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -2316,6 +2316,7 @@ class Compilation {
|
|||
return byId(a, b);
|
||||
});
|
||||
this.logger.timeEnd("hashing: sort chunks");
|
||||
const fullHashChunks = new Set();
|
||||
for (let i = 0; i < chunks.length; i++) {
|
||||
const chunk = chunks[i];
|
||||
// Last minute module hash generation for modules that depend on chunk hashes
|
||||
|
|
@ -2347,10 +2348,20 @@ class Compilation {
|
|||
moduleGraph: this.moduleGraph,
|
||||
runtimeTemplate: this.runtimeTemplate
|
||||
});
|
||||
chunk.hash = /** @type {string} */ (chunkHash.digest(hashDigest));
|
||||
hash.update(chunk.hash);
|
||||
const chunkHashDigest = /** @type {string} */ (chunkHash.digest(
|
||||
hashDigest
|
||||
));
|
||||
hash.update(chunkHashDigest);
|
||||
chunk.hash = chunkHashDigest;
|
||||
chunk.renderedHash = chunk.hash.substr(0, hashDigestLength);
|
||||
this.hooks.contentHash.call(chunk);
|
||||
const fullHashModules = chunkGraph.getChunkFullHashModulesIterable(
|
||||
chunk
|
||||
);
|
||||
if (fullHashModules) {
|
||||
fullHashChunks.add(chunk);
|
||||
} else {
|
||||
this.hooks.contentHash.call(chunk);
|
||||
}
|
||||
} catch (err) {
|
||||
this.errors.push(new ChunkRenderError(chunk, "", err));
|
||||
}
|
||||
|
|
@ -2362,6 +2373,32 @@ class Compilation {
|
|||
this.fullHash = /** @type {string} */ (hash.digest(hashDigest));
|
||||
this.hash = this.fullHash.substr(0, hashDigestLength);
|
||||
this.logger.timeEnd("hashing: hash digest");
|
||||
|
||||
this.logger.time("hashing: process full hash modules");
|
||||
for (const chunk of fullHashChunks) {
|
||||
for (const module of chunkGraph.getChunkFullHashModulesIterable(chunk)) {
|
||||
const moduleHash = createHash(hashFunction);
|
||||
module.updateHash(moduleHash, chunkGraph);
|
||||
const moduleHashDigest = /** @type {string} */ (moduleHash.digest(
|
||||
hashDigest
|
||||
));
|
||||
chunkGraph.setModuleHashes(
|
||||
module,
|
||||
moduleHashDigest,
|
||||
moduleHashDigest.substr(0, hashDigestLength)
|
||||
);
|
||||
}
|
||||
const chunkHash = createHash(hashFunction);
|
||||
chunkHash.update(chunk.hash);
|
||||
chunkHash.update(this.hash);
|
||||
const chunkHashDigest = /** @type {string} */ (chunkHash.digest(
|
||||
hashDigest
|
||||
));
|
||||
chunk.hash = chunkHashDigest;
|
||||
chunk.renderedHash = chunk.hash.substr(0, hashDigestLength);
|
||||
this.hooks.contentHash.call(chunk);
|
||||
}
|
||||
this.logger.timeEnd("hashing: process full hash modules");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -301,7 +301,6 @@ class HotModuleReplacementPlugin {
|
|||
}
|
||||
}
|
||||
const hotUpdateMainContent = {
|
||||
h: compilation.hash,
|
||||
c: [],
|
||||
r: [],
|
||||
m: undefined
|
||||
|
|
@ -320,6 +319,14 @@ class HotModuleReplacementPlugin {
|
|||
const newRuntimeModules = Array.from(
|
||||
chunkGraph.getChunkRuntimeModulesIterable(currentChunk)
|
||||
).filter(module => updatedModules.has(module));
|
||||
const fullHashModules = chunkGraph.getChunkFullHashModulesIterable(
|
||||
currentChunk
|
||||
);
|
||||
const newFullHashModules =
|
||||
fullHashModules &&
|
||||
Array.from(fullHashModules).filter(module =>
|
||||
updatedModules.has(module)
|
||||
);
|
||||
/** @type {Set<number|string>} */
|
||||
const allModules = new Set();
|
||||
for (const module of chunkGraph.getChunkModulesIterable(
|
||||
|
|
@ -339,6 +346,12 @@ class HotModuleReplacementPlugin {
|
|||
hotUpdateChunk,
|
||||
newRuntimeModules
|
||||
);
|
||||
if (newFullHashModules) {
|
||||
chunkGraph.attachFullHashModules(
|
||||
hotUpdateChunk,
|
||||
newRuntimeModules
|
||||
);
|
||||
}
|
||||
hotUpdateChunk.removedModules = removedModules;
|
||||
const renderManifest = compilation.getRenderManifest({
|
||||
chunk: hotUpdateChunk,
|
||||
|
|
@ -401,7 +414,6 @@ class HotModuleReplacementPlugin {
|
|||
(chunk, runtimeRequirements) => {
|
||||
runtimeRequirements.add(RuntimeGlobals.hmrDownloadManifest);
|
||||
runtimeRequirements.add(RuntimeGlobals.hmrDownloadUpdateHandlers);
|
||||
runtimeRequirements.add(RuntimeGlobals.getFullHash);
|
||||
runtimeRequirements.add(RuntimeGlobals.interceptModuleExecution);
|
||||
runtimeRequirements.add(RuntimeGlobals.moduleCache);
|
||||
compilation.addRuntimeModule(
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
var $getFullHash$ = undefined;
|
||||
var $interceptModuleExecution$ = undefined;
|
||||
var $moduleCache$ = undefined;
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
|
|
@ -16,7 +15,6 @@ var $hmrInvalidateModuleHandlers$ = undefined;
|
|||
var __webpack_require__ = undefined;
|
||||
|
||||
module.exports = function () {
|
||||
var currentHash = $getFullHash$();
|
||||
var currentModuleData = {};
|
||||
var installedModules = $moduleCache$;
|
||||
|
||||
|
|
@ -33,15 +31,10 @@ module.exports = function () {
|
|||
|
||||
// The update info
|
||||
var currentUpdateApplyHandlers;
|
||||
var currentUpdateNewHash;
|
||||
var queuedInvalidatedModules;
|
||||
|
||||
$hmrModuleData$ = currentModuleData;
|
||||
|
||||
$getFullHash$ = function () {
|
||||
return currentHash;
|
||||
};
|
||||
|
||||
$interceptModuleExecution$.push(function (options) {
|
||||
var module = options.module;
|
||||
var require = createRequire(options.require, options.id);
|
||||
|
|
@ -251,7 +244,6 @@ module.exports = function () {
|
|||
|
||||
setStatus("prepare");
|
||||
|
||||
currentUpdateNewHash = update.h;
|
||||
var updatedModules = [];
|
||||
blockingPromises = [];
|
||||
currentUpdateApplyHandlers = [];
|
||||
|
|
@ -328,11 +320,6 @@ module.exports = function () {
|
|||
// Now in "apply" phase
|
||||
setStatus("apply");
|
||||
|
||||
if (currentUpdateNewHash !== undefined) {
|
||||
currentHash = currentUpdateNewHash;
|
||||
currentUpdateNewHash = undefined;
|
||||
}
|
||||
|
||||
var error;
|
||||
var reportError = function (err) {
|
||||
if (!error) error = err;
|
||||
|
|
|
|||
|
|
@ -337,6 +337,16 @@ class JavascriptModulesPlugin {
|
|||
hash.update(chunkGraph.getModuleHash(m));
|
||||
}
|
||||
}
|
||||
const runtimeModules = chunkGraph.getOrderedChunkModulesIterableBySourceType(
|
||||
chunk,
|
||||
"runtime",
|
||||
compareModulesByIdentifier
|
||||
);
|
||||
if (runtimeModules) {
|
||||
for (const m of runtimeModules) {
|
||||
hash.update(chunkGraph.getModuleHash(m));
|
||||
}
|
||||
}
|
||||
if (hotUpdateChunk) {
|
||||
hash.update(JSON.stringify(hotUpdateChunk.removedModules));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
import "./module";
|
||||
|
||||
const getFile = name =>
|
||||
__non_webpack_require__("fs").readFileSync(
|
||||
__non_webpack_require__("path").join(__dirname, name),
|
||||
"utf-8"
|
||||
);
|
||||
|
||||
it("should generate the main file and change full hash on update", done => {
|
||||
const hash1 = __webpack_hash__;
|
||||
expect(getFile("bundle.js")).toContain(hash1);
|
||||
module.hot.accept("./module", () => {
|
||||
const hash2 = __webpack_hash__;
|
||||
expect(hash1).toBeTypeOf("string");
|
||||
expect(hash2).not.toBe(hash1);
|
||||
expect(getFile("bundle.js")).toContain(hash2);
|
||||
expect(getFile("bundle.js")).not.toContain(hash1);
|
||||
done();
|
||||
});
|
||||
NEXT(require("../../update")(done));
|
||||
});
|
||||
|
|
@ -0,0 +1 @@
|
|||
import("./thing");
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
export default 1;
|
||||
---
|
||||
export default 2;
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
module.exports = {
|
||||
node: {
|
||||
__dirname: false
|
||||
}
|
||||
};
|
||||
Loading…
Reference in New Issue