mirror of https://github.com/webpack/webpack.git
calculate real content hash for assets after asset processing
This commit is contained in:
parent
43433e9379
commit
b929d4cdce
|
@ -1323,6 +1323,10 @@ export interface Optimization {
|
|||
* Figure out which exports are provided by modules to generate more efficient code.
|
||||
*/
|
||||
providedExports?: boolean;
|
||||
/**
|
||||
* Use real [contenthash] based on final content of the assets.
|
||||
*/
|
||||
realContentHash?: boolean;
|
||||
/**
|
||||
* Removes modules from chunks when these modules are already included in all parents.
|
||||
*/
|
||||
|
|
|
@ -175,6 +175,10 @@ const { getRuntimeKey } = require("./util/runtime");
|
|||
/**
|
||||
* @typedef {Object} AssetInfo
|
||||
* @property {boolean=} immutable true, if the asset can be long term cached forever (contains a hash)
|
||||
* @property {string | string[]=} fullhash the value(s) of the full hash used for this asset
|
||||
* @property {string | string[]=} chunkhash the value(s) of the chunk hash used for this asset
|
||||
* @property {string | string[]=} modulehash the value(s) of the module hash used for this asset
|
||||
* @property {string | string[]=} contenthash the value(s) of the content hash used for this asset
|
||||
* @property {number=} size size in bytes, only set after asset has been emitted
|
||||
* @property {boolean=} development true, when asset is only used for development and doesn't count towards user-facing assets
|
||||
* @property {boolean=} hotModuleReplacement true, when asset ships data for updating an existing application (HMR)
|
||||
|
@ -731,6 +735,8 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
|
|||
this.assets = {};
|
||||
/** @type {Map<string, AssetInfo>} */
|
||||
this.assetsInfo = new Map();
|
||||
/** @type {Map<string, Map<string, Set<string>>>} */
|
||||
this._assetsRelatedIn = new Map();
|
||||
/** @type {WebpackError[]} */
|
||||
this.errors = [];
|
||||
/** @type {WebpackError[]} */
|
||||
|
@ -2701,15 +2707,67 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
)
|
||||
);
|
||||
this.assets[file] = source;
|
||||
this.assetsInfo.set(file, assetInfo);
|
||||
this._setAssetInfo(file, assetInfo);
|
||||
return;
|
||||
}
|
||||
const oldInfo = this.assetsInfo.get(file);
|
||||
this.assetsInfo.set(file, Object.assign({}, oldInfo, assetInfo));
|
||||
const newInfo = Object.assign({}, oldInfo, assetInfo);
|
||||
this._setAssetInfo(file, newInfo, oldInfo);
|
||||
return;
|
||||
}
|
||||
this.assets[file] = source;
|
||||
this.assetsInfo.set(file, assetInfo);
|
||||
this._setAssetInfo(file, assetInfo, undefined);
|
||||
}
|
||||
|
||||
_setAssetInfo(file, newInfo, oldInfo = this.assetsInfo.get(file)) {
|
||||
if (newInfo === undefined) {
|
||||
this.assetsInfo.delete(file);
|
||||
} else {
|
||||
this.assetsInfo.set(file, newInfo);
|
||||
}
|
||||
const oldRelated = oldInfo && oldInfo.related;
|
||||
const newRelated = newInfo && newInfo.related;
|
||||
if (oldRelated) {
|
||||
for (const key of Object.keys(oldRelated)) {
|
||||
const remove = name => {
|
||||
const relatedIn = this._assetsRelatedIn.get(name);
|
||||
if (relatedIn === undefined) return;
|
||||
const entry = relatedIn.get(key);
|
||||
if (entry === undefined) return;
|
||||
entry.delete(file);
|
||||
if (entry.size !== 0) return;
|
||||
relatedIn.delete(key);
|
||||
if (relatedIn.size === 0) this._assetsRelatedIn.delete(name);
|
||||
};
|
||||
const entry = oldRelated[key];
|
||||
if (Array.isArray(entry)) {
|
||||
entry.forEach(remove);
|
||||
} else if (entry) {
|
||||
remove(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (newRelated) {
|
||||
for (const key of Object.keys(newRelated)) {
|
||||
const add = name => {
|
||||
let relatedIn = this._assetsRelatedIn.get(name);
|
||||
if (relatedIn === undefined) {
|
||||
this._assetsRelatedIn.set(name, (relatedIn = new Map()));
|
||||
}
|
||||
let entry = relatedIn.get(key);
|
||||
if (entry === undefined) {
|
||||
relatedIn.set(key, (entry = new Set()));
|
||||
}
|
||||
entry.add(file);
|
||||
};
|
||||
const entry = newRelated[key];
|
||||
if (Array.isArray(entry)) {
|
||||
entry.forEach(add);
|
||||
} else if (entry) {
|
||||
add(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2735,16 +2793,73 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
if (assetInfoUpdateOrFunction !== undefined) {
|
||||
const oldInfo = this.assetsInfo.get(file) || EMPTY_ASSET_INFO;
|
||||
if (typeof assetInfoUpdateOrFunction === "function") {
|
||||
this.assetsInfo.set(file, assetInfoUpdateOrFunction(oldInfo));
|
||||
this._setAssetInfo(file, assetInfoUpdateOrFunction(oldInfo), oldInfo);
|
||||
} else {
|
||||
this.assetsInfo.set(
|
||||
this._setAssetInfo(
|
||||
file,
|
||||
cachedCleverMerge(oldInfo, assetInfoUpdateOrFunction)
|
||||
cachedCleverMerge(oldInfo, assetInfoUpdateOrFunction),
|
||||
oldInfo
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
renameAsset(file, newFile) {
|
||||
const source = this.assets[file];
|
||||
if (!source) {
|
||||
throw new Error(
|
||||
`Called Compilation.renameAsset for not existing filename ${file}`
|
||||
);
|
||||
}
|
||||
if (this.assets[newFile]) {
|
||||
if (!isSourceEqual(this.assets[file], source)) {
|
||||
this.errors.push(
|
||||
new WebpackError(
|
||||
`Conflict: Called Compilation.renameAsset for already existing filename ${newFile} with different content`
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
const assetInfo = this.assetsInfo.get(file);
|
||||
// Update related in all other assets
|
||||
const relatedInInfo = this._assetsRelatedIn.get(file);
|
||||
if (relatedInInfo) {
|
||||
for (const [key, assets] of relatedInInfo) {
|
||||
for (const name of assets) {
|
||||
const info = this.assetsInfo.get(name);
|
||||
if (!info) continue;
|
||||
const related = info.related;
|
||||
if (!related) continue;
|
||||
const entry = related[key];
|
||||
let newEntry;
|
||||
if (Array.isArray(entry)) {
|
||||
newEntry = entry.map(x => (x === file ? newFile : x));
|
||||
} else if (entry === file) {
|
||||
newEntry = newFile;
|
||||
} else continue;
|
||||
this.assetsInfo.set(name, {
|
||||
...info,
|
||||
related: {
|
||||
...related,
|
||||
[key]: newEntry
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
this._setAssetInfo(file, undefined, assetInfo);
|
||||
this._setAssetInfo(newFile, assetInfo);
|
||||
delete this.assets[file];
|
||||
this.assets[newFile] = source;
|
||||
for (const chunk of this.chunks) {
|
||||
const size = chunk.files.size;
|
||||
chunk.files.delete(file);
|
||||
if (size !== chunk.files.size) {
|
||||
chunk.files.add(newFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} file file name
|
||||
*/
|
||||
|
@ -2754,35 +2869,19 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
}
|
||||
delete this.assets[file];
|
||||
const assetInfo = this.assetsInfo.get(file);
|
||||
this.assetsInfo.delete(file);
|
||||
this._setAssetInfo(file, undefined, assetInfo);
|
||||
const related = assetInfo && assetInfo.related;
|
||||
if (related) {
|
||||
for (const key of Object.keys(related)) {
|
||||
const checkUsedAndDelete = file => {
|
||||
// That's not super efficient, but let's assume the number of assets
|
||||
// is not super large and we are not deleting a lot of files
|
||||
// It could be refactored to track parents in emit/updateAsset
|
||||
// to make it easier to access this info in O(1)
|
||||
for (const assetInfo of this.assetsInfo.values()) {
|
||||
if (!assetInfo) continue;
|
||||
const related = assetInfo.related;
|
||||
if (!related) continue;
|
||||
const items = related[key];
|
||||
if (!items) continue;
|
||||
if (
|
||||
Array.isArray(items)
|
||||
? items.some(item => item === file)
|
||||
: items === file
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (!this._assetsRelatedIn.has(file)) {
|
||||
this.deleteAsset(file);
|
||||
}
|
||||
this.deleteAsset(file);
|
||||
};
|
||||
const items = related[key];
|
||||
if (Array.isArray(items)) {
|
||||
for (const item of items) checkUsedAndDelete(item);
|
||||
} else {
|
||||
items.forEach(checkUsedAndDelete);
|
||||
} else if (items) {
|
||||
checkUsedAndDelete(items);
|
||||
}
|
||||
}
|
||||
|
@ -3225,6 +3324,11 @@ Compilation.PROCESS_ASSETS_STAGE_DEV_TOOLING = 2000;
|
|||
*/
|
||||
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_TRANSFER = 3000;
|
||||
|
||||
/**
|
||||
* Optimize the hashes of the assets, e. g. by generating real hashes of the asset content.
|
||||
*/
|
||||
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_HASH = 3500;
|
||||
|
||||
/**
|
||||
* Analyse existing assets.
|
||||
*/
|
||||
|
|
|
@ -24,6 +24,7 @@ const schema = require("../schemas/plugins/SourceMapDevToolPlugin.json");
|
|||
/** @typedef {import("./Cache").Etag} Etag */
|
||||
/** @typedef {import("./CacheFacade").ItemCacheFacade} ItemCacheFacade */
|
||||
/** @typedef {import("./Chunk")} Chunk */
|
||||
/** @typedef {import("./Compilation").AssetInfo} AssetInfo */
|
||||
/** @typedef {import("./Compiler")} Compiler */
|
||||
/** @typedef {import("./Module")} Module */
|
||||
/** @typedef {import("./util/Hash")} Hash */
|
||||
|
@ -31,6 +32,7 @@ const schema = require("../schemas/plugins/SourceMapDevToolPlugin.json");
|
|||
/**
|
||||
* @typedef {object} SourceMapTask
|
||||
* @property {Source} asset
|
||||
* @property {AssetInfo} assetInfo
|
||||
* @property {(string | Module)[]} modules
|
||||
* @property {string} source
|
||||
* @property {string} file
|
||||
|
@ -38,16 +40,33 @@ const schema = require("../schemas/plugins/SourceMapDevToolPlugin.json");
|
|||
* @property {ItemCacheFacade} cacheItem cache item
|
||||
*/
|
||||
|
||||
/**
|
||||
* Escapes regular expression metacharacters
|
||||
* @param {string} str String to quote
|
||||
* @returns {string} Escaped string
|
||||
*/
|
||||
const quoteMeta = str => {
|
||||
return str.replace(/[-[\]\\/{}()*+?.^$|]/g, "\\$&");
|
||||
};
|
||||
|
||||
/**
|
||||
* Creating {@link SourceMapTask} for given file
|
||||
* @param {string} file current compiled file
|
||||
* @param {Source} asset the asset
|
||||
* @param {AssetInfo} assetInfo the asset info
|
||||
* @param {SourceMapDevToolPluginOptions} options source map options
|
||||
* @param {Compilation} compilation compilation instance
|
||||
* @param {ItemCacheFacade} cacheItem cache item
|
||||
* @returns {SourceMapTask | undefined} created task instance or `undefined`
|
||||
*/
|
||||
const getTaskForFile = (file, asset, options, compilation, cacheItem) => {
|
||||
const getTaskForFile = (
|
||||
file,
|
||||
asset,
|
||||
assetInfo,
|
||||
options,
|
||||
compilation,
|
||||
cacheItem
|
||||
) => {
|
||||
let source;
|
||||
/** @type {SourceMap} */
|
||||
let sourceMap;
|
||||
|
@ -77,6 +96,7 @@ const getTaskForFile = (file, asset, options, compilation, cacheItem) => {
|
|||
file,
|
||||
asset,
|
||||
source,
|
||||
assetInfo,
|
||||
sourceMap,
|
||||
modules,
|
||||
cacheItem
|
||||
|
@ -244,6 +264,7 @@ class SourceMapDevToolPlugin {
|
|||
const task = getTaskForFile(
|
||||
file,
|
||||
asset.source,
|
||||
asset.info,
|
||||
options,
|
||||
compilation,
|
||||
cacheItem
|
||||
|
@ -373,6 +394,24 @@ class SourceMapDevToolPlugin {
|
|||
}
|
||||
sourceMap.sourceRoot = options.sourceRoot || "";
|
||||
sourceMap.file = file;
|
||||
const usesContentHash =
|
||||
sourceMapFilename &&
|
||||
/\[contenthash(:\w+)?\]/.test(sourceMapFilename);
|
||||
|
||||
// If SourceMap and asset uses contenthash, avoid a circular dependency by hiding hash in `file`
|
||||
if (usesContentHash && task.assetInfo.contenthash) {
|
||||
const contenthash = task.assetInfo.contenthash;
|
||||
let pattern;
|
||||
if (Array.isArray(contenthash)) {
|
||||
pattern = contenthash.map(quoteMeta).join("|");
|
||||
} else {
|
||||
pattern = quoteMeta(contenthash);
|
||||
}
|
||||
sourceMap.file = sourceMap.file.replace(
|
||||
new RegExp(pattern, "g"),
|
||||
m => "x".repeat(m.length)
|
||||
);
|
||||
}
|
||||
|
||||
/** @type {string | false} */
|
||||
let currentSourceMappingURLComment = sourceMappingURLComment;
|
||||
|
@ -388,6 +427,11 @@ class SourceMapDevToolPlugin {
|
|||
const sourceMapString = JSON.stringify(sourceMap);
|
||||
if (sourceMapFilename) {
|
||||
let filename = file;
|
||||
const sourceMapContentHash =
|
||||
usesContentHash &&
|
||||
/** @type {string} */ (createHash("md4")
|
||||
.update(sourceMapString)
|
||||
.digest("hex"));
|
||||
const pathParams = {
|
||||
chunk,
|
||||
filename: options.fileContext
|
||||
|
@ -397,11 +441,12 @@ class SourceMapDevToolPlugin {
|
|||
`/${filename}`
|
||||
)
|
||||
: filename,
|
||||
contentHash: /** @type {string} */ (createHash("md4")
|
||||
.update(sourceMapString)
|
||||
.digest("hex"))
|
||||
contentHash: sourceMapContentHash
|
||||
};
|
||||
let sourceMapFile = compilation.getPath(
|
||||
const {
|
||||
path: sourceMapFile,
|
||||
info: sourceMapInfo
|
||||
} = compilation.getPathWithInfo(
|
||||
sourceMapFilename,
|
||||
pathParams
|
||||
);
|
||||
|
@ -433,6 +478,7 @@ class SourceMapDevToolPlugin {
|
|||
// Add source map file to compilation assets and chunk files
|
||||
const sourceMapAsset = new RawSource(sourceMapString);
|
||||
const sourceMapAssetInfo = {
|
||||
...sourceMapInfo,
|
||||
development: true
|
||||
};
|
||||
assets[sourceMapFile] = sourceMapAsset;
|
||||
|
|
|
@ -29,18 +29,29 @@ const prepareId = id => {
|
|||
return id.replace(/(^[.-]|[^a-zA-Z0-9_-])+/g, "_");
|
||||
};
|
||||
|
||||
const hashLength = (replacer, handler, assetInfo) => {
|
||||
const hashLength = (replacer, handler, assetInfo, hashName) => {
|
||||
const fn = (match, arg, input) => {
|
||||
if (assetInfo) assetInfo.immutable = true;
|
||||
let result;
|
||||
const length = arg && parseInt(arg, 10);
|
||||
|
||||
if (length && handler) {
|
||||
return handler(length);
|
||||
result = handler(length);
|
||||
} else {
|
||||
const hash = replacer(match, arg, input);
|
||||
|
||||
result = length ? hash.slice(0, length) : hash;
|
||||
}
|
||||
|
||||
const hash = replacer(match, arg, input);
|
||||
|
||||
return length ? hash.slice(0, length) : hash;
|
||||
if (assetInfo) {
|
||||
assetInfo.immutable = true;
|
||||
if (Array.isArray(assetInfo[hashName])) {
|
||||
assetInfo[hashName] = [...assetInfo[hashName], result];
|
||||
} else if (assetInfo[hashName]) {
|
||||
assetInfo[hashName] = [assetInfo[hashName], result];
|
||||
} else {
|
||||
assetInfo[hashName] = result;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
return fn;
|
||||
|
@ -146,7 +157,8 @@ const replacePathVariables = (path, data, assetInfo) => {
|
|||
const hashReplacer = hashLength(
|
||||
replacer(data.hash),
|
||||
data.hashWithLength,
|
||||
assetInfo
|
||||
assetInfo,
|
||||
"fullhash"
|
||||
);
|
||||
|
||||
replacements.set("fullhash", hashReplacer);
|
||||
|
@ -180,7 +192,8 @@ const replacePathVariables = (path, data, assetInfo) => {
|
|||
const chunkhashReplacer = hashLength(
|
||||
replacer(chunk instanceof Chunk ? chunk.renderedHash : chunk.hash),
|
||||
"hashWithLength" in chunk ? chunk.hashWithLength : undefined,
|
||||
assetInfo
|
||||
assetInfo,
|
||||
"chunkhash"
|
||||
);
|
||||
const contenthashReplacer = hashLength(
|
||||
replacer(
|
||||
|
@ -193,7 +206,8 @@ const replacePathVariables = (path, data, assetInfo) => {
|
|||
("contentHashWithLength" in chunk && chunk.contentHashWithLength
|
||||
? chunk.contentHashWithLength[contentHashType]
|
||||
: undefined),
|
||||
assetInfo
|
||||
assetInfo,
|
||||
"contenthash"
|
||||
);
|
||||
|
||||
replacements.set("id", idReplacer);
|
||||
|
@ -228,12 +242,14 @@ const replacePathVariables = (path, data, assetInfo) => {
|
|||
: module.hash
|
||||
),
|
||||
"hashWithLength" in module ? module.hashWithLength : undefined,
|
||||
assetInfo
|
||||
assetInfo,
|
||||
"modulehash"
|
||||
);
|
||||
const contentHashReplacer = hashLength(
|
||||
replacer(data.contentHash),
|
||||
undefined,
|
||||
assetInfo
|
||||
assetInfo,
|
||||
"contenthash"
|
||||
);
|
||||
|
||||
replacements.set("id", idReplacer);
|
||||
|
|
|
@ -407,6 +407,13 @@ class WebpackOptionsApply extends OptionsApply {
|
|||
const NoEmitOnErrorsPlugin = require("./NoEmitOnErrorsPlugin");
|
||||
new NoEmitOnErrorsPlugin().apply(compiler);
|
||||
}
|
||||
if (options.optimization.realContentHash) {
|
||||
const RealContentHashPlugin = require("./optimize/RealContentHashPlugin");
|
||||
new RealContentHashPlugin({
|
||||
hashFunction: options.output.hashFunction,
|
||||
hashDigest: options.output.hashDigest
|
||||
}).apply(compiler);
|
||||
}
|
||||
if (options.optimization.checkWasmTypes) {
|
||||
const WasmFinalizeExportsPlugin = require("./wasm/WasmFinalizeExportsPlugin");
|
||||
new WasmFinalizeExportsPlugin().apply(compiler);
|
||||
|
|
|
@ -674,6 +674,7 @@ const applyOptimizationDefaults = (
|
|||
D(optimization, "checkWasmTypes", production);
|
||||
D(optimization, "mangleWasmImports", false);
|
||||
D(optimization, "portableRecords", records);
|
||||
D(optimization, "realContentHash", production);
|
||||
D(optimization, "minimize", production);
|
||||
A(optimization, "minimizer", () => [
|
||||
{
|
||||
|
|
|
@ -0,0 +1,203 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { RawSource } = require("webpack-sources");
|
||||
const Compilation = require("../Compilation");
|
||||
const { compareSelect, compareStrings } = require("../util/comparators");
|
||||
const createHash = require("../util/createHash");
|
||||
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
const addToList = (itemOrItems, list) => {
|
||||
if (Array.isArray(itemOrItems)) {
|
||||
for (const item of itemOrItems) {
|
||||
list.add(item);
|
||||
}
|
||||
} else if (itemOrItems) {
|
||||
list.add(itemOrItems);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Escapes regular expression metacharacters
|
||||
* @param {string} str String to quote
|
||||
* @returns {string} Escaped string
|
||||
*/
|
||||
const quoteMeta = str => {
|
||||
return str.replace(/[-[\]\\/{}()*+?.^$|]/g, "\\$&");
|
||||
};
|
||||
|
||||
class RealContentHashPlugin {
|
||||
constructor({ hashFunction, hashDigest }) {
|
||||
this._hashFunction = hashFunction;
|
||||
this._hashDigest = hashDigest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap("RealContentHashPlugin", compilation => {
|
||||
compilation.hooks.processAssets.tap(
|
||||
{
|
||||
name: "RealContentHashPlugin",
|
||||
stage: Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_HASH
|
||||
},
|
||||
() => {
|
||||
const assets = compilation.getAssets();
|
||||
const assetsWithInfo = [];
|
||||
const hashToAssets = new Map();
|
||||
for (const { source, info, name } of assets) {
|
||||
const content = source.source();
|
||||
/** @type {Set<string>} */
|
||||
const hashes = new Set();
|
||||
addToList(info.contenthash, hashes);
|
||||
const data = {
|
||||
name,
|
||||
info,
|
||||
source,
|
||||
content,
|
||||
newContent: undefined,
|
||||
hasOwnHash: false,
|
||||
contentHash: undefined,
|
||||
referencedHashes: new Set(),
|
||||
hashes
|
||||
};
|
||||
assetsWithInfo.push(data);
|
||||
for (const hash of hashes) {
|
||||
const list = hashToAssets.get(hash);
|
||||
if (list === undefined) {
|
||||
hashToAssets.set(hash, [data]);
|
||||
} else {
|
||||
list.push(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
const hashRegExp = new RegExp(
|
||||
Array.from(hashToAssets.keys(), quoteMeta).join("|"),
|
||||
"g"
|
||||
);
|
||||
for (const asset of assetsWithInfo) {
|
||||
const { content, referencedHashes, hashes } = asset;
|
||||
if (Buffer.isBuffer(content)) continue;
|
||||
const inContent = content.match(hashRegExp);
|
||||
if (inContent) {
|
||||
for (const hash of inContent) {
|
||||
if (hashes.has(hash)) {
|
||||
asset.hasOwnHash = true;
|
||||
continue;
|
||||
}
|
||||
referencedHashes.add(hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
const getDependencies = hash => {
|
||||
const assets = hashToAssets.get(hash);
|
||||
const hashes = new Set();
|
||||
for (const { referencedHashes } of assets) {
|
||||
for (const hash of referencedHashes) {
|
||||
hashes.add(hash);
|
||||
}
|
||||
}
|
||||
return hashes;
|
||||
};
|
||||
const hashInfo = hash => {
|
||||
const assets = hashToAssets.get(hash);
|
||||
return `${hash} (${Array.from(assets, a => a.name)})`;
|
||||
};
|
||||
const hashesInOrder = new Set();
|
||||
for (const hash of hashToAssets.keys()) {
|
||||
const add = (hash, stack) => {
|
||||
const deps = getDependencies(hash);
|
||||
stack.add(hash);
|
||||
for (const dep of deps) {
|
||||
if (hashesInOrder.has(dep)) continue;
|
||||
if (stack.has(dep)) {
|
||||
throw new Error(
|
||||
`Circular hash dependency ${Array.from(
|
||||
stack,
|
||||
hashInfo
|
||||
).join(" -> ")} -> ${hashInfo(dep)}`
|
||||
);
|
||||
}
|
||||
add(dep, stack);
|
||||
}
|
||||
hashesInOrder.add(hash);
|
||||
stack.delete(hash);
|
||||
};
|
||||
if (hashesInOrder.has(hash)) continue;
|
||||
add(hash, new Set());
|
||||
}
|
||||
const hashToNewHash = new Map();
|
||||
const computeNewContent = (asset, includeOwn) => {
|
||||
if (asset.newContent !== undefined) return;
|
||||
if (
|
||||
asset.hasOwnHash ||
|
||||
Array.from(asset.referencedHashes).some(hash =>
|
||||
hashToNewHash.has(hash)
|
||||
)
|
||||
) {
|
||||
asset.newContent = asset.content.replace(hashRegExp, hash => {
|
||||
if (!includeOwn && asset.hashes.has(hash)) {
|
||||
return "";
|
||||
}
|
||||
return hashToNewHash.get(hash) || hash;
|
||||
});
|
||||
}
|
||||
};
|
||||
const comparator = compareSelect(a => a.name, compareStrings);
|
||||
for (const oldHash of hashesInOrder) {
|
||||
const assets = hashToAssets.get(oldHash);
|
||||
assets.sort(comparator);
|
||||
const hash = createHash(this._hashFunction);
|
||||
for (const asset of assets) {
|
||||
computeNewContent(asset);
|
||||
hash.update(asset.newContent || asset.content);
|
||||
}
|
||||
const digest = hash.digest(this._hashDigest);
|
||||
const newHash = digest.slice(0, oldHash.length);
|
||||
if (oldHash !== newHash) {
|
||||
hashToNewHash.set(oldHash, newHash);
|
||||
}
|
||||
}
|
||||
for (const asset of assetsWithInfo) {
|
||||
// recomputed content with it's own hash
|
||||
if (asset.hasOwnHash) {
|
||||
asset.newContent = undefined;
|
||||
computeNewContent(asset, true);
|
||||
}
|
||||
const newName = asset.name.replace(
|
||||
hashRegExp,
|
||||
hash => hashToNewHash.get(hash) || hash
|
||||
);
|
||||
|
||||
const infoUpdate = {};
|
||||
const hash = asset.info.contenthash;
|
||||
infoUpdate.contenthash = Array.isArray(hash)
|
||||
? hash.map(hash => hashToNewHash.get(hash) || hash)
|
||||
: hashToNewHash.get(hash) || hash;
|
||||
|
||||
if (asset.newContent !== undefined) {
|
||||
const source = new RawSource(asset.newContent);
|
||||
compilation.updateAsset(asset.name, source, infoUpdate);
|
||||
} else {
|
||||
compilation.updateAsset(asset.name, asset.source, infoUpdate);
|
||||
}
|
||||
|
||||
if (asset.name !== newName) {
|
||||
compilation.renameAsset(asset.name, newName);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RealContentHashPlugin;
|
|
@ -1174,6 +1174,10 @@
|
|||
"description": "Figure out which exports are provided by modules to generate more efficient code.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"realContentHash": {
|
||||
"description": "Use real [contenthash] based on final content of the assets.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"removeAvailableModules": {
|
||||
"description": "Removes modules from chunks when these modules are already included in all parents.",
|
||||
"type": "boolean"
|
||||
|
|
|
@ -11,6 +11,7 @@ const FakeDocument = require("./helpers/FakeDocument");
|
|||
|
||||
const webpack = require("..");
|
||||
const prepareOptions = require("./helpers/prepareOptions");
|
||||
const { parseResource } = require("../lib/util/identifier");
|
||||
|
||||
const casesPath = path.join(__dirname, "configCases");
|
||||
const categories = fs.readdirSync(casesPath).map(cat => {
|
||||
|
@ -74,7 +75,9 @@ const describeCases = config => {
|
|||
});
|
||||
testConfig = {
|
||||
findBundle: function (i, options) {
|
||||
const ext = path.extname(options.output.filename);
|
||||
const ext = path.extname(
|
||||
parseResource(options.output.filename).path
|
||||
);
|
||||
if (
|
||||
fs.existsSync(
|
||||
path.join(options.output.path, "bundle" + i + ext)
|
||||
|
|
|
@ -2519,6 +2519,46 @@ LOG from ModuleConcatenationPlugin
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`StatsTestCases should print correct stats for real-content-hash 1`] = `
|
||||
"Hash: 8981c4ae0cf7803cd21e5c98fdf5155243d58453
|
||||
Child
|
||||
Hash: 8981c4ae0cf7803cd21e
|
||||
Time: X ms
|
||||
Built at: 1970-04-20 12:42:42
|
||||
asset 7382fad5b015914e0811.jpg 5.89 KiB [emitted] [immutable] (auxiliary name: main)
|
||||
asset 75500a81ed65abaee6f7-75500a.js 268 bytes [emitted] [immutable] [minimized] (name: main)
|
||||
sourceMap 75500a81ed65abaee6f7-75500a.js.map 366 bytes [emitted] [dev]
|
||||
asset 7d9d0a742118d8263796-7d9d0a.js 253 bytes [emitted] [immutable] [minimized] (name: lazy)
|
||||
sourceMap 7d9d0a742118d8263796-7d9d0a.js.map 331 bytes [emitted] [dev]
|
||||
asset 89a353e9c515885abd8e.png 14.6 KiB [emitted] [immutable] (auxiliary name: lazy)
|
||||
asset ac0e4e032b46a3050268-ac0e4e.js 2.18 KiB [emitted] [immutable] [minimized] (name: runtime~main)
|
||||
sourceMap ac0e4e032b46a3050268-ac0e4e.js.map 12.7 KiB [emitted] [dev]
|
||||
Entrypoint main = ac0e4e032b46a3050268-ac0e4e.js 75500a81ed65abaee6f7-75500a.js (7108e55c0e1e5ffc1429-7108e5.js.map 7257cc3b3c344e9bbeb9-7257cc.js.map 7382fad5b015914e0811.jpg)
|
||||
./a/index.js 150 bytes [built]
|
||||
./a/file.jpg 42 bytes (javascript) 5.89 KiB (asset) [built]
|
||||
./a/lazy.js + 1 modules 106 bytes [built]
|
||||
./a/file.png 42 bytes (javascript) 14.6 KiB (asset) [built]
|
||||
+ 8 hidden modules
|
||||
Child
|
||||
Hash: 5c98fdf5155243d58453
|
||||
Time: X ms
|
||||
Built at: 1970-04-20 12:42:42
|
||||
asset 137042fbf41dcdaddad9-137042.js 2.18 KiB [emitted] [immutable] [minimized] (name: runtime~main)
|
||||
sourceMap 137042fbf41dcdaddad9-137042.js.map 12.7 KiB [emitted] [dev]
|
||||
asset 7382fad5b015914e0811.jpg 5.89 KiB [emitted] [immutable] (auxiliary name: main)
|
||||
asset 75500a81ed65abaee6f7-75500a.js 268 bytes [emitted] [immutable] [minimized] (name: main)
|
||||
sourceMap 75500a81ed65abaee6f7-75500a.js.map 323 bytes [emitted] [dev]
|
||||
asset 7d9d0a742118d8263796-7d9d0a.js 253 bytes [emitted] [immutable] [minimized] (name: lazy)
|
||||
sourceMap 7d9d0a742118d8263796-7d9d0a.js.map 327 bytes [emitted] [dev]
|
||||
asset 89a353e9c515885abd8e.png 14.6 KiB [emitted] [immutable] (auxiliary name: lazy)
|
||||
Entrypoint main = 137042fbf41dcdaddad9-137042.js 75500a81ed65abaee6f7-75500a.js (61c15c3165b3503e09ec-61c15c.js.map 7382fad5b015914e0811.jpg e6992d2757cf11c89646-e6992d.js.map)
|
||||
./b/index.js 109 bytes [built]
|
||||
./b/file.jpg 42 bytes (javascript) 5.89 KiB (asset) [built]
|
||||
./b/lazy.js + 1 modules 102 bytes [built]
|
||||
./b/file.png 42 bytes (javascript) 14.6 KiB (asset) [built]
|
||||
+ 8 hidden modules"
|
||||
`;
|
||||
|
||||
exports[`StatsTestCases should print correct stats for related-assets 1`] = `
|
||||
"Child default:
|
||||
asset default-chunk_js.css 73 bytes [emitted] 3 related assets
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
it("should contain contenthash as query parameter and path", function() {
|
||||
it("should contain contenthash as query parameter and path", function () {
|
||||
var fs = require("fs");
|
||||
var source = fs.readFileSync(__filename, "utf-8");
|
||||
var match = /sourceMappingURL\s*=.*-([A-Fa-f0-9]{32})\.map\?([A-Fa-f0-9]{32})-([A-Fa-f0-9]{32})/.exec(source);
|
||||
var match = /sourceMappingURL\s*=.*-([A-Fa-f0-9]{32})\.map\?([A-Fa-f0-9]{32})-([A-Fa-f0-9]{6})/.exec(
|
||||
source
|
||||
);
|
||||
expect(match.length).toBe(4);
|
||||
});
|
||||
|
|
|
@ -7,6 +7,8 @@ module.exports = {
|
|||
},
|
||||
devtool: "source-map",
|
||||
output: {
|
||||
sourceMapFilename: "[file]-[contenthash].map?[contenthash]-[contenthash]"
|
||||
filename: "bundle0.js?[contenthash]",
|
||||
sourceMapFilename:
|
||||
"[file]-[contenthash].map?[contenthash]-[contenthash:6][query]"
|
||||
}
|
||||
};
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 5.9 KiB |
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
|
@ -0,0 +1,4 @@
|
|||
// Comment 2019
|
||||
const inlinedVariable = "test";
|
||||
console.log(inlinedVariable, require("./file.jpg"));
|
||||
import(/* webpackChunkName: "lazy" */ "./lazy");
|
|
@ -0,0 +1,3 @@
|
|||
import test from "./module";
|
||||
import url from "./file.png";
|
||||
console.log(test, url);
|
|
@ -0,0 +1 @@
|
|||
export default 40 + 2;
|
Binary file not shown.
After Width: | Height: | Size: 5.9 KiB |
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
|
@ -0,0 +1,3 @@
|
|||
// Comment 2020
|
||||
console.log("test", require("./file.jpg"));
|
||||
import(/* webpackChunkName: "lazy" */ "./lazy");
|
|
@ -0,0 +1,3 @@
|
|||
import test from "./module";
|
||||
import url from "./file.png";
|
||||
console.log(test, url);
|
|
@ -0,0 +1 @@
|
|||
export default 42;
|
|
@ -0,0 +1,13 @@
|
|||
module.exports = {
|
||||
validate(stats) {
|
||||
const a = stats.stats[0].toJson({
|
||||
assets: true
|
||||
});
|
||||
const b = stats.stats[1].toJson({
|
||||
assets: true
|
||||
});
|
||||
expect(Object.keys(a.assetsByChunkName).length).toBe(3);
|
||||
expect(a.assetsByChunkName.main).toEqual(b.assetsByChunkName.main);
|
||||
expect(a.assetsByChunkName.lazy).toEqual(b.assetsByChunkName.lazy);
|
||||
}
|
||||
};
|
|
@ -0,0 +1,45 @@
|
|||
const path = require("path");
|
||||
|
||||
const base = {
|
||||
mode: "production",
|
||||
entry: "./index",
|
||||
devtool: "source-map",
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.(png|jpg)$/,
|
||||
type: "asset/resource"
|
||||
}
|
||||
]
|
||||
},
|
||||
optimization: {
|
||||
runtimeChunk: true,
|
||||
minimize: true
|
||||
},
|
||||
stats: {
|
||||
relatedAssets: true
|
||||
},
|
||||
experiments: {
|
||||
asset: true
|
||||
}
|
||||
};
|
||||
|
||||
/** @type {import("../../../").Configuration[]} */
|
||||
module.exports = [
|
||||
{
|
||||
...base,
|
||||
context: path.resolve(__dirname, "a"),
|
||||
output: {
|
||||
path: path.resolve(__dirname, "../../js/stats/real-content-hash/a"),
|
||||
filename: "[contenthash]-[contenthash:6].js"
|
||||
}
|
||||
},
|
||||
{
|
||||
...base,
|
||||
context: path.resolve(__dirname, "b"),
|
||||
output: {
|
||||
path: path.resolve(__dirname, "../../js/stats/real-content-hash/b"),
|
||||
filename: "[contenthash]-[contenthash:6].js"
|
||||
}
|
||||
}
|
||||
];
|
|
@ -214,6 +214,26 @@ declare interface AssetInfo {
|
|||
*/
|
||||
immutable?: boolean;
|
||||
|
||||
/**
|
||||
* the value(s) of the full hash used for this asset
|
||||
*/
|
||||
fullhash?: LibraryExport;
|
||||
|
||||
/**
|
||||
* the value(s) of the chunk hash used for this asset
|
||||
*/
|
||||
chunkhash?: LibraryExport;
|
||||
|
||||
/**
|
||||
* the value(s) of the module hash used for this asset
|
||||
*/
|
||||
modulehash?: LibraryExport;
|
||||
|
||||
/**
|
||||
* the value(s) of the content hash used for this asset
|
||||
*/
|
||||
contenthash?: LibraryExport;
|
||||
|
||||
/**
|
||||
* size in bytes, only set after asset has been emitted
|
||||
*/
|
||||
|
@ -1319,6 +1339,7 @@ declare class Compilation {
|
|||
newSourceOrFunction: Source | ((arg0: Source) => Source),
|
||||
assetInfoUpdateOrFunction?: AssetInfo | ((arg0: AssetInfo) => AssetInfo)
|
||||
): void;
|
||||
renameAsset(file?: any, newFile?: any): void;
|
||||
deleteAsset(file: string): void;
|
||||
getAssets(): Readonly<Asset>[];
|
||||
getAsset(name: string): Readonly<Asset>;
|
||||
|
@ -1415,6 +1436,11 @@ declare class Compilation {
|
|||
*/
|
||||
static PROCESS_ASSETS_STAGE_OPTIMIZE_TRANSFER: number;
|
||||
|
||||
/**
|
||||
* Optimize the hashes of the assets, e. g. by generating real hashes of the asset content.
|
||||
*/
|
||||
static PROCESS_ASSETS_STAGE_OPTIMIZE_HASH: number;
|
||||
|
||||
/**
|
||||
* Analyse existing assets.
|
||||
*/
|
||||
|
@ -5155,6 +5181,11 @@ declare interface Optimization {
|
|||
*/
|
||||
providedExports?: boolean;
|
||||
|
||||
/**
|
||||
* Use real [contenthash] based on final content of the assets.
|
||||
*/
|
||||
realContentHash?: boolean;
|
||||
|
||||
/**
|
||||
* Removes modules from chunks when these modules are already included in all parents.
|
||||
*/
|
||||
|
|
Loading…
Reference in New Issue