mirror of https://github.com/webpack/webpack.git
Merge pull request #11956 from webpack/feature/asset-processing
allow to process assets that are added during processAssets
This commit is contained in:
commit
2aacd90fa2
|
|
@ -289,10 +289,122 @@ class Compilation {
|
||||||
*/
|
*/
|
||||||
constructor(compiler) {
|
constructor(compiler) {
|
||||||
const getNormalModuleLoader = () => deprecatedNormalModuleLoaderHook(this);
|
const getNormalModuleLoader = () => deprecatedNormalModuleLoaderHook(this);
|
||||||
/** @type {AsyncSeriesHook<[CompilationAssets]>} */
|
/** @typedef {{ additionalAssets?: true | Function }} ProcessAssetsAdditionalOptions */
|
||||||
|
/** @type {AsyncSeriesHook<[CompilationAssets], ProcessAssetsAdditionalOptions>} */
|
||||||
const processAssetsHook = new AsyncSeriesHook(["assets"]);
|
const processAssetsHook = new AsyncSeriesHook(["assets"]);
|
||||||
|
|
||||||
|
let savedAssets = new Set();
|
||||||
|
const popNewAssets = assets => {
|
||||||
|
let newAssets = undefined;
|
||||||
|
for (const file of Object.keys(assets)) {
|
||||||
|
if (savedAssets.has(file)) continue;
|
||||||
|
if (newAssets === undefined) {
|
||||||
|
newAssets = Object.create(null);
|
||||||
|
}
|
||||||
|
newAssets[file] = assets[file];
|
||||||
|
savedAssets.add(file);
|
||||||
|
}
|
||||||
|
return newAssets;
|
||||||
|
};
|
||||||
|
processAssetsHook.intercept({
|
||||||
|
name: "Compilation",
|
||||||
|
call: () => {
|
||||||
|
savedAssets.clear();
|
||||||
|
},
|
||||||
|
register: tap => {
|
||||||
|
const { type, name } = tap;
|
||||||
|
const { fn, additionalAssets, ...remainingTap } = tap;
|
||||||
|
const additionalAssetsFn =
|
||||||
|
additionalAssets === true ? fn : additionalAssets;
|
||||||
|
let processedAssets = undefined;
|
||||||
|
switch (type) {
|
||||||
|
case "sync":
|
||||||
|
if (additionalAssetsFn) {
|
||||||
|
this.hooks.processAdditionalAssets.tap(name, assets => {
|
||||||
|
if (processedAssets === this.assets) additionalAssetsFn(assets);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...remainingTap,
|
||||||
|
type: "async",
|
||||||
|
fn: (assets, callback) => {
|
||||||
|
try {
|
||||||
|
fn(assets);
|
||||||
|
} catch (e) {
|
||||||
|
return callback(e);
|
||||||
|
}
|
||||||
|
processedAssets = this.assets;
|
||||||
|
const newAssets = popNewAssets(assets);
|
||||||
|
if (newAssets !== undefined) {
|
||||||
|
this.hooks.processAdditionalAssets.callAsync(
|
||||||
|
newAssets,
|
||||||
|
callback
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
case "async":
|
||||||
|
if (additionalAssetsFn) {
|
||||||
|
this.hooks.processAdditionalAssets.tapAsync(
|
||||||
|
name,
|
||||||
|
(assets, callback) => {
|
||||||
|
if (processedAssets === this.assets)
|
||||||
|
return additionalAssetsFn(assets, callback);
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...remainingTap,
|
||||||
|
fn: (assets, callback) => {
|
||||||
|
fn(assets, err => {
|
||||||
|
if (err) return callback(err);
|
||||||
|
processedAssets = this.assets;
|
||||||
|
const newAssets = popNewAssets(assets);
|
||||||
|
if (newAssets !== undefined) {
|
||||||
|
this.hooks.processAdditionalAssets.callAsync(
|
||||||
|
newAssets,
|
||||||
|
callback
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
case "promise":
|
||||||
|
if (additionalAssetsFn) {
|
||||||
|
this.hooks.processAdditionalAssets.tapPromise(name, assets => {
|
||||||
|
if (processedAssets === this.assets)
|
||||||
|
return additionalAssetsFn(assets);
|
||||||
|
return Promise.resolve();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...remainingTap,
|
||||||
|
fn: assets => {
|
||||||
|
const p = fn(assets);
|
||||||
|
if (!p || !p.then) return p;
|
||||||
|
return p.then(() => {
|
||||||
|
processedAssets = this.assets;
|
||||||
|
const newAssets = popNewAssets(assets);
|
||||||
|
if (newAssets !== undefined) {
|
||||||
|
return this.hooks.processAdditionalAssets.promise(
|
||||||
|
newAssets
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
/** @type {SyncHook<[CompilationAssets]>} */
|
/** @type {SyncHook<[CompilationAssets]>} */
|
||||||
const afterProcessAssetsHook = new SyncHook(["assets"]);
|
const afterProcessAssetsHook = new SyncHook(["assets"]);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template T
|
* @template T
|
||||||
* @param {string} name name of the hook
|
* @param {string} name name of the hook
|
||||||
|
|
@ -544,6 +656,8 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
|
||||||
|
|
||||||
processAssets: processAssetsHook,
|
processAssets: processAssetsHook,
|
||||||
afterProcessAssets: afterProcessAssetsHook,
|
afterProcessAssets: afterProcessAssetsHook,
|
||||||
|
/** @type {AsyncSeriesHook<[CompilationAssets]>} */
|
||||||
|
processAdditionalAssets: new AsyncSeriesHook(["assets"]),
|
||||||
|
|
||||||
/** @type {SyncBailHook<[], boolean>} */
|
/** @type {SyncBailHook<[], boolean>} */
|
||||||
needAdditionalSeal: new SyncBailHook([]),
|
needAdditionalSeal: new SyncBailHook([]),
|
||||||
|
|
@ -3439,6 +3553,8 @@ Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE = 100;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Optimize the count of existing assets, e. g. by merging them.
|
* Optimize the count of existing assets, e. g. by merging them.
|
||||||
|
* Only assets of the same type should be merged.
|
||||||
|
* For assets of different types see PROCESS_ASSETS_STAGE_OPTIMIZE_INLINE.
|
||||||
*/
|
*/
|
||||||
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_COUNT = 200;
|
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_COUNT = 200;
|
||||||
|
|
||||||
|
|
@ -3453,17 +3569,23 @@ Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_COMPATIBILITY = 300;
|
||||||
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_SIZE = 400;
|
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_SIZE = 400;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Summarize the list of existing assets.
|
* Add development tooling to assets, e. g. by extracting a SourceMap.
|
||||||
* When creating new assets from this they should be fully optimized.
|
*/
|
||||||
|
Compilation.PROCESS_ASSETS_STAGE_DEV_TOOLING = 500;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optimize the count of existing assets, e. g. by inlining assets of into other assets.
|
||||||
|
* Only assets of different types should be inlined.
|
||||||
|
* For assets of the same type see PROCESS_ASSETS_STAGE_OPTIMIZE_COUNT.
|
||||||
|
*/
|
||||||
|
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_INLINE = 700;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Summarize the list of existing assets
|
||||||
* e. g. creating an assets manifest of Service Workers.
|
* e. g. creating an assets manifest of Service Workers.
|
||||||
*/
|
*/
|
||||||
Compilation.PROCESS_ASSETS_STAGE_SUMMARIZE = 1000;
|
Compilation.PROCESS_ASSETS_STAGE_SUMMARIZE = 1000;
|
||||||
|
|
||||||
/**
|
|
||||||
* Add development tooling to assets, e. g. by extracting a SourceMap.
|
|
||||||
*/
|
|
||||||
Compilation.PROCESS_ASSETS_STAGE_DEV_TOOLING = 2000;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Optimize the hashes of the assets, e. g. by generating real hashes of the asset content.
|
* Optimize the hashes of the assets, e. g. by generating real hashes of the asset content.
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
|
|
@ -152,9 +152,9 @@ class Compiler {
|
||||||
beforeCompile: new AsyncSeriesHook(["params"]),
|
beforeCompile: new AsyncSeriesHook(["params"]),
|
||||||
/** @type {SyncHook<[CompilationParams]>} */
|
/** @type {SyncHook<[CompilationParams]>} */
|
||||||
compile: new SyncHook(["params"]),
|
compile: new SyncHook(["params"]),
|
||||||
/** @type {AsyncParallelHook<[Compilation], Module>} */
|
/** @type {AsyncParallelHook<[Compilation]>} */
|
||||||
make: new AsyncParallelHook(["compilation"]),
|
make: new AsyncParallelHook(["compilation"]),
|
||||||
/** @type {AsyncParallelHook<[Compilation], Module>} */
|
/** @type {AsyncParallelHook<[Compilation]>} */
|
||||||
finishMake: new AsyncSeriesHook(["compilation"]),
|
finishMake: new AsyncSeriesHook(["compilation"]),
|
||||||
/** @type {AsyncSeriesHook<[Compilation]>} */
|
/** @type {AsyncSeriesHook<[Compilation]>} */
|
||||||
afterCompile: new AsyncSeriesHook(["compilation"]),
|
afterCompile: new AsyncSeriesHook(["compilation"]),
|
||||||
|
|
|
||||||
|
|
@ -162,7 +162,8 @@ class SourceMapDevToolPlugin {
|
||||||
compilation.hooks.processAssets.tapAsync(
|
compilation.hooks.processAssets.tapAsync(
|
||||||
{
|
{
|
||||||
name: "SourceMapDevToolPlugin",
|
name: "SourceMapDevToolPlugin",
|
||||||
stage: Compilation.PROCESS_ASSETS_STAGE_DEV_TOOLING
|
stage: Compilation.PROCESS_ASSETS_STAGE_DEV_TOOLING,
|
||||||
|
additionalAssets: true
|
||||||
},
|
},
|
||||||
(assets, callback) => {
|
(assets, callback) => {
|
||||||
const chunkGraph = compilation.chunkGraph;
|
const chunkGraph = compilation.chunkGraph;
|
||||||
|
|
|
||||||
|
|
@ -355,6 +355,9 @@ module.exports = mergeExports(fn, {
|
||||||
get ModuleConcatenationPlugin() {
|
get ModuleConcatenationPlugin() {
|
||||||
return require("./optimize/ModuleConcatenationPlugin");
|
return require("./optimize/ModuleConcatenationPlugin");
|
||||||
},
|
},
|
||||||
|
get RealContentHashPlugin() {
|
||||||
|
return require("./optimize/RealContentHashPlugin");
|
||||||
|
},
|
||||||
get RuntimeChunkPlugin() {
|
get RuntimeChunkPlugin() {
|
||||||
return require("./optimize/RuntimeChunkPlugin");
|
return require("./optimize/RuntimeChunkPlugin");
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -5,12 +5,15 @@
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
const { SyncBailHook } = require("tapable");
|
||||||
const { RawSource, CachedSource, CompatSource } = require("webpack-sources");
|
const { RawSource, CachedSource, CompatSource } = require("webpack-sources");
|
||||||
const Compilation = require("../Compilation");
|
const Compilation = require("../Compilation");
|
||||||
const WebpackError = require("../WebpackError");
|
const WebpackError = require("../WebpackError");
|
||||||
const { compareSelect, compareStrings } = require("../util/comparators");
|
const { compareSelect, compareStrings } = require("../util/comparators");
|
||||||
const createHash = require("../util/createHash");
|
const createHash = require("../util/createHash");
|
||||||
|
|
||||||
|
/** @typedef {import("webpack-sources").Source} Source */
|
||||||
|
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
|
||||||
/** @typedef {import("../Compiler")} Compiler */
|
/** @typedef {import("../Compiler")} Compiler */
|
||||||
|
|
||||||
const EMPTY_SET = new Set();
|
const EMPTY_SET = new Set();
|
||||||
|
|
@ -47,7 +50,50 @@ const toCachedSource = source => {
|
||||||
return newSource;
|
return newSource;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} AssetInfoForRealContentHash
|
||||||
|
* @property {string} name
|
||||||
|
* @property {AssetInfo} info
|
||||||
|
* @property {Source} source
|
||||||
|
* @property {RawSource | undefined} newSource
|
||||||
|
* @property {RawSource | undefined} newSourceWithoutOwn
|
||||||
|
* @property {string} content
|
||||||
|
* @property {Set<string>} ownHashes
|
||||||
|
* @property {Promise} contentComputePromise
|
||||||
|
* @property {Promise} contentComputeWithoutOwnPromise
|
||||||
|
* @property {Set<string>} referencedHashes
|
||||||
|
* @property {Set<string>} hashes
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} CompilationHooks
|
||||||
|
* @property {SyncBailHook<[Buffer[], string], string>} updateHash
|
||||||
|
*/
|
||||||
|
|
||||||
|
/** @type {WeakMap<Compilation, CompilationHooks>} */
|
||||||
|
const compilationHooksMap = new WeakMap();
|
||||||
|
|
||||||
class RealContentHashPlugin {
|
class RealContentHashPlugin {
|
||||||
|
/**
|
||||||
|
* @param {Compilation} compilation the compilation
|
||||||
|
* @returns {CompilationHooks} the attached hooks
|
||||||
|
*/
|
||||||
|
static getCompilationHooks(compilation) {
|
||||||
|
if (!(compilation instanceof Compilation)) {
|
||||||
|
throw new TypeError(
|
||||||
|
"The 'compilation' argument must be an instance of Compilation"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let hooks = compilationHooksMap.get(compilation);
|
||||||
|
if (hooks === undefined) {
|
||||||
|
hooks = {
|
||||||
|
updateHash: new SyncBailHook(["content", "oldHash"])
|
||||||
|
};
|
||||||
|
compilationHooksMap.set(compilation, hooks);
|
||||||
|
}
|
||||||
|
return hooks;
|
||||||
|
}
|
||||||
|
|
||||||
constructor({ hashFunction, hashDigest }) {
|
constructor({ hashFunction, hashDigest }) {
|
||||||
this._hashFunction = hashFunction;
|
this._hashFunction = hashFunction;
|
||||||
this._hashDigest = hashDigest;
|
this._hashDigest = hashDigest;
|
||||||
|
|
@ -66,6 +112,7 @@ class RealContentHashPlugin {
|
||||||
const cacheGenerate = compilation.getCache(
|
const cacheGenerate = compilation.getCache(
|
||||||
"RealContentHashPlugin|generate"
|
"RealContentHashPlugin|generate"
|
||||||
);
|
);
|
||||||
|
const hooks = RealContentHashPlugin.getCompilationHooks(compilation);
|
||||||
compilation.hooks.processAssets.tapPromise(
|
compilation.hooks.processAssets.tapPromise(
|
||||||
{
|
{
|
||||||
name: "RealContentHashPlugin",
|
name: "RealContentHashPlugin",
|
||||||
|
|
@ -73,6 +120,7 @@ class RealContentHashPlugin {
|
||||||
},
|
},
|
||||||
async () => {
|
async () => {
|
||||||
const assets = compilation.getAssets();
|
const assets = compilation.getAssets();
|
||||||
|
/** @type {AssetInfoForRealContentHash[]} */
|
||||||
const assetsWithInfo = [];
|
const assetsWithInfo = [];
|
||||||
const hashToAssets = new Map();
|
const hashToAssets = new Map();
|
||||||
for (const { source, info, name } of assets) {
|
for (const { source, info, name } of assets) {
|
||||||
|
|
@ -87,9 +135,13 @@ class RealContentHashPlugin {
|
||||||
source: cachedSource,
|
source: cachedSource,
|
||||||
/** @type {RawSource | undefined} */
|
/** @type {RawSource | undefined} */
|
||||||
newSource: undefined,
|
newSource: undefined,
|
||||||
|
/** @type {RawSource | undefined} */
|
||||||
|
newSourceWithoutOwn: undefined,
|
||||||
content,
|
content,
|
||||||
hasOwnHash: false,
|
/** @type {Set<string>} */
|
||||||
contentComputePromise: false,
|
ownHashes: undefined,
|
||||||
|
contentComputePromise: undefined,
|
||||||
|
contentComputeWithoutOwnPromise: undefined,
|
||||||
/** @type {Set<string>} */
|
/** @type {Set<string>} */
|
||||||
referencedHashes: undefined,
|
referencedHashes: undefined,
|
||||||
hashes
|
hashes
|
||||||
|
|
@ -114,6 +166,7 @@ class RealContentHashPlugin {
|
||||||
const { name, source, content, hashes } = asset;
|
const { name, source, content, hashes } = asset;
|
||||||
if (Buffer.isBuffer(content)) {
|
if (Buffer.isBuffer(content)) {
|
||||||
asset.referencedHashes = EMPTY_SET;
|
asset.referencedHashes = EMPTY_SET;
|
||||||
|
asset.ownHashes = EMPTY_SET;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const etag = cacheAnalyse.mergeEtags(
|
const etag = cacheAnalyse.mergeEtags(
|
||||||
|
|
@ -122,21 +175,21 @@ class RealContentHashPlugin {
|
||||||
);
|
);
|
||||||
[
|
[
|
||||||
asset.referencedHashes,
|
asset.referencedHashes,
|
||||||
asset.hasOwnHash
|
asset.ownHashes
|
||||||
] = await cacheAnalyse.providePromise(name, etag, () => {
|
] = await cacheAnalyse.providePromise(name, etag, () => {
|
||||||
const referencedHashes = new Set();
|
const referencedHashes = new Set();
|
||||||
let hasOwnHash = false;
|
let ownHashes = new Set();
|
||||||
const inContent = content.match(hashRegExp);
|
const inContent = content.match(hashRegExp);
|
||||||
if (inContent) {
|
if (inContent) {
|
||||||
for (const hash of inContent) {
|
for (const hash of inContent) {
|
||||||
if (hashes.has(hash)) {
|
if (hashes.has(hash)) {
|
||||||
hasOwnHash = true;
|
ownHashes.add(hash);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
referencedHashes.add(hash);
|
referencedHashes.add(hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return [referencedHashes, hasOwnHash];
|
return [referencedHashes, ownHashes];
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
@ -163,7 +216,12 @@ ${referencingAssets
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const hashes = new Set();
|
const hashes = new Set();
|
||||||
for (const { referencedHashes } of assets) {
|
for (const { referencedHashes, ownHashes } of assets) {
|
||||||
|
if (!ownHashes.has(hash)) {
|
||||||
|
for (const hash of ownHashes) {
|
||||||
|
hashes.add(hash);
|
||||||
|
}
|
||||||
|
}
|
||||||
for (const hash of referencedHashes) {
|
for (const hash of referencedHashes) {
|
||||||
hashes.add(hash);
|
hashes.add(hash);
|
||||||
}
|
}
|
||||||
|
|
@ -199,32 +257,57 @@ ${referencingAssets
|
||||||
add(hash, new Set());
|
add(hash, new Set());
|
||||||
}
|
}
|
||||||
const hashToNewHash = new Map();
|
const hashToNewHash = new Map();
|
||||||
const computeNewContent = (asset, includeOwn) => {
|
const getEtag = asset =>
|
||||||
if (asset.contentComputePromise) return asset.contentComputePromise;
|
cacheGenerate.mergeEtags(
|
||||||
return (asset.contentComputePromise = (async () => {
|
|
||||||
if (
|
|
||||||
asset.hasOwnHash ||
|
|
||||||
Array.from(asset.referencedHashes).some(
|
|
||||||
hash => hashToNewHash.get(hash) !== hash
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
const identifier =
|
|
||||||
asset.name +
|
|
||||||
(includeOwn && asset.hasOwnHash ? "|with-own" : "");
|
|
||||||
const etag = cacheGenerate.mergeEtags(
|
|
||||||
cacheGenerate.getLazyHashedEtag(asset.source),
|
cacheGenerate.getLazyHashedEtag(asset.source),
|
||||||
Array.from(asset.referencedHashes, hash =>
|
Array.from(asset.referencedHashes, hash =>
|
||||||
hashToNewHash.get(hash)
|
hashToNewHash.get(hash)
|
||||||
).join("|")
|
).join("|")
|
||||||
);
|
);
|
||||||
|
const computeNewContent = asset => {
|
||||||
|
if (asset.contentComputePromise) return asset.contentComputePromise;
|
||||||
|
return (asset.contentComputePromise = (async () => {
|
||||||
|
if (
|
||||||
|
asset.ownHashes.size > 0 ||
|
||||||
|
Array.from(asset.referencedHashes).some(
|
||||||
|
hash => hashToNewHash.get(hash) !== hash
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
const identifier = asset.name;
|
||||||
|
const etag = getEtag(asset);
|
||||||
asset.newSource = await cacheGenerate.providePromise(
|
asset.newSource = await cacheGenerate.providePromise(
|
||||||
|
identifier,
|
||||||
|
etag,
|
||||||
|
() => {
|
||||||
|
const newContent = asset.content.replace(hashRegExp, hash =>
|
||||||
|
hashToNewHash.get(hash)
|
||||||
|
);
|
||||||
|
return new RawSource(newContent);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
})());
|
||||||
|
};
|
||||||
|
const computeNewContentWithoutOwn = asset => {
|
||||||
|
if (asset.contentComputeWithoutOwnPromise)
|
||||||
|
return asset.contentComputeWithoutOwnPromise;
|
||||||
|
return (asset.contentComputeWithoutOwnPromise = (async () => {
|
||||||
|
if (
|
||||||
|
asset.ownHashes.size > 0 ||
|
||||||
|
Array.from(asset.referencedHashes).some(
|
||||||
|
hash => hashToNewHash.get(hash) !== hash
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
const identifier = asset.name + "|without-own";
|
||||||
|
const etag = getEtag(asset);
|
||||||
|
asset.newSourceWithoutOwn = await cacheGenerate.providePromise(
|
||||||
identifier,
|
identifier,
|
||||||
etag,
|
etag,
|
||||||
() => {
|
() => {
|
||||||
const newContent = asset.content.replace(
|
const newContent = asset.content.replace(
|
||||||
hashRegExp,
|
hashRegExp,
|
||||||
hash => {
|
hash => {
|
||||||
if (!includeOwn && asset.hashes.has(hash)) {
|
if (asset.ownHashes.has(hash)) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
return hashToNewHash.get(hash);
|
return hashToNewHash.get(hash);
|
||||||
|
|
@ -241,25 +324,37 @@ ${referencingAssets
|
||||||
const assets = hashToAssets.get(oldHash);
|
const assets = hashToAssets.get(oldHash);
|
||||||
assets.sort(comparator);
|
assets.sort(comparator);
|
||||||
const hash = createHash(this._hashFunction);
|
const hash = createHash(this._hashFunction);
|
||||||
await Promise.all(assets.map(computeNewContent));
|
await Promise.all(
|
||||||
for (const asset of assets) {
|
assets.map(asset =>
|
||||||
hash.update(
|
asset.ownHashes.has(oldHash)
|
||||||
asset.newSource
|
? computeNewContentWithoutOwn(asset)
|
||||||
? asset.newSource.buffer()
|
: computeNewContent(asset)
|
||||||
: asset.source.buffer()
|
)
|
||||||
);
|
);
|
||||||
|
const assetsContent = assets.map(asset => {
|
||||||
|
if (asset.ownHashes.has(oldHash)) {
|
||||||
|
return asset.newSourceWithoutOwn
|
||||||
|
? asset.newSourceWithoutOwn.buffer()
|
||||||
|
: asset.source.buffer();
|
||||||
|
} else {
|
||||||
|
return asset.newSource
|
||||||
|
? asset.newSource.buffer()
|
||||||
|
: asset.source.buffer();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let newHash = hooks.updateHash.call(assetsContent, oldHash);
|
||||||
|
if (!newHash) {
|
||||||
|
for (const content of assetsContent) {
|
||||||
|
hash.update(content);
|
||||||
}
|
}
|
||||||
const digest = hash.digest(this._hashDigest);
|
const digest = hash.digest(this._hashDigest);
|
||||||
const newHash = digest.slice(0, oldHash.length);
|
newHash = /** @type {string} */ (digest.slice(0, oldHash.length));
|
||||||
|
}
|
||||||
hashToNewHash.set(oldHash, newHash);
|
hashToNewHash.set(oldHash, newHash);
|
||||||
}
|
}
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
assetsWithInfo.map(async asset => {
|
assetsWithInfo.map(async asset => {
|
||||||
// recomputed content with it's own hash
|
await computeNewContent(asset);
|
||||||
if (asset.hasOwnHash) {
|
|
||||||
asset.contentComputePromise = undefined;
|
|
||||||
}
|
|
||||||
await computeNewContent(asset, true);
|
|
||||||
const newName = asset.name.replace(hashRegExp, hash =>
|
const newName = asset.name.replace(hashRegExp, hash =>
|
||||||
hashToNewHash.get(hash)
|
hashToNewHash.get(hash)
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@
|
||||||
"neo-async": "^2.6.2",
|
"neo-async": "^2.6.2",
|
||||||
"pkg-dir": "^4.2.0",
|
"pkg-dir": "^4.2.0",
|
||||||
"schema-utils": "^3.0.0",
|
"schema-utils": "^3.0.0",
|
||||||
"tapable": "^2.0.0",
|
"tapable": "^2.1.1",
|
||||||
"terser-webpack-plugin": "^5.0.3",
|
"terser-webpack-plugin": "^5.0.3",
|
||||||
"watchpack": "^2.0.0",
|
"watchpack": "^2.0.0",
|
||||||
"webpack-sources": "^2.1.1"
|
"webpack-sources": "^2.1.1"
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
const crypto = require("crypto");
|
||||||
|
const fs = require("fs");
|
||||||
|
const path = require("path");
|
||||||
|
|
||||||
|
it("should result in the correct HTML", () => {
|
||||||
|
const content = fs.readFileSync(
|
||||||
|
path.resolve(__dirname, "index.html"),
|
||||||
|
"utf-8"
|
||||||
|
);
|
||||||
|
|
||||||
|
// check minimized
|
||||||
|
expect(content).toMatch(/<\/script> <script/);
|
||||||
|
|
||||||
|
// check inlined js is minimized
|
||||||
|
expect(content).toMatch(/For license information please see inline-/);
|
||||||
|
|
||||||
|
// contains references to normal-[contenthash].js
|
||||||
|
expect(content).toMatch(/normal-.{20}\.js/);
|
||||||
|
|
||||||
|
const [filename] = /normal-.{20}\.js/.exec(content);
|
||||||
|
const normalJs = fs.readFileSync(path.resolve(__dirname, filename));
|
||||||
|
const hash = crypto.createHash("sha512");
|
||||||
|
hash.update(normalJs);
|
||||||
|
const digest = hash.digest("base64");
|
||||||
|
|
||||||
|
// SRI has been updated and matched content
|
||||||
|
expect(content).toContain(digest);
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
console.log("inline");
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
console.log("normal");
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
module.exports = {
|
||||||
|
findBundle: function () {
|
||||||
|
return "./test.js";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
@ -0,0 +1,189 @@
|
||||||
|
const {
|
||||||
|
sources: { RawSource, OriginalSource, ReplaceSource },
|
||||||
|
Compilation,
|
||||||
|
util: { createHash },
|
||||||
|
optimize: { RealContentHashPlugin }
|
||||||
|
} = require("../../../../");
|
||||||
|
|
||||||
|
class HtmlPlugin {
|
||||||
|
constructor(entrypoints) {
|
||||||
|
this.entrypoints = entrypoints;
|
||||||
|
}
|
||||||
|
|
||||||
|
apply(compiler) {
|
||||||
|
compiler.hooks.compilation.tap("html-plugin", compilation => {
|
||||||
|
compilation.hooks.processAssets.tap(
|
||||||
|
{
|
||||||
|
name: "html-plugin",
|
||||||
|
stage: Compilation.PROCESS_ASSETS_STAGE_ADDITIONAL
|
||||||
|
},
|
||||||
|
() => {
|
||||||
|
const publicPath = compilation.outputOptions.publicPath;
|
||||||
|
const files = [];
|
||||||
|
for (const name of this.entrypoints) {
|
||||||
|
for (const file of compilation.entrypoints.get(name).getFiles())
|
||||||
|
files.push(file);
|
||||||
|
}
|
||||||
|
const toScriptTag = (file, extra) => {
|
||||||
|
const asset = compilation.getAsset(file);
|
||||||
|
const hash = createHash("sha512");
|
||||||
|
hash.update(asset.source.source());
|
||||||
|
const integrity = `sha512-${hash.digest("base64")}`;
|
||||||
|
compilation.updateAsset(
|
||||||
|
file,
|
||||||
|
x => x,
|
||||||
|
assetInfo => ({
|
||||||
|
...assetInfo,
|
||||||
|
contenthash: Array.isArray(assetInfo.contenthash)
|
||||||
|
? [...new Set([...assetInfo.contenthash, integrity])]
|
||||||
|
: assetInfo.contenthash
|
||||||
|
? [assetInfo.contenthash, integrity]
|
||||||
|
: integrity
|
||||||
|
})
|
||||||
|
);
|
||||||
|
return `<script src="${
|
||||||
|
publicPath === "auto" ? "" : publicPath
|
||||||
|
}${file}" integrity="${integrity}"></script>`;
|
||||||
|
};
|
||||||
|
compilation.emitAsset(
|
||||||
|
"index.html",
|
||||||
|
new OriginalSource(
|
||||||
|
`<html>
|
||||||
|
<body>
|
||||||
|
${files.map(file => ` ${toScriptTag(file)}`).join("\n")}
|
||||||
|
</body>
|
||||||
|
</html>`,
|
||||||
|
"index.html"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class HtmlInlinePlugin {
|
||||||
|
constructor(inline) {
|
||||||
|
this.inline = inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
apply(compiler) {
|
||||||
|
compiler.hooks.compilation.tap("html-inline-plugin", compilation => {
|
||||||
|
compilation.hooks.processAssets.tap(
|
||||||
|
{
|
||||||
|
name: "html-inline-plugin",
|
||||||
|
stage: Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_INLINE,
|
||||||
|
additionalAssets: true
|
||||||
|
},
|
||||||
|
assets => {
|
||||||
|
const publicPath = compilation.outputOptions.publicPath;
|
||||||
|
for (const name of Object.keys(assets)) {
|
||||||
|
if (/\.html$/.test(name)) {
|
||||||
|
const asset = compilation.getAsset(name);
|
||||||
|
const content = asset.source.source();
|
||||||
|
const matches = [];
|
||||||
|
const regExp = /<script\s+src\s*=\s*"([^"]+)"(?:\s+[^"=\s]+(?:\s*=\s*(?:"[^"]*"|[^\s]+))?)*\s*><\/script>/g;
|
||||||
|
let match = regExp.exec(content);
|
||||||
|
while (match) {
|
||||||
|
let url = match[1];
|
||||||
|
if (url.startsWith(publicPath))
|
||||||
|
url = url.slice(publicPath.length);
|
||||||
|
if (this.inline.test(url)) {
|
||||||
|
const asset = compilation.getAsset(url);
|
||||||
|
matches.push({
|
||||||
|
start: match.index,
|
||||||
|
length: match[0].length,
|
||||||
|
asset
|
||||||
|
});
|
||||||
|
}
|
||||||
|
match = regExp.exec(content);
|
||||||
|
}
|
||||||
|
if (matches.length > 0) {
|
||||||
|
const newSource = new ReplaceSource(asset.source, name);
|
||||||
|
for (const { start, length, asset } of matches) {
|
||||||
|
newSource.replace(
|
||||||
|
start,
|
||||||
|
start + length - 1,
|
||||||
|
`<script>${asset.source.source()}</script>`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
compilation.updateAsset(name, newSource);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SriHashSupportPlugin {
|
||||||
|
apply(compiler) {
|
||||||
|
compiler.hooks.compilation.tap("sri-hash-support-plugin", compilation => {
|
||||||
|
RealContentHashPlugin.getCompilationHooks(compilation).updateHash.tap(
|
||||||
|
"sri-hash-support-plugin",
|
||||||
|
(input, oldHash) => {
|
||||||
|
if (/^sha512-.{88}$/.test(oldHash) && input.length === 1) {
|
||||||
|
const hash = createHash("sha512");
|
||||||
|
hash.update(input[0]);
|
||||||
|
return `sha512-${hash.digest("base64")}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class HtmlMinimizePlugin {
|
||||||
|
apply(compiler) {
|
||||||
|
compiler.hooks.compilation.tap("html-minimize-plugin", compilation => {
|
||||||
|
compilation.hooks.processAssets.tap(
|
||||||
|
{
|
||||||
|
name: "html-minimize-plugin",
|
||||||
|
stage: Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_SIZE,
|
||||||
|
additionalAssets: true
|
||||||
|
},
|
||||||
|
assets => {
|
||||||
|
for (const name of Object.keys(assets)) {
|
||||||
|
if (/\.html$/.test(name)) {
|
||||||
|
compilation.updateAsset(
|
||||||
|
name,
|
||||||
|
source => new RawSource(source.source().replace(/\s+/g, " ")),
|
||||||
|
assetInfo => ({
|
||||||
|
...assetInfo,
|
||||||
|
minimized: true
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {import("../../../../").Configuration} */
|
||||||
|
module.exports = {
|
||||||
|
mode: "production",
|
||||||
|
entry: {
|
||||||
|
test: { import: "./index.js", filename: "test.js" },
|
||||||
|
inline: "./inline.js",
|
||||||
|
normal: "./normal.js"
|
||||||
|
},
|
||||||
|
output: {
|
||||||
|
filename: "[name]-[contenthash].js"
|
||||||
|
},
|
||||||
|
optimization: {
|
||||||
|
minimize: true,
|
||||||
|
minimizer: ["...", new HtmlMinimizePlugin()]
|
||||||
|
},
|
||||||
|
node: {
|
||||||
|
__dirname: false,
|
||||||
|
__filename: false
|
||||||
|
},
|
||||||
|
plugins: [
|
||||||
|
new HtmlPlugin(["inline", "normal"]),
|
||||||
|
new HtmlInlinePlugin(/inline/),
|
||||||
|
new SriHashSupportPlugin()
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
@ -1229,10 +1229,17 @@ declare class Compilation {
|
||||||
"tap" | "tapAsync" | "tapPromise" | "name"
|
"tap" | "tapAsync" | "tapPromise" | "name"
|
||||||
> &
|
> &
|
||||||
FakeHookMarker;
|
FakeHookMarker;
|
||||||
optimizeAssets: AsyncSeriesHook<[Record<string, Source>]>;
|
optimizeAssets: AsyncSeriesHook<
|
||||||
|
[Record<string, Source>],
|
||||||
|
{ additionalAssets?: true | Function }
|
||||||
|
>;
|
||||||
afterOptimizeAssets: SyncHook<[Record<string, Source>]>;
|
afterOptimizeAssets: SyncHook<[Record<string, Source>]>;
|
||||||
processAssets: AsyncSeriesHook<[Record<string, Source>]>;
|
processAssets: AsyncSeriesHook<
|
||||||
|
[Record<string, Source>],
|
||||||
|
{ additionalAssets?: true | Function }
|
||||||
|
>;
|
||||||
afterProcessAssets: SyncHook<[Record<string, Source>]>;
|
afterProcessAssets: SyncHook<[Record<string, Source>]>;
|
||||||
|
processAdditionalAssets: AsyncSeriesHook<[Record<string, Source>]>;
|
||||||
needAdditionalSeal: SyncBailHook<[], boolean>;
|
needAdditionalSeal: SyncBailHook<[], boolean>;
|
||||||
afterSeal: AsyncSeriesHook<[]>;
|
afterSeal: AsyncSeriesHook<[]>;
|
||||||
renderManifest: SyncWaterfallHook<
|
renderManifest: SyncWaterfallHook<
|
||||||
|
|
@ -1536,6 +1543,8 @@ declare class Compilation {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Optimize the count of existing assets, e. g. by merging them.
|
* Optimize the count of existing assets, e. g. by merging them.
|
||||||
|
* Only assets of the same type should be merged.
|
||||||
|
* For assets of different types see PROCESS_ASSETS_STAGE_OPTIMIZE_INLINE.
|
||||||
*/
|
*/
|
||||||
static PROCESS_ASSETS_STAGE_OPTIMIZE_COUNT: number;
|
static PROCESS_ASSETS_STAGE_OPTIMIZE_COUNT: number;
|
||||||
|
|
||||||
|
|
@ -1549,18 +1558,24 @@ declare class Compilation {
|
||||||
*/
|
*/
|
||||||
static PROCESS_ASSETS_STAGE_OPTIMIZE_SIZE: number;
|
static PROCESS_ASSETS_STAGE_OPTIMIZE_SIZE: number;
|
||||||
|
|
||||||
/**
|
|
||||||
* Summarize the list of existing assets.
|
|
||||||
* When creating new assets from this they should be fully optimized.
|
|
||||||
* e. g. creating an assets manifest of Service Workers.
|
|
||||||
*/
|
|
||||||
static PROCESS_ASSETS_STAGE_SUMMARIZE: number;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add development tooling to assets, e. g. by extracting a SourceMap.
|
* Add development tooling to assets, e. g. by extracting a SourceMap.
|
||||||
*/
|
*/
|
||||||
static PROCESS_ASSETS_STAGE_DEV_TOOLING: number;
|
static PROCESS_ASSETS_STAGE_DEV_TOOLING: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optimize the count of existing assets, e. g. by inlining assets of into other assets.
|
||||||
|
* Only assets of different types should be inlined.
|
||||||
|
* For assets of the same type see PROCESS_ASSETS_STAGE_OPTIMIZE_COUNT.
|
||||||
|
*/
|
||||||
|
static PROCESS_ASSETS_STAGE_OPTIMIZE_INLINE: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Summarize the list of existing assets
|
||||||
|
* e. g. creating an assets manifest of Service Workers.
|
||||||
|
*/
|
||||||
|
static PROCESS_ASSETS_STAGE_SUMMARIZE: number;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Optimize the hashes of the assets, e. g. by generating real hashes of the asset content.
|
* Optimize the hashes of the assets, e. g. by generating real hashes of the asset content.
|
||||||
*/
|
*/
|
||||||
|
|
@ -1597,6 +1612,9 @@ declare interface CompilationHooksJavascriptModulesPlugin {
|
||||||
chunkHash: SyncHook<[Chunk, Hash, ChunkHashContext]>;
|
chunkHash: SyncHook<[Chunk, Hash, ChunkHashContext]>;
|
||||||
useSourceMap: SyncBailHook<[Chunk, RenderContextObject], boolean>;
|
useSourceMap: SyncBailHook<[Chunk, RenderContextObject], boolean>;
|
||||||
}
|
}
|
||||||
|
declare interface CompilationHooksRealContentHashPlugin {
|
||||||
|
updateHash: SyncBailHook<[Buffer[], string], string>;
|
||||||
|
}
|
||||||
declare interface CompilationParams {
|
declare interface CompilationParams {
|
||||||
normalModuleFactory: NormalModuleFactory;
|
normalModuleFactory: NormalModuleFactory;
|
||||||
contextModuleFactory: ContextModuleFactory;
|
contextModuleFactory: ContextModuleFactory;
|
||||||
|
|
@ -5665,7 +5683,7 @@ declare abstract class NormalModuleFactory extends ModuleFactory {
|
||||||
beforeResolve: AsyncSeriesBailHook<[ResolveData], any>;
|
beforeResolve: AsyncSeriesBailHook<[ResolveData], any>;
|
||||||
afterResolve: AsyncSeriesBailHook<[ResolveData], any>;
|
afterResolve: AsyncSeriesBailHook<[ResolveData], any>;
|
||||||
createModule: AsyncSeriesBailHook<[any, ResolveData], any>;
|
createModule: AsyncSeriesBailHook<[any, ResolveData], any>;
|
||||||
module: SyncWaterfallHook<[Module, any, ResolveData]>;
|
module: SyncWaterfallHook<[Module, any, ResolveData], any>;
|
||||||
createParser: HookMap<SyncBailHook<any, any>>;
|
createParser: HookMap<SyncBailHook<any, any>>;
|
||||||
parser: HookMap<SyncHook<any>>;
|
parser: HookMap<SyncHook<any>>;
|
||||||
createGenerator: HookMap<SyncBailHook<any, any>>;
|
createGenerator: HookMap<SyncBailHook<any, any>>;
|
||||||
|
|
@ -6888,6 +6906,17 @@ declare class ReadFileCompileWasmPlugin {
|
||||||
*/
|
*/
|
||||||
apply(compiler: Compiler): void;
|
apply(compiler: Compiler): void;
|
||||||
}
|
}
|
||||||
|
declare class RealContentHashPlugin {
|
||||||
|
constructor(__0: { hashFunction: any; hashDigest: any });
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply the plugin
|
||||||
|
*/
|
||||||
|
apply(compiler: Compiler): void;
|
||||||
|
static getCompilationHooks(
|
||||||
|
compilation: Compilation
|
||||||
|
): CompilationHooksRealContentHashPlugin;
|
||||||
|
}
|
||||||
declare interface RealDependencyLocation {
|
declare interface RealDependencyLocation {
|
||||||
start: SourcePosition;
|
start: SourcePosition;
|
||||||
end?: SourcePosition;
|
end?: SourcePosition;
|
||||||
|
|
@ -10220,6 +10249,7 @@ declare namespace exports {
|
||||||
LimitChunkCountPlugin,
|
LimitChunkCountPlugin,
|
||||||
MinChunkSizePlugin,
|
MinChunkSizePlugin,
|
||||||
ModuleConcatenationPlugin,
|
ModuleConcatenationPlugin,
|
||||||
|
RealContentHashPlugin,
|
||||||
RuntimeChunkPlugin,
|
RuntimeChunkPlugin,
|
||||||
SideEffectsFlagPlugin,
|
SideEffectsFlagPlugin,
|
||||||
SplitChunksPlugin
|
SplitChunksPlugin
|
||||||
|
|
|
||||||
|
|
@ -6221,10 +6221,10 @@ tapable@^1.0.0:
|
||||||
resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2"
|
resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2"
|
||||||
integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==
|
integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==
|
||||||
|
|
||||||
tapable@^2.0.0:
|
tapable@^2.0.0, tapable@^2.1.1:
|
||||||
version "2.0.0"
|
version "2.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.0.0.tgz#a49c3d6a8a2bb606e7db372b82904c970d537a08"
|
resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.1.1.tgz#b01cc1902d42a7bb30514e320ce21c456f72fd3f"
|
||||||
integrity sha512-bjzn0C0RWoffnNdTzNi7rNDhs1Zlwk2tRXgk8EiHKAOX1Mag3d6T0Y5zNa7l9CJ+EoUne/0UHdwS8tMbkh9zDg==
|
integrity sha512-Wib1S8m2wdpLbmQz0RBEVosIyvb/ykfKXf3ZIDqvWoMg/zTNm6G/tDSuUM61J1kNCDXWJrLHGSFeMhAG+gAGpQ==
|
||||||
|
|
||||||
temp-dir@^1.0.0:
|
temp-dir@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue