Compare commits

...

10 Commits

Author SHA1 Message Date
hai-x 2b7d03e4b0
Merge d4d787a922 into c7ebdbda63 2025-10-07 14:06:42 -05:00
alexander-akait c7ebdbda63 chore(release): 5.102.1
Github Actions / lint (push) Waiting to run Details
Github Actions / validate-legacy-node (push) Waiting to run Details
Github Actions / benchmark (1/4) (push) Waiting to run Details
Github Actions / benchmark (2/4) (push) Waiting to run Details
Github Actions / benchmark (3/4) (push) Waiting to run Details
Github Actions / benchmark (4/4) (push) Waiting to run Details
Github Actions / basic (push) Waiting to run Details
Github Actions / unit (push) Waiting to run Details
Github Actions / integration (10.x, macos-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (10.x, macos-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (10.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (10.x, ubuntu-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (10.x, windows-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (10.x, windows-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (12.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (14.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (16.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (18.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (20.x, macos-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (20.x, macos-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (20.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (20.x, ubuntu-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (20.x, windows-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (20.x, windows-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (22.x, macos-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (22.x, macos-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (22.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (22.x, ubuntu-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (22.x, windows-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (22.x, windows-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (24.x, macos-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (24.x, macos-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (24.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (24.x, ubuntu-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (24.x, windows-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (24.x, windows-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (lts/*, ubuntu-latest, a, 1) (push) Blocked by required conditions Details
Github Actions / integration (lts/*, ubuntu-latest, b, 1) (push) Blocked by required conditions Details
2025-10-07 19:26:11 +03:00
Xiao b7530c2510
fix(css): correct CC_UPPER_U typo (E -> U) (#19989) 2025-10-07 19:10:42 +03:00
Alexander Akait f3ef1428b3
fix: defer import mangling (#19988) 2025-10-07 19:09:05 +03:00
Alexander Akait d32f1711ac
fix: hash options types (#19987) 2025-10-07 17:40:59 +03:00
Hai d4d787a922 refactor 2025-10-07 00:45:53 +08:00
Hai dac300f09a refactor 2025-10-07 00:39:29 +08:00
Hai 5197fd7f03 fix: review 2025-09-22 02:58:31 +08:00
Hai a51d2349ee fix: lint 2025-09-15 00:44:05 +08:00
Hai 81268133cd feat: port webpack-manifest-plugin 2025-09-15 00:30:46 +08:00
62 changed files with 1365 additions and 288 deletions

View File

@ -537,9 +537,21 @@ export type Filename = FilenameTemplate;
*/
export type GlobalObject = string;
/**
* Digest type used for the hash.
* Digest types used for the hash.
*/
export type HashDigest = string;
export type HashDigest =
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2";
/**
* Number of chars which are used for the hash.
*/
@ -2157,7 +2169,7 @@ export interface Output {
*/
globalObject?: GlobalObject;
/**
* Digest type used for the hash.
* Digest types used for the hash.
*/
hashDigest?: HashDigest;
/**
@ -3653,7 +3665,7 @@ export interface OutputNormalized {
*/
globalObject?: GlobalObject;
/**
* Digest type used for the hash.
* Digest types used for the hash.
*/
hashDigest?: HashDigest;
/**

View File

@ -0,0 +1,38 @@
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn fix:special` to update
*/
/**
* A function that receives the manifest object and returns the manifest string.
*/
export type HandlerFunction = (manifest: ManifestObject) => string;
/**
* Maps asset identifiers to their manifest entries.
*/
export type ManifestObject = Record<string, ManifestItem>;
export interface ManifestPluginOptions {
/**
* Specifies the filename of the output file on disk. By default the plugin will emit `manifest.json` inside the 'output.path' directory.
*/
filename?: string;
/**
* A function that receives the manifest object and returns the manifest string.
*/
handler?: HandlerFunction;
}
/**
* Describes a manifest entry that links the emitted path to the producing asset.
*/
export interface ManifestItem {
/**
* The compilation asset that produced this manifest entry.
*/
asset?: import("../../lib/Compilation").Asset;
/**
* The public path recorded in the manifest for this asset.
*/
filePath: string;
}

View File

@ -7,7 +7,7 @@
/**
* Algorithm used for generation the hash (see node.js crypto package).
*/
export type HashFunction = string | typeof import("../../lib/util/Hash");
export type HashFunction = string | typeof import("../../../lib/util/Hash");
export interface HashedModuleIdsPluginOptions {
/**
@ -17,7 +17,19 @@ export interface HashedModuleIdsPluginOptions {
/**
* The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.
*/
hashDigest?: "hex" | "latin1" | "base64";
hashDigest?:
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2";
/**
* The prefix length of the hash digest to use, defaults to 4.
*/

View File

@ -0,0 +1,149 @@
This example demonstrates how to use webpack internal ManifestPlugin.
# example.js
```js
import("./baz");
```
# foo.txt
```js
foo
```
# bar.txt
```js
bar
```
# baz.js
```js
import foo from "./foo.txt";
import bar from "./bar.txt";
export default foo + bar;
```
# webpack.config.js
```javascript
"use strict";
const webpack = require("../../");
/** @type {webpack.Configuration} */
module.exports = {
devtool: "source-map",
module: {
rules: [
{
test: /foo.txt/,
type: "asset/resource"
},
{
test: /bar.txt/,
use: require.resolve("file-loader")
}
]
},
plugins: [
new webpack.ManifestPlugin({
filename: "manifest.json"
}),
new webpack.ManifestPlugin({
filename: "manifest.yml",
handler(manifest) {
let _manifest = "";
for (const key in manifest) {
if (key === "manifest.json") continue;
_manifest += `- ${key}: '${manifest[key].filePath}'\n`;
}
return _manifest;
}
})
]
};
```
# dist/manifest.json
```json
{
"output.js.map": "dist/output.js.map",
"main.js": "dist/output.js",
"bar.txt": "dist/a0145fafc7fab801e574631452de554b.txt",
"foo.txt": "dist/3ee037f347c64cc372ad.txt",
"1.output.js.map": "dist/1.output.js.map",
"1.output.js": "dist/1.output.js"
}
```
# dist/manifest.yml
```yml
- output.js.map: 'dist/output.js.map'
- main.js: 'dist/output.js'
- bar.txt: 'dist/a0145fafc7fab801e574631452de554b.txt'
- foo.txt: 'dist/3ee037f347c64cc372ad.txt'
- 1.output.js.map: 'dist/1.output.js.map'
- 1.output.js: 'dist/1.output.js'
```
# Info
## Unoptimized
```
assets by path *.js 11.9 KiB
asset output.js 9.61 KiB [emitted] (name: main) 1 related asset
asset 1.output.js 2.3 KiB [emitted] 1 related asset
assets by path *.txt 8 bytes
asset 3ee037f347c64cc372ad.txt 4 bytes [emitted] [immutable] [from: foo.txt]
asset a0145fafc7fab801e574631452de554b.txt 4 bytes [emitted] [immutable] [from: bar.txt]
asset manifest.json 260 bytes [emitted]
asset manifest.yml 240 bytes [emitted]
chunk (runtime: main) output.js (main) 17 bytes (javascript) 5.48 KiB (runtime) [entry] [rendered]
> ./example.js main
runtime modules 5.48 KiB 8 modules
./example.js 17 bytes [built] [code generated]
[used exports unknown]
entry ./example.js main
chunk (runtime: main) 1.output.js 207 bytes (javascript) 4 bytes (asset) [rendered]
> ./baz ./example.js 1:0-15
dependent modules 122 bytes (javascript) 4 bytes (asset) [dependent] 2 modules
./baz.js 85 bytes [built] [code generated]
[exports: default]
[used exports unknown]
import() ./baz ./example.js 1:0-15
webpack X.X.X compiled successfully
```
## Production mode
```
assets by path *.js 2.17 KiB
asset output.js 1.94 KiB [emitted] [minimized] (name: main) 1 related asset
asset 293.output.js 237 bytes [emitted] [minimized] 1 related asset
assets by path *.txt 8 bytes
asset 3ee037f347c64cc372ad.txt 4 bytes [emitted] [immutable] [from: foo.txt]
asset a0145fafc7fab801e574631452de554b.txt 4 bytes [emitted] [immutable] [from: bar.txt]
asset manifest.json 268 bytes [emitted]
asset manifest.yml 248 bytes [emitted]
chunk (runtime: main) 293.output.js 4 bytes (asset) 249 bytes (javascript) [rendered]
> ./baz ./example.js 1:0-15
./baz.js + 2 modules 207 bytes [built] [code generated]
[exports: default]
import() ./baz ./example.js 1:0-15
./foo.txt 4 bytes (asset) 42 bytes (javascript) [built] [code generated]
[no exports]
chunk (runtime: main) output.js (main) 17 bytes (javascript) 5.48 KiB (runtime) [entry] [rendered]
> ./example.js main
runtime modules 5.48 KiB 8 modules
./example.js 17 bytes [built] [code generated]
[no exports used]
entry ./example.js main
webpack X.X.X compiled successfully
```

View File

@ -0,0 +1 @@
bar

View File

@ -0,0 +1,4 @@
import foo from "./foo.txt";
import bar from "./bar.txt";
export default foo + bar;

View File

@ -0,0 +1 @@
require("../build-common");

View File

@ -0,0 +1 @@
import("./baz");

View File

@ -0,0 +1 @@
foo

View File

@ -0,0 +1,57 @@
This example demonstrates how to use webpack internal ManifestPlugin.
# example.js
```js
_{{example.js}}_
```
# foo.txt
```js
_{{foo.txt}}_
```
# bar.txt
```js
_{{bar.txt}}_
```
# baz.js
```js
_{{baz.js}}_
```
# webpack.config.js
```javascript
_{{webpack.config.js}}_
```
# dist/manifest.json
```json
_{{dist/manifest.json}}_
```
# dist/manifest.yml
```yml
_{{dist/manifest.yml}}_
```
# Info
## Unoptimized
```
_{{stdout}}_
```
## Production mode
```
_{{production:stdout}}_
```

View File

@ -0,0 +1,36 @@
"use strict";
const webpack = require("../../");
/** @type {webpack.Configuration} */
module.exports = {
devtool: "source-map",
module: {
rules: [
{
test: /foo.txt/,
type: "asset/resource"
},
{
test: /bar.txt/,
use: require.resolve("file-loader")
}
]
},
plugins: [
new webpack.ManifestPlugin({
filename: "manifest.json"
}),
new webpack.ManifestPlugin({
filename: "manifest.yml",
handler(manifest) {
let _manifest = "";
for (const key in manifest) {
if (key === "manifest.json") continue;
_manifest += `- ${key}: '${manifest[key].filePath}'\n`;
}
return _manifest;
}
})
]
};

View File

@ -1670,7 +1670,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
for (const type of sourceTypes) hash.update(type);
}
this.moduleGraph.getExportsInfo(module).updateHash(hash, runtime);
return BigInt(`0x${/** @type {string} */ (hash.digest("hex"))}`);
return BigInt(`0x${hash.digest("hex")}`);
});
return graphHash;
}
@ -1808,7 +1808,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
}
}
hash.update(graphHash);
return /** @type {string} */ (hash.digest("hex"));
return hash.digest("hex");
});
}

View File

@ -141,7 +141,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
if (info.runtimeRequirements) {
for (const rr of info.runtimeRequirements) hash.update(rr);
}
return (info.hash = /** @type {string} */ (hash.digest("hex")));
return (info.hash = hash.digest("hex"));
}
/**

View File

@ -4378,7 +4378,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
runtime,
runtimeTemplate
});
moduleHashDigest = /** @type {string} */ (moduleHash.digest(hashDigest));
moduleHashDigest = moduleHash.digest(hashDigest);
} catch (err) {
errors.push(new ModuleHashingError(module, /** @type {Error} */ (err)));
moduleHashDigest = "XXXXXX";
@ -4601,9 +4601,7 @@ This prevents using hashes of each other and should be avoided.`);
moduleGraph: this.moduleGraph,
runtimeTemplate: this.runtimeTemplate
});
const chunkHashDigest = /** @type {string} */ (
chunkHash.digest(hashDigest)
);
const chunkHashDigest = chunkHash.digest(hashDigest);
hash.update(chunkHashDigest);
chunk.hash = chunkHashDigest;
chunk.renderedHash = chunk.hash.slice(0, hashDigestLength);
@ -4637,7 +4635,7 @@ This prevents using hashes of each other and should be avoided.`);
this.logger.timeAggregateEnd("hashing: hash chunks");
this.logger.time("hashing: hash digest");
this.hooks.fullHash.call(hash);
this.fullHash = /** @type {string} */ (hash.digest(hashDigest));
this.fullHash = hash.digest(hashDigest);
this.hash = this.fullHash.slice(0, hashDigestLength);
this.logger.timeEnd("hashing: hash digest");
@ -4652,9 +4650,7 @@ This prevents using hashes of each other and should be avoided.`);
runtime: chunk.runtime,
runtimeTemplate
});
const moduleHashDigest = /** @type {string} */ (
moduleHash.digest(hashDigest)
);
const moduleHashDigest = moduleHash.digest(hashDigest);
const oldHash = chunkGraph.getModuleHash(module, chunk.runtime);
chunkGraph.setModuleHashes(
module,
@ -4671,9 +4667,7 @@ This prevents using hashes of each other and should be avoided.`);
const chunkHash = createHash(hashFunction);
chunkHash.update(chunk.hash);
chunkHash.update(this.hash);
const chunkHashDigest =
/** @type {string} */
(chunkHash.digest(hashDigest));
const chunkHashDigest = chunkHash.digest(hashDigest);
chunk.hash = chunkHashDigest;
chunk.renderedHash = chunk.hash.slice(0, hashDigestLength);
this.hooks.contentHash.call(chunk);

View File

@ -830,7 +830,7 @@ class DefinePlugin {
compilation.valueCacheVersions.set(
VALUE_DEP_MAIN,
/** @type {string} */ (mainHash.digest("hex").slice(0, 8))
mainHash.digest("hex").slice(0, 8)
);
}
);

View File

@ -48,7 +48,7 @@ class DependencyTemplates {
updateHash(part) {
const hash = createHash(this._hashFunction);
hash.update(`${this._hash}${part}`);
this._hash = /** @type {string} */ (hash.digest("hex"));
this._hash = hash.digest("hex");
}
getHash() {

View File

@ -3350,7 +3350,7 @@ class FileSystemInfo {
hash.update(/** @type {string | Buffer} */ (content));
const digest = /** @type {string} */ (hash.digest("hex"));
const digest = hash.digest("hex");
this._fileHashes.set(path, digest);
@ -3618,7 +3618,7 @@ class FileSystemInfo {
}
}
const digest = /** @type {string} */ (hash.digest("hex"));
const digest = hash.digest("hex");
/** @type {ContextFileSystemInfoEntry} */
const result = {
safeTime,
@ -3681,7 +3681,7 @@ class FileSystemInfo {
null,
(entry.resolved = {
safeTime,
timestampHash: /** @type {string} */ (hash.digest("hex"))
timestampHash: hash.digest("hex")
})
);
}
@ -3743,7 +3743,7 @@ class FileSystemInfo {
/** @type {ContextHash} */
const result = {
hash: /** @type {string} */ (hash.digest("hex"))
hash: hash.digest("hex")
};
if (symlinks) result.symlinks = symlinks;
return result;
@ -3790,10 +3790,7 @@ class FileSystemInfo {
for (const h of hashes) {
hash.update(h);
}
callback(
null,
(entry.resolved = /** @type {string} */ (hash.digest("hex")))
);
callback(null, (entry.resolved = hash.digest("hex")));
}
);
}
@ -3910,8 +3907,8 @@ class FileSystemInfo {
/** @type {ContextTimestampAndHash} */
const result = {
safeTime,
timestampHash: /** @type {string} */ (tsHash.digest("hex")),
hash: /** @type {string} */ (hash.digest("hex"))
timestampHash: tsHash.digest("hex"),
hash: hash.digest("hex")
};
if (symlinks) result.symlinks = symlinks;
return result;
@ -3979,8 +3976,8 @@ class FileSystemInfo {
null,
(entry.resolved = {
safeTime,
timestampHash: /** @type {string} */ (tsHash.digest("hex")),
hash: /** @type {string} */ (hash.digest("hex"))
timestampHash: tsHash.digest("hex"),
hash: hash.digest("hex")
})
);
}

176
lib/ManifestPlugin.js Normal file
View File

@ -0,0 +1,176 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Haijie Xie @hai-x
*/
"use strict";
const path = require("path");
const { RawSource } = require("webpack-sources");
const Compilation = require("./Compilation");
const HotUpdateChunk = require("./HotUpdateChunk");
const createSchemaValidation = require("./util/create-schema-validation");
/** @typedef {import("./Compiler")} Compiler */
/** @typedef {import("..").StatsCompilation} StatsCompilation */
/** @typedef {import("./Chunk")} Chunk */
/** @typedef {import("./Compilation").Asset} Asset */
/** @typedef {import("./Module")} Module */
/** @typedef {import("./NormalModule")} NormalModule */
/** @typedef {import("./config/defaults").WebpackOptionsNormalizedWithDefaults} WebpackOptions */
/** @typedef {import("../declarations/plugins/ManifestPlugin").ManifestPluginOptions} ManifestPluginOptions */
/** @typedef {import("../declarations/plugins/ManifestPlugin").ManifestObject} ManifestObject */
/** @typedef {import("../declarations/plugins/ManifestPlugin").ManifestItem} ManifestItem */
const PLUGIN_NAME = "ManifestPlugin";
const validate = createSchemaValidation(
require("../schemas/plugins/ManifestPlugin.check"),
() => require("../schemas/plugins/ManifestPlugin.json"),
{
name: "ManifestPlugin",
baseDataPath: "options"
}
);
/**
* @param {string} filename filename
* @returns {string} extname
*/
const extname = (filename) => {
const replaced = filename.replace(/\?.*/, "");
const split = replaced.split(".");
const last = split.pop();
if (!last) return "";
return last && /^(gz|br|map)$/i.test(last) ? `${split.pop()}.${last}` : last;
};
class ManifestPlugin {
/**
* @param {ManifestPluginOptions} options options
*/
constructor(options) {
validate(options);
/** @type {Required<ManifestPluginOptions>} */
this.options = {
filename: "manifest.json",
handler: (manifest) => this._handleManifest(manifest),
...options
};
}
/**
* @param {ManifestObject} manifest manifest object
* @returns {string} manifest content
*/
_handleManifest(manifest) {
return JSON.stringify(
Object.keys(manifest).reduce((acc, cur) => {
acc[cur] = manifest[cur].filePath;
return acc;
}, /** @type {Record<string, string>} */ ({})),
null,
2
);
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, (compilation) => {
compilation.hooks.processAssets.tap(
{
name: PLUGIN_NAME,
stage: Compilation.PROCESS_ASSETS_STAGE_SUMMARIZE
},
() => {
const assets = compilation.getAssets();
const hashDigestLength = compilation.outputOptions.hashDigestLength;
const publicPath = compilation.getPath(
compilation.outputOptions.publicPath
);
/** @type {Set<string>} */
const added = new Set();
/** @type {ManifestObject} */
const manifest = {};
/**
* @param {string} name name
* @returns {string} hash removed name
*/
const removeHash = (name) => {
// Handles hashes that match configured `hashDigestLength`
// i.e. index.XXXX.html -> index.html (html-webpack-plugin)
if (hashDigestLength <= 0) return name;
const reg = new RegExp(
`(\\.[a-f0-9]{${hashDigestLength},32})(?=\\.)`,
"gi"
);
return name.replace(reg, "");
};
/**
* @param {string} file file
* @param {((file: string) => string)=} namer namer
* @returns {void}
*/
const handleFile = (file, namer) => {
if (added.has(file)) return;
added.add(file);
let name = namer ? namer(file) : file;
const asset = compilation.getAsset(file);
if (asset && asset.info.sourceFilename) {
name = path.join(
path.dirname(file),
path.basename(asset.info.sourceFilename)
);
}
manifest[removeHash(name)] = {
filePath: publicPath
? publicPath +
(publicPath.endsWith("/") ? `${file}` : `/${file}`)
: file,
asset
};
};
for (const chunk of compilation.chunks) {
if (chunk instanceof HotUpdateChunk) continue;
const chunkName = chunk.name;
for (const auxiliaryFile of chunk.auxiliaryFiles) {
handleFile(auxiliaryFile, (file) => path.basename(file));
}
for (const file of chunk.files) {
handleFile(file, (file) => {
if (chunkName) return `${chunkName}.${extname(file)}`;
return file;
});
}
}
for (const asset of assets) {
if (asset.info.hotModuleReplacement) {
continue;
}
handleFile(asset.name);
}
compilation.emitAsset(
this.options.filename,
new RawSource(this.options.handler(manifest))
);
}
);
});
}
}
module.exports = ManifestPlugin;

View File

@ -84,7 +84,7 @@ const getHash =
() => {
const hash = createHash(hashFunction);
hash.update(strFn());
const digest = /** @type {string} */ (hash.digest("hex"));
const digest = hash.digest("hex");
return digest.slice(0, 4);
};

View File

@ -1211,7 +1211,7 @@ class NormalModule extends Module {
hash.update("meta");
hash.update(JSON.stringify(this.buildMeta));
/** @type {BuildInfo} */
(this.buildInfo).hash = /** @type {string} */ (hash.digest("hex"));
(this.buildInfo).hash = hash.digest("hex");
}
/**

View File

@ -949,7 +949,16 @@ class RuntimeTemplate {
// when the defaultInterop is used (when a ESM imports a CJS module),
if (exportName.length > 0 && exportName[0] === "default") {
if (isDeferred && exportsType !== "namespace") {
const access = `${importVar}.a${propertyAccess(exportName, 1)}`;
const exportsInfo = moduleGraph.getExportsInfo(module);
const name = exportName.slice(1);
const used = exportsInfo.getUsedName(name, runtime);
if (!used) {
const comment = Template.toNormalComment(
`unused export ${propertyAccess(exportName)}`
);
return `${comment} undefined`;
}
const access = `${importVar}.a${propertyAccess(used)}`;
if (isCall || asiSafe === undefined) {
return access;
}

View File

@ -482,14 +482,12 @@ class SourceMapDevToolPlugin {
const sourceMapString = JSON.stringify(sourceMap);
if (sourceMapFilename) {
const filename = file;
const sourceMapContentHash =
/** @type {string} */
(
usesContentHash &&
createHash(compilation.outputOptions.hashFunction)
.update(sourceMapString)
.digest("hex")
);
const sourceMapContentHash = usesContentHash
? createHash(compilation.outputOptions.hashFunction)
.update(sourceMapString)
.digest("hex")
: undefined;
const pathParams = {
chunk,
filename: options.fileContext

View File

@ -244,11 +244,10 @@ class AssetGenerator extends Generator {
hash.update(module.error.toString());
}
const fullContentHash = /** @type {string} */ (
hash.digest(runtimeTemplate.outputOptions.hashDigest)
const fullContentHash = hash.digest(
runtimeTemplate.outputOptions.hashDigest
);
/** @type {string} */
const contentHash = nonNumericOnlyHash(
fullContentHash,
runtimeTemplate.outputOptions.hashDigestLength

View File

@ -34,7 +34,7 @@ class LazyHashedEtag {
if (this._hash === undefined) {
const hash = createHash(this._hashFunction);
this._obj.updateHash(hash);
this._hash = /** @type {string} */ (hash.digest("base64"));
this._hash = hash.digest("base64");
}
return this._hash;
}

View File

@ -470,12 +470,8 @@ class CssModulesPlugin {
hash.update(chunkGraph.getModuleHash(module, chunk.runtime));
}
}
const digest = /** @type {string} */ (hash.digest(hashDigest));
chunk.contentHash.css = nonNumericOnlyHash(
digest,
/** @type {number} */
(hashDigestLength)
);
const digest = hash.digest(hashDigest);
chunk.contentHash.css = nonNumericOnlyHash(digest, hashDigestLength);
});
compilation.hooks.renderManifest.tap(PLUGIN_NAME, (result, options) => {
const { chunkGraph } = compilation;

View File

@ -66,7 +66,7 @@ const CC_LOWER_Z = "z".charCodeAt(0);
const CC_UPPER_A = "A".charCodeAt(0);
const CC_UPPER_F = "F".charCodeAt(0);
const CC_UPPER_E = "E".charCodeAt(0);
const CC_UPPER_U = "E".charCodeAt(0);
const CC_UPPER_U = "U".charCodeAt(0);
const CC_UPPER_Z = "Z".charCodeAt(0);
const CC_0 = "0".charCodeAt(0);
const CC_9 = "9".charCodeAt(0);

View File

@ -64,9 +64,7 @@ const getLocalIdent = (local, module, chunkGraph, runtimeTemplate) => {
hash.update(local);
}
const localIdentHash =
/** @type {string} */
(hash.digest(hashDigest)).slice(0, hashDigestLength);
const localIdentHash = hash.digest(hashDigest).slice(0, hashDigestLength);
return runtimeTemplate.compilation
.getPath(localIdentName, {

View File

@ -378,9 +378,7 @@ class WorkerPlugin {
)}|${i}`;
const hash = createHash(compilation.outputOptions.hashFunction);
hash.update(name);
const digest =
/** @type {string} */
(hash.digest(compilation.outputOptions.hashDigest));
const digest = hash.digest(compilation.outputOptions.hashDigest);
entryOptions.runtime = digest.slice(
0,
compilation.outputOptions.hashDigestLength

View File

@ -16,12 +16,12 @@ const {
getUsedModuleIdsAndModules
} = require("./IdHelpers");
/** @typedef {import("../../declarations/plugins/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions} HashedModuleIdsPluginOptions */
/** @typedef {import("../../declarations/plugins/ids/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions} HashedModuleIdsPluginOptions */
/** @typedef {import("../Compiler")} Compiler */
const validate = createSchemaValidation(
require("../../schemas/plugins/HashedModuleIdsPlugin.check"),
() => require("../../schemas/plugins/HashedModuleIdsPlugin.json"),
require("../../schemas/plugins/ids/HashedModuleIdsPlugin.check"),
() => require("../../schemas/plugins/ids/HashedModuleIdsPlugin.json"),
{
name: "Hashed Module Ids Plugin",
baseDataPath: "options"
@ -37,7 +37,7 @@ class HashedModuleIdsPlugin {
constructor(options = {}) {
validate(options);
/** @type {HashedModuleIdsPluginOptions} */
/** @type {Required<Omit<HashedModuleIdsPluginOptions, "context">> & { context?: string | undefined }} */
this.options = {
context: undefined,
hashFunction: DEFAULTS.HASH_FUNCTION,
@ -73,9 +73,7 @@ class HashedModuleIdsPlugin {
)
);
hash.update(ident || "");
const hashId = /** @type {string} */ (
hash.digest(options.hashDigest)
);
const hashId = hash.digest(options.hashDigest);
let len = options.hashDigestLength;
while (usedIds.has(hashId.slice(0, len))) {
/** @type {number} */ (len)++;

View File

@ -25,7 +25,7 @@ const numberHash = require("../util/numberHash");
const getHash = (str, len, hashFunction) => {
const hash = createHash(hashFunction);
hash.update(str);
const digest = /** @type {string} */ (hash.digest("hex"));
const digest = hash.digest("hex");
return digest.slice(0, len);
};

View File

@ -355,6 +355,9 @@ module.exports = mergeExports(fn, {
get Stats() {
return require("./Stats");
},
get ManifestPlugin() {
return require("./ManifestPlugin");
},
get Template() {
return require("./Template");
},

View File

@ -503,11 +503,10 @@ class JavascriptModulesPlugin {
}
xor.updateHash(hash);
}
const digest = /** @type {string} */ (hash.digest(hashDigest));
const digest = hash.digest(hashDigest);
chunk.contentHash.javascript = nonNumericOnlyHash(
digest,
/** @type {number} */
(hashDigestLength)
hashDigestLength
);
});
compilation.hooks.additionalTreeRuntimeRequirements.tap(

View File

@ -12,6 +12,8 @@ const WebpackError = require("../WebpackError");
const { compareSelect, compareStrings } = require("../util/comparators");
const createHash = require("../util/createHash");
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} HashDigest */
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../Cache").Etag} Etag */
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
@ -109,8 +111,8 @@ const compilationHooksMap = new WeakMap();
/**
* @typedef {object} RealContentHashPluginOptions
* @property {string | Hash} hashFunction the hash function to use
* @property {string=} hashDigest the hash digest to use
* @property {HashFunction} hashFunction the hash function to use
* @property {HashDigest} hashDigest the hash digest to use
*/
const PLUGIN_NAME = "RealContentHashPlugin";
@ -432,7 +434,7 @@ ${referencingAssets
hash.update(content);
}
const digest = hash.digest(this._hashDigest);
newHash = /** @type {string} */ (digest.slice(0, oldHash.length));
newHash = digest.slice(0, oldHash.length);
}
hashToNewHash.set(oldHash, newHash);
}

View File

@ -55,7 +55,7 @@ const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;
const hashForName = (buffers, hashFunction) => {
const hash = createHash(hashFunction);
for (const buf of buffers) hash.update(buf);
return /** @type {string} */ (hash.digest("hex"));
return hash.digest("hex");
};
const COMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;

View File

@ -117,7 +117,7 @@ const setMapSize = (map, size) => {
const toHash = (buffer, hashFunction) => {
const hash = createHash(hashFunction);
hash.update(buffer);
return /** @type {string} */ (hash.digest("latin1"));
return hash.digest("latin1");
};
const ESCAPE = null;

View File

@ -722,7 +722,7 @@ const SIMPLE_EXTRACTORS = {
}
},
hash: (object, compilation) => {
object.hash = /** @type {string} */ (compilation.hash);
object.hash = compilation.hash;
},
version: (object) => {
object.version = require("../../package.json").version;

View File

@ -5,14 +5,31 @@
"use strict";
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
class Hash {
/* istanbul ignore next */
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @abstract
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @abstract
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @abstract
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
const AbstractMethodError = require("../AbstractMethodError");
@ -24,8 +41,21 @@ class Hash {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @abstract
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @abstract
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @abstract
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
const AbstractMethodError = require("../AbstractMethodError");

View File

@ -7,9 +7,10 @@
const Hash = require("./Hash");
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
const BULK_SIZE = 2000;
const BULK_SIZE = 3;
// We are using an object instead of a Map as this will stay static during the runtime
// so access to it can be optimized by v8
@ -38,9 +39,22 @@ class BulkUpdateDecorator extends Hash {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
if (
@ -55,7 +69,11 @@ class BulkUpdateDecorator extends Hash {
this.hash.update(this.buffer);
this.buffer = "";
}
this.hash.update(data, inputEncoding);
if (typeof data === "string" && inputEncoding) {
this.hash.update(data, inputEncoding);
} else {
this.hash.update(data);
}
} else {
this.buffer += data;
if (this.buffer.length > BULK_SIZE) {
@ -71,8 +89,19 @@ class BulkUpdateDecorator extends Hash {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
let digestCache;
@ -91,9 +120,19 @@ class BulkUpdateDecorator extends Hash {
if (buffer.length > 0) {
this.hash.update(buffer);
}
if (!encoding) {
const result = this.hash.digest();
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
return result;
}
const digestResult = this.hash.digest(encoding);
// Compatibility with the old hash library
const result =
typeof digestResult === "string" ? digestResult : digestResult.toString();
typeof digestResult === "string"
? digestResult
: /** @type {NodeJS.TypedArray} */ (digestResult).toString();
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
@ -110,9 +149,22 @@ class DebugHash extends Hash {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
if (typeof data !== "string") data = data.toString("utf8");
@ -132,8 +184,19 @@ class DebugHash extends Hash {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
@ -186,14 +249,21 @@ module.exports = (algorithm) => {
case "native-md4":
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() => /** @type {typeof import("crypto")} */ (crypto).createHash("md4"),
() =>
/** @type {Hash} */ (
/** @type {typeof import("crypto")} */
(crypto).createHash("md4")
),
"md4"
);
default:
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() =>
/** @type {typeof import("crypto")} */ (crypto).createHash(algorithm),
/** @type {Hash} */ (
/** @type {typeof import("crypto")} */
(crypto).createHash(algorithm)
),
algorithm
);
}

View File

@ -8,6 +8,8 @@
const Hash = require("../Hash");
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
class BatchedHash extends Hash {
/**
* @param {Hash} hash hash
@ -21,9 +23,22 @@ class BatchedHash extends Hash {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
if (this.string !== undefined) {
@ -35,7 +50,11 @@ class BatchedHash extends Hash {
this.string += data;
return this;
}
this.hash.update(this.string, this.encoding);
if (this.encoding) {
this.hash.update(this.string, this.encoding);
} else {
this.hash.update(this.string);
}
this.string = undefined;
}
if (typeof data === "string") {
@ -46,8 +65,10 @@ class BatchedHash extends Hash {
) {
this.string = data;
this.encoding = inputEncoding;
} else {
} else if (inputEncoding) {
this.hash.update(data, inputEncoding);
} else {
this.hash.update(data);
}
} else {
this.hash.update(data);
@ -57,12 +78,30 @@ class BatchedHash extends Hash {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
if (this.string !== undefined) {
this.hash.update(this.string, this.encoding);
if (this.encoding) {
this.hash.update(this.string, this.encoding);
} else {
this.hash.update(this.string);
}
}
if (!encoding) {
return this.hash.digest();
}
return this.hash.digest(encoding);
}

View File

@ -5,13 +5,15 @@
"use strict";
const Hash = require("../Hash");
// 65536 is the size of a wasm memory page
// 64 is the maximum chunk size for every possible wasm hash implementation
// 4 is the maximum number of bytes per char for string encoding (max is utf-8)
// ~3 makes sure that it's always a block of 4 chars, so avoid partially encoded bytes for base64
const MAX_SHORT_STRING = Math.floor((65536 - 64) / 4) & ~3;
class WasmHash {
class WasmHash extends Hash {
/**
* @param {WebAssembly.Instance} instance wasm instance
* @param {WebAssembly.Instance[]} instancesPool pool of instances
@ -19,6 +21,8 @@ class WasmHash {
* @param {number} digestSize size of digest returned by wasm
*/
constructor(instance, instancesPool, chunkSize, digestSize) {
super();
const exports = /** @type {EXPECTED_ANY} */ (instance.exports);
exports.init();
this.exports = exports;
@ -35,17 +39,39 @@ class WasmHash {
}
/**
* @param {Buffer | string} data data
* @param {BufferEncoding=} encoding encoding
* @returns {this} itself
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
update(data, encoding) {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (typeof data === "string") {
while (data.length > MAX_SHORT_STRING) {
this._updateWithShortString(data.slice(0, MAX_SHORT_STRING), encoding);
this._updateWithShortString(
data.slice(0, MAX_SHORT_STRING),
/** @type {NodeJS.BufferEncoding} */
(inputEncoding)
);
data = data.slice(MAX_SHORT_STRING);
}
this._updateWithShortString(data, encoding);
this._updateWithShortString(
data,
/** @type {NodeJS.BufferEncoding} */
(inputEncoding)
);
return this;
}
this._updateWithBuffer(data);
@ -136,17 +162,31 @@ class WasmHash {
}
/**
* @param {BufferEncoding} type type
* @returns {Buffer | string} digest
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @returns {Buffer} digest
*/
digest(type) {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {string=} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
const { exports, buffered, mem, digestSize } = this;
exports.final(buffered);
this.instancesPool.push(this);
const hex = mem.toString("latin1", 0, digestSize);
if (type === "hex") return hex;
if (type === "binary" || !type) return Buffer.from(hex, "hex");
return Buffer.from(hex, "hex").toString(type);
if (encoding === "hex") return hex;
if (encoding === "binary" || !encoding) return Buffer.from(hex, "hex");
return Buffer.from(hex, "hex").toString(
/** @type {NodeJS.BufferEncoding} */ (encoding)
);
}
}

View File

@ -1,6 +1,6 @@
{
"name": "webpack",
"version": "5.102.0",
"version": "5.102.1",
"description": "Packs ECMAScript/CommonJs/AMD modules for the browser. Allows you to split your codebase into multiple bundles, which can be loaded on demand. Supports loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",
"homepage": "https://github.com/webpack/webpack",
"bugs": "https://github.com/webpack/webpack/issues",

File diff suppressed because one or more lines are too long

View File

@ -1568,8 +1568,21 @@
"minLength": 1
},
"HashDigest": {
"description": "Digest type used for the hash.",
"type": "string"
"description": "Digest types used for the hash.",
"enum": [
"base64",
"base64url",
"hex",
"binary",
"utf8",
"utf-8",
"utf16le",
"utf-16le",
"latin1",
"ascii",
"ucs2",
"ucs-2"
]
},
"HashDigestLength": {
"description": "Number of chars which are used for the hash.",

View File

@ -1,6 +0,0 @@
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn fix:special` to update
*/
const t=/^(?:[A-Za-z]:[\\/]|\\\\|\/)/;function e(r,{instancePath:s="",parentData:n,parentDataProperty:a,rootData:i=r}={}){let o=null,l=0;if(0===l){if(!r||"object"!=typeof r||Array.isArray(r))return e.errors=[{params:{type:"object"}}],!1;{const s=l;for(const t in r)if("context"!==t&&"hashDigest"!==t&&"hashDigestLength"!==t&&"hashFunction"!==t)return e.errors=[{params:{additionalProperty:t}}],!1;if(s===l){if(void 0!==r.context){let s=r.context;const n=l;if(l===n){if("string"!=typeof s)return e.errors=[{params:{type:"string"}}],!1;if(s.includes("!")||!0!==t.test(s))return e.errors=[{params:{}}],!1}var u=n===l}else u=!0;if(u){if(void 0!==r.hashDigest){let t=r.hashDigest;const s=l;if("hex"!==t&&"latin1"!==t&&"base64"!==t)return e.errors=[{params:{}}],!1;u=s===l}else u=!0;if(u){if(void 0!==r.hashDigestLength){let t=r.hashDigestLength;const s=l;if(l===s){if("number"!=typeof t)return e.errors=[{params:{type:"number"}}],!1;if(t<1||isNaN(t))return e.errors=[{params:{comparison:">=",limit:1}}],!1}u=s===l}else u=!0;if(u)if(void 0!==r.hashFunction){let t=r.hashFunction;const s=l,n=l;let a=!1,i=null;const p=l,h=l;let c=!1;const m=l;if(l===m)if("string"==typeof t){if(t.length<1){const t={params:{}};null===o?o=[t]:o.push(t),l++}}else{const t={params:{type:"string"}};null===o?o=[t]:o.push(t),l++}var f=m===l;if(c=c||f,!c){const e=l;if(!(t instanceof Function)){const t={params:{}};null===o?o=[t]:o.push(t),l++}f=e===l,c=c||f}if(c)l=h,null!==o&&(h?o.length=h:o=null);else{const t={params:{}};null===o?o=[t]:o.push(t),l++}if(p===l&&(a=!0,i=0),!a){const t={params:{passingSchemas:i}};return null===o?o=[t]:o.push(t),l++,e.errors=o,!1}l=n,null!==o&&(n?o.length=n:o=null),u=s===l}else u=!0}}}}}return e.errors=o,0===l}module.exports=e,module.exports.default=e;

View File

@ -3,5 +3,5 @@
* DO NOT MODIFY BY HAND.
* Run `yarn fix:special` to update
*/
declare const check: (options: import("../../declarations/plugins/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions) => boolean;
declare const check: (options: import("../../declarations/plugins/ManifestPlugin").ManifestPluginOptions) => boolean;
export = check;

View File

@ -0,0 +1,6 @@
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn fix:special` to update
*/
const r=/^(?:[A-Za-z]:[\\/]|\\\\|\/)/;function e(t,{instancePath:n="",parentData:a,parentDataProperty:s,rootData:o=t}={}){let i=null,l=0;if(0===l){if(!t||"object"!=typeof t||Array.isArray(t))return e.errors=[{params:{type:"object"}}],!1;{const n=l;for(const r in t)if("filename"!==r&&"handler"!==r)return e.errors=[{params:{additionalProperty:r}}],!1;if(n===l){if(void 0!==t.filename){let n=t.filename;const a=l;if(l===a){if("string"!=typeof n)return e.errors=[{params:{type:"string"}}],!1;if(n.includes("!")||!1!==r.test(n))return e.errors=[{params:{}}],!1;if(n.length<1)return e.errors=[{params:{}}],!1}var f=a===l}else f=!0;if(f)if(void 0!==t.handler){const r=l,n=l;let a=!1,s=null;const o=l;if(!(t.handler instanceof Function)){const r={params:{}};null===i?i=[r]:i.push(r),l++}if(o===l&&(a=!0,s=0),!a){const r={params:{passingSchemas:s}};return null===i?i=[r]:i.push(r),l++,e.errors=i,!1}l=n,null!==i&&(n?i.length=n:i=null),f=r===l}else f=!0}}}return e.errors=i,0===l}module.exports=e,module.exports.default=e;

View File

@ -0,0 +1,51 @@
{
"definitions": {
"HandlerFunction": {
"description": "A function that receives the manifest object and returns the manifest string.",
"instanceof": "Function",
"tsType": "(manifest: ManifestObject) => string"
},
"ManifestItem": {
"description": "Describes a manifest entry that links the emitted path to the producing asset.",
"type": "object",
"additionalProperties": false,
"properties": {
"asset": {
"description": "The compilation asset that produced this manifest entry.",
"tsType": "import('../../lib/Compilation').Asset"
},
"filePath": {
"description": "The public path recorded in the manifest for this asset.",
"type": "string"
}
},
"required": ["filePath"]
},
"ManifestObject": {
"description": "Maps asset identifiers to their manifest entries.",
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/ManifestItem"
},
"tsType": "Record<string, ManifestItem>"
}
},
"title": "ManifestPluginOptions",
"type": "object",
"additionalProperties": false,
"properties": {
"filename": {
"description": "Specifies the filename of the output file on disk. By default the plugin will emit `manifest.json` inside the 'output.path' directory.",
"type": "string",
"absolutePath": false,
"minLength": 1
},
"handler": {
"oneOf": [
{
"$ref": "#/definitions/HandlerFunction"
}
]
}
}
}

View File

@ -0,0 +1,7 @@
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn fix:special` to update
*/
declare const check: (options: import("../../../declarations/plugins/ids/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions) => boolean;
export = check;

View File

@ -0,0 +1,6 @@
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn fix:special` to update
*/
const t=/^(?:[A-Za-z]:[\\/]|\\\\|\/)/;function e(s,{instancePath:r="",parentData:n,parentDataProperty:a,rootData:i=s}={}){let o=null,l=0;if(0===l){if(!s||"object"!=typeof s||Array.isArray(s))return e.errors=[{params:{type:"object"}}],!1;{const r=l;for(const t in s)if("context"!==t&&"hashDigest"!==t&&"hashDigestLength"!==t&&"hashFunction"!==t)return e.errors=[{params:{additionalProperty:t}}],!1;if(r===l){if(void 0!==s.context){let r=s.context;const n=l;if(l===n){if("string"!=typeof r)return e.errors=[{params:{type:"string"}}],!1;if(r.includes("!")||!0!==t.test(r))return e.errors=[{params:{}}],!1}var u=n===l}else u=!0;if(u){if(void 0!==s.hashDigest){let t=s.hashDigest;const r=l;if("base64"!==t&&"base64url"!==t&&"hex"!==t&&"binary"!==t&&"utf8"!==t&&"utf-8"!==t&&"utf16le"!==t&&"utf-16le"!==t&&"latin1"!==t&&"ascii"!==t&&"ucs2"!==t&&"ucs-2"!==t)return e.errors=[{params:{}}],!1;u=r===l}else u=!0;if(u){if(void 0!==s.hashDigestLength){let t=s.hashDigestLength;const r=l;if(l===r){if("number"!=typeof t)return e.errors=[{params:{type:"number"}}],!1;if(t<1||isNaN(t))return e.errors=[{params:{comparison:">=",limit:1}}],!1}u=r===l}else u=!0;if(u)if(void 0!==s.hashFunction){let t=s.hashFunction;const r=l,n=l;let a=!1,i=null;const c=l,p=l;let h=!1;const m=l;if(l===m)if("string"==typeof t){if(t.length<1){const t={params:{}};null===o?o=[t]:o.push(t),l++}}else{const t={params:{type:"string"}};null===o?o=[t]:o.push(t),l++}var f=m===l;if(h=h||f,!h){const e=l;if(!(t instanceof Function)){const t={params:{}};null===o?o=[t]:o.push(t),l++}f=e===l,h=h||f}if(h)l=p,null!==o&&(p?o.length=p:o=null);else{const t={params:{}};null===o?o=[t]:o.push(t),l++}if(c===l&&(a=!0,i=0),!a){const t={params:{passingSchemas:i}};return null===o?o=[t]:o.push(t),l++,e.errors=o,!1}l=n,null!==o&&(n?o.length=n:o=null),u=r===l}else u=!0}}}}}return e.errors=o,0===l}module.exports=e,module.exports.default=e;

View File

@ -9,7 +9,7 @@
},
{
"instanceof": "Function",
"tsType": "typeof import('../../lib/util/Hash')"
"tsType": "typeof import('../../../lib/util/Hash')"
}
]
}
@ -25,7 +25,20 @@
},
"hashDigest": {
"description": "The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.",
"enum": ["hex", "latin1", "base64"]
"enum": [
"base64",
"base64url",
"hex",
"binary",
"utf8",
"utf-8",
"utf16le",
"utf-16le",
"latin1",
"ascii",
"ucs2",
"ucs-2"
]
},
"hashDigestLength": {
"description": "The prefix length of the hash digest to use, defaults to 4.",

View File

@ -6795,13 +6795,27 @@ Object {
"output-hash-digest": Object {
"configs": Array [
Object {
"description": "Digest type used for the hash.",
"description": "Digest types used for the hash.",
"multiple": false,
"path": "output.hashDigest",
"type": "string",
"type": "enum",
"values": Array [
"base64",
"base64url",
"hex",
"binary",
"utf8",
"utf-8",
"utf16le",
"utf-16le",
"latin1",
"ascii",
"ucs2",
"ucs-2",
],
},
],
"description": "Digest type used for the hash.",
"description": "Digest types used for the hash.",
"multiple": false,
"simpleType": "string",
},

View File

@ -0,0 +1,4 @@
{
"foo": "bar",
"nested": { "foo": "bar" }
}

View File

@ -0,0 +1,18 @@
import defer * as mod1 from "./file.ext" with { type: "bytes" };
import defer * as mod2 from "./file.ext" with { type: "json" };
import * as mod3 from "./file.ext" with { type: "bytes" };
import * as mod4 from "./file.ext" with { type: "json" };
it("should work with defer and import attributes", () => {
const decoder = new TextDecoder('utf-8');
const mod1Decoded = JSON.parse(decoder.decode(mod1.default));
expect(mod1Decoded.foo).toBe("bar");
expect(mod1Decoded.nested.foo).toBe("bar");
expect(mod2.default.foo).toBe("bar");
expect(mod2.default.nested.foo).toBe("bar");
const mod2Decoded = JSON.parse(decoder.decode(mod3.default));
expect(mod2Decoded.foo).toBe("bar");
expect(mod2Decoded.nested.foo).toBe("bar");
expect(mod4.default.foo).toBe("bar");
expect(mod4.default.nested.foo).toBe("bar");
});

View File

@ -0,0 +1,5 @@
"use strict";
const supportsTextDecoder = require("../../../helpers/supportsTextDecoder");
module.exports = () => supportsTextDecoder();

View File

@ -0,0 +1,9 @@
"use strict";
/** @type {import("../../../../").Configuration} */
module.exports = {
target: [`async-node${process.versions.node.split(".").map(Number)[0]}`],
experiments: {
deferImport: true
}
};

View File

@ -0,0 +1 @@
file

View File

@ -0,0 +1,30 @@
import fs from "fs";
import path from "path";
import url from "../../asset-modules/_images/file.png";
import(/* webpackChunkName: 'file' */ "./file.txt?foo");
it("should emit manifest with expected entries and paths with function publicPath", () => {
expect(url).toEqual("/dist/file-loader.png");
const manifest = JSON.parse(
fs.readFileSync(path.resolve(__dirname, "bar.json"), "utf-8")
);
const keys = Object.keys(manifest).sort();
expect(keys).toEqual(
[
"file.js",
"file.txt?foo",
"main.js",
"third.party.js",
"file.png"
].sort()
);
expect(manifest["main.js"]).toMatch(/\/dist\/bundle1\.js/);
expect(manifest["file.js"]).toMatch(/\/dist\/file\.[a-f0-9]+\.js/);
expect(manifest["file.txt?foo"]).toMatch(/\/dist\/file\.[a-f0-9]+\.txt\?foo/);
expect(manifest["third.party.js"]).toBe("/dist/third.party.js");
expect(manifest["file.png"]).toBe("/dist/file-loader.png");
});

View File

@ -0,0 +1,30 @@
import fs from "fs";
import path from "path";
import url from "../../asset-modules/_images/file.png";
import(/* webpackChunkName: 'file' */ "./file.txt?foo");
it("should emit manifest with expected entries and paths with string publicPath", () => {
expect(url).toEqual("/app/file-loader.png");
const manifest = JSON.parse(
fs.readFileSync(path.resolve(__dirname, "foo.json"), "utf-8")
);
const keys = Object.keys(manifest).sort();
expect(keys).toEqual(
[
"file.js",
"file.txt?foo",
"main.js",
"third.party.js",
"file.png"
].sort()
);
expect(manifest["main.js"]).toMatch(/\/app\/bundle0\.js/);
expect(manifest["file.js"]).toMatch(/\/app\/file\.[a-f0-9]+\.js/);
expect(manifest["file.txt?foo"]).toMatch(/\/app\/file\.[a-f0-9]+\.txt\?foo/);
expect(manifest["third.party.js"]).toBe("/app/third.party.js");
expect(manifest["file.png"]).toBe("/app/file-loader.png");
});

View File

@ -0,0 +1,8 @@
"use strict";
module.exports = [
// each time returns different OriginalSource in webpack.config.js:33
// this prevents hit in inmemory cache
/^Pack got invalid because of write to: RealContentHashPlugin|analyse|third.party.js$/,
/^Pack got invalid because of write to: RealContentHashPlugin|analyse|third.party.js$/
];

View File

@ -0,0 +1,96 @@
"use strict";
const { RawSource } = require("webpack-sources");
const webpack = require("../../../../");
/** @typedef {import("../../../../lib/Compiler")} Compiler */
class CopyPlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const hookOptions = {
name: "MockCopyPlugin",
stage: webpack.Compilation.PROCESS_ASSETS_STAGE_ADDITIONS
};
compiler.hooks.thisCompilation.tap(hookOptions, (compilation) => {
compilation.hooks.processAssets.tap(hookOptions, () => {
const output = "// some compilation result\n";
compilation.emitAsset("third.party.js", new RawSource(output));
});
});
}
}
/** @type {import("../../../../").Configuration[]} */
module.exports = [
{
node: {
__dirname: false,
__filename: false
},
output: {
publicPath: "/app/",
chunkFilename: "[name].[contenthash].js",
assetModuleFilename: "[name].[contenthash][ext][query]"
},
plugins: [
new CopyPlugin(),
new webpack.ManifestPlugin({
filename: "foo.json"
})
],
module: {
rules: [
{
test: /\.txt$/,
type: "asset/resource"
},
{
test: /\.png$/,
loader: "file-loader",
options: {
name: "file-loader.[ext]"
}
}
]
}
},
{
entry: "./index-2.js",
node: {
__dirname: false,
__filename: false
},
output: {
publicPath: (_data) => "/dist/",
chunkFilename: "[name].[contenthash].js",
assetModuleFilename: "[name].[contenthash][ext][query]"
},
plugins: [
new CopyPlugin(),
new webpack.ManifestPlugin({
filename: "bar.json"
})
],
module: {
rules: [
{
test: /\.txt$/,
type: "asset/resource"
},
{
test: /\.png$/,
loader: "file-loader",
options: {
name: "file-loader.[ext]"
}
}
]
}
}
];

View File

@ -1,7 +1,7 @@
"use strict";
module.exports = [
// each time returns different OriginalSource in webpack.config.js:78
// each time returns different OriginalSource in webpack.config.js:108
// this prevents hit in inmemory cache
/^Pack got invalid because of write to: RealContentHashPlugin|analyse|index\.html$/
];

416
types.d.ts vendored
View File

@ -989,18 +989,18 @@ declare interface Bootstrap {
allowInlineStartup: boolean;
}
type BufferEncoding =
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex";
| "ascii"
| "ucs2"
| "ucs-2";
type BufferEncodingOption = "buffer" | { encoding: "buffer" };
declare interface BufferEntry {
map?: null | RawSourceMap;
@ -4518,18 +4518,18 @@ declare class EnableWasmLoadingPlugin {
type EncodingOption =
| undefined
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| ObjectEncodingOptions;
type Entry =
| string
@ -6242,13 +6242,36 @@ declare class Hash {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
*/
update(data: string | Buffer, inputEncoding?: string): Hash;
update(data: string | Buffer): Hash;
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
*/
update(data: string, inputEncoding: HashDigest): Hash;
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
*/
digest(encoding?: string): string | Buffer;
digest(): Buffer;
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
*/
digest(encoding: HashDigest): string;
}
type HashDigest =
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2";
type HashFunction = string | typeof Hash;
declare interface HashLike {
/**
@ -6266,7 +6289,9 @@ declare interface HashableObject {
}
declare class HashedModuleIdsPlugin {
constructor(options?: HashedModuleIdsPluginOptions);
options: HashedModuleIdsPluginOptions;
options: Required<Omit<HashedModuleIdsPluginOptions, "context">> & {
context?: string;
};
/**
* Apply the plugin
@ -6282,7 +6307,19 @@ declare interface HashedModuleIdsPluginOptions {
/**
* The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.
*/
hashDigest?: "base64" | "latin1" | "hex";
hashDigest?:
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2";
/**
* The prefix length of the hash digest to use, defaults to 4.
@ -6624,18 +6661,18 @@ declare interface IntermediateFileSystemExtras {
createWriteStream: (
pathLike: PathLikeFs,
result?:
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| WriteStreamOptions
) => NodeJS.WritableStream;
open: Open;
@ -9890,6 +9927,44 @@ declare interface MakeDirectoryOptions {
recursive?: boolean;
mode?: string | number;
}
/**
* Describes a manifest entry that links the emitted path to the producing asset.
*/
declare interface ManifestItem {
/**
* The compilation asset that produced this manifest entry.
*/
asset?: Asset;
/**
* The public path recorded in the manifest for this asset.
*/
filePath: string;
}
declare interface ManifestObject {
[index: string]: ManifestItem;
}
declare class ManifestPlugin {
constructor(options: ManifestPluginOptions);
options: Required<ManifestPluginOptions>;
/**
* Apply the plugin
*/
apply(compiler: Compiler): void;
}
declare interface ManifestPluginOptions {
/**
* Specifies the filename of the output file on disk. By default the plugin will emit `manifest.json` inside the 'output.path' directory.
*/
filename?: string;
/**
* A function that receives the manifest object and returns the manifest string.
*/
handler?: (manifest: ManifestObject) => string;
}
declare interface MapOptions {
/**
* need columns?
@ -11704,7 +11779,7 @@ declare interface NormalModuleLoaderContext<OptionsType> {
mode: "none" | "development" | "production";
webpack?: boolean;
hashFunction: HashFunction;
hashDigest: string;
hashDigest: HashDigest;
hashDigestLength: number;
hashSalt?: string;
_module?: NormalModule;
@ -11790,18 +11865,18 @@ declare interface ObjectDeserializerContext {
declare interface ObjectEncodingOptions {
encoding?:
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex";
| "ascii"
| "ucs2"
| "ucs-2";
}
declare interface ObjectSerializer {
serialize: (value: any, context: ObjectSerializerContext) => void;
@ -12705,9 +12780,21 @@ declare interface Output {
globalObject?: string;
/**
* Digest type used for the hash.
* Digest types used for the hash.
*/
hashDigest?: string;
hashDigest?:
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2";
/**
* Number of chars which are used for the hash.
@ -12874,18 +12961,18 @@ declare interface OutputFileSystem {
createReadStream?: (
path: PathLikeFs,
options?:
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| ReadStreamOptions
) => NodeJS.ReadableStream;
join?: (path1: string, path2: string) => string;
@ -13020,9 +13107,21 @@ declare interface OutputNormalized {
globalObject?: string;
/**
* Digest type used for the hash.
* Digest types used for the hash.
*/
hashDigest?: string;
hashDigest?:
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2";
/**
* Number of chars which are used for the hash.
@ -13198,7 +13297,21 @@ type OutputNormalizedWithDefaults = OutputNormalized & {
path: string;
pathinfo: NonNullable<undefined | boolean | "verbose">;
hashFunction: NonNullable<undefined | string | typeof Hash>;
hashDigest: string;
hashDigest: NonNullable<
| undefined
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
>;
hashDigestLength: number;
chunkLoadTimeout: number;
chunkLoading: NonNullable<undefined | string | false>;
@ -13890,19 +14003,19 @@ declare interface ReadFileFs {
(
path: PathOrFileDescriptorFs,
options:
| ({ encoding: BufferEncoding; flag?: string } & Abortable)
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex",
| ({ encoding: BufferEncoding; flag?: string } & Abortable),
callback: (err: null | NodeJS.ErrnoException, result?: string) => void
): void;
(
@ -13910,18 +14023,18 @@ declare interface ReadFileFs {
options:
| undefined
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| (ObjectEncodingOptions & { flag?: string } & Abortable),
callback: (
err: null | NodeJS.ErrnoException,
@ -13941,36 +14054,36 @@ declare interface ReadFileSync {
(
path: PathOrFileDescriptorFs,
options:
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| { encoding: BufferEncoding; flag?: string }
): string;
(
path: PathOrFileDescriptorFs,
options?:
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| (ObjectEncodingOptions & { flag?: string })
): string | Buffer;
}
@ -13986,18 +14099,18 @@ declare interface ReadFileTypes {
(
path: PathOrFileDescriptorTypes,
options:
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| ({ encoding: BufferEncoding; flag?: string } & Abortable),
callback: (err: null | NodeJS.ErrnoException, result?: string) => void
): void;
@ -14006,18 +14119,18 @@ declare interface ReadFileTypes {
options:
| undefined
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| (ObjectEncodingOptions & { flag?: string } & Abortable),
callback: (
err: null | NodeJS.ErrnoException,
@ -14039,33 +14152,33 @@ declare interface ReaddirFs {
options:
| undefined
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| {
encoding:
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex";
| "ascii"
| "ucs2"
| "ucs-2";
withFileTypes?: false;
recursive?: boolean;
},
@ -14083,18 +14196,18 @@ declare interface ReaddirFs {
options:
| undefined
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| (ObjectEncodingOptions & {
withFileTypes?: false;
recursive?: boolean;
@ -14133,33 +14246,33 @@ declare interface ReaddirSync {
path: PathLikeFs,
options?:
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| {
encoding:
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex";
| "ascii"
| "ucs2"
| "ucs-2";
withFileTypes?: false;
recursive?: boolean;
}
@ -14174,18 +14287,18 @@ declare interface ReaddirSync {
path: PathLikeFs,
options?:
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| (ObjectEncodingOptions & { withFileTypes?: false; recursive?: boolean })
): string[] | Buffer[];
(
@ -14206,33 +14319,33 @@ declare interface ReaddirTypes {
options:
| undefined
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| {
encoding:
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex";
| "ascii"
| "ucs2"
| "ucs-2";
withFileTypes?: false;
recursive?: boolean;
},
@ -14250,18 +14363,18 @@ declare interface ReaddirTypes {
options:
| undefined
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| (ObjectEncodingOptions & {
withFileTypes?: false;
recursive?: boolean;
@ -14363,12 +14476,12 @@ declare interface RealContentHashPluginOptions {
/**
* the hash function to use
*/
hashFunction: string | typeof Hash;
hashFunction: HashFunction;
/**
* the hash digest to use
*/
hashDigest?: string;
hashDigest: HashDigest;
}
declare interface RealDependencyLocation {
start: SourcePosition;
@ -17758,18 +17871,18 @@ declare interface StreamChunksOptions {
declare interface StreamOptions {
flags?: string;
encoding?:
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex";
| "ascii"
| "ucs2"
| "ucs-2";
fd?: any;
mode?: number;
autoClose?: boolean;
@ -18558,18 +18671,18 @@ declare interface WriteFile {
}
type WriteFileOptions =
| null
| "ascii"
| "base64"
| "base64url"
| "hex"
| "binary"
| "utf8"
| "utf-8"
| "utf16le"
| "utf-16le"
| "latin1"
| "ascii"
| "ucs2"
| "ucs-2"
| "base64"
| "base64url"
| "latin1"
| "binary"
| "hex"
| (ObjectEncodingOptions &
Abortable & { mode?: string | number; flag?: string; flush?: boolean });
declare interface WriteOnlySet<T> {
@ -19269,6 +19382,7 @@ declare namespace exports {
EntryPlugin as SingleEntryPlugin,
SourceMapDevToolPlugin,
Stats,
ManifestPlugin,
Template,
WatchIgnorePlugin,
WebpackError,