mirror of https://github.com/webpack/webpack.git
Compare commits
4 Commits
d23a6f238a
...
bc09f872b4
Author | SHA1 | Date |
---|---|---|
|
bc09f872b4 | |
|
d32f1711ac | |
|
436fc7d9da | |
|
fad1bc1f32 |
|
@ -97,7 +97,7 @@ jobs:
|
|||
- run: yarn link webpack --frozen-lockfile
|
||||
|
||||
- name: Run benchmarks
|
||||
uses: CodSpeedHQ/action@3959e9e296ef25296e93e32afcc97196f966e57f # v4.1.0
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
with:
|
||||
run: yarn benchmark --ci
|
||||
mode: "instrumentation"
|
||||
|
|
|
@ -537,9 +537,21 @@ export type Filename = FilenameTemplate;
|
|||
*/
|
||||
export type GlobalObject = string;
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
export type HashDigest = string;
|
||||
export type HashDigest =
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
/**
|
||||
* Number of chars which are used for the hash.
|
||||
*/
|
||||
|
@ -2157,7 +2169,7 @@ export interface Output {
|
|||
*/
|
||||
globalObject?: GlobalObject;
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
hashDigest?: HashDigest;
|
||||
/**
|
||||
|
@ -3653,7 +3665,7 @@ export interface OutputNormalized {
|
|||
*/
|
||||
globalObject?: GlobalObject;
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
hashDigest?: HashDigest;
|
||||
/**
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
/**
|
||||
* Algorithm used for generation the hash (see node.js crypto package).
|
||||
*/
|
||||
export type HashFunction = string | typeof import("../../lib/util/Hash");
|
||||
export type HashFunction = string | typeof import("../../../lib/util/Hash");
|
||||
|
||||
export interface HashedModuleIdsPluginOptions {
|
||||
/**
|
||||
|
@ -17,7 +17,19 @@ export interface HashedModuleIdsPluginOptions {
|
|||
/**
|
||||
* The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.
|
||||
*/
|
||||
hashDigest?: "hex" | "latin1" | "base64";
|
||||
hashDigest?:
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
/**
|
||||
* The prefix length of the hash digest to use, defaults to 4.
|
||||
*/
|
|
@ -1670,7 +1670,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
|
|||
for (const type of sourceTypes) hash.update(type);
|
||||
}
|
||||
this.moduleGraph.getExportsInfo(module).updateHash(hash, runtime);
|
||||
return BigInt(`0x${/** @type {string} */ (hash.digest("hex"))}`);
|
||||
return BigInt(`0x${hash.digest("hex")}`);
|
||||
});
|
||||
return graphHash;
|
||||
}
|
||||
|
@ -1808,7 +1808,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
|
|||
}
|
||||
}
|
||||
hash.update(graphHash);
|
||||
return /** @type {string} */ (hash.digest("hex"));
|
||||
return hash.digest("hex");
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -141,7 +141,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
|
|||
if (info.runtimeRequirements) {
|
||||
for (const rr of info.runtimeRequirements) hash.update(rr);
|
||||
}
|
||||
return (info.hash = /** @type {string} */ (hash.digest("hex")));
|
||||
return (info.hash = hash.digest("hex"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -4378,7 +4378,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
runtime,
|
||||
runtimeTemplate
|
||||
});
|
||||
moduleHashDigest = /** @type {string} */ (moduleHash.digest(hashDigest));
|
||||
moduleHashDigest = moduleHash.digest(hashDigest);
|
||||
} catch (err) {
|
||||
errors.push(new ModuleHashingError(module, /** @type {Error} */ (err)));
|
||||
moduleHashDigest = "XXXXXX";
|
||||
|
@ -4601,9 +4601,7 @@ This prevents using hashes of each other and should be avoided.`);
|
|||
moduleGraph: this.moduleGraph,
|
||||
runtimeTemplate: this.runtimeTemplate
|
||||
});
|
||||
const chunkHashDigest = /** @type {string} */ (
|
||||
chunkHash.digest(hashDigest)
|
||||
);
|
||||
const chunkHashDigest = chunkHash.digest(hashDigest);
|
||||
hash.update(chunkHashDigest);
|
||||
chunk.hash = chunkHashDigest;
|
||||
chunk.renderedHash = chunk.hash.slice(0, hashDigestLength);
|
||||
|
@ -4637,7 +4635,7 @@ This prevents using hashes of each other and should be avoided.`);
|
|||
this.logger.timeAggregateEnd("hashing: hash chunks");
|
||||
this.logger.time("hashing: hash digest");
|
||||
this.hooks.fullHash.call(hash);
|
||||
this.fullHash = /** @type {string} */ (hash.digest(hashDigest));
|
||||
this.fullHash = hash.digest(hashDigest);
|
||||
this.hash = this.fullHash.slice(0, hashDigestLength);
|
||||
this.logger.timeEnd("hashing: hash digest");
|
||||
|
||||
|
@ -4652,9 +4650,7 @@ This prevents using hashes of each other and should be avoided.`);
|
|||
runtime: chunk.runtime,
|
||||
runtimeTemplate
|
||||
});
|
||||
const moduleHashDigest = /** @type {string} */ (
|
||||
moduleHash.digest(hashDigest)
|
||||
);
|
||||
const moduleHashDigest = moduleHash.digest(hashDigest);
|
||||
const oldHash = chunkGraph.getModuleHash(module, chunk.runtime);
|
||||
chunkGraph.setModuleHashes(
|
||||
module,
|
||||
|
@ -4671,9 +4667,7 @@ This prevents using hashes of each other and should be avoided.`);
|
|||
const chunkHash = createHash(hashFunction);
|
||||
chunkHash.update(chunk.hash);
|
||||
chunkHash.update(this.hash);
|
||||
const chunkHashDigest =
|
||||
/** @type {string} */
|
||||
(chunkHash.digest(hashDigest));
|
||||
const chunkHashDigest = chunkHash.digest(hashDigest);
|
||||
chunk.hash = chunkHashDigest;
|
||||
chunk.renderedHash = chunk.hash.slice(0, hashDigestLength);
|
||||
this.hooks.contentHash.call(chunk);
|
||||
|
|
|
@ -830,7 +830,7 @@ class DefinePlugin {
|
|||
|
||||
compilation.valueCacheVersions.set(
|
||||
VALUE_DEP_MAIN,
|
||||
/** @type {string} */ (mainHash.digest("hex").slice(0, 8))
|
||||
mainHash.digest("hex").slice(0, 8)
|
||||
);
|
||||
}
|
||||
);
|
||||
|
|
|
@ -48,7 +48,7 @@ class DependencyTemplates {
|
|||
updateHash(part) {
|
||||
const hash = createHash(this._hashFunction);
|
||||
hash.update(`${this._hash}${part}`);
|
||||
this._hash = /** @type {string} */ (hash.digest("hex"));
|
||||
this._hash = hash.digest("hex");
|
||||
}
|
||||
|
||||
getHash() {
|
||||
|
|
|
@ -3350,7 +3350,7 @@ class FileSystemInfo {
|
|||
|
||||
hash.update(/** @type {string | Buffer} */ (content));
|
||||
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
|
||||
this._fileHashes.set(path, digest);
|
||||
|
||||
|
@ -3618,7 +3618,7 @@ class FileSystemInfo {
|
|||
}
|
||||
}
|
||||
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
/** @type {ContextFileSystemInfoEntry} */
|
||||
const result = {
|
||||
safeTime,
|
||||
|
@ -3681,7 +3681,7 @@ class FileSystemInfo {
|
|||
null,
|
||||
(entry.resolved = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: hash.digest("hex")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -3743,7 +3743,7 @@ class FileSystemInfo {
|
|||
|
||||
/** @type {ContextHash} */
|
||||
const result = {
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
hash: hash.digest("hex")
|
||||
};
|
||||
if (symlinks) result.symlinks = symlinks;
|
||||
return result;
|
||||
|
@ -3790,10 +3790,7 @@ class FileSystemInfo {
|
|||
for (const h of hashes) {
|
||||
hash.update(h);
|
||||
}
|
||||
callback(
|
||||
null,
|
||||
(entry.resolved = /** @type {string} */ (hash.digest("hex")))
|
||||
);
|
||||
callback(null, (entry.resolved = hash.digest("hex")));
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -3910,8 +3907,8 @@ class FileSystemInfo {
|
|||
/** @type {ContextTimestampAndHash} */
|
||||
const result = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (tsHash.digest("hex")),
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: tsHash.digest("hex"),
|
||||
hash: hash.digest("hex")
|
||||
};
|
||||
if (symlinks) result.symlinks = symlinks;
|
||||
return result;
|
||||
|
@ -3979,8 +3976,8 @@ class FileSystemInfo {
|
|||
null,
|
||||
(entry.resolved = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (tsHash.digest("hex")),
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: tsHash.digest("hex"),
|
||||
hash: hash.digest("hex")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ const getHash =
|
|||
() => {
|
||||
const hash = createHash(hashFunction);
|
||||
hash.update(strFn());
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
return digest.slice(0, 4);
|
||||
};
|
||||
|
||||
|
|
|
@ -1211,7 +1211,7 @@ class NormalModule extends Module {
|
|||
hash.update("meta");
|
||||
hash.update(JSON.stringify(this.buildMeta));
|
||||
/** @type {BuildInfo} */
|
||||
(this.buildInfo).hash = /** @type {string} */ (hash.digest("hex"));
|
||||
(this.buildInfo).hash = hash.digest("hex");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -482,14 +482,12 @@ class SourceMapDevToolPlugin {
|
|||
const sourceMapString = JSON.stringify(sourceMap);
|
||||
if (sourceMapFilename) {
|
||||
const filename = file;
|
||||
const sourceMapContentHash =
|
||||
/** @type {string} */
|
||||
(
|
||||
usesContentHash &&
|
||||
createHash(compilation.outputOptions.hashFunction)
|
||||
.update(sourceMapString)
|
||||
.digest("hex")
|
||||
);
|
||||
const sourceMapContentHash = usesContentHash
|
||||
? createHash(compilation.outputOptions.hashFunction)
|
||||
.update(sourceMapString)
|
||||
.digest("hex")
|
||||
: undefined;
|
||||
|
||||
const pathParams = {
|
||||
chunk,
|
||||
filename: options.fileContext
|
||||
|
|
|
@ -244,11 +244,10 @@ class AssetGenerator extends Generator {
|
|||
hash.update(module.error.toString());
|
||||
}
|
||||
|
||||
const fullContentHash = /** @type {string} */ (
|
||||
hash.digest(runtimeTemplate.outputOptions.hashDigest)
|
||||
const fullContentHash = hash.digest(
|
||||
runtimeTemplate.outputOptions.hashDigest
|
||||
);
|
||||
|
||||
/** @type {string} */
|
||||
const contentHash = nonNumericOnlyHash(
|
||||
fullContentHash,
|
||||
runtimeTemplate.outputOptions.hashDigestLength
|
||||
|
|
|
@ -34,7 +34,7 @@ class LazyHashedEtag {
|
|||
if (this._hash === undefined) {
|
||||
const hash = createHash(this._hashFunction);
|
||||
this._obj.updateHash(hash);
|
||||
this._hash = /** @type {string} */ (hash.digest("base64"));
|
||||
this._hash = hash.digest("base64");
|
||||
}
|
||||
return this._hash;
|
||||
}
|
||||
|
|
|
@ -470,12 +470,8 @@ class CssModulesPlugin {
|
|||
hash.update(chunkGraph.getModuleHash(module, chunk.runtime));
|
||||
}
|
||||
}
|
||||
const digest = /** @type {string} */ (hash.digest(hashDigest));
|
||||
chunk.contentHash.css = nonNumericOnlyHash(
|
||||
digest,
|
||||
/** @type {number} */
|
||||
(hashDigestLength)
|
||||
);
|
||||
const digest = hash.digest(hashDigest);
|
||||
chunk.contentHash.css = nonNumericOnlyHash(digest, hashDigestLength);
|
||||
});
|
||||
compilation.hooks.renderManifest.tap(PLUGIN_NAME, (result, options) => {
|
||||
const { chunkGraph } = compilation;
|
||||
|
|
|
@ -64,9 +64,7 @@ const getLocalIdent = (local, module, chunkGraph, runtimeTemplate) => {
|
|||
hash.update(local);
|
||||
}
|
||||
|
||||
const localIdentHash =
|
||||
/** @type {string} */
|
||||
(hash.digest(hashDigest)).slice(0, hashDigestLength);
|
||||
const localIdentHash = hash.digest(hashDigest).slice(0, hashDigestLength);
|
||||
|
||||
return runtimeTemplate.compilation
|
||||
.getPath(localIdentName, {
|
||||
|
|
|
@ -378,9 +378,7 @@ class WorkerPlugin {
|
|||
)}|${i}`;
|
||||
const hash = createHash(compilation.outputOptions.hashFunction);
|
||||
hash.update(name);
|
||||
const digest =
|
||||
/** @type {string} */
|
||||
(hash.digest(compilation.outputOptions.hashDigest));
|
||||
const digest = hash.digest(compilation.outputOptions.hashDigest);
|
||||
entryOptions.runtime = digest.slice(
|
||||
0,
|
||||
compilation.outputOptions.hashDigestLength
|
||||
|
|
|
@ -376,28 +376,40 @@ class LazyCompilationPlugin {
|
|||
apply(compiler) {
|
||||
/** @type {BackendApi} */
|
||||
let backend;
|
||||
compiler.hooks.beforeCompile.tapAsync(PLUGIN_NAME, (params, callback) => {
|
||||
if (backend !== undefined) return callback();
|
||||
const promise = this.backend(compiler, (err, result) => {
|
||||
if (err) return callback(err);
|
||||
backend = /** @type {BackendApi} */ (result);
|
||||
callback();
|
||||
});
|
||||
if (promise && promise.then) {
|
||||
promise.then((b) => {
|
||||
backend = b;
|
||||
compiler.hooks.beforeCompile.tapAsync(
|
||||
PLUGIN_NAME,
|
||||
(/** @type {any} */ params, /** @type {(err?: Error | null) => void} */ callback) => {
|
||||
if (backend !== undefined) return callback();
|
||||
const promise = this.backend(compiler, (err, result) => {
|
||||
if (err) return callback(err);
|
||||
backend = /** @type {BackendApi} */ (result);
|
||||
callback();
|
||||
}, callback);
|
||||
});
|
||||
if (promise && promise.then) {
|
||||
promise.then((b) => {
|
||||
backend = b;
|
||||
callback();
|
||||
}, callback);
|
||||
}
|
||||
}
|
||||
});
|
||||
);
|
||||
compiler.hooks.thisCompilation.tap(
|
||||
PLUGIN_NAME,
|
||||
/**
|
||||
* @param {import("../Compilation")} compilation
|
||||
* @param {{ normalModuleFactory: import("../NormalModuleFactory") }} param1
|
||||
*/
|
||||
(compilation, { normalModuleFactory }) => {
|
||||
normalModuleFactory.hooks.module.tap(
|
||||
PLUGIN_NAME,
|
||||
/**
|
||||
* @param {Module} module
|
||||
* @param {*} createData
|
||||
* @param {*} resolveData
|
||||
*/
|
||||
(module, createData, resolveData) => {
|
||||
if (
|
||||
resolveData.dependencies.every((dep) =>
|
||||
resolveData.dependencies.every((dep: any) =>
|
||||
HMR_DEPENDENCY_TYPES.has(dep.type)
|
||||
)
|
||||
) {
|
||||
|
@ -457,7 +469,7 @@ class LazyCompilationPlugin {
|
|||
);
|
||||
}
|
||||
);
|
||||
compiler.hooks.shutdown.tapAsync(PLUGIN_NAME, (callback) => {
|
||||
compiler.hooks.shutdown.tapAsync(PLUGIN_NAME, (callback: (...args: any[]) => void) => {
|
||||
backend.dispose(callback);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -16,12 +16,12 @@ const {
|
|||
getUsedModuleIdsAndModules
|
||||
} = require("./IdHelpers");
|
||||
|
||||
/** @typedef {import("../../declarations/plugins/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions} HashedModuleIdsPluginOptions */
|
||||
/** @typedef {import("../../declarations/plugins/ids/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions} HashedModuleIdsPluginOptions */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
const validate = createSchemaValidation(
|
||||
require("../../schemas/plugins/HashedModuleIdsPlugin.check"),
|
||||
() => require("../../schemas/plugins/HashedModuleIdsPlugin.json"),
|
||||
require("../../schemas/plugins/ids/HashedModuleIdsPlugin.check"),
|
||||
() => require("../../schemas/plugins/ids/HashedModuleIdsPlugin.json"),
|
||||
{
|
||||
name: "Hashed Module Ids Plugin",
|
||||
baseDataPath: "options"
|
||||
|
@ -37,7 +37,7 @@ class HashedModuleIdsPlugin {
|
|||
constructor(options = {}) {
|
||||
validate(options);
|
||||
|
||||
/** @type {HashedModuleIdsPluginOptions} */
|
||||
/** @type {Required<Omit<HashedModuleIdsPluginOptions, "context">> & { context?: string | undefined }} */
|
||||
this.options = {
|
||||
context: undefined,
|
||||
hashFunction: DEFAULTS.HASH_FUNCTION,
|
||||
|
@ -73,9 +73,7 @@ class HashedModuleIdsPlugin {
|
|||
)
|
||||
);
|
||||
hash.update(ident || "");
|
||||
const hashId = /** @type {string} */ (
|
||||
hash.digest(options.hashDigest)
|
||||
);
|
||||
const hashId = hash.digest(options.hashDigest);
|
||||
let len = options.hashDigestLength;
|
||||
while (usedIds.has(hashId.slice(0, len))) {
|
||||
/** @type {number} */ (len)++;
|
||||
|
|
|
@ -25,7 +25,7 @@ const numberHash = require("../util/numberHash");
|
|||
const getHash = (str, len, hashFunction) => {
|
||||
const hash = createHash(hashFunction);
|
||||
hash.update(str);
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
return digest.slice(0, len);
|
||||
};
|
||||
|
||||
|
|
|
@ -503,11 +503,10 @@ class JavascriptModulesPlugin {
|
|||
}
|
||||
xor.updateHash(hash);
|
||||
}
|
||||
const digest = /** @type {string} */ (hash.digest(hashDigest));
|
||||
const digest = hash.digest(hashDigest);
|
||||
chunk.contentHash.javascript = nonNumericOnlyHash(
|
||||
digest,
|
||||
/** @type {number} */
|
||||
(hashDigestLength)
|
||||
hashDigestLength
|
||||
);
|
||||
});
|
||||
compilation.hooks.additionalTreeRuntimeRequirements.tap(
|
||||
|
|
|
@ -12,6 +12,8 @@ const WebpackError = require("../WebpackError");
|
|||
const { compareSelect, compareStrings } = require("../util/comparators");
|
||||
const createHash = require("../util/createHash");
|
||||
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} HashDigest */
|
||||
/** @typedef {import("webpack-sources").Source} Source */
|
||||
/** @typedef {import("../Cache").Etag} Etag */
|
||||
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
|
||||
|
@ -109,8 +111,8 @@ const compilationHooksMap = new WeakMap();
|
|||
|
||||
/**
|
||||
* @typedef {object} RealContentHashPluginOptions
|
||||
* @property {string | Hash} hashFunction the hash function to use
|
||||
* @property {string=} hashDigest the hash digest to use
|
||||
* @property {HashFunction} hashFunction the hash function to use
|
||||
* @property {HashDigest} hashDigest the hash digest to use
|
||||
*/
|
||||
|
||||
const PLUGIN_NAME = "RealContentHashPlugin";
|
||||
|
@ -432,7 +434,7 @@ ${referencingAssets
|
|||
hash.update(content);
|
||||
}
|
||||
const digest = hash.digest(this._hashDigest);
|
||||
newHash = /** @type {string} */ (digest.slice(0, oldHash.length));
|
||||
newHash = digest.slice(0, oldHash.length);
|
||||
}
|
||||
hashToNewHash.set(oldHash, newHash);
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;
|
|||
const hashForName = (buffers, hashFunction) => {
|
||||
const hash = createHash(hashFunction);
|
||||
for (const buf of buffers) hash.update(buf);
|
||||
return /** @type {string} */ (hash.digest("hex"));
|
||||
return hash.digest("hex");
|
||||
};
|
||||
|
||||
const COMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
|
||||
|
|
|
@ -117,7 +117,7 @@ const setMapSize = (map, size) => {
|
|||
const toHash = (buffer, hashFunction) => {
|
||||
const hash = createHash(hashFunction);
|
||||
hash.update(buffer);
|
||||
return /** @type {string} */ (hash.digest("latin1"));
|
||||
return hash.digest("latin1");
|
||||
};
|
||||
|
||||
const ESCAPE = null;
|
||||
|
|
|
@ -722,7 +722,7 @@ const SIMPLE_EXTRACTORS = {
|
|||
}
|
||||
},
|
||||
hash: (object, compilation) => {
|
||||
object.hash = /** @type {string} */ (compilation.hash);
|
||||
object.hash = compilation.hash;
|
||||
},
|
||||
version: (object) => {
|
||||
object.version = require("../../package.json").version;
|
||||
|
|
|
@ -5,14 +5,31 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
|
||||
class Hash {
|
||||
/* istanbul ignore next */
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @abstract
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @abstract
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @abstract
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
const AbstractMethodError = require("../AbstractMethodError");
|
||||
|
@ -24,8 +41,21 @@ class Hash {
|
|||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @abstract
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @abstract
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @abstract
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
const AbstractMethodError = require("../AbstractMethodError");
|
||||
|
|
|
@ -7,9 +7,10 @@
|
|||
|
||||
const Hash = require("./Hash");
|
||||
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
|
||||
|
||||
const BULK_SIZE = 2000;
|
||||
const BULK_SIZE = 3;
|
||||
|
||||
// We are using an object instead of a Map as this will stay static during the runtime
|
||||
// so access to it can be optimized by v8
|
||||
|
@ -38,9 +39,22 @@ class BulkUpdateDecorator extends Hash {
|
|||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (
|
||||
|
@ -55,7 +69,11 @@ class BulkUpdateDecorator extends Hash {
|
|||
this.hash.update(this.buffer);
|
||||
this.buffer = "";
|
||||
}
|
||||
this.hash.update(data, inputEncoding);
|
||||
if (typeof data === "string" && inputEncoding) {
|
||||
this.hash.update(data, inputEncoding);
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
}
|
||||
} else {
|
||||
this.buffer += data;
|
||||
if (this.buffer.length > BULK_SIZE) {
|
||||
|
@ -71,8 +89,19 @@ class BulkUpdateDecorator extends Hash {
|
|||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
let digestCache;
|
||||
|
@ -91,9 +120,19 @@ class BulkUpdateDecorator extends Hash {
|
|||
if (buffer.length > 0) {
|
||||
this.hash.update(buffer);
|
||||
}
|
||||
if (!encoding) {
|
||||
const result = this.hash.digest();
|
||||
if (digestCache !== undefined) {
|
||||
digestCache.set(buffer, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
const digestResult = this.hash.digest(encoding);
|
||||
// Compatibility with the old hash library
|
||||
const result =
|
||||
typeof digestResult === "string" ? digestResult : digestResult.toString();
|
||||
typeof digestResult === "string"
|
||||
? digestResult
|
||||
: /** @type {NodeJS.TypedArray} */ (digestResult).toString();
|
||||
if (digestCache !== undefined) {
|
||||
digestCache.set(buffer, result);
|
||||
}
|
||||
|
@ -110,9 +149,22 @@ class DebugHash extends Hash {
|
|||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (typeof data !== "string") data = data.toString("utf8");
|
||||
|
@ -132,8 +184,19 @@ class DebugHash extends Hash {
|
|||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
|
||||
|
@ -186,14 +249,21 @@ module.exports = (algorithm) => {
|
|||
case "native-md4":
|
||||
if (crypto === undefined) crypto = require("crypto");
|
||||
return new BulkUpdateDecorator(
|
||||
() => /** @type {typeof import("crypto")} */ (crypto).createHash("md4"),
|
||||
() =>
|
||||
/** @type {Hash} */ (
|
||||
/** @type {typeof import("crypto")} */
|
||||
(crypto).createHash("md4")
|
||||
),
|
||||
"md4"
|
||||
);
|
||||
default:
|
||||
if (crypto === undefined) crypto = require("crypto");
|
||||
return new BulkUpdateDecorator(
|
||||
() =>
|
||||
/** @type {typeof import("crypto")} */ (crypto).createHash(algorithm),
|
||||
/** @type {Hash} */ (
|
||||
/** @type {typeof import("crypto")} */
|
||||
(crypto).createHash(algorithm)
|
||||
),
|
||||
algorithm
|
||||
);
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
const Hash = require("../Hash");
|
||||
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
|
||||
|
||||
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
|
||||
class BatchedHash extends Hash {
|
||||
/**
|
||||
* @param {Hash} hash hash
|
||||
|
@ -21,9 +23,22 @@ class BatchedHash extends Hash {
|
|||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (this.string !== undefined) {
|
||||
|
@ -35,7 +50,11 @@ class BatchedHash extends Hash {
|
|||
this.string += data;
|
||||
return this;
|
||||
}
|
||||
this.hash.update(this.string, this.encoding);
|
||||
if (this.encoding) {
|
||||
this.hash.update(this.string, this.encoding);
|
||||
} else {
|
||||
this.hash.update(this.string);
|
||||
}
|
||||
this.string = undefined;
|
||||
}
|
||||
if (typeof data === "string") {
|
||||
|
@ -46,8 +65,10 @@ class BatchedHash extends Hash {
|
|||
) {
|
||||
this.string = data;
|
||||
this.encoding = inputEncoding;
|
||||
} else {
|
||||
} else if (inputEncoding) {
|
||||
this.hash.update(data, inputEncoding);
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
}
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
|
@ -57,12 +78,30 @@ class BatchedHash extends Hash {
|
|||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
if (this.string !== undefined) {
|
||||
this.hash.update(this.string, this.encoding);
|
||||
if (this.encoding) {
|
||||
this.hash.update(this.string, this.encoding);
|
||||
} else {
|
||||
this.hash.update(this.string);
|
||||
}
|
||||
}
|
||||
if (!encoding) {
|
||||
return this.hash.digest();
|
||||
}
|
||||
return this.hash.digest(encoding);
|
||||
}
|
||||
|
|
|
@ -5,13 +5,15 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
const Hash = require("../Hash");
|
||||
|
||||
// 65536 is the size of a wasm memory page
|
||||
// 64 is the maximum chunk size for every possible wasm hash implementation
|
||||
// 4 is the maximum number of bytes per char for string encoding (max is utf-8)
|
||||
// ~3 makes sure that it's always a block of 4 chars, so avoid partially encoded bytes for base64
|
||||
const MAX_SHORT_STRING = Math.floor((65536 - 64) / 4) & ~3;
|
||||
|
||||
class WasmHash {
|
||||
class WasmHash extends Hash {
|
||||
/**
|
||||
* @param {WebAssembly.Instance} instance wasm instance
|
||||
* @param {WebAssembly.Instance[]} instancesPool pool of instances
|
||||
|
@ -19,6 +21,8 @@ class WasmHash {
|
|||
* @param {number} digestSize size of digest returned by wasm
|
||||
*/
|
||||
constructor(instance, instancesPool, chunkSize, digestSize) {
|
||||
super();
|
||||
|
||||
const exports = /** @type {EXPECTED_ANY} */ (instance.exports);
|
||||
exports.init();
|
||||
this.exports = exports;
|
||||
|
@ -35,17 +39,39 @@ class WasmHash {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer | string} data data
|
||||
* @param {BufferEncoding=} encoding encoding
|
||||
* @returns {this} itself
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, encoding) {
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (typeof data === "string") {
|
||||
while (data.length > MAX_SHORT_STRING) {
|
||||
this._updateWithShortString(data.slice(0, MAX_SHORT_STRING), encoding);
|
||||
this._updateWithShortString(
|
||||
data.slice(0, MAX_SHORT_STRING),
|
||||
/** @type {NodeJS.BufferEncoding} */
|
||||
(inputEncoding)
|
||||
);
|
||||
data = data.slice(MAX_SHORT_STRING);
|
||||
}
|
||||
this._updateWithShortString(data, encoding);
|
||||
this._updateWithShortString(
|
||||
data,
|
||||
/** @type {NodeJS.BufferEncoding} */
|
||||
(inputEncoding)
|
||||
);
|
||||
return this;
|
||||
}
|
||||
this._updateWithBuffer(data);
|
||||
|
@ -136,17 +162,31 @@ class WasmHash {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {BufferEncoding} type type
|
||||
* @returns {Buffer | string} digest
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
digest(type) {
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
const { exports, buffered, mem, digestSize } = this;
|
||||
exports.final(buffered);
|
||||
this.instancesPool.push(this);
|
||||
const hex = mem.toString("latin1", 0, digestSize);
|
||||
if (type === "hex") return hex;
|
||||
if (type === "binary" || !type) return Buffer.from(hex, "hex");
|
||||
return Buffer.from(hex, "hex").toString(type);
|
||||
if (encoding === "hex") return hex;
|
||||
if (encoding === "binary" || !encoding) return Buffer.from(hex, "hex");
|
||||
return Buffer.from(hex, "hex").toString(
|
||||
/** @type {NodeJS.BufferEncoding} */ (encoding)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1568,8 +1568,21 @@
|
|||
"minLength": 1
|
||||
},
|
||||
"HashDigest": {
|
||||
"description": "Digest type used for the hash.",
|
||||
"type": "string"
|
||||
"description": "Digest types used for the hash.",
|
||||
"enum": [
|
||||
"base64",
|
||||
"base64url",
|
||||
"hex",
|
||||
"binary",
|
||||
"utf8",
|
||||
"utf-8",
|
||||
"utf16le",
|
||||
"utf-16le",
|
||||
"latin1",
|
||||
"ascii",
|
||||
"ucs2",
|
||||
"ucs-2"
|
||||
]
|
||||
},
|
||||
"HashDigestLength": {
|
||||
"description": "Number of chars which are used for the hash.",
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
/*
|
||||
* This file was automatically generated.
|
||||
* DO NOT MODIFY BY HAND.
|
||||
* Run `yarn fix:special` to update
|
||||
*/
|
||||
declare const check: (options: import("../../declarations/plugins/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions) => boolean;
|
||||
export = check;
|
|
@ -1,6 +0,0 @@
|
|||
/*
|
||||
* This file was automatically generated.
|
||||
* DO NOT MODIFY BY HAND.
|
||||
* Run `yarn fix:special` to update
|
||||
*/
|
||||
const t=/^(?:[A-Za-z]:[\\/]|\\\\|\/)/;function e(r,{instancePath:s="",parentData:n,parentDataProperty:a,rootData:i=r}={}){let o=null,l=0;if(0===l){if(!r||"object"!=typeof r||Array.isArray(r))return e.errors=[{params:{type:"object"}}],!1;{const s=l;for(const t in r)if("context"!==t&&"hashDigest"!==t&&"hashDigestLength"!==t&&"hashFunction"!==t)return e.errors=[{params:{additionalProperty:t}}],!1;if(s===l){if(void 0!==r.context){let s=r.context;const n=l;if(l===n){if("string"!=typeof s)return e.errors=[{params:{type:"string"}}],!1;if(s.includes("!")||!0!==t.test(s))return e.errors=[{params:{}}],!1}var u=n===l}else u=!0;if(u){if(void 0!==r.hashDigest){let t=r.hashDigest;const s=l;if("hex"!==t&&"latin1"!==t&&"base64"!==t)return e.errors=[{params:{}}],!1;u=s===l}else u=!0;if(u){if(void 0!==r.hashDigestLength){let t=r.hashDigestLength;const s=l;if(l===s){if("number"!=typeof t)return e.errors=[{params:{type:"number"}}],!1;if(t<1||isNaN(t))return e.errors=[{params:{comparison:">=",limit:1}}],!1}u=s===l}else u=!0;if(u)if(void 0!==r.hashFunction){let t=r.hashFunction;const s=l,n=l;let a=!1,i=null;const p=l,h=l;let c=!1;const m=l;if(l===m)if("string"==typeof t){if(t.length<1){const t={params:{}};null===o?o=[t]:o.push(t),l++}}else{const t={params:{type:"string"}};null===o?o=[t]:o.push(t),l++}var f=m===l;if(c=c||f,!c){const e=l;if(!(t instanceof Function)){const t={params:{}};null===o?o=[t]:o.push(t),l++}f=e===l,c=c||f}if(c)l=h,null!==o&&(h?o.length=h:o=null);else{const t={params:{}};null===o?o=[t]:o.push(t),l++}if(p===l&&(a=!0,i=0),!a){const t={params:{passingSchemas:i}};return null===o?o=[t]:o.push(t),l++,e.errors=o,!1}l=n,null!==o&&(n?o.length=n:o=null),u=s===l}else u=!0}}}}}return e.errors=o,0===l}module.exports=e,module.exports.default=e;
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
* This file was automatically generated.
|
||||
* DO NOT MODIFY BY HAND.
|
||||
* Run `yarn fix:special` to update
|
||||
*/
|
||||
declare const check: (options: import("../../../declarations/plugins/ids/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions) => boolean;
|
||||
export = check;
|
|
@ -0,0 +1,6 @@
|
|||
/*
|
||||
* This file was automatically generated.
|
||||
* DO NOT MODIFY BY HAND.
|
||||
* Run `yarn fix:special` to update
|
||||
*/
|
||||
const t=/^(?:[A-Za-z]:[\\/]|\\\\|\/)/;function e(s,{instancePath:r="",parentData:n,parentDataProperty:a,rootData:i=s}={}){let o=null,l=0;if(0===l){if(!s||"object"!=typeof s||Array.isArray(s))return e.errors=[{params:{type:"object"}}],!1;{const r=l;for(const t in s)if("context"!==t&&"hashDigest"!==t&&"hashDigestLength"!==t&&"hashFunction"!==t)return e.errors=[{params:{additionalProperty:t}}],!1;if(r===l){if(void 0!==s.context){let r=s.context;const n=l;if(l===n){if("string"!=typeof r)return e.errors=[{params:{type:"string"}}],!1;if(r.includes("!")||!0!==t.test(r))return e.errors=[{params:{}}],!1}var u=n===l}else u=!0;if(u){if(void 0!==s.hashDigest){let t=s.hashDigest;const r=l;if("base64"!==t&&"base64url"!==t&&"hex"!==t&&"binary"!==t&&"utf8"!==t&&"utf-8"!==t&&"utf16le"!==t&&"utf-16le"!==t&&"latin1"!==t&&"ascii"!==t&&"ucs2"!==t&&"ucs-2"!==t)return e.errors=[{params:{}}],!1;u=r===l}else u=!0;if(u){if(void 0!==s.hashDigestLength){let t=s.hashDigestLength;const r=l;if(l===r){if("number"!=typeof t)return e.errors=[{params:{type:"number"}}],!1;if(t<1||isNaN(t))return e.errors=[{params:{comparison:">=",limit:1}}],!1}u=r===l}else u=!0;if(u)if(void 0!==s.hashFunction){let t=s.hashFunction;const r=l,n=l;let a=!1,i=null;const c=l,p=l;let h=!1;const m=l;if(l===m)if("string"==typeof t){if(t.length<1){const t={params:{}};null===o?o=[t]:o.push(t),l++}}else{const t={params:{type:"string"}};null===o?o=[t]:o.push(t),l++}var f=m===l;if(h=h||f,!h){const e=l;if(!(t instanceof Function)){const t={params:{}};null===o?o=[t]:o.push(t),l++}f=e===l,h=h||f}if(h)l=p,null!==o&&(p?o.length=p:o=null);else{const t={params:{}};null===o?o=[t]:o.push(t),l++}if(c===l&&(a=!0,i=0),!a){const t={params:{passingSchemas:i}};return null===o?o=[t]:o.push(t),l++,e.errors=o,!1}l=n,null!==o&&(n?o.length=n:o=null),u=r===l}else u=!0}}}}}return e.errors=o,0===l}module.exports=e,module.exports.default=e;
|
|
@ -9,7 +9,7 @@
|
|||
},
|
||||
{
|
||||
"instanceof": "Function",
|
||||
"tsType": "typeof import('../../lib/util/Hash')"
|
||||
"tsType": "typeof import('../../../lib/util/Hash')"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -25,7 +25,20 @@
|
|||
},
|
||||
"hashDigest": {
|
||||
"description": "The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.",
|
||||
"enum": ["hex", "latin1", "base64"]
|
||||
"enum": [
|
||||
"base64",
|
||||
"base64url",
|
||||
"hex",
|
||||
"binary",
|
||||
"utf8",
|
||||
"utf-8",
|
||||
"utf16le",
|
||||
"utf-16le",
|
||||
"latin1",
|
||||
"ascii",
|
||||
"ucs2",
|
||||
"ucs-2"
|
||||
]
|
||||
},
|
||||
"hashDigestLength": {
|
||||
"description": "The prefix length of the hash digest to use, defaults to 4.",
|
|
@ -6795,13 +6795,27 @@ Object {
|
|||
"output-hash-digest": Object {
|
||||
"configs": Array [
|
||||
Object {
|
||||
"description": "Digest type used for the hash.",
|
||||
"description": "Digest types used for the hash.",
|
||||
"multiple": false,
|
||||
"path": "output.hashDigest",
|
||||
"type": "string",
|
||||
"type": "enum",
|
||||
"values": Array [
|
||||
"base64",
|
||||
"base64url",
|
||||
"hex",
|
||||
"binary",
|
||||
"utf8",
|
||||
"utf-8",
|
||||
"utf16le",
|
||||
"utf-16le",
|
||||
"latin1",
|
||||
"ascii",
|
||||
"ucs2",
|
||||
"ucs-2",
|
||||
],
|
||||
},
|
||||
],
|
||||
"description": "Digest type used for the hash.",
|
||||
"description": "Digest types used for the hash.",
|
||||
"multiple": false,
|
||||
"simpleType": "string",
|
||||
},
|
||||
|
|
|
@ -989,18 +989,18 @@ declare interface Bootstrap {
|
|||
allowInlineStartup: boolean;
|
||||
}
|
||||
type BufferEncoding =
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
type BufferEncodingOption = "buffer" | { encoding: "buffer" };
|
||||
declare interface BufferEntry {
|
||||
map?: null | RawSourceMap;
|
||||
|
@ -4518,18 +4518,18 @@ declare class EnableWasmLoadingPlugin {
|
|||
type EncodingOption =
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| ObjectEncodingOptions;
|
||||
type Entry =
|
||||
| string
|
||||
|
@ -6242,13 +6242,36 @@ declare class Hash {
|
|||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
*/
|
||||
update(data: string | Buffer, inputEncoding?: string): Hash;
|
||||
update(data: string | Buffer): Hash;
|
||||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
*/
|
||||
update(data: string, inputEncoding: HashDigest): Hash;
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
*/
|
||||
digest(encoding?: string): string | Buffer;
|
||||
digest(): Buffer;
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
*/
|
||||
digest(encoding: HashDigest): string;
|
||||
}
|
||||
type HashDigest =
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
type HashFunction = string | typeof Hash;
|
||||
declare interface HashLike {
|
||||
/**
|
||||
|
@ -6266,7 +6289,9 @@ declare interface HashableObject {
|
|||
}
|
||||
declare class HashedModuleIdsPlugin {
|
||||
constructor(options?: HashedModuleIdsPluginOptions);
|
||||
options: HashedModuleIdsPluginOptions;
|
||||
options: Required<Omit<HashedModuleIdsPluginOptions, "context">> & {
|
||||
context?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
|
@ -6282,7 +6307,19 @@ declare interface HashedModuleIdsPluginOptions {
|
|||
/**
|
||||
* The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.
|
||||
*/
|
||||
hashDigest?: "base64" | "latin1" | "hex";
|
||||
hashDigest?:
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
|
||||
/**
|
||||
* The prefix length of the hash digest to use, defaults to 4.
|
||||
|
@ -6624,18 +6661,18 @@ declare interface IntermediateFileSystemExtras {
|
|||
createWriteStream: (
|
||||
pathLike: PathLikeFs,
|
||||
result?:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| WriteStreamOptions
|
||||
) => NodeJS.WritableStream;
|
||||
open: Open;
|
||||
|
@ -11704,7 +11741,7 @@ declare interface NormalModuleLoaderContext<OptionsType> {
|
|||
mode: "none" | "development" | "production";
|
||||
webpack?: boolean;
|
||||
hashFunction: HashFunction;
|
||||
hashDigest: string;
|
||||
hashDigest: HashDigest;
|
||||
hashDigestLength: number;
|
||||
hashSalt?: string;
|
||||
_module?: NormalModule;
|
||||
|
@ -11790,18 +11827,18 @@ declare interface ObjectDeserializerContext {
|
|||
declare interface ObjectEncodingOptions {
|
||||
encoding?:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
}
|
||||
declare interface ObjectSerializer {
|
||||
serialize: (value: any, context: ObjectSerializerContext) => void;
|
||||
|
@ -12705,9 +12742,21 @@ declare interface Output {
|
|||
globalObject?: string;
|
||||
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
hashDigest?: string;
|
||||
hashDigest?:
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
|
||||
/**
|
||||
* Number of chars which are used for the hash.
|
||||
|
@ -12874,18 +12923,18 @@ declare interface OutputFileSystem {
|
|||
createReadStream?: (
|
||||
path: PathLikeFs,
|
||||
options?:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| ReadStreamOptions
|
||||
) => NodeJS.ReadableStream;
|
||||
join?: (path1: string, path2: string) => string;
|
||||
|
@ -13020,9 +13069,21 @@ declare interface OutputNormalized {
|
|||
globalObject?: string;
|
||||
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
hashDigest?: string;
|
||||
hashDigest?:
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
|
||||
/**
|
||||
* Number of chars which are used for the hash.
|
||||
|
@ -13198,7 +13259,21 @@ type OutputNormalizedWithDefaults = OutputNormalized & {
|
|||
path: string;
|
||||
pathinfo: NonNullable<undefined | boolean | "verbose">;
|
||||
hashFunction: NonNullable<undefined | string | typeof Hash>;
|
||||
hashDigest: string;
|
||||
hashDigest: NonNullable<
|
||||
| undefined
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
>;
|
||||
hashDigestLength: number;
|
||||
chunkLoadTimeout: number;
|
||||
chunkLoading: NonNullable<undefined | string | false>;
|
||||
|
@ -13890,19 +13965,19 @@ declare interface ReadFileFs {
|
|||
(
|
||||
path: PathOrFileDescriptorFs,
|
||||
options:
|
||||
| ({ encoding: BufferEncoding; flag?: string } & Abortable)
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex",
|
||||
| ({ encoding: BufferEncoding; flag?: string } & Abortable),
|
||||
callback: (err: null | NodeJS.ErrnoException, result?: string) => void
|
||||
): void;
|
||||
(
|
||||
|
@ -13910,18 +13985,18 @@ declare interface ReadFileFs {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & { flag?: string } & Abortable),
|
||||
callback: (
|
||||
err: null | NodeJS.ErrnoException,
|
||||
|
@ -13941,36 +14016,36 @@ declare interface ReadFileSync {
|
|||
(
|
||||
path: PathOrFileDescriptorFs,
|
||||
options:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| { encoding: BufferEncoding; flag?: string }
|
||||
): string;
|
||||
(
|
||||
path: PathOrFileDescriptorFs,
|
||||
options?:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & { flag?: string })
|
||||
): string | Buffer;
|
||||
}
|
||||
|
@ -13986,18 +14061,18 @@ declare interface ReadFileTypes {
|
|||
(
|
||||
path: PathOrFileDescriptorTypes,
|
||||
options:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| ({ encoding: BufferEncoding; flag?: string } & Abortable),
|
||||
callback: (err: null | NodeJS.ErrnoException, result?: string) => void
|
||||
): void;
|
||||
|
@ -14006,18 +14081,18 @@ declare interface ReadFileTypes {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & { flag?: string } & Abortable),
|
||||
callback: (
|
||||
err: null | NodeJS.ErrnoException,
|
||||
|
@ -14039,33 +14114,33 @@ declare interface ReaddirFs {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| {
|
||||
encoding:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
},
|
||||
|
@ -14083,18 +14158,18 @@ declare interface ReaddirFs {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & {
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
|
@ -14133,33 +14208,33 @@ declare interface ReaddirSync {
|
|||
path: PathLikeFs,
|
||||
options?:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| {
|
||||
encoding:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
}
|
||||
|
@ -14174,18 +14249,18 @@ declare interface ReaddirSync {
|
|||
path: PathLikeFs,
|
||||
options?:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & { withFileTypes?: false; recursive?: boolean })
|
||||
): string[] | Buffer[];
|
||||
(
|
||||
|
@ -14206,33 +14281,33 @@ declare interface ReaddirTypes {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| {
|
||||
encoding:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
},
|
||||
|
@ -14250,18 +14325,18 @@ declare interface ReaddirTypes {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & {
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
|
@ -14363,12 +14438,12 @@ declare interface RealContentHashPluginOptions {
|
|||
/**
|
||||
* the hash function to use
|
||||
*/
|
||||
hashFunction: string | typeof Hash;
|
||||
hashFunction: HashFunction;
|
||||
|
||||
/**
|
||||
* the hash digest to use
|
||||
*/
|
||||
hashDigest?: string;
|
||||
hashDigest: HashDigest;
|
||||
}
|
||||
declare interface RealDependencyLocation {
|
||||
start: SourcePosition;
|
||||
|
@ -17758,18 +17833,18 @@ declare interface StreamChunksOptions {
|
|||
declare interface StreamOptions {
|
||||
flags?: string;
|
||||
encoding?:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
fd?: any;
|
||||
mode?: number;
|
||||
autoClose?: boolean;
|
||||
|
@ -18558,18 +18633,18 @@ declare interface WriteFile {
|
|||
}
|
||||
type WriteFileOptions =
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions &
|
||||
Abortable & { mode?: string | number; flag?: string; flush?: boolean });
|
||||
declare interface WriteOnlySet<T> {
|
||||
|
|
Loading…
Reference in New Issue