mirror of https://github.com/webpack/webpack.git
Compare commits
12 Commits
a78e15db68
...
6b9a2b23cc
Author | SHA1 | Date |
---|---|---|
|
6b9a2b23cc | |
|
c7ebdbda63 | |
|
b7530c2510 | |
|
f3ef1428b3 | |
|
d32f1711ac | |
|
436fc7d9da | |
|
c7dd066327 | |
|
85bacbdc6e | |
|
7634cd28bf | |
|
9f98d803c0 | |
|
8804459884 | |
|
bc91301142 |
|
@ -97,7 +97,7 @@ jobs:
|
|||
- run: yarn link webpack --frozen-lockfile
|
||||
|
||||
- name: Run benchmarks
|
||||
uses: CodSpeedHQ/action@653fdc30e6c40ffd9739e40c8a0576f4f4523ca1 # v4.0.1
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
with:
|
||||
run: yarn benchmark --ci
|
||||
mode: "instrumentation"
|
||||
|
|
|
@ -284,6 +284,7 @@
|
|||
"url's",
|
||||
"valign",
|
||||
"valtype",
|
||||
"walltime",
|
||||
"wasi",
|
||||
"wasm",
|
||||
"watchings",
|
||||
|
@ -304,7 +305,6 @@
|
|||
"commithash",
|
||||
"formaters",
|
||||
"akait",
|
||||
"Akait",
|
||||
"evenstensberg",
|
||||
"Stensberg",
|
||||
"ovflowd",
|
||||
|
|
|
@ -537,9 +537,21 @@ export type Filename = FilenameTemplate;
|
|||
*/
|
||||
export type GlobalObject = string;
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
export type HashDigest = string;
|
||||
export type HashDigest =
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
/**
|
||||
* Number of chars which are used for the hash.
|
||||
*/
|
||||
|
@ -2157,7 +2169,7 @@ export interface Output {
|
|||
*/
|
||||
globalObject?: GlobalObject;
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
hashDigest?: HashDigest;
|
||||
/**
|
||||
|
@ -3349,7 +3361,6 @@ export interface JavascriptParserOptions {
|
|||
* Set the inner regular expression for partial dynamic dependencies.
|
||||
*/
|
||||
wrappedContextRegExp?: RegExp;
|
||||
[k: string]: any;
|
||||
}
|
||||
/**
|
||||
* Generator options for json modules.
|
||||
|
@ -3654,7 +3665,7 @@ export interface OutputNormalized {
|
|||
*/
|
||||
globalObject?: GlobalObject;
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
hashDigest?: HashDigest;
|
||||
/**
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
/**
|
||||
* Algorithm used for generation the hash (see node.js crypto package).
|
||||
*/
|
||||
export type HashFunction = string | typeof import("../../lib/util/Hash");
|
||||
export type HashFunction = string | typeof import("../../../lib/util/Hash");
|
||||
|
||||
export interface HashedModuleIdsPluginOptions {
|
||||
/**
|
||||
|
@ -17,7 +17,19 @@ export interface HashedModuleIdsPluginOptions {
|
|||
/**
|
||||
* The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.
|
||||
*/
|
||||
hashDigest?: "hex" | "latin1" | "base64";
|
||||
hashDigest?:
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
/**
|
||||
* The prefix length of the hash digest to use, defaults to 4.
|
||||
*/
|
|
@ -300,7 +300,7 @@ module.exports = __webpack_require__.tb("PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmc
|
|||
/******/
|
||||
/******/ /* webpack/runtime/jsonp chunk loading */
|
||||
/******/ (() => {
|
||||
/******/ __webpack_require__.b = (document && document.baseURI) || self.location.href;
|
||||
/******/ __webpack_require__.b = (typeof document !== 'undefined' && document.baseURI) || self.location.href;
|
||||
/******/
|
||||
/******/ // object to store loaded and loading chunks
|
||||
/******/ // undefined = chunk not loaded, null = chunk preloaded/prefetched
|
||||
|
@ -476,10 +476,10 @@ files.forEach(item => {
|
|||
asset output.js 19.2 KiB [emitted] (name: main)
|
||||
asset images/89a353e9c515885abd8e.png 14.6 KiB [emitted] [immutable] [from: images/file.png] (auxiliary name: main)
|
||||
asset images/afc10c70ed4ce2b33593.svg 656 bytes [emitted] [immutable] [from: images/url.svg] (auxiliary name: main)
|
||||
chunk (runtime: main) output.js (main) 12.4 KiB (javascript) 15.2 KiB (asset) 1.46 KiB (runtime) [entry] [rendered]
|
||||
chunk (runtime: main) output.js (main) 12.4 KiB (javascript) 15.2 KiB (asset) 1.48 KiB (runtime) [entry] [rendered]
|
||||
> ./example.js main
|
||||
dependent modules 9.59 KiB (javascript) 15.2 KiB (asset) [dependent] 6 modules
|
||||
runtime modules 1.46 KiB 5 modules
|
||||
runtime modules 1.48 KiB 5 modules
|
||||
./example.js 2.85 KiB [built] [code generated]
|
||||
[no exports]
|
||||
[used exports unknown]
|
||||
|
|
|
@ -286,7 +286,7 @@ export const add = (content, from) => {
|
|||
/******/
|
||||
/******/ /* webpack/runtime/jsonp chunk loading */
|
||||
/******/ (() => {
|
||||
/******/ __webpack_require__.b = (document && document.baseURI) || self.location.href;
|
||||
/******/ __webpack_require__.b = (typeof document !== 'undefined' && document.baseURI) || self.location.href;
|
||||
/******/
|
||||
/******/ // object to store loaded and loading chunks
|
||||
/******/ // undefined = chunk not loaded, null = chunk preloaded/prefetched
|
||||
|
@ -722,7 +722,7 @@ function fibonacci(n) {
|
|||
## Unoptimized
|
||||
|
||||
```
|
||||
asset main.js 11.8 KiB [emitted] (name: main)
|
||||
asset main.js 11.9 KiB [emitted] (name: main)
|
||||
asset workers/fibonacci.js 4.99 KiB [emitted] (name: fibonacci)
|
||||
asset chat.js 839 bytes [emitted] (name: chat)
|
||||
asset 129.js 741 bytes [emitted]
|
||||
|
@ -746,9 +746,9 @@ chunk (runtime: 9a81d90cfd0dfd13d748) workers/fibonacci.js (fibonacci) 176 bytes
|
|||
./fib-worker.js 176 bytes [built] [code generated]
|
||||
[no exports used]
|
||||
new Worker() ./fib-worker.js ./example.js 80:18-84:2
|
||||
chunk (runtime: main) main.js (main) 2.25 KiB (javascript) 5.4 KiB (runtime) [entry] [rendered]
|
||||
chunk (runtime: main) main.js (main) 2.25 KiB (javascript) 5.42 KiB (runtime) [entry] [rendered]
|
||||
> ./example.js main
|
||||
runtime modules 5.4 KiB 7 modules
|
||||
runtime modules 5.42 KiB 7 modules
|
||||
./example.js 2.25 KiB [built] [code generated]
|
||||
[no exports used]
|
||||
entry ./example.js main
|
||||
|
@ -758,7 +758,7 @@ webpack X.X.X compiled successfully
|
|||
## Production mode
|
||||
|
||||
```
|
||||
asset main.js 3.27 KiB [emitted] [minimized] (name: main)
|
||||
asset main.js 3.29 KiB [emitted] [minimized] (name: main)
|
||||
asset workers/fibonacci.js 776 bytes [emitted] [minimized] (name: fibonacci)
|
||||
asset chat.js 270 bytes [emitted] [minimized] (name: chat)
|
||||
asset 129.js 159 bytes [emitted] [minimized]
|
||||
|
@ -782,9 +782,9 @@ chunk (runtime: 9a81d90cfd0dfd13d748) workers/fibonacci.js (fibonacci) 176 bytes
|
|||
./fib-worker.js 176 bytes [built] [code generated]
|
||||
[no exports used]
|
||||
new Worker() ./fib-worker.js ./example.js 80:18-84:2
|
||||
chunk (runtime: main) main.js (main) 2.25 KiB (javascript) 5.4 KiB (runtime) [entry] [rendered]
|
||||
chunk (runtime: main) main.js (main) 2.25 KiB (javascript) 5.42 KiB (runtime) [entry] [rendered]
|
||||
> ./example.js main
|
||||
runtime modules 5.4 KiB 7 modules
|
||||
runtime modules 5.42 KiB 7 modules
|
||||
./example.js 2.25 KiB [built] [code generated]
|
||||
[no exports used]
|
||||
entry ./example.js main
|
||||
|
|
|
@ -1670,7 +1670,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
|
|||
for (const type of sourceTypes) hash.update(type);
|
||||
}
|
||||
this.moduleGraph.getExportsInfo(module).updateHash(hash, runtime);
|
||||
return BigInt(`0x${/** @type {string} */ (hash.digest("hex"))}`);
|
||||
return BigInt(`0x${hash.digest("hex")}`);
|
||||
});
|
||||
return graphHash;
|
||||
}
|
||||
|
@ -1808,7 +1808,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
|
|||
}
|
||||
}
|
||||
hash.update(graphHash);
|
||||
return /** @type {string} */ (hash.digest("hex"));
|
||||
return hash.digest("hex");
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -141,7 +141,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
|
|||
if (info.runtimeRequirements) {
|
||||
for (const rr of info.runtimeRequirements) hash.update(rr);
|
||||
}
|
||||
return (info.hash = /** @type {string} */ (hash.digest("hex")));
|
||||
return (info.hash = hash.digest("hex"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -4378,7 +4378,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
runtime,
|
||||
runtimeTemplate
|
||||
});
|
||||
moduleHashDigest = /** @type {string} */ (moduleHash.digest(hashDigest));
|
||||
moduleHashDigest = moduleHash.digest(hashDigest);
|
||||
} catch (err) {
|
||||
errors.push(new ModuleHashingError(module, /** @type {Error} */ (err)));
|
||||
moduleHashDigest = "XXXXXX";
|
||||
|
@ -4601,9 +4601,7 @@ This prevents using hashes of each other and should be avoided.`);
|
|||
moduleGraph: this.moduleGraph,
|
||||
runtimeTemplate: this.runtimeTemplate
|
||||
});
|
||||
const chunkHashDigest = /** @type {string} */ (
|
||||
chunkHash.digest(hashDigest)
|
||||
);
|
||||
const chunkHashDigest = chunkHash.digest(hashDigest);
|
||||
hash.update(chunkHashDigest);
|
||||
chunk.hash = chunkHashDigest;
|
||||
chunk.renderedHash = chunk.hash.slice(0, hashDigestLength);
|
||||
|
@ -4637,7 +4635,7 @@ This prevents using hashes of each other and should be avoided.`);
|
|||
this.logger.timeAggregateEnd("hashing: hash chunks");
|
||||
this.logger.time("hashing: hash digest");
|
||||
this.hooks.fullHash.call(hash);
|
||||
this.fullHash = /** @type {string} */ (hash.digest(hashDigest));
|
||||
this.fullHash = hash.digest(hashDigest);
|
||||
this.hash = this.fullHash.slice(0, hashDigestLength);
|
||||
this.logger.timeEnd("hashing: hash digest");
|
||||
|
||||
|
@ -4652,9 +4650,7 @@ This prevents using hashes of each other and should be avoided.`);
|
|||
runtime: chunk.runtime,
|
||||
runtimeTemplate
|
||||
});
|
||||
const moduleHashDigest = /** @type {string} */ (
|
||||
moduleHash.digest(hashDigest)
|
||||
);
|
||||
const moduleHashDigest = moduleHash.digest(hashDigest);
|
||||
const oldHash = chunkGraph.getModuleHash(module, chunk.runtime);
|
||||
chunkGraph.setModuleHashes(
|
||||
module,
|
||||
|
@ -4671,9 +4667,7 @@ This prevents using hashes of each other and should be avoided.`);
|
|||
const chunkHash = createHash(hashFunction);
|
||||
chunkHash.update(chunk.hash);
|
||||
chunkHash.update(this.hash);
|
||||
const chunkHashDigest =
|
||||
/** @type {string} */
|
||||
(chunkHash.digest(hashDigest));
|
||||
const chunkHashDigest = chunkHash.digest(hashDigest);
|
||||
chunk.hash = chunkHashDigest;
|
||||
chunk.renderedHash = chunk.hash.slice(0, hashDigestLength);
|
||||
this.hooks.contentHash.call(chunk);
|
||||
|
|
|
@ -254,10 +254,12 @@ class ContextModule extends Module {
|
|||
} else if (this.options.namespaceObject) {
|
||||
identifier += "|namespace object";
|
||||
}
|
||||
if (this.options.attributes) {
|
||||
identifier += `|importAttributes: ${JSON.stringify(this.options.attributes)}`;
|
||||
}
|
||||
if (this.layer) {
|
||||
identifier += `|layer: ${this.layer}`;
|
||||
}
|
||||
|
||||
return identifier;
|
||||
}
|
||||
|
||||
|
|
|
@ -830,7 +830,7 @@ class DefinePlugin {
|
|||
|
||||
compilation.valueCacheVersions.set(
|
||||
VALUE_DEP_MAIN,
|
||||
/** @type {string} */ (mainHash.digest("hex").slice(0, 8))
|
||||
mainHash.digest("hex").slice(0, 8)
|
||||
);
|
||||
}
|
||||
);
|
||||
|
|
|
@ -48,7 +48,7 @@ class DependencyTemplates {
|
|||
updateHash(part) {
|
||||
const hash = createHash(this._hashFunction);
|
||||
hash.update(`${this._hash}${part}`);
|
||||
this._hash = /** @type {string} */ (hash.digest("hex"));
|
||||
this._hash = hash.digest("hex");
|
||||
}
|
||||
|
||||
getHash() {
|
||||
|
|
|
@ -3350,7 +3350,7 @@ class FileSystemInfo {
|
|||
|
||||
hash.update(/** @type {string | Buffer} */ (content));
|
||||
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
|
||||
this._fileHashes.set(path, digest);
|
||||
|
||||
|
@ -3618,7 +3618,7 @@ class FileSystemInfo {
|
|||
}
|
||||
}
|
||||
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
/** @type {ContextFileSystemInfoEntry} */
|
||||
const result = {
|
||||
safeTime,
|
||||
|
@ -3681,7 +3681,7 @@ class FileSystemInfo {
|
|||
null,
|
||||
(entry.resolved = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: hash.digest("hex")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -3743,7 +3743,7 @@ class FileSystemInfo {
|
|||
|
||||
/** @type {ContextHash} */
|
||||
const result = {
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
hash: hash.digest("hex")
|
||||
};
|
||||
if (symlinks) result.symlinks = symlinks;
|
||||
return result;
|
||||
|
@ -3790,10 +3790,7 @@ class FileSystemInfo {
|
|||
for (const h of hashes) {
|
||||
hash.update(h);
|
||||
}
|
||||
callback(
|
||||
null,
|
||||
(entry.resolved = /** @type {string} */ (hash.digest("hex")))
|
||||
);
|
||||
callback(null, (entry.resolved = hash.digest("hex")));
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -3910,8 +3907,8 @@ class FileSystemInfo {
|
|||
/** @type {ContextTimestampAndHash} */
|
||||
const result = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (tsHash.digest("hex")),
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: tsHash.digest("hex"),
|
||||
hash: hash.digest("hex")
|
||||
};
|
||||
if (symlinks) result.symlinks = symlinks;
|
||||
return result;
|
||||
|
@ -3979,8 +3976,8 @@ class FileSystemInfo {
|
|||
null,
|
||||
(entry.resolved = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (tsHash.digest("hex")),
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: tsHash.digest("hex"),
|
||||
hash: hash.digest("hex")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ const getHash =
|
|||
() => {
|
||||
const hash = createHash(hashFunction);
|
||||
hash.update(strFn());
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
return digest.slice(0, 4);
|
||||
};
|
||||
|
||||
|
|
|
@ -1211,7 +1211,7 @@ class NormalModule extends Module {
|
|||
hash.update("meta");
|
||||
hash.update(JSON.stringify(this.buildMeta));
|
||||
/** @type {BuildInfo} */
|
||||
(this.buildInfo).hash = /** @type {string} */ (hash.digest("hex"));
|
||||
(this.buildInfo).hash = hash.digest("hex");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -949,7 +949,16 @@ class RuntimeTemplate {
|
|||
// when the defaultInterop is used (when a ESM imports a CJS module),
|
||||
if (exportName.length > 0 && exportName[0] === "default") {
|
||||
if (isDeferred && exportsType !== "namespace") {
|
||||
const access = `${importVar}.a${propertyAccess(exportName, 1)}`;
|
||||
const exportsInfo = moduleGraph.getExportsInfo(module);
|
||||
const name = exportName.slice(1);
|
||||
const used = exportsInfo.getUsedName(name, runtime);
|
||||
if (!used) {
|
||||
const comment = Template.toNormalComment(
|
||||
`unused export ${propertyAccess(exportName)}`
|
||||
);
|
||||
return `${comment} undefined`;
|
||||
}
|
||||
const access = `${importVar}.a${propertyAccess(used)}`;
|
||||
if (isCall || asiSafe === undefined) {
|
||||
return access;
|
||||
}
|
||||
|
|
|
@ -482,14 +482,12 @@ class SourceMapDevToolPlugin {
|
|||
const sourceMapString = JSON.stringify(sourceMap);
|
||||
if (sourceMapFilename) {
|
||||
const filename = file;
|
||||
const sourceMapContentHash =
|
||||
/** @type {string} */
|
||||
(
|
||||
usesContentHash &&
|
||||
createHash(compilation.outputOptions.hashFunction)
|
||||
.update(sourceMapString)
|
||||
.digest("hex")
|
||||
);
|
||||
const sourceMapContentHash = usesContentHash
|
||||
? createHash(compilation.outputOptions.hashFunction)
|
||||
.update(sourceMapString)
|
||||
.digest("hex")
|
||||
: undefined;
|
||||
|
||||
const pathParams = {
|
||||
chunk,
|
||||
filename: options.fileContext
|
||||
|
|
|
@ -53,6 +53,7 @@ class AssetSourceGenerator extends Generator {
|
|||
|
||||
const encodedSource = originalSource.buffer().toString("base64");
|
||||
|
||||
runtimeRequirements.add(RuntimeGlobals.requireScope);
|
||||
runtimeRequirements.add(RuntimeGlobals.toBinary);
|
||||
|
||||
let sourceContent;
|
||||
|
|
|
@ -244,11 +244,10 @@ class AssetGenerator extends Generator {
|
|||
hash.update(module.error.toString());
|
||||
}
|
||||
|
||||
const fullContentHash = /** @type {string} */ (
|
||||
hash.digest(runtimeTemplate.outputOptions.hashDigest)
|
||||
const fullContentHash = hash.digest(
|
||||
runtimeTemplate.outputOptions.hashDigest
|
||||
);
|
||||
|
||||
/** @type {string} */
|
||||
const contentHash = nonNumericOnlyHash(
|
||||
fullContentHash,
|
||||
runtimeTemplate.outputOptions.hashDigestLength
|
||||
|
|
|
@ -34,7 +34,7 @@ class LazyHashedEtag {
|
|||
if (this._hash === undefined) {
|
||||
const hash = createHash(this._hashFunction);
|
||||
this._obj.updateHash(hash);
|
||||
this._hash = /** @type {string} */ (hash.digest("base64"));
|
||||
this._hash = hash.digest("base64");
|
||||
}
|
||||
return this._hash;
|
||||
}
|
||||
|
|
|
@ -470,12 +470,8 @@ class CssModulesPlugin {
|
|||
hash.update(chunkGraph.getModuleHash(module, chunk.runtime));
|
||||
}
|
||||
}
|
||||
const digest = /** @type {string} */ (hash.digest(hashDigest));
|
||||
chunk.contentHash.css = nonNumericOnlyHash(
|
||||
digest,
|
||||
/** @type {number} */
|
||||
(hashDigestLength)
|
||||
);
|
||||
const digest = hash.digest(hashDigest);
|
||||
chunk.contentHash.css = nonNumericOnlyHash(digest, hashDigestLength);
|
||||
});
|
||||
compilation.hooks.renderManifest.tap(PLUGIN_NAME, (result, options) => {
|
||||
const { chunkGraph } = compilation;
|
||||
|
|
|
@ -66,7 +66,7 @@ const CC_LOWER_Z = "z".charCodeAt(0);
|
|||
const CC_UPPER_A = "A".charCodeAt(0);
|
||||
const CC_UPPER_F = "F".charCodeAt(0);
|
||||
const CC_UPPER_E = "E".charCodeAt(0);
|
||||
const CC_UPPER_U = "E".charCodeAt(0);
|
||||
const CC_UPPER_U = "U".charCodeAt(0);
|
||||
const CC_UPPER_Z = "Z".charCodeAt(0);
|
||||
const CC_0 = "0".charCodeAt(0);
|
||||
const CC_9 = "9".charCodeAt(0);
|
||||
|
|
|
@ -68,8 +68,8 @@ class ContextElementDependency extends ModuleDependency {
|
|||
*/
|
||||
getResourceIdentifier() {
|
||||
let str = super.getResourceIdentifier();
|
||||
if (this.attributes !== undefined) {
|
||||
str += JSON.stringify(this.attributes);
|
||||
if (this.attributes) {
|
||||
str += `|importAttributes${JSON.stringify(this.attributes)}`;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
|
|
@ -64,9 +64,7 @@ const getLocalIdent = (local, module, chunkGraph, runtimeTemplate) => {
|
|||
hash.update(local);
|
||||
}
|
||||
|
||||
const localIdentHash =
|
||||
/** @type {string} */
|
||||
(hash.digest(hashDigest)).slice(0, hashDigestLength);
|
||||
const localIdentHash = hash.digest(hashDigest).slice(0, hashDigestLength);
|
||||
|
||||
return runtimeTemplate.compilation
|
||||
.getPath(localIdentName, {
|
||||
|
|
|
@ -79,8 +79,11 @@ class HarmonyImportDependency extends ModuleDependency {
|
|||
*/
|
||||
getResourceIdentifier() {
|
||||
let str = super.getResourceIdentifier();
|
||||
if (this.attributes !== undefined) {
|
||||
str += JSON.stringify(this.attributes);
|
||||
if (this.defer) {
|
||||
str += "|defer";
|
||||
}
|
||||
if (this.attributes) {
|
||||
str += `|importAttributes${JSON.stringify(this.attributes)}`;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
|
|
@ -35,6 +35,19 @@ class ImportContextDependency extends ContextDependency {
|
|||
return "esm";
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string | null} an identifier to merge equal requests
|
||||
*/
|
||||
getResourceIdentifier() {
|
||||
let str = super.getResourceIdentifier();
|
||||
|
||||
if (this.options.attributes) {
|
||||
str += `|importAttributes${JSON.stringify(this.options.attributes)}`;
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ObjectSerializerContext} context context
|
||||
*/
|
||||
|
|
|
@ -50,8 +50,8 @@ class ImportDependency extends ModuleDependency {
|
|||
*/
|
||||
getResourceIdentifier() {
|
||||
let str = super.getResourceIdentifier();
|
||||
if (this.attributes !== undefined) {
|
||||
str += JSON.stringify(this.attributes);
|
||||
if (this.attributes) {
|
||||
str += `|importAttributes${JSON.stringify(this.attributes)}`;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
|
|
@ -378,9 +378,7 @@ class WorkerPlugin {
|
|||
)}|${i}`;
|
||||
const hash = createHash(compilation.outputOptions.hashFunction);
|
||||
hash.update(name);
|
||||
const digest =
|
||||
/** @type {string} */
|
||||
(hash.digest(compilation.outputOptions.hashDigest));
|
||||
const digest = hash.digest(compilation.outputOptions.hashDigest);
|
||||
entryOptions.runtime = digest.slice(
|
||||
0,
|
||||
compilation.outputOptions.hashDigestLength
|
||||
|
|
|
@ -16,12 +16,12 @@ const {
|
|||
getUsedModuleIdsAndModules
|
||||
} = require("./IdHelpers");
|
||||
|
||||
/** @typedef {import("../../declarations/plugins/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions} HashedModuleIdsPluginOptions */
|
||||
/** @typedef {import("../../declarations/plugins/ids/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions} HashedModuleIdsPluginOptions */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
const validate = createSchemaValidation(
|
||||
require("../../schemas/plugins/HashedModuleIdsPlugin.check"),
|
||||
() => require("../../schemas/plugins/HashedModuleIdsPlugin.json"),
|
||||
require("../../schemas/plugins/ids/HashedModuleIdsPlugin.check"),
|
||||
() => require("../../schemas/plugins/ids/HashedModuleIdsPlugin.json"),
|
||||
{
|
||||
name: "Hashed Module Ids Plugin",
|
||||
baseDataPath: "options"
|
||||
|
@ -37,7 +37,7 @@ class HashedModuleIdsPlugin {
|
|||
constructor(options = {}) {
|
||||
validate(options);
|
||||
|
||||
/** @type {HashedModuleIdsPluginOptions} */
|
||||
/** @type {Required<Omit<HashedModuleIdsPluginOptions, "context">> & { context?: string | undefined }} */
|
||||
this.options = {
|
||||
context: undefined,
|
||||
hashFunction: DEFAULTS.HASH_FUNCTION,
|
||||
|
@ -73,9 +73,7 @@ class HashedModuleIdsPlugin {
|
|||
)
|
||||
);
|
||||
hash.update(ident || "");
|
||||
const hashId = /** @type {string} */ (
|
||||
hash.digest(options.hashDigest)
|
||||
);
|
||||
const hashId = hash.digest(options.hashDigest);
|
||||
let len = options.hashDigestLength;
|
||||
while (usedIds.has(hashId.slice(0, len))) {
|
||||
/** @type {number} */ (len)++;
|
||||
|
|
|
@ -25,7 +25,7 @@ const numberHash = require("../util/numberHash");
|
|||
const getHash = (str, len, hashFunction) => {
|
||||
const hash = createHash(hashFunction);
|
||||
hash.update(str);
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
return digest.slice(0, len);
|
||||
};
|
||||
|
||||
|
|
|
@ -503,11 +503,10 @@ class JavascriptModulesPlugin {
|
|||
}
|
||||
xor.updateHash(hash);
|
||||
}
|
||||
const digest = /** @type {string} */ (hash.digest(hashDigest));
|
||||
const digest = hash.digest(hashDigest);
|
||||
chunk.contentHash.javascript = nonNumericOnlyHash(
|
||||
digest,
|
||||
/** @type {number} */
|
||||
(hashDigestLength)
|
||||
hashDigestLength
|
||||
);
|
||||
});
|
||||
compilation.hooks.additionalTreeRuntimeRequirements.tap(
|
||||
|
|
|
@ -134,11 +134,21 @@ class SystemLibraryPlugin extends AbstractLibraryPlugin {
|
|||
);
|
||||
if (used) {
|
||||
if (otherUnused || used !== exportInfo.name) {
|
||||
instructions.push(
|
||||
`${external}${propertyAccess([
|
||||
used
|
||||
])} = module${propertyAccess([exportInfo.name])};`
|
||||
);
|
||||
if (exportInfo.name === "default") {
|
||||
// Ideally we should use `module && module.__esModule ? module['default'] : module`
|
||||
// But we need to keep compatibility with SystemJS format libraries (they are using `default`) and bundled SystemJS libraries from commonjs format
|
||||
instructions.push(
|
||||
`${external}${propertyAccess([
|
||||
used
|
||||
])} = module["default"] || module;`
|
||||
);
|
||||
} else {
|
||||
instructions.push(
|
||||
`${external}${propertyAccess([
|
||||
used
|
||||
])} = module${propertyAccess([exportInfo.name])};`
|
||||
);
|
||||
}
|
||||
handledNames.push(exportInfo.name);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -12,6 +12,8 @@ const WebpackError = require("../WebpackError");
|
|||
const { compareSelect, compareStrings } = require("../util/comparators");
|
||||
const createHash = require("../util/createHash");
|
||||
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} HashDigest */
|
||||
/** @typedef {import("webpack-sources").Source} Source */
|
||||
/** @typedef {import("../Cache").Etag} Etag */
|
||||
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
|
||||
|
@ -109,8 +111,8 @@ const compilationHooksMap = new WeakMap();
|
|||
|
||||
/**
|
||||
* @typedef {object} RealContentHashPluginOptions
|
||||
* @property {string | Hash} hashFunction the hash function to use
|
||||
* @property {string=} hashDigest the hash digest to use
|
||||
* @property {HashFunction} hashFunction the hash function to use
|
||||
* @property {HashDigest} hashDigest the hash digest to use
|
||||
*/
|
||||
|
||||
const PLUGIN_NAME = "RealContentHashPlugin";
|
||||
|
@ -432,7 +434,7 @@ ${referencingAssets
|
|||
hash.update(content);
|
||||
}
|
||||
const digest = hash.digest(this._hashDigest);
|
||||
newHash = /** @type {string} */ (digest.slice(0, oldHash.length));
|
||||
newHash = digest.slice(0, oldHash.length);
|
||||
}
|
||||
hashToNewHash.set(oldHash, newHash);
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;
|
|||
const hashForName = (buffers, hashFunction) => {
|
||||
const hash = createHash(hashFunction);
|
||||
for (const buf of buffers) hash.update(buf);
|
||||
return /** @type {string} */ (hash.digest("hex"));
|
||||
return hash.digest("hex");
|
||||
};
|
||||
|
||||
const COMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
|
||||
|
|
|
@ -117,7 +117,7 @@ const setMapSize = (map, size) => {
|
|||
const toHash = (buffer, hashFunction) => {
|
||||
const hash = createHash(hashFunction);
|
||||
hash.update(buffer);
|
||||
return /** @type {string} */ (hash.digest("latin1"));
|
||||
return hash.digest("latin1");
|
||||
};
|
||||
|
||||
const ESCAPE = null;
|
||||
|
|
|
@ -722,7 +722,7 @@ const SIMPLE_EXTRACTORS = {
|
|||
}
|
||||
},
|
||||
hash: (object, compilation) => {
|
||||
object.hash = /** @type {string} */ (compilation.hash);
|
||||
object.hash = compilation.hash;
|
||||
},
|
||||
version: (object) => {
|
||||
object.version = require("../../package.json").version;
|
||||
|
|
|
@ -5,14 +5,31 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
|
||||
class Hash {
|
||||
/* istanbul ignore next */
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @abstract
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @abstract
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @abstract
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
const AbstractMethodError = require("../AbstractMethodError");
|
||||
|
@ -24,8 +41,21 @@ class Hash {
|
|||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @abstract
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @abstract
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @abstract
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
const AbstractMethodError = require("../AbstractMethodError");
|
||||
|
|
|
@ -7,9 +7,10 @@
|
|||
|
||||
const Hash = require("./Hash");
|
||||
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
|
||||
|
||||
const BULK_SIZE = 2000;
|
||||
const BULK_SIZE = 3;
|
||||
|
||||
// We are using an object instead of a Map as this will stay static during the runtime
|
||||
// so access to it can be optimized by v8
|
||||
|
@ -38,9 +39,22 @@ class BulkUpdateDecorator extends Hash {
|
|||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (
|
||||
|
@ -55,7 +69,11 @@ class BulkUpdateDecorator extends Hash {
|
|||
this.hash.update(this.buffer);
|
||||
this.buffer = "";
|
||||
}
|
||||
this.hash.update(data, inputEncoding);
|
||||
if (typeof data === "string" && inputEncoding) {
|
||||
this.hash.update(data, inputEncoding);
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
}
|
||||
} else {
|
||||
this.buffer += data;
|
||||
if (this.buffer.length > BULK_SIZE) {
|
||||
|
@ -71,8 +89,19 @@ class BulkUpdateDecorator extends Hash {
|
|||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
let digestCache;
|
||||
|
@ -91,9 +120,19 @@ class BulkUpdateDecorator extends Hash {
|
|||
if (buffer.length > 0) {
|
||||
this.hash.update(buffer);
|
||||
}
|
||||
if (!encoding) {
|
||||
const result = this.hash.digest();
|
||||
if (digestCache !== undefined) {
|
||||
digestCache.set(buffer, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
const digestResult = this.hash.digest(encoding);
|
||||
// Compatibility with the old hash library
|
||||
const result =
|
||||
typeof digestResult === "string" ? digestResult : digestResult.toString();
|
||||
typeof digestResult === "string"
|
||||
? digestResult
|
||||
: /** @type {NodeJS.TypedArray} */ (digestResult).toString();
|
||||
if (digestCache !== undefined) {
|
||||
digestCache.set(buffer, result);
|
||||
}
|
||||
|
@ -110,9 +149,22 @@ class DebugHash extends Hash {
|
|||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (typeof data !== "string") data = data.toString("utf8");
|
||||
|
@ -132,8 +184,19 @@ class DebugHash extends Hash {
|
|||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
|
||||
|
@ -186,14 +249,21 @@ module.exports = (algorithm) => {
|
|||
case "native-md4":
|
||||
if (crypto === undefined) crypto = require("crypto");
|
||||
return new BulkUpdateDecorator(
|
||||
() => /** @type {typeof import("crypto")} */ (crypto).createHash("md4"),
|
||||
() =>
|
||||
/** @type {Hash} */ (
|
||||
/** @type {typeof import("crypto")} */
|
||||
(crypto).createHash("md4")
|
||||
),
|
||||
"md4"
|
||||
);
|
||||
default:
|
||||
if (crypto === undefined) crypto = require("crypto");
|
||||
return new BulkUpdateDecorator(
|
||||
() =>
|
||||
/** @type {typeof import("crypto")} */ (crypto).createHash(algorithm),
|
||||
/** @type {Hash} */ (
|
||||
/** @type {typeof import("crypto")} */
|
||||
(crypto).createHash(algorithm)
|
||||
),
|
||||
algorithm
|
||||
);
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
const Hash = require("../Hash");
|
||||
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
|
||||
|
||||
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
|
||||
class BatchedHash extends Hash {
|
||||
/**
|
||||
* @param {Hash} hash hash
|
||||
|
@ -21,9 +23,22 @@ class BatchedHash extends Hash {
|
|||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (this.string !== undefined) {
|
||||
|
@ -35,7 +50,11 @@ class BatchedHash extends Hash {
|
|||
this.string += data;
|
||||
return this;
|
||||
}
|
||||
this.hash.update(this.string, this.encoding);
|
||||
if (this.encoding) {
|
||||
this.hash.update(this.string, this.encoding);
|
||||
} else {
|
||||
this.hash.update(this.string);
|
||||
}
|
||||
this.string = undefined;
|
||||
}
|
||||
if (typeof data === "string") {
|
||||
|
@ -46,8 +65,10 @@ class BatchedHash extends Hash {
|
|||
) {
|
||||
this.string = data;
|
||||
this.encoding = inputEncoding;
|
||||
} else {
|
||||
} else if (inputEncoding) {
|
||||
this.hash.update(data, inputEncoding);
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
}
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
|
@ -57,12 +78,30 @@ class BatchedHash extends Hash {
|
|||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
if (this.string !== undefined) {
|
||||
this.hash.update(this.string, this.encoding);
|
||||
if (this.encoding) {
|
||||
this.hash.update(this.string, this.encoding);
|
||||
} else {
|
||||
this.hash.update(this.string);
|
||||
}
|
||||
}
|
||||
if (!encoding) {
|
||||
return this.hash.digest();
|
||||
}
|
||||
return this.hash.digest(encoding);
|
||||
}
|
||||
|
|
|
@ -5,13 +5,15 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
const Hash = require("../Hash");
|
||||
|
||||
// 65536 is the size of a wasm memory page
|
||||
// 64 is the maximum chunk size for every possible wasm hash implementation
|
||||
// 4 is the maximum number of bytes per char for string encoding (max is utf-8)
|
||||
// ~3 makes sure that it's always a block of 4 chars, so avoid partially encoded bytes for base64
|
||||
const MAX_SHORT_STRING = Math.floor((65536 - 64) / 4) & ~3;
|
||||
|
||||
class WasmHash {
|
||||
class WasmHash extends Hash {
|
||||
/**
|
||||
* @param {WebAssembly.Instance} instance wasm instance
|
||||
* @param {WebAssembly.Instance[]} instancesPool pool of instances
|
||||
|
@ -19,6 +21,8 @@ class WasmHash {
|
|||
* @param {number} digestSize size of digest returned by wasm
|
||||
*/
|
||||
constructor(instance, instancesPool, chunkSize, digestSize) {
|
||||
super();
|
||||
|
||||
const exports = /** @type {EXPECTED_ANY} */ (instance.exports);
|
||||
exports.init();
|
||||
this.exports = exports;
|
||||
|
@ -35,17 +39,39 @@ class WasmHash {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer | string} data data
|
||||
* @param {BufferEncoding=} encoding encoding
|
||||
* @returns {this} itself
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, encoding) {
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (typeof data === "string") {
|
||||
while (data.length > MAX_SHORT_STRING) {
|
||||
this._updateWithShortString(data.slice(0, MAX_SHORT_STRING), encoding);
|
||||
this._updateWithShortString(
|
||||
data.slice(0, MAX_SHORT_STRING),
|
||||
/** @type {NodeJS.BufferEncoding} */
|
||||
(inputEncoding)
|
||||
);
|
||||
data = data.slice(MAX_SHORT_STRING);
|
||||
}
|
||||
this._updateWithShortString(data, encoding);
|
||||
this._updateWithShortString(
|
||||
data,
|
||||
/** @type {NodeJS.BufferEncoding} */
|
||||
(inputEncoding)
|
||||
);
|
||||
return this;
|
||||
}
|
||||
this._updateWithBuffer(data);
|
||||
|
@ -136,17 +162,31 @@ class WasmHash {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {BufferEncoding} type type
|
||||
* @returns {Buffer | string} digest
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
digest(type) {
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
const { exports, buffered, mem, digestSize } = this;
|
||||
exports.final(buffered);
|
||||
this.instancesPool.push(this);
|
||||
const hex = mem.toString("latin1", 0, digestSize);
|
||||
if (type === "hex") return hex;
|
||||
if (type === "binary" || !type) return Buffer.from(hex, "hex");
|
||||
return Buffer.from(hex, "hex").toString(type);
|
||||
if (encoding === "hex") return hex;
|
||||
if (encoding === "binary" || !encoding) return Buffer.from(hex, "hex");
|
||||
return Buffer.from(hex, "hex").toString(
|
||||
/** @type {NodeJS.BufferEncoding} */ (encoding)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "webpack",
|
||||
"version": "5.102.0",
|
||||
"version": "5.102.1",
|
||||
"description": "Packs ECMAScript/CommonJs/AMD modules for the browser. Allows you to split your codebase into multiple bundles, which can be loaded on demand. Supports loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",
|
||||
"homepage": "https://github.com/webpack/webpack",
|
||||
"bugs": "https://github.com/webpack/webpack/issues",
|
||||
|
@ -110,7 +110,7 @@
|
|||
"devDependencies": {
|
||||
"@babel/core": "^7.27.1",
|
||||
"@babel/preset-react": "^7.27.1",
|
||||
"@codspeed/core": "^4.0.1",
|
||||
"@codspeed/core": "^5.0.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/markdown": "^7.3.0",
|
||||
"@stylistic/eslint-plugin": "^5.4.0",
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1568,8 +1568,21 @@
|
|||
"minLength": 1
|
||||
},
|
||||
"HashDigest": {
|
||||
"description": "Digest type used for the hash.",
|
||||
"type": "string"
|
||||
"description": "Digest types used for the hash.",
|
||||
"enum": [
|
||||
"base64",
|
||||
"base64url",
|
||||
"hex",
|
||||
"binary",
|
||||
"utf8",
|
||||
"utf-8",
|
||||
"utf16le",
|
||||
"utf-16le",
|
||||
"latin1",
|
||||
"ascii",
|
||||
"ucs2",
|
||||
"ucs-2"
|
||||
]
|
||||
},
|
||||
"HashDigestLength": {
|
||||
"description": "Number of chars which are used for the hash.",
|
||||
|
@ -1786,7 +1799,7 @@
|
|||
"JavascriptParserOptions": {
|
||||
"description": "Parser options for javascript modules.",
|
||||
"type": "object",
|
||||
"additionalProperties": true,
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"amd": {
|
||||
"$ref": "#/definitions/Amd"
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
/*
|
||||
* This file was automatically generated.
|
||||
* DO NOT MODIFY BY HAND.
|
||||
* Run `yarn fix:special` to update
|
||||
*/
|
||||
declare const check: (options: import("../../declarations/plugins/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions) => boolean;
|
||||
export = check;
|
|
@ -1,6 +0,0 @@
|
|||
/*
|
||||
* This file was automatically generated.
|
||||
* DO NOT MODIFY BY HAND.
|
||||
* Run `yarn fix:special` to update
|
||||
*/
|
||||
const t=/^(?:[A-Za-z]:[\\/]|\\\\|\/)/;function e(r,{instancePath:s="",parentData:n,parentDataProperty:a,rootData:i=r}={}){let o=null,l=0;if(0===l){if(!r||"object"!=typeof r||Array.isArray(r))return e.errors=[{params:{type:"object"}}],!1;{const s=l;for(const t in r)if("context"!==t&&"hashDigest"!==t&&"hashDigestLength"!==t&&"hashFunction"!==t)return e.errors=[{params:{additionalProperty:t}}],!1;if(s===l){if(void 0!==r.context){let s=r.context;const n=l;if(l===n){if("string"!=typeof s)return e.errors=[{params:{type:"string"}}],!1;if(s.includes("!")||!0!==t.test(s))return e.errors=[{params:{}}],!1}var u=n===l}else u=!0;if(u){if(void 0!==r.hashDigest){let t=r.hashDigest;const s=l;if("hex"!==t&&"latin1"!==t&&"base64"!==t)return e.errors=[{params:{}}],!1;u=s===l}else u=!0;if(u){if(void 0!==r.hashDigestLength){let t=r.hashDigestLength;const s=l;if(l===s){if("number"!=typeof t)return e.errors=[{params:{type:"number"}}],!1;if(t<1||isNaN(t))return e.errors=[{params:{comparison:">=",limit:1}}],!1}u=s===l}else u=!0;if(u)if(void 0!==r.hashFunction){let t=r.hashFunction;const s=l,n=l;let a=!1,i=null;const p=l,h=l;let c=!1;const m=l;if(l===m)if("string"==typeof t){if(t.length<1){const t={params:{}};null===o?o=[t]:o.push(t),l++}}else{const t={params:{type:"string"}};null===o?o=[t]:o.push(t),l++}var f=m===l;if(c=c||f,!c){const e=l;if(!(t instanceof Function)){const t={params:{}};null===o?o=[t]:o.push(t),l++}f=e===l,c=c||f}if(c)l=h,null!==o&&(h?o.length=h:o=null);else{const t={params:{}};null===o?o=[t]:o.push(t),l++}if(p===l&&(a=!0,i=0),!a){const t={params:{passingSchemas:i}};return null===o?o=[t]:o.push(t),l++,e.errors=o,!1}l=n,null!==o&&(n?o.length=n:o=null),u=s===l}else u=!0}}}}}return e.errors=o,0===l}module.exports=e,module.exports.default=e;
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
* This file was automatically generated.
|
||||
* DO NOT MODIFY BY HAND.
|
||||
* Run `yarn fix:special` to update
|
||||
*/
|
||||
declare const check: (options: import("../../../declarations/plugins/ids/HashedModuleIdsPlugin").HashedModuleIdsPluginOptions) => boolean;
|
||||
export = check;
|
|
@ -0,0 +1,6 @@
|
|||
/*
|
||||
* This file was automatically generated.
|
||||
* DO NOT MODIFY BY HAND.
|
||||
* Run `yarn fix:special` to update
|
||||
*/
|
||||
const t=/^(?:[A-Za-z]:[\\/]|\\\\|\/)/;function e(s,{instancePath:r="",parentData:n,parentDataProperty:a,rootData:i=s}={}){let o=null,l=0;if(0===l){if(!s||"object"!=typeof s||Array.isArray(s))return e.errors=[{params:{type:"object"}}],!1;{const r=l;for(const t in s)if("context"!==t&&"hashDigest"!==t&&"hashDigestLength"!==t&&"hashFunction"!==t)return e.errors=[{params:{additionalProperty:t}}],!1;if(r===l){if(void 0!==s.context){let r=s.context;const n=l;if(l===n){if("string"!=typeof r)return e.errors=[{params:{type:"string"}}],!1;if(r.includes("!")||!0!==t.test(r))return e.errors=[{params:{}}],!1}var u=n===l}else u=!0;if(u){if(void 0!==s.hashDigest){let t=s.hashDigest;const r=l;if("base64"!==t&&"base64url"!==t&&"hex"!==t&&"binary"!==t&&"utf8"!==t&&"utf-8"!==t&&"utf16le"!==t&&"utf-16le"!==t&&"latin1"!==t&&"ascii"!==t&&"ucs2"!==t&&"ucs-2"!==t)return e.errors=[{params:{}}],!1;u=r===l}else u=!0;if(u){if(void 0!==s.hashDigestLength){let t=s.hashDigestLength;const r=l;if(l===r){if("number"!=typeof t)return e.errors=[{params:{type:"number"}}],!1;if(t<1||isNaN(t))return e.errors=[{params:{comparison:">=",limit:1}}],!1}u=r===l}else u=!0;if(u)if(void 0!==s.hashFunction){let t=s.hashFunction;const r=l,n=l;let a=!1,i=null;const c=l,p=l;let h=!1;const m=l;if(l===m)if("string"==typeof t){if(t.length<1){const t={params:{}};null===o?o=[t]:o.push(t),l++}}else{const t={params:{type:"string"}};null===o?o=[t]:o.push(t),l++}var f=m===l;if(h=h||f,!h){const e=l;if(!(t instanceof Function)){const t={params:{}};null===o?o=[t]:o.push(t),l++}f=e===l,h=h||f}if(h)l=p,null!==o&&(p?o.length=p:o=null);else{const t={params:{}};null===o?o=[t]:o.push(t),l++}if(c===l&&(a=!0,i=0),!a){const t={params:{passingSchemas:i}};return null===o?o=[t]:o.push(t),l++,e.errors=o,!1}l=n,null!==o&&(n?o.length=n:o=null),u=r===l}else u=!0}}}}}return e.errors=o,0===l}module.exports=e,module.exports.default=e;
|
|
@ -9,7 +9,7 @@
|
|||
},
|
||||
{
|
||||
"instanceof": "Function",
|
||||
"tsType": "typeof import('../../lib/util/Hash')"
|
||||
"tsType": "typeof import('../../../lib/util/Hash')"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -25,7 +25,20 @@
|
|||
},
|
||||
"hashDigest": {
|
||||
"description": "The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.",
|
||||
"enum": ["hex", "latin1", "base64"]
|
||||
"enum": [
|
||||
"base64",
|
||||
"base64url",
|
||||
"hex",
|
||||
"binary",
|
||||
"utf8",
|
||||
"utf-8",
|
||||
"utf16le",
|
||||
"utf-16le",
|
||||
"latin1",
|
||||
"ascii",
|
||||
"ucs2",
|
||||
"ucs-2"
|
||||
]
|
||||
},
|
||||
"hashDigestLength": {
|
||||
"description": "The prefix length of the hash digest to use, defaults to 4.",
|
|
@ -3,6 +3,15 @@ import fs from "fs/promises";
|
|||
import { Session } from "inspector";
|
||||
import path from "path";
|
||||
import { fileURLToPath, pathToFileURL } from "url";
|
||||
import {
|
||||
InstrumentHooks,
|
||||
getCodspeedRunnerMode,
|
||||
getGitDir,
|
||||
getV8Flags,
|
||||
mongoMeasurement,
|
||||
setupCore,
|
||||
teardownCore
|
||||
} from "@codspeed/core";
|
||||
import { simpleGit } from "simple-git";
|
||||
import { Bench, hrtimeNow } from "tinybench";
|
||||
|
||||
|
@ -12,32 +21,6 @@ const git = simpleGit(rootPath);
|
|||
|
||||
const REV_LIST_REGEXP = /^([a-f0-9]+)\s*([a-f0-9]+)\s*([a-f0-9]+)?\s*$/;
|
||||
|
||||
const getV8Flags = () => {
|
||||
const nodeVersionMajor = Number.parseInt(
|
||||
process.version.slice(1).split(".")[0],
|
||||
10
|
||||
);
|
||||
const flags = [
|
||||
"--hash-seed=1",
|
||||
"--random-seed=1",
|
||||
"--no-opt",
|
||||
"--predictable",
|
||||
"--predictable-gc-schedule",
|
||||
"--interpreted-frames-native-stack",
|
||||
"--allow-natives-syntax",
|
||||
"--expose-gc",
|
||||
"--no-concurrent-sweeping",
|
||||
"--max-old-space-size=4096"
|
||||
];
|
||||
if (nodeVersionMajor < 18) {
|
||||
flags.push("--no-randomize-hashes");
|
||||
}
|
||||
if (nodeVersionMajor < 20) {
|
||||
flags.push("--no-scavenge-task");
|
||||
}
|
||||
return flags;
|
||||
};
|
||||
|
||||
const checkV8Flags = () => {
|
||||
const requiredFlags = getV8Flags();
|
||||
const actualFlags = process.execArgv;
|
||||
|
@ -248,6 +231,8 @@ for (const baselineInfo of baselineRevisions) {
|
|||
}
|
||||
}
|
||||
|
||||
const baseOutputPath = path.join(__dirname, "js", "benchmark");
|
||||
|
||||
function buildConfiguration(
|
||||
test,
|
||||
baseline,
|
||||
|
@ -385,105 +370,239 @@ const scenarios = [
|
|||
}
|
||||
];
|
||||
|
||||
const baseOutputPath = path.join(__dirname, "js", "benchmark");
|
||||
function getStackTrace(belowFn) {
|
||||
const oldLimit = Error.stackTraceLimit;
|
||||
Error.stackTraceLimit = Infinity;
|
||||
const dummyObject = {};
|
||||
const v8Handler = Error.prepareStackTrace;
|
||||
Error.prepareStackTrace = (dummyObject, v8StackTrace) => v8StackTrace;
|
||||
Error.captureStackTrace(dummyObject, belowFn || getStackTrace);
|
||||
const v8StackTrace = dummyObject.stack;
|
||||
Error.prepareStackTrace = v8Handler;
|
||||
Error.stackTraceLimit = oldLimit;
|
||||
return v8StackTrace;
|
||||
}
|
||||
|
||||
function getCallingFile() {
|
||||
const stack = getStackTrace();
|
||||
let callingFile = stack[2].getFileName(); // [here, withCodSpeed, actual caller]
|
||||
const gitDir = getGitDir(callingFile);
|
||||
if (gitDir === undefined) {
|
||||
throw new Error("Could not find a git repository");
|
||||
}
|
||||
if (callingFile.startsWith("file://")) {
|
||||
callingFile = fileURLToPath(callingFile);
|
||||
}
|
||||
return path.relative(gitDir, callingFile);
|
||||
}
|
||||
|
||||
const taskUriMap = new WeakMap();
|
||||
|
||||
function getOrCreateUriMap(bench) {
|
||||
let uriMap = taskUriMap.get(bench);
|
||||
if (!uriMap) {
|
||||
uriMap = new Map();
|
||||
taskUriMap.set(bench, uriMap);
|
||||
}
|
||||
return uriMap;
|
||||
}
|
||||
|
||||
function getTaskUri(bench, taskName, rootCallingFile) {
|
||||
const uriMap = taskUriMap.get(bench);
|
||||
return uriMap?.get(taskName) || `${rootCallingFile}::${taskName}`;
|
||||
}
|
||||
|
||||
const withCodSpeed = async (/** @type {import("tinybench").Bench} */ bench) => {
|
||||
const { Measurement, getGitDir, mongoMeasurement, setupCore, teardownCore } =
|
||||
await import("@codspeed/core");
|
||||
const codspeedRunnerMode = getCodspeedRunnerMode();
|
||||
|
||||
if (!Measurement.isInstrumented()) {
|
||||
const rawRun = bench.run;
|
||||
bench.run = async () => {
|
||||
console.warn(
|
||||
`[CodSpeed] ${bench.tasks.length} benches detected but no instrumentation found, falling back to tinybench`
|
||||
);
|
||||
return await rawRun.bind(bench)();
|
||||
};
|
||||
if (codspeedRunnerMode === "disabled") {
|
||||
return bench;
|
||||
}
|
||||
|
||||
const getStackTrace = (belowFn) => {
|
||||
const oldLimit = Error.stackTraceLimit;
|
||||
Error.stackTraceLimit = Infinity;
|
||||
const dummyObject = {};
|
||||
const v8Handler = Error.prepareStackTrace;
|
||||
Error.prepareStackTrace = (dummyObject, v8StackTrace) => v8StackTrace;
|
||||
Error.captureStackTrace(dummyObject, belowFn || getStackTrace);
|
||||
const v8StackTrace = dummyObject.stack;
|
||||
Error.prepareStackTrace = v8Handler;
|
||||
Error.stackTraceLimit = oldLimit;
|
||||
return v8StackTrace;
|
||||
};
|
||||
|
||||
const getCallingFile = () => {
|
||||
const stack = getStackTrace();
|
||||
let callingFile = stack[2].getFileName(); // [here, withCodSpeed, actual caller]
|
||||
const gitDir = getGitDir(callingFile);
|
||||
if (gitDir === undefined) {
|
||||
throw new Error("Could not find a git repository");
|
||||
}
|
||||
if (callingFile.startsWith("file://")) {
|
||||
callingFile = fileURLToPath(callingFile);
|
||||
}
|
||||
return path.relative(gitDir, callingFile);
|
||||
};
|
||||
|
||||
const rawAdd = bench.add;
|
||||
const uriMap = getOrCreateUriMap(bench);
|
||||
bench.add = (name, fn, opts) => {
|
||||
const callingFile = getCallingFile();
|
||||
const uri = `${callingFile}::${name}`;
|
||||
const options = { ...opts, uri };
|
||||
return rawAdd.bind(bench)(name, fn, options);
|
||||
let uri = callingFile;
|
||||
if (bench.name !== undefined) {
|
||||
uri += `::${bench.name}`;
|
||||
}
|
||||
uri += `::${name}`;
|
||||
uriMap.set(name, uri);
|
||||
return rawAdd.bind(bench)(name, fn, opts);
|
||||
};
|
||||
const rootCallingFile = getCallingFile();
|
||||
bench.run = async function run() {
|
||||
const iterations = bench.opts.iterations - 1;
|
||||
console.log("[CodSpeed] running");
|
||||
setupCore();
|
||||
for (const task of bench.tasks) {
|
||||
await bench.opts.setup?.(task, "run");
|
||||
await task.fnOpts.beforeAll?.call(task);
|
||||
const samples = [];
|
||||
async function iteration() {
|
||||
try {
|
||||
await task.fnOpts.beforeEach?.call(task, "run");
|
||||
const start = bench.opts.now();
|
||||
await task.fn();
|
||||
samples.push(bench.opts.now() - start || 0);
|
||||
await task.fnOpts.afterEach?.call(this, "run");
|
||||
} catch (err) {
|
||||
if (bench.opts.throws) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (codspeedRunnerMode === "instrumented") {
|
||||
const setupBenchRun = () => {
|
||||
setupCore();
|
||||
console.log(
|
||||
"[CodSpeed] running with @codspeed/tinybench (instrumented mode)"
|
||||
);
|
||||
};
|
||||
const finalizeBenchRun = () => {
|
||||
teardownCore();
|
||||
console.log(`[CodSpeed] Done running ${bench.tasks.length} benches.`);
|
||||
return bench.tasks;
|
||||
};
|
||||
|
||||
const wrapFunctionWithFrame = (fn, isAsync) => {
|
||||
if (isAsync) {
|
||||
return async function __codspeed_root_frame__() {
|
||||
await fn();
|
||||
};
|
||||
}
|
||||
|
||||
return function __codspeed_root_frame__() {
|
||||
fn();
|
||||
};
|
||||
};
|
||||
|
||||
const logTaskCompletion = (uri, status) => {
|
||||
console.log(`[CodSpeed] ${status} ${uri}`);
|
||||
};
|
||||
|
||||
const taskCompletionMessage = () =>
|
||||
InstrumentHooks.isInstrumented() ? "Measured" : "Checked";
|
||||
|
||||
const iterationAsync = async (task) => {
|
||||
try {
|
||||
await task.fnOpts.beforeEach?.call(task, "run");
|
||||
const start = bench.opts.now();
|
||||
await task.fn();
|
||||
const end = bench.opts.now() - start || 0;
|
||||
await task.fnOpts.afterEach?.call(this, "run");
|
||||
return [start, end];
|
||||
} catch (err) {
|
||||
if (bench.opts.throws) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
while (samples.length < iterations) {
|
||||
await iteration();
|
||||
}
|
||||
// Codspeed Measure
|
||||
const uri =
|
||||
task.opts && "uri" in task.options
|
||||
? task.opts.uri
|
||||
: `${rootCallingFile}::${task.name}`;
|
||||
await task.fnOpts.beforeEach?.call(task);
|
||||
await mongoMeasurement.start(uri);
|
||||
await (async function __codspeed_root_frame__() {
|
||||
Measurement.startInstrumentation();
|
||||
await task.fn();
|
||||
Measurement.stopInstrumentation(uri);
|
||||
})();
|
||||
await mongoMeasurement.stop(uri);
|
||||
await task.fnOpts.afterEach?.call(task);
|
||||
console.log(`[Codspeed] ✔ Measured ${uri}`);
|
||||
await task.fnOpts.afterAll?.call(task);
|
||||
};
|
||||
|
||||
const wrapWithInstrumentHooksAsync = async (fn, uri) => {
|
||||
InstrumentHooks.startBenchmark();
|
||||
const result = await fn();
|
||||
InstrumentHooks.stopBenchmark();
|
||||
InstrumentHooks.setExecutedBenchmark(process.pid, uri);
|
||||
return result;
|
||||
};
|
||||
|
||||
const runTaskAsync = async (task, uri) => {
|
||||
const { fnOpts, fn } = task;
|
||||
|
||||
// Custom setup
|
||||
await bench.opts.setup?.(task, "run");
|
||||
|
||||
await fnOpts?.beforeAll?.call(task, "run");
|
||||
|
||||
// Custom warmup
|
||||
// We don't run `optimizeFunction` because our function is never optimized, instead we just warmup webpack
|
||||
const samples = [];
|
||||
|
||||
while (samples.length < bench.opts.iterations - 1) {
|
||||
samples.push(await iterationAsync(task));
|
||||
}
|
||||
|
||||
await fnOpts?.beforeEach?.call(task, "run");
|
||||
await mongoMeasurement.start(uri);
|
||||
global.gc?.();
|
||||
await wrapWithInstrumentHooksAsync(wrapFunctionWithFrame(fn, true), uri);
|
||||
await mongoMeasurement.stop(uri);
|
||||
await fnOpts?.afterEach?.call(task, "run");
|
||||
console.log(`[Codspeed] ✔ Measured ${uri}`);
|
||||
await fnOpts?.afterAll?.call(task, "run");
|
||||
|
||||
// Custom teardown
|
||||
await bench.opts.teardown?.(task, "run");
|
||||
task.processRunResult({ latencySamples: samples });
|
||||
}
|
||||
teardownCore();
|
||||
console.log(`[CodSpeed] Done running ${bench.tasks.length} benches.`);
|
||||
return bench.tasks;
|
||||
};
|
||||
|
||||
logTaskCompletion(uri, taskCompletionMessage());
|
||||
};
|
||||
|
||||
const iteration = (task) => {
|
||||
try {
|
||||
task.fnOpts.beforeEach?.call(task, "run");
|
||||
const start = bench.opts.now();
|
||||
task.fn();
|
||||
const end = bench.opts.now() - start || 0;
|
||||
task.fnOpts.afterEach?.call(this, "run");
|
||||
return [start, end];
|
||||
} catch (err) {
|
||||
if (bench.opts.throws) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const wrapWithInstrumentHooks = (fn, uri) => {
|
||||
InstrumentHooks.startBenchmark();
|
||||
const result = fn();
|
||||
InstrumentHooks.stopBenchmark();
|
||||
InstrumentHooks.setExecutedBenchmark(process.pid, uri);
|
||||
return result;
|
||||
};
|
||||
|
||||
const runTaskSync = (task, uri) => {
|
||||
const { fnOpts, fn } = task;
|
||||
|
||||
// Custom setup
|
||||
bench.opts.setup?.(task, "run");
|
||||
|
||||
fnOpts?.beforeAll?.call(task, "run");
|
||||
|
||||
// Custom warmup
|
||||
const samples = [];
|
||||
|
||||
while (samples.length < bench.opts.iterations - 1) {
|
||||
samples.push(iteration(task));
|
||||
}
|
||||
|
||||
fnOpts?.beforeEach?.call(task, "run");
|
||||
|
||||
wrapWithInstrumentHooks(wrapFunctionWithFrame(fn, false), uri);
|
||||
|
||||
fnOpts?.afterEach?.call(task, "run");
|
||||
console.log(`[Codspeed] ✔ Measured ${uri}`);
|
||||
fnOpts?.afterAll?.call(task, "run");
|
||||
|
||||
// Custom teardown
|
||||
bench.opts.teardown?.(task, "run");
|
||||
|
||||
logTaskCompletion(uri, taskCompletionMessage());
|
||||
};
|
||||
|
||||
const finalizeAsyncRun = () => {
|
||||
finalizeBenchRun();
|
||||
};
|
||||
const finalizeSyncRun = () => {
|
||||
finalizeBenchRun();
|
||||
};
|
||||
|
||||
bench.run = async () => {
|
||||
setupBenchRun();
|
||||
|
||||
for (const task of bench.tasks) {
|
||||
const uri = getTaskUri(task.bench, task.name, rootCallingFile);
|
||||
await runTaskAsync(task, uri);
|
||||
}
|
||||
|
||||
return finalizeAsyncRun();
|
||||
};
|
||||
|
||||
bench.runSync = () => {
|
||||
setupBenchRun();
|
||||
|
||||
for (const task of bench.tasks) {
|
||||
const uri = getTaskUri(task.bench, task.name, rootCallingFile);
|
||||
runTaskSync(task, uri);
|
||||
}
|
||||
|
||||
return finalizeSyncRun();
|
||||
};
|
||||
} else if (codspeedRunnerMode === "walltime") {
|
||||
// We don't need it
|
||||
}
|
||||
|
||||
return bench;
|
||||
};
|
||||
|
||||
|
@ -495,7 +614,6 @@ const bench = await withCodSpeed(
|
|||
warmupIterations: 2,
|
||||
iterations: 8,
|
||||
setup(task, mode) {
|
||||
global.gc();
|
||||
console.log(`Setup (${mode} mode): ${task.name}`);
|
||||
},
|
||||
teardown(task, mode) {
|
||||
|
|
|
@ -6795,13 +6795,27 @@ Object {
|
|||
"output-hash-digest": Object {
|
||||
"configs": Array [
|
||||
Object {
|
||||
"description": "Digest type used for the hash.",
|
||||
"description": "Digest types used for the hash.",
|
||||
"multiple": false,
|
||||
"path": "output.hashDigest",
|
||||
"type": "string",
|
||||
"type": "enum",
|
||||
"values": Array [
|
||||
"base64",
|
||||
"base64url",
|
||||
"hex",
|
||||
"binary",
|
||||
"utf8",
|
||||
"utf-8",
|
||||
"utf16le",
|
||||
"utf-16le",
|
||||
"latin1",
|
||||
"ascii",
|
||||
"ucs2",
|
||||
"ucs-2",
|
||||
],
|
||||
},
|
||||
],
|
||||
"description": "Digest type used for the hash.",
|
||||
"description": "Digest types used for the hash.",
|
||||
"multiple": false,
|
||||
"simpleType": "string",
|
||||
},
|
||||
|
|
|
@ -1,12 +1,17 @@
|
|||
import * as style from "./style.css";
|
||||
import file from "./file.text" with { type: "bytes" };
|
||||
|
||||
it("should work", () => {
|
||||
it("should work", async () => {
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const text = decoder.decode(file);
|
||||
|
||||
expect(text).toBe("a Ā 𐀀 文 🦄 Text");
|
||||
|
||||
const dyn = (await import("./file.text?other", { with: { type: "bytes" } })).default;
|
||||
const dynText = decoder.decode(dyn);
|
||||
|
||||
expect(dynText).toBe("a Ā 𐀀 文 🦄 Text");
|
||||
|
||||
if (typeof getComputedStyle === "function") {
|
||||
const style = getComputedStyle(document.body);
|
||||
expect(style.getPropertyValue("--my-url")).toBe(" url(data:application/octet-stream;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCA2MDAgNjAwIj48dGl0bGU+aWNvbi1zcXVhcmUtc21hbGw8L3RpdGxlPjxwYXRoIGZpbGw9IiNGRkYiIGQ9Ik0zMDAgLjFMNTY1IDE1MHYyOTkuOUwzMDAgNTk5LjggMzUgNDQ5LjlWMTUweiIvPjxwYXRoIGZpbGw9IiM4RUQ2RkIiIGQ9Ik01MTcuNyA0MzkuNUwzMDguOCA1NTcuOHYtOTJMNDM5IDM5NC4xbDc4LjcgNDUuNHptMTQuMy0xMi45VjE3OS40bC03Ni40IDQ0LjF2MTU5bDc2LjQgNDQuMXpNODEuNSA0MzkuNWwyMDguOSAxMTguMnYtOTJsLTEzMC4yLTcxLjYtNzguNyA0NS40em0tMTQuMy0xMi45VjE3OS40bDc2LjQgNDQuMXYxNTlsLTc2LjQgNDQuMXptOC45LTI2My4yTDI5MC40IDQyLjJ2ODlsLTEzNy4zIDc1LjUtMS4xLjYtNzUuOS00My45em00NDYuOSAwTDMwOC44IDQyLjJ2ODlMNDQ2IDIwNi44bDEuMS42IDc1LjktNDR6Ii8+PHBhdGggZmlsbD0iIzFDNzhDMCIgZD0iTTI5MC40IDQ0NC44TDE2MiAzNzQuMVYyMzQuMmwxMjguNCA3NC4xdjEzNi41em0xOC40IDBsMTI4LjQtNzAuNnYtMTQwbC0xMjguNCA3NC4xdjEzNi41ek0yOTkuNiAzMDN6bS0xMjktODVsMTI5LTcwLjlMNDI4LjUgMjE4bC0xMjguOSA3NC40LTEyOS03NC40eiIvPjwvc3ZnPg==)");
|
||||
|
|
|
@ -14,5 +14,15 @@ module.exports = {
|
|||
scope.window.document.head.appendChild(link);
|
||||
|
||||
run++;
|
||||
},
|
||||
findBundle(i) {
|
||||
if (i === 2) {
|
||||
return ["bundle2.mjs"];
|
||||
}
|
||||
|
||||
return [
|
||||
`file_text_other.bundle${i}.${i === 2 ? "mjs" : "js"}`,
|
||||
`bundle${i}.${i === 2 ? "mjs" : "js"}`
|
||||
];
|
||||
}
|
||||
};
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"foo": "bar"
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
it("show override request", async () => {
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const mod = "file.ext";
|
||||
const loadedMod = (await import(`./files/${mod}`, { with: { type: "bytes" } })).default;
|
||||
const text = decoder.decode(loadedMod);
|
||||
|
||||
expect(JSON.parse(text)).toEqual({ foo: "bar" });
|
||||
|
||||
const otherLoadedMod = (await import(`./files/${mod}`, { with: { type: "json" } })).default;
|
||||
|
||||
expect(otherLoadedMod.foo).toBe("bar");
|
||||
});
|
|
@ -0,0 +1,5 @@
|
|||
"use strict";
|
||||
|
||||
const supportsTextDecoder = require("../../../helpers/supportsTextDecoder");
|
||||
|
||||
module.exports = () => supportsTextDecoder();
|
|
@ -0,0 +1,4 @@
|
|||
"use strict";
|
||||
|
||||
/** @type {import("../../../../").Configuration} */
|
||||
module.exports = {};
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"foo": "bar",
|
||||
"nested": { "foo": "bar" }
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
import defer * as mod1 from "./file.ext" with { type: "bytes" };
|
||||
import defer * as mod2 from "./file.ext" with { type: "json" };
|
||||
import * as mod3 from "./file.ext" with { type: "bytes" };
|
||||
import * as mod4 from "./file.ext" with { type: "json" };
|
||||
|
||||
it("should work with defer and import attributes", () => {
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const mod1Decoded = JSON.parse(decoder.decode(mod1.default));
|
||||
expect(mod1Decoded.foo).toBe("bar");
|
||||
expect(mod1Decoded.nested.foo).toBe("bar");
|
||||
expect(mod2.default.foo).toBe("bar");
|
||||
expect(mod2.default.nested.foo).toBe("bar");
|
||||
const mod2Decoded = JSON.parse(decoder.decode(mod3.default));
|
||||
expect(mod2Decoded.foo).toBe("bar");
|
||||
expect(mod2Decoded.nested.foo).toBe("bar");
|
||||
expect(mod4.default.foo).toBe("bar");
|
||||
expect(mod4.default.nested.foo).toBe("bar");
|
||||
});
|
|
@ -0,0 +1,5 @@
|
|||
"use strict";
|
||||
|
||||
const supportsTextDecoder = require("../../../helpers/supportsTextDecoder");
|
||||
|
||||
module.exports = () => supportsTextDecoder();
|
|
@ -0,0 +1,9 @@
|
|||
"use strict";
|
||||
|
||||
/** @type {import("../../../../").Configuration} */
|
||||
module.exports = {
|
||||
target: [`async-node${process.versions.node.split(".").map(Number)[0]}`],
|
||||
experiments: {
|
||||
deferImport: true
|
||||
}
|
||||
};
|
|
@ -0,0 +1,12 @@
|
|||
const a = 10;
|
||||
const b = 20;
|
||||
|
||||
class MyClass {
|
||||
getValue() {
|
||||
return "my-class";
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MyClass;
|
||||
module.exports.a = a;
|
||||
module.exports.b = b;
|
|
@ -0,0 +1,11 @@
|
|||
const a = 10;
|
||||
const b = 20;
|
||||
|
||||
class MyClass {
|
||||
getValue() {
|
||||
return "my-class";
|
||||
}
|
||||
}
|
||||
|
||||
export default MyClass;
|
||||
export { a, b };
|
|
@ -914,5 +914,25 @@ module.exports = (env, { testPath }) => [
|
|||
experiments: {
|
||||
outputModule: true
|
||||
}
|
||||
},
|
||||
{
|
||||
entry: "./esm.js",
|
||||
output: {
|
||||
uniqueName: "system-esm",
|
||||
filename: "system-esm.js",
|
||||
library: {
|
||||
type: "system"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
entry: "./commonjs.js",
|
||||
output: {
|
||||
uniqueName: "system-commonjs",
|
||||
filename: "system-commonjs.js",
|
||||
library: {
|
||||
type: "system"
|
||||
}
|
||||
}
|
||||
}
|
||||
];
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
import MyClass, {a, b} from "library-commonjs";
|
||||
|
||||
it("should get exports from systemjs library (" + NAME + ")", function() {
|
||||
expect(new MyClass().getValue()).toBe("my-class")
|
||||
expect(a).toBe(10);
|
||||
expect(b).toBe(20);
|
||||
});
|
|
@ -0,0 +1,17 @@
|
|||
"use strict";
|
||||
|
||||
const System = require("../../../helpers/fakeSystem");
|
||||
|
||||
module.exports = {
|
||||
beforeExecute: () => {
|
||||
System.init();
|
||||
},
|
||||
moduleScope(scope) {
|
||||
scope.System = System;
|
||||
scope.System.setRequire(scope.require);
|
||||
},
|
||||
afterExecute() {
|
||||
delete global.webpackChunk;
|
||||
System.execute("(anonym)");
|
||||
}
|
||||
};
|
|
@ -0,0 +1,27 @@
|
|||
"use strict";
|
||||
|
||||
const path = require("path");
|
||||
const webpack = require("../../../../");
|
||||
|
||||
/** @type {(env: Env, options: TestOptions) => import("../../../../").Configuration[]} */
|
||||
module.exports = (env, { testPath }) => [
|
||||
{
|
||||
entry: "./system-external-commonjs.js",
|
||||
output: {
|
||||
library: {
|
||||
type: "system"
|
||||
}
|
||||
},
|
||||
externals: {
|
||||
"library-commonjs": path.resolve(
|
||||
testPath,
|
||||
"../0-create-library/system-commonjs.js"
|
||||
)
|
||||
},
|
||||
plugins: [
|
||||
new webpack.DefinePlugin({
|
||||
NAME: JSON.stringify("systemjs with external from commonjs format")
|
||||
})
|
||||
]
|
||||
}
|
||||
];
|
|
@ -0,0 +1,7 @@
|
|||
import MyClass, {a, b} from "library-esm";
|
||||
|
||||
it("should get exports from systemjs library (" + NAME + ")", function() {
|
||||
expect(new MyClass().getValue()).toBe("my-class")
|
||||
expect(a).toBe(10);
|
||||
expect(b).toBe(20);
|
||||
});
|
|
@ -0,0 +1,17 @@
|
|||
"use strict";
|
||||
|
||||
const System = require("../../../helpers/fakeSystem");
|
||||
|
||||
module.exports = {
|
||||
beforeExecute: () => {
|
||||
System.init();
|
||||
},
|
||||
moduleScope(scope) {
|
||||
scope.System = System;
|
||||
scope.System.setRequire(scope.require);
|
||||
},
|
||||
afterExecute() {
|
||||
delete global.webpackChunk;
|
||||
System.execute("(anonym)");
|
||||
}
|
||||
};
|
|
@ -0,0 +1,24 @@
|
|||
"use strict";
|
||||
|
||||
const path = require("path");
|
||||
const webpack = require("../../../../");
|
||||
|
||||
/** @type {(env: Env, options: TestOptions) => import("../../../../").Configuration[]} */
|
||||
module.exports = (env, { testPath }) => [
|
||||
{
|
||||
entry: "./system-external-esm.js",
|
||||
output: {
|
||||
library: {
|
||||
type: "system"
|
||||
}
|
||||
},
|
||||
externals: {
|
||||
"library-esm": path.resolve(testPath, "../0-create-library/system-esm.js")
|
||||
},
|
||||
plugins: [
|
||||
new webpack.DefinePlugin({
|
||||
NAME: JSON.stringify("systemjs with external from ES module format")
|
||||
})
|
||||
]
|
||||
}
|
||||
];
|
|
@ -989,18 +989,18 @@ declare interface Bootstrap {
|
|||
allowInlineStartup: boolean;
|
||||
}
|
||||
type BufferEncoding =
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
type BufferEncodingOption = "buffer" | { encoding: "buffer" };
|
||||
declare interface BufferEntry {
|
||||
map?: null | RawSourceMap;
|
||||
|
@ -4518,18 +4518,18 @@ declare class EnableWasmLoadingPlugin {
|
|||
type EncodingOption =
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| ObjectEncodingOptions;
|
||||
type Entry =
|
||||
| string
|
||||
|
@ -6242,13 +6242,36 @@ declare class Hash {
|
|||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
*/
|
||||
update(data: string | Buffer, inputEncoding?: string): Hash;
|
||||
update(data: string | Buffer): Hash;
|
||||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
*/
|
||||
update(data: string, inputEncoding: HashDigest): Hash;
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
*/
|
||||
digest(encoding?: string): string | Buffer;
|
||||
digest(): Buffer;
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
*/
|
||||
digest(encoding: HashDigest): string;
|
||||
}
|
||||
type HashDigest =
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
type HashFunction = string | typeof Hash;
|
||||
declare interface HashLike {
|
||||
/**
|
||||
|
@ -6266,7 +6289,9 @@ declare interface HashableObject {
|
|||
}
|
||||
declare class HashedModuleIdsPlugin {
|
||||
constructor(options?: HashedModuleIdsPluginOptions);
|
||||
options: HashedModuleIdsPluginOptions;
|
||||
options: Required<Omit<HashedModuleIdsPluginOptions, "context">> & {
|
||||
context?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
|
@ -6282,7 +6307,19 @@ declare interface HashedModuleIdsPluginOptions {
|
|||
/**
|
||||
* The encoding to use when generating the hash, defaults to 'base64'. All encodings from Node.JS' hash.digest are supported.
|
||||
*/
|
||||
hashDigest?: "base64" | "latin1" | "hex";
|
||||
hashDigest?:
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
|
||||
/**
|
||||
* The prefix length of the hash digest to use, defaults to 4.
|
||||
|
@ -6624,18 +6661,18 @@ declare interface IntermediateFileSystemExtras {
|
|||
createWriteStream: (
|
||||
pathLike: PathLikeFs,
|
||||
result?:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| WriteStreamOptions
|
||||
) => NodeJS.WritableStream;
|
||||
open: Open;
|
||||
|
@ -8195,8 +8232,6 @@ declare class JavascriptParser extends ParserClass {
|
|||
* Parser options for javascript modules.
|
||||
*/
|
||||
declare interface JavascriptParserOptions {
|
||||
[index: string]: any;
|
||||
|
||||
/**
|
||||
* Set the value of `require.amd` and `define.amd`. Or disable AMD support.
|
||||
*/
|
||||
|
@ -11706,7 +11741,7 @@ declare interface NormalModuleLoaderContext<OptionsType> {
|
|||
mode: "none" | "development" | "production";
|
||||
webpack?: boolean;
|
||||
hashFunction: HashFunction;
|
||||
hashDigest: string;
|
||||
hashDigest: HashDigest;
|
||||
hashDigestLength: number;
|
||||
hashSalt?: string;
|
||||
_module?: NormalModule;
|
||||
|
@ -11792,18 +11827,18 @@ declare interface ObjectDeserializerContext {
|
|||
declare interface ObjectEncodingOptions {
|
||||
encoding?:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
}
|
||||
declare interface ObjectSerializer {
|
||||
serialize: (value: any, context: ObjectSerializerContext) => void;
|
||||
|
@ -12707,9 +12742,21 @@ declare interface Output {
|
|||
globalObject?: string;
|
||||
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
hashDigest?: string;
|
||||
hashDigest?:
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
|
||||
/**
|
||||
* Number of chars which are used for the hash.
|
||||
|
@ -12876,18 +12923,18 @@ declare interface OutputFileSystem {
|
|||
createReadStream?: (
|
||||
path: PathLikeFs,
|
||||
options?:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| ReadStreamOptions
|
||||
) => NodeJS.ReadableStream;
|
||||
join?: (path1: string, path2: string) => string;
|
||||
|
@ -13022,9 +13069,21 @@ declare interface OutputNormalized {
|
|||
globalObject?: string;
|
||||
|
||||
/**
|
||||
* Digest type used for the hash.
|
||||
* Digest types used for the hash.
|
||||
*/
|
||||
hashDigest?: string;
|
||||
hashDigest?:
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
|
||||
/**
|
||||
* Number of chars which are used for the hash.
|
||||
|
@ -13200,7 +13259,21 @@ type OutputNormalizedWithDefaults = OutputNormalized & {
|
|||
path: string;
|
||||
pathinfo: NonNullable<undefined | boolean | "verbose">;
|
||||
hashFunction: NonNullable<undefined | string | typeof Hash>;
|
||||
hashDigest: string;
|
||||
hashDigest: NonNullable<
|
||||
| undefined
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
>;
|
||||
hashDigestLength: number;
|
||||
chunkLoadTimeout: number;
|
||||
chunkLoading: NonNullable<undefined | string | false>;
|
||||
|
@ -13892,19 +13965,19 @@ declare interface ReadFileFs {
|
|||
(
|
||||
path: PathOrFileDescriptorFs,
|
||||
options:
|
||||
| ({ encoding: BufferEncoding; flag?: string } & Abortable)
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex",
|
||||
| ({ encoding: BufferEncoding; flag?: string } & Abortable),
|
||||
callback: (err: null | NodeJS.ErrnoException, result?: string) => void
|
||||
): void;
|
||||
(
|
||||
|
@ -13912,18 +13985,18 @@ declare interface ReadFileFs {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & { flag?: string } & Abortable),
|
||||
callback: (
|
||||
err: null | NodeJS.ErrnoException,
|
||||
|
@ -13943,36 +14016,36 @@ declare interface ReadFileSync {
|
|||
(
|
||||
path: PathOrFileDescriptorFs,
|
||||
options:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| { encoding: BufferEncoding; flag?: string }
|
||||
): string;
|
||||
(
|
||||
path: PathOrFileDescriptorFs,
|
||||
options?:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & { flag?: string })
|
||||
): string | Buffer;
|
||||
}
|
||||
|
@ -13988,18 +14061,18 @@ declare interface ReadFileTypes {
|
|||
(
|
||||
path: PathOrFileDescriptorTypes,
|
||||
options:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| ({ encoding: BufferEncoding; flag?: string } & Abortable),
|
||||
callback: (err: null | NodeJS.ErrnoException, result?: string) => void
|
||||
): void;
|
||||
|
@ -14008,18 +14081,18 @@ declare interface ReadFileTypes {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & { flag?: string } & Abortable),
|
||||
callback: (
|
||||
err: null | NodeJS.ErrnoException,
|
||||
|
@ -14041,33 +14114,33 @@ declare interface ReaddirFs {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| {
|
||||
encoding:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
},
|
||||
|
@ -14085,18 +14158,18 @@ declare interface ReaddirFs {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & {
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
|
@ -14135,33 +14208,33 @@ declare interface ReaddirSync {
|
|||
path: PathLikeFs,
|
||||
options?:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| {
|
||||
encoding:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
}
|
||||
|
@ -14176,18 +14249,18 @@ declare interface ReaddirSync {
|
|||
path: PathLikeFs,
|
||||
options?:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & { withFileTypes?: false; recursive?: boolean })
|
||||
): string[] | Buffer[];
|
||||
(
|
||||
|
@ -14208,33 +14281,33 @@ declare interface ReaddirTypes {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| {
|
||||
encoding:
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
},
|
||||
|
@ -14252,18 +14325,18 @@ declare interface ReaddirTypes {
|
|||
options:
|
||||
| undefined
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions & {
|
||||
withFileTypes?: false;
|
||||
recursive?: boolean;
|
||||
|
@ -14365,12 +14438,12 @@ declare interface RealContentHashPluginOptions {
|
|||
/**
|
||||
* the hash function to use
|
||||
*/
|
||||
hashFunction: string | typeof Hash;
|
||||
hashFunction: HashFunction;
|
||||
|
||||
/**
|
||||
* the hash digest to use
|
||||
*/
|
||||
hashDigest?: string;
|
||||
hashDigest: HashDigest;
|
||||
}
|
||||
declare interface RealDependencyLocation {
|
||||
start: SourcePosition;
|
||||
|
@ -17760,18 +17833,18 @@ declare interface StreamChunksOptions {
|
|||
declare interface StreamOptions {
|
||||
flags?: string;
|
||||
encoding?:
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex";
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2";
|
||||
fd?: any;
|
||||
mode?: number;
|
||||
autoClose?: boolean;
|
||||
|
@ -18560,18 +18633,18 @@ declare interface WriteFile {
|
|||
}
|
||||
type WriteFileOptions =
|
||||
| null
|
||||
| "ascii"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "hex"
|
||||
| "binary"
|
||||
| "utf8"
|
||||
| "utf-8"
|
||||
| "utf16le"
|
||||
| "utf-16le"
|
||||
| "latin1"
|
||||
| "ascii"
|
||||
| "ucs2"
|
||||
| "ucs-2"
|
||||
| "base64"
|
||||
| "base64url"
|
||||
| "latin1"
|
||||
| "binary"
|
||||
| "hex"
|
||||
| (ObjectEncodingOptions &
|
||||
Abortable & { mode?: string | number; flag?: string; flush?: boolean });
|
||||
declare interface WriteOnlySet<T> {
|
||||
|
|
12
yarn.lock
12
yarn.lock
|
@ -332,14 +332,14 @@
|
|||
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
||||
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
||||
|
||||
"@codspeed/core@^4.0.1":
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@codspeed/core/-/core-4.0.1.tgz#91049cce17b8c1d1b4b6cbc481f5ddc1145d6e1e"
|
||||
integrity sha512-fJ53arfgtzCDZa8DuGJhpTZ3Ll9A1uW5nQ2jSJnfO4Hl5MRD2cP8P4vPvIUAGbdbjwCxR1jat6cW8OloMJkJXw==
|
||||
"@codspeed/core@^5.0.1":
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@codspeed/core/-/core-5.0.1.tgz#6145c898a86a6d56a169611c3e9657a8b97c7642"
|
||||
integrity sha512-4g5ZyFAin8QywK4+0FK1uXG3GLRPu0oc3xbP+7OUhhFxbwpzFuaJtKmnTofMqLy9/pHH6Bl/7H0/DTVH3cpFkA==
|
||||
dependencies:
|
||||
axios "^1.4.0"
|
||||
find-up "^6.3.0"
|
||||
form-data "^4.0.0"
|
||||
form-data "^4.0.4"
|
||||
node-gyp-build "^4.6.0"
|
||||
|
||||
"@cspell/cspell-bundled-dicts@9.1.3":
|
||||
|
@ -3878,7 +3878,7 @@ fork-ts-checker-webpack-plugin@^9.0.2:
|
|||
semver "^7.3.5"
|
||||
tapable "^2.2.1"
|
||||
|
||||
form-data@^4.0.0, form-data@^4.0.4:
|
||||
form-data@^4.0.4:
|
||||
version "4.0.4"
|
||||
resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.4.tgz#784cdcce0669a9d68e94d11ac4eea98088edd2c4"
|
||||
integrity sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==
|
||||
|
|
Loading…
Reference in New Issue