mirror of https://github.com/webpack/webpack.git
feat: added `base64url`, `base62`, `base58`, `base52`, `base49`, `base36`, `base32` and `base25` digests
This commit is contained in:
parent
5b2c4ba051
commit
a6c2f5006a
|
|
@ -60,6 +60,8 @@ const getLocalIdent = (local, module, chunkGraph, runtimeTemplate) => {
|
|||
hash.update(hashSalt);
|
||||
}
|
||||
|
||||
// console.log("relativeResourcePath", relativeResourcePath);
|
||||
|
||||
hash.update(relativeResourcePath);
|
||||
|
||||
if (!/\[local\]/.test(localIdentName)) {
|
||||
|
|
|
|||
|
|
@ -5,212 +5,21 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
const Hash = require("./Hash");
|
||||
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
/** @typedef {import("./Hash")} Hash */
|
||||
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
|
||||
|
||||
const BULK_SIZE = 3;
|
||||
|
||||
// We are using an object instead of a Map as this will stay static during the runtime
|
||||
// so access to it can be optimized by v8
|
||||
/** @type {{[key: string]: Map<string, string>}} */
|
||||
const digestCaches = {};
|
||||
|
||||
/** @typedef {() => Hash} HashFactory */
|
||||
|
||||
class BulkUpdateDecorator extends Hash {
|
||||
/**
|
||||
* @param {Hash | HashFactory} hashOrFactory function to create a hash
|
||||
* @param {string=} hashKey key for caching
|
||||
*/
|
||||
constructor(hashOrFactory, hashKey) {
|
||||
super();
|
||||
this.hashKey = hashKey;
|
||||
if (typeof hashOrFactory === "function") {
|
||||
this.hashFactory = hashOrFactory;
|
||||
this.hash = undefined;
|
||||
} else {
|
||||
this.hashFactory = undefined;
|
||||
this.hash = hashOrFactory;
|
||||
}
|
||||
this.buffer = "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (
|
||||
inputEncoding !== undefined ||
|
||||
typeof data !== "string" ||
|
||||
data.length > BULK_SIZE
|
||||
) {
|
||||
if (this.hash === undefined) {
|
||||
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
|
||||
}
|
||||
if (this.buffer.length > 0) {
|
||||
this.hash.update(this.buffer);
|
||||
this.buffer = "";
|
||||
}
|
||||
if (typeof data === "string" && inputEncoding) {
|
||||
this.hash.update(data, inputEncoding);
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
}
|
||||
} else {
|
||||
this.buffer += data;
|
||||
if (this.buffer.length > BULK_SIZE) {
|
||||
if (this.hash === undefined) {
|
||||
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
|
||||
}
|
||||
this.hash.update(this.buffer);
|
||||
this.buffer = "";
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
let digestCache;
|
||||
const buffer = this.buffer;
|
||||
if (this.hash === undefined) {
|
||||
// short data for hash, we can use caching
|
||||
const cacheKey = `${this.hashKey}-${encoding}`;
|
||||
digestCache = digestCaches[cacheKey];
|
||||
if (digestCache === undefined) {
|
||||
digestCache = digestCaches[cacheKey] = new Map();
|
||||
}
|
||||
const cacheEntry = digestCache.get(buffer);
|
||||
if (cacheEntry !== undefined) return cacheEntry;
|
||||
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
|
||||
}
|
||||
if (buffer.length > 0) {
|
||||
this.hash.update(buffer);
|
||||
}
|
||||
if (!encoding) {
|
||||
const result = this.hash.digest();
|
||||
if (digestCache !== undefined) {
|
||||
digestCache.set(buffer, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
const digestResult = this.hash.digest(encoding);
|
||||
// Compatibility with the old hash library
|
||||
const result =
|
||||
typeof digestResult === "string"
|
||||
? digestResult
|
||||
: /** @type {NodeJS.TypedArray} */ (digestResult).toString();
|
||||
if (digestCache !== undefined) {
|
||||
digestCache.set(buffer, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
class DebugHash extends Hash {
|
||||
constructor() {
|
||||
super();
|
||||
this.string = "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (typeof data !== "string") data = data.toString("utf8");
|
||||
const prefix = Buffer.from("@webpack-debug-digest@").toString("hex");
|
||||
if (data.startsWith(prefix)) {
|
||||
data = Buffer.from(data.slice(prefix.length), "hex").toString();
|
||||
}
|
||||
this.string += `[${data}](${
|
||||
/** @type {string} */
|
||||
(
|
||||
// eslint-disable-next-line unicorn/error-message
|
||||
new Error().stack
|
||||
).split("\n", 3)[2]
|
||||
})\n`;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {typeof import("crypto") | undefined} */
|
||||
let crypto;
|
||||
/** @type {typeof import("./hash/xxhash64") | undefined} */
|
||||
let createXXHash64;
|
||||
/** @type {typeof import("./hash/md4") | undefined} */
|
||||
let createMd4;
|
||||
/** @type {typeof import("./hash/DebugHash") | undefined} */
|
||||
let DebugHash;
|
||||
/** @type {typeof import("./hash/BatchedHash") | undefined} */
|
||||
let BatchedHash;
|
||||
/** @type {typeof import("./hash/BulkUpdateHash") | undefined} */
|
||||
let BulkUpdateHash;
|
||||
|
||||
/**
|
||||
* Creates a hash by name or function
|
||||
|
|
@ -219,12 +28,18 @@ let BatchedHash;
|
|||
*/
|
||||
module.exports = (algorithm) => {
|
||||
if (typeof algorithm === "function") {
|
||||
if (BulkUpdateHash === undefined) {
|
||||
BulkUpdateHash = require("./hash/BulkUpdateHash");
|
||||
}
|
||||
// eslint-disable-next-line new-cap
|
||||
return new BulkUpdateDecorator(() => new algorithm());
|
||||
return new BulkUpdateHash(() => new algorithm());
|
||||
}
|
||||
switch (algorithm) {
|
||||
// TODO add non-cryptographic algorithm here
|
||||
case "debug":
|
||||
if (DebugHash === undefined) {
|
||||
DebugHash = require("./hash/DebugHash");
|
||||
}
|
||||
return new DebugHash();
|
||||
case "xxhash64":
|
||||
if (createXXHash64 === undefined) {
|
||||
|
|
@ -248,7 +63,10 @@ module.exports = (algorithm) => {
|
|||
)(createMd4());
|
||||
case "native-md4":
|
||||
if (crypto === undefined) crypto = require("crypto");
|
||||
return new BulkUpdateDecorator(
|
||||
if (BulkUpdateHash === undefined) {
|
||||
BulkUpdateHash = require("./hash/BulkUpdateHash");
|
||||
}
|
||||
return new BulkUpdateHash(
|
||||
() =>
|
||||
/** @type {Hash} */ (
|
||||
/** @type {typeof import("crypto")} */
|
||||
|
|
@ -258,7 +76,10 @@ module.exports = (algorithm) => {
|
|||
);
|
||||
default:
|
||||
if (crypto === undefined) crypto = require("crypto");
|
||||
return new BulkUpdateDecorator(
|
||||
if (BulkUpdateHash === undefined) {
|
||||
BulkUpdateHash = require("./hash/BulkUpdateHash");
|
||||
}
|
||||
return new BulkUpdateHash(
|
||||
() =>
|
||||
/** @type {Hash} */ (
|
||||
/** @type {typeof import("crypto")} */
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@
|
|||
"use strict";
|
||||
|
||||
const Hash = require("../Hash");
|
||||
const { digest, update } = require("./hash-digest");
|
||||
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
|
||||
|
||||
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
|
|
@ -51,9 +52,9 @@ class BatchedHash extends Hash {
|
|||
return this;
|
||||
}
|
||||
if (this.encoding) {
|
||||
this.hash.update(this.string, this.encoding);
|
||||
update(this.hash, this.string, this.encoding);
|
||||
} else {
|
||||
this.hash.update(this.string);
|
||||
update(this.hash, this.string);
|
||||
}
|
||||
this.string = undefined;
|
||||
}
|
||||
|
|
@ -66,12 +67,12 @@ class BatchedHash extends Hash {
|
|||
this.string = data;
|
||||
this.encoding = inputEncoding;
|
||||
} else if (inputEncoding) {
|
||||
this.hash.update(data, inputEncoding);
|
||||
update(this.hash, data, inputEncoding);
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
update(this.hash, data);
|
||||
}
|
||||
} else {
|
||||
this.hash.update(data);
|
||||
update(this.hash, data);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
@ -95,15 +96,15 @@ class BatchedHash extends Hash {
|
|||
digest(encoding) {
|
||||
if (this.string !== undefined) {
|
||||
if (this.encoding) {
|
||||
this.hash.update(this.string, this.encoding);
|
||||
update(this.hash, this.string, this.encoding);
|
||||
} else {
|
||||
this.hash.update(this.string);
|
||||
update(this.hash, this.string);
|
||||
}
|
||||
}
|
||||
if (!encoding) {
|
||||
return this.hash.digest();
|
||||
return digest(this.hash);
|
||||
}
|
||||
return this.hash.digest(encoding);
|
||||
return digest(this.hash, encoding);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,138 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Alexander Akait @alexander-akait
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const Hash = require("../Hash");
|
||||
const { digest, update } = require("./hash-digest");
|
||||
|
||||
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
/** @typedef {() => Hash} HashFactory */
|
||||
|
||||
const BULK_SIZE = 3;
|
||||
|
||||
// We are using an object instead of a Map as this will stay static during the runtime
|
||||
// so access to it can be optimized by v8
|
||||
/** @type {{[key: string]: Map<string, string>}} */
|
||||
const digestCaches = {};
|
||||
|
||||
class BulkUpdateHash extends Hash {
|
||||
/**
|
||||
* @param {Hash | HashFactory} hashOrFactory function to create a hash
|
||||
* @param {string=} hashKey key for caching
|
||||
*/
|
||||
constructor(hashOrFactory, hashKey) {
|
||||
super();
|
||||
this.hashKey = hashKey;
|
||||
if (typeof hashOrFactory === "function") {
|
||||
this.hashFactory = hashOrFactory;
|
||||
this.hash = undefined;
|
||||
} else {
|
||||
this.hashFactory = undefined;
|
||||
this.hash = hashOrFactory;
|
||||
}
|
||||
this.buffer = "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (
|
||||
inputEncoding !== undefined ||
|
||||
typeof data !== "string" ||
|
||||
data.length > BULK_SIZE
|
||||
) {
|
||||
if (this.hash === undefined) {
|
||||
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
|
||||
}
|
||||
if (this.buffer.length > 0) {
|
||||
update(this.hash, this.buffer);
|
||||
this.buffer = "";
|
||||
}
|
||||
if (typeof data === "string" && inputEncoding) {
|
||||
update(this.hash, data, inputEncoding);
|
||||
} else {
|
||||
update(this.hash, data);
|
||||
}
|
||||
} else {
|
||||
this.buffer += data;
|
||||
if (this.buffer.length > BULK_SIZE) {
|
||||
if (this.hash === undefined) {
|
||||
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
|
||||
}
|
||||
update(this.hash, this.buffer);
|
||||
this.buffer = "";
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
let digestCache;
|
||||
const buffer = this.buffer;
|
||||
if (this.hash === undefined) {
|
||||
// short data for hash, we can use caching
|
||||
const cacheKey = `${this.hashKey}-${encoding}`;
|
||||
digestCache = digestCaches[cacheKey];
|
||||
if (digestCache === undefined) {
|
||||
digestCache = digestCaches[cacheKey] = new Map();
|
||||
}
|
||||
const cacheEntry = digestCache.get(buffer);
|
||||
if (cacheEntry !== undefined) return cacheEntry;
|
||||
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
|
||||
}
|
||||
|
||||
if (buffer.length > 0) {
|
||||
update(this.hash, buffer);
|
||||
}
|
||||
if (!encoding) {
|
||||
const result = digest(this.hash, undefined, Boolean(this.hashKey));
|
||||
if (digestCache !== undefined) {
|
||||
digestCache.set(buffer, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
const result = digest(this.hash, encoding, Boolean(this.hashKey));
|
||||
if (digestCache !== undefined) {
|
||||
digestCache.set(buffer, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BulkUpdateHash;
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Alexander Akait @alexander-akait
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const Hash = require("../Hash");
|
||||
|
||||
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
|
||||
/* istanbul ignore next */
|
||||
class DebugHash extends Hash {
|
||||
constructor() {
|
||||
super();
|
||||
this.string = "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string | Buffer} data data
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @overload
|
||||
* @param {string} data data
|
||||
* @param {Encoding} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} inputEncoding data encoding
|
||||
* @returns {Hash} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (typeof data !== "string") data = data.toString("utf8");
|
||||
const prefix = Buffer.from("@webpack-debug-digest@").toString("hex");
|
||||
if (data.startsWith(prefix)) {
|
||||
data = Buffer.from(data.slice(prefix.length), "hex").toString();
|
||||
}
|
||||
this.string += `[${data}](${
|
||||
/** @type {string} */
|
||||
(
|
||||
// eslint-disable-next-line unicorn/error-message
|
||||
new Error().stack
|
||||
).split("\n", 3)[2]
|
||||
})\n`;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @overload
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DebugHash;
|
||||
|
|
@ -0,0 +1,216 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Alexander Akait @alexander-akait
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
/** @typedef {import("../Hash")} Hash */
|
||||
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
|
||||
|
||||
/** @typedef {"26" | "32" | "36" | "49" | "52" | "58" | "62"} Base */
|
||||
|
||||
/* cSpell:disable */
|
||||
|
||||
/** @type {Record<Base, string>} */
|
||||
const ENCODE_TABLE = Object.freeze({
|
||||
26: "abcdefghijklmnopqrstuvwxyz",
|
||||
32: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
|
||||
36: "0123456789abcdefghijklmnopqrstuvwxyz",
|
||||
49: "abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ",
|
||||
52: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ",
|
||||
58: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz",
|
||||
62: "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
});
|
||||
|
||||
/* cSpell:enable */
|
||||
|
||||
const ZERO = BigInt("0");
|
||||
const EIGHT = BigInt("8");
|
||||
const FF = BigInt("0xff");
|
||||
|
||||
/**
|
||||
* It encodes octet arrays by doing long divisions on all significant digits in the array, creating a representation of that number in the new base.
|
||||
* Then for every leading zero in the input (not significant as a number) it will encode as a single leader character.
|
||||
* This is the first in the alphabet and will decode as 8 bits. The other characters depend upon the base.
|
||||
* For example, a base58 alphabet packs roughly 5.858 bits per character.
|
||||
* This means the encoded string 000f (using a base16, 0-f alphabet) will actually decode to 4 bytes unlike a canonical hex encoding which uniformly packs 4 bits into each character.
|
||||
* While unusual, this does mean that no padding is required, and it works for bases like 43.
|
||||
* @param {Buffer} buffer buffer
|
||||
* @param {Base} base base
|
||||
* @returns {string} encoded buffer
|
||||
*/
|
||||
const encode = (buffer, base) => {
|
||||
if (buffer.length === 0) return "";
|
||||
const bigIntBase = BigInt(ENCODE_TABLE[base].length);
|
||||
// Convert buffer to BigInt efficiently using bitwise operations
|
||||
let value = ZERO;
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
value = (value << EIGHT) | BigInt(buffer[i]);
|
||||
}
|
||||
// Convert to baseX string efficiently using array
|
||||
const digits = [];
|
||||
if (value === ZERO) return ENCODE_TABLE[base][0];
|
||||
while (value > ZERO) {
|
||||
const remainder = Number(value % bigIntBase);
|
||||
digits.push(ENCODE_TABLE[base][remainder]);
|
||||
value /= bigIntBase;
|
||||
}
|
||||
return digits.reverse().join("");
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} data string
|
||||
* @param {Base} base base
|
||||
* @returns {Buffer} buffer
|
||||
*/
|
||||
const decode = (data, base) => {
|
||||
if (data.length === 0) return Buffer.from("");
|
||||
const bigIntBase = BigInt(ENCODE_TABLE[base].length);
|
||||
// Convert the baseX string to a BigInt value
|
||||
let value = ZERO;
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
const digit = ENCODE_TABLE[base].indexOf(data[i]);
|
||||
if (digit === -1) {
|
||||
throw new Error(`Invalid character at position ${i}: ${data[i]}`);
|
||||
}
|
||||
value = value * bigIntBase + BigInt(digit);
|
||||
}
|
||||
// If value is 0, return a single-byte buffer with value 0
|
||||
if (value === ZERO) {
|
||||
return Buffer.alloc(1);
|
||||
}
|
||||
// Determine buffer size efficiently by counting bytes
|
||||
let temp = value;
|
||||
let byteLength = 0;
|
||||
while (temp > ZERO) {
|
||||
temp >>= EIGHT;
|
||||
byteLength++;
|
||||
}
|
||||
// Create buffer and fill it from right to left
|
||||
const buffer = Buffer.alloc(byteLength);
|
||||
for (let i = byteLength - 1; i >= 0; i--) {
|
||||
buffer[i] = Number(value & FF);
|
||||
value >>= EIGHT;
|
||||
}
|
||||
return buffer;
|
||||
};
|
||||
|
||||
// Compatibility with the old hash libraries, they can return different structures, so let's stringify them firstly
|
||||
|
||||
/**
|
||||
* @param {string | { toString: (radix: number) => string }} value value
|
||||
* @param {string} encoding encoding
|
||||
* @returns {string} string
|
||||
*/
|
||||
const toString = (value, encoding) =>
|
||||
typeof value === "string"
|
||||
? value
|
||||
: Buffer.from(value.toString(16), "hex").toString(
|
||||
/** @type {NodeJS.BufferEncoding} */
|
||||
(encoding)
|
||||
);
|
||||
|
||||
/**
|
||||
* @param {Buffer | { toString: (radix: number) => string }} value value
|
||||
* @returns {Buffer} buffer
|
||||
*/
|
||||
const toBuffer = (value) =>
|
||||
Buffer.isBuffer(value) ? value : Buffer.from(value.toString(16), "hex");
|
||||
|
||||
let isBase64URLSupported = false;
|
||||
|
||||
try {
|
||||
isBase64URLSupported = Boolean(Buffer.from("", "base64url"));
|
||||
} catch (_err) {
|
||||
// Nothing
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Hash} hash hash
|
||||
* @param {string | Buffer} data data
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @returns {void}
|
||||
*/
|
||||
const update = (hash, data, encoding) => {
|
||||
if (encoding === "base64url" && !isBase64URLSupported) {
|
||||
const base64String = /** @type {string} */ (data)
|
||||
.replace(/-/g, "+")
|
||||
.replace(/_/g, "/");
|
||||
const buf = Buffer.from(base64String, "base64");
|
||||
hash.update(buf);
|
||||
return;
|
||||
} else if (
|
||||
typeof data === "string" &&
|
||||
encoding &&
|
||||
typeof ENCODE_TABLE[/** @type {Base} */ (encoding.slice(4))] !== "undefined"
|
||||
) {
|
||||
const buf = decode(data, /** @type {Base} */ (encoding.slice(4)));
|
||||
hash.update(buf);
|
||||
return;
|
||||
}
|
||||
|
||||
if (encoding) {
|
||||
hash.update(/** @type {string} */ (data), encoding);
|
||||
} else {
|
||||
hash.update(data);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @overload
|
||||
* @param {Hash} hash hash
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* @overload
|
||||
* @param {Hash} hash hash
|
||||
* @param {undefined} encoding encoding of the return value
|
||||
* @param {boolean=} isSafe true when we await right types from digest(), otherwise false
|
||||
* @returns {Buffer} digest
|
||||
*/
|
||||
/**
|
||||
* @overload
|
||||
* @param {Hash} hash hash
|
||||
* @param {Encoding} encoding encoding of the return value
|
||||
* @param {boolean=} isSafe true when we await right types from digest(), otherwise false
|
||||
* @returns {string} digest
|
||||
*/
|
||||
/**
|
||||
* @param {Hash} hash hash
|
||||
* @param {Encoding=} encoding encoding of the return value
|
||||
* @param {boolean=} isSafe true when we await right types from digest(), otherwise false
|
||||
* @returns {string | Buffer} digest
|
||||
*/
|
||||
const digest = (hash, encoding, isSafe) => {
|
||||
if (typeof encoding === "undefined") {
|
||||
return isSafe ? hash.digest() : toBuffer(hash.digest());
|
||||
}
|
||||
|
||||
if (encoding === "base64url" && !isBase64URLSupported) {
|
||||
const digest = isSafe
|
||||
? hash.digest("base64")
|
||||
: toString(hash.digest("base64"), "base64");
|
||||
|
||||
return digest.replace(/\+/g, "-").replace(/\//g, "_").replace(/[=]+$/, "");
|
||||
} else if (
|
||||
typeof ENCODE_TABLE[/** @type {Base} */ (encoding.slice(4))] !== "undefined"
|
||||
) {
|
||||
const buf = isSafe ? hash.digest() : toBuffer(hash.digest());
|
||||
|
||||
return encode(
|
||||
buf,
|
||||
/** @type {Base} */
|
||||
(encoding.slice(4))
|
||||
);
|
||||
}
|
||||
|
||||
return isSafe
|
||||
? hash.digest(encoding)
|
||||
: toString(hash.digest(encoding), encoding);
|
||||
};
|
||||
|
||||
module.exports.decode = decode;
|
||||
module.exports.digest = digest;
|
||||
module.exports.encode = encode;
|
||||
module.exports.update = update;
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
/** @type {import("../../../../").Configuration[]} */
|
||||
module.exports = [
|
||||
{
|
||||
output: {
|
||||
hashFunction: require("xxhashjs").h32
|
||||
}
|
||||
}
|
||||
];
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
"use strict";
|
||||
|
||||
/** @type {import("../../../../").Configuration[]} */
|
||||
module.exports = [
|
||||
// Default hash function and all hash digests
|
||||
{
|
||||
output: {
|
||||
hashDigest: "base64url"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashDigest: "base26"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashDigest: "base32"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashDigest: "base36"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashDigest: "base49"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashDigest: "base52"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashDigest: "base58"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashDigest: "base62"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashDigest: "hex"
|
||||
}
|
||||
},
|
||||
// xxhash64
|
||||
{
|
||||
output: {
|
||||
hashFunction: "xxhash64"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "xxhash64",
|
||||
hashDigest: "base64url"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "xxhash64",
|
||||
hashDigest: "base32"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "xxhash64",
|
||||
hashDigest: "hex"
|
||||
}
|
||||
},
|
||||
// md4
|
||||
{
|
||||
output: {
|
||||
hashFunction: "md4"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "md4",
|
||||
hashDigest: "base64url"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "md4",
|
||||
hashDigest: "base32"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "md4",
|
||||
hashDigest: "hex"
|
||||
}
|
||||
},
|
||||
// sha512
|
||||
{
|
||||
output: {
|
||||
hashFunction: "sha512"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "sha512",
|
||||
hashDigest: "base64url"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "sha512",
|
||||
hashDigest: "base32"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: "sha512",
|
||||
hashDigest: "hex"
|
||||
}
|
||||
}
|
||||
];
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1 @@
|
|||
module.exports = module.id;
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
it("should have unique ids", function () {
|
||||
var ids = [];
|
||||
for(var i = 1; i <= 15; i++) {
|
||||
var id = require("./files/file" + i + ".js");
|
||||
expect(ids.indexOf(id)).toBe(-1);
|
||||
ids.push(id);
|
||||
}
|
||||
});
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
"use strict";
|
||||
|
||||
/** @type {import("../../../../").Configuration[]} */
|
||||
module.exports = [
|
||||
{
|
||||
output: {
|
||||
hashFunction: require("xxhashjs").h32,
|
||||
hashDigest: "hex"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: require("xxhashjs").h32,
|
||||
hashDigest: "base64url"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: require("xxhashjs").h32,
|
||||
hashDigest: "base32"
|
||||
}
|
||||
},
|
||||
{
|
||||
output: {
|
||||
hashFunction: require("xxhashjs").h32,
|
||||
hashDigest: "hex"
|
||||
}
|
||||
}
|
||||
];
|
||||
Loading…
Reference in New Issue