fix: types

This commit is contained in:
alexander-akait 2025-03-07 19:02:26 +03:00
parent 8e9ff66838
commit 9abab772ea
7 changed files with 85 additions and 40 deletions

View File

@ -370,6 +370,8 @@ const { isSourceEqual } = require("./util/source");
/** @typedef {Set<Module>} NotCodeGeneratedModules */
/** @typedef {Record<string, TODO>} Records */
/** @type {AssetInfo} */
const EMPTY_ASSET_INFO = Object.freeze({});
@ -1039,7 +1041,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @type {Map<string, Module>}
*/
this._modules = new Map();
/** @type {Record<string, TODO> | null} */
/** @type {Records | null} */
this.records = null;
/** @type {string[]} */
this.additionalChunkAssets = [];

View File

@ -50,6 +50,7 @@ const {
/** @typedef {import("./Chunk").ChunkId} ChunkId */
/** @typedef {import("./ChunkGraph").ModuleId} ModuleId */
/** @typedef {import("./Compilation").AssetInfo} AssetInfo */
/** @typedef {import("./Compilation").Records} Records */
/** @typedef {import("./Compiler")} Compiler */
/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */
/** @typedef {import("./Module")} Module */
@ -390,7 +391,7 @@ class HotModuleReplacementPlugin {
const nonCodeGeneratedModules = new TupleSet();
compilation.hooks.fullHash.tap(PLUGIN_NAME, hash => {
const chunkGraph = compilation.chunkGraph;
const records = compilation.records;
const records = /** @type {Records} */ (compilation.records);
for (const chunk of compilation.chunks) {
/**
* @param {Module} module module
@ -484,7 +485,7 @@ class HotModuleReplacementPlugin {
},
() => {
const chunkGraph = compilation.chunkGraph;
const records = compilation.records;
const records = /** @type {Records} */ (compilation.records);
if (records.hash === compilation.hash) return;
if (
!records.chunkModuleHashes ||

View File

@ -72,7 +72,7 @@ class CssGenerator extends Generator {
const initFragments = [];
/** @type {CssData} */
const cssData = {
esModule: this.esModule,
esModule: /** @type {boolean} */ (this.esModule),
exports: new Map()
};
@ -261,7 +261,7 @@ class CssGenerator extends Generator {
* @param {UpdateHashContext} updateHashContext context for updating hash
*/
updateHash(hash, { module }) {
hash.update(this.esModule.toString());
hash.update(/** @type {boolean} */ (this.esModule).toString());
}
}

View File

@ -170,7 +170,8 @@ const importAssertions = Parser =>
}
if (isAssertLegacy) {
nodes[LEGACY_ASSERT_ATTRIBUTES] = true;
/** @type {EXPECTED_ANY} */
(nodes)[LEGACY_ASSERT_ATTRIBUTES] = true;
}
return nodes;
@ -250,7 +251,7 @@ const getImportAttributes = node => {
result[key] = /** @type {string} */ (attribute.value.value);
}
if (node.attributes[LEGACY_ASSERT_ATTRIBUTES]) {
if (/** @type {EXPECTED_ANY} */ (node.attributes)[LEGACY_ASSERT_ATTRIBUTES]) {
result._isLegacyAssert = true;
}
@ -1684,12 +1685,14 @@ class JavascriptParser extends Parser {
continue;
}
/** @type {string} */
const value = argExpr.isString()
? argExpr.string
? /** @type {string} */ (argExpr.string)
: String(argExpr.number);
const newString = value + (stringSuffix ? stringSuffix.string : "");
/** @type {string} */
const newString =
value +
(stringSuffix ? /** @type {string} */ (stringSuffix.string) : "");
const newRange = /** @type {Range} */ ([
/** @type {Range} */ (argExpr.range)[0],
/** @type {Range} */ ((stringSuffix || argExpr).range)[1]
@ -4476,8 +4479,7 @@ class JavascriptParser extends Parser {
terminated: undefined,
definitions: new StackedMap()
};
/** @type {ParserState} */
this.state = state;
this.state = /** @type {ParserState} */ (state);
this.comments = comments;
this.semicolons = semicolons;
this.statementPath = [];
@ -4494,8 +4496,7 @@ class JavascriptParser extends Parser {
}
this.hooks.finish.call(ast, comments);
this.scope = oldScope;
/** @type {ParserState} */
this.state = oldState;
this.state = /** @type {ParserState} */ (oldState);
this.comments = oldComments;
this.semicolons = oldSemicolons;
this.statementPath = oldStatementPath;

View File

@ -64,7 +64,11 @@ class JsonParser extends Parser {
buildMeta.defaultObject =
typeof data === "object" ? "redirect-warn" : false;
state.module.addDependency(
new JsonExportsDependency(jsonData, this.options.exportsDepth)
new JsonExportsDependency(
jsonData,
/** @type {number} */
(this.options.exportsDepth)
)
);
return state;
}

View File

@ -32,6 +32,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
/** @typedef {import("../Module")} Module */
/** @typedef {import("../ModuleGraph")} ModuleGraph */
/** @typedef {import("../TemplatedPathPlugin").TemplatePath} TemplatePath */
/** @typedef {import("../util/createHash").Algorithm} Algorithm */
/** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
/** @typedef {import("../util/deterministicGrouping").Options<Module>} DeterministicGroupingOptionsForModule */
@ -118,7 +119,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
* @callback GetCacheGroups
* @param {Module} module
* @param {CacheGroupsContext} context
* @returns {CacheGroupSource[]}
* @returns {CacheGroupSource[] | null}
*/
/**
@ -143,7 +144,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
* @property {number} maxAsyncRequests
* @property {number} maxInitialRequests
* @property {boolean} hidePathInfo
* @property {TemplatePath} filename
* @property {TemplatePath=} filename
* @property {string} automaticNameDelimiter
* @property {GetCacheGroups} getCacheGroups
* @property {GetName} getName
@ -181,7 +182,7 @@ const hashFilename = (name, outputOptions) => {
const digest =
/** @type {string} */
(
createHash(outputOptions.hashFunction)
createHash(/** @type {Algorithm} */ (outputOptions.hashFunction))
.update(name)
.digest(outputOptions.hashDigest)
);
@ -241,7 +242,9 @@ const compareModuleIterables = compareIterables(compareModulesByIdentifier);
*/
const compareEntries = (a, b) => {
// 1. by priority
const diffPriority = a.cacheGroup.priority - b.cacheGroup.priority;
const diffPriority =
/** @type {number} */ (a.cacheGroup.priority) -
/** @type {number} */ (b.cacheGroup.priority);
if (diffPriority) return diffPriority;
// 2. by number of chunks
const diffCount = a.chunks.size - b.chunks.size;
@ -404,7 +407,7 @@ const totalSize = sizes => {
};
/**
* @param {false|string|Function|undefined} name the chunk name
* @param {false | string | Function | undefined} name the chunk name
* @returns {GetName | undefined} a function to get the name of the chunk
*/
const normalizeName = name => {
@ -439,7 +442,7 @@ const normalizeChunksFilter = chunks => {
};
/**
* @param {GetCacheGroups | Record<string, false|string|RegExp|OptimizationSplitChunksGetCacheGroups|OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options
* @param {undefined | GetCacheGroups | Record<string, false | string | RegExp | OptimizationSplitChunksGetCacheGroups | OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options
* @param {string[]} defaultSizeTypes the default size types
* @returns {GetCacheGroups} a function to get the cache groups
*/
@ -529,11 +532,11 @@ const checkTest = (test, module, context) => {
if (typeof test === "boolean") return test;
if (typeof test === "string") {
const name = module.nameForCondition();
return name && name.startsWith(test);
return name ? name.startsWith(test) : false;
}
if (test instanceof RegExp) {
const name = module.nameForCondition();
return name && test.test(name);
return name ? test.test(name) : false;
}
return false;
};
@ -571,11 +574,11 @@ const checkModuleLayer = (test, module) => {
}
if (typeof test === "string") {
const layer = module.layer;
return test === "" ? !layer : layer && layer.startsWith(test);
return test === "" ? !layer : layer ? layer.startsWith(test) : false;
}
if (test instanceof RegExp) {
const layer = module.layer;
return test.test(layer);
return layer ? test.test(layer) : false;
}
return false;
};
@ -676,9 +679,11 @@ module.exports = class SplitChunksPlugin {
options.cacheGroups,
defaultSizeTypes
),
getName: options.name ? normalizeName(options.name) : defaultGetName,
automaticNameDelimiter: options.automaticNameDelimiter,
usedExports: options.usedExports,
getName: options.name
? /** @type {GetName} */ (normalizeName(options.name))
: defaultGetName,
automaticNameDelimiter: options.automaticNameDelimiter || "-",
usedExports: options.usedExports || false,
fallbackCacheGroup: {
chunksFilter: normalizeChunksFilter(
fallbackCacheGroup.chunks || options.chunks || "all"
@ -733,8 +738,9 @@ module.exports = class SplitChunksPlugin {
cacheGroupSource.enforceSizeThreshold,
cacheGroupSource.enforce ? undefined : this.options.enforceSizeThreshold
);
/** @type {CacheGroup} */
const cacheGroup = {
key: cacheGroupSource.key,
key: /** @type {string} */ (cacheGroupSource.key),
priority: cacheGroupSource.priority || 0,
chunksFilter: cacheGroupSource.chunksFilter || this.options.chunksFilter,
minSize,
@ -853,10 +859,11 @@ module.exports = class SplitChunksPlugin {
result = iterator.next();
if (result.done) return first;
let key =
chunkIndexMap.get(first) | chunkIndexMap.get(result.value);
/** @type {bigint} */ (chunkIndexMap.get(first)) |
/** @type {bigint} */ (chunkIndexMap.get(result.value));
while (!(result = iterator.next()).done) {
const raw = chunkIndexMap.get(result.value);
key = key ^ raw;
key = key ^ /** @type {bigint} */ (raw);
}
return key;
};
@ -866,7 +873,7 @@ module.exports = class SplitChunksPlugin {
*/
const keyToString = key => {
if (typeof key === "bigint") return key.toString(16);
return chunkIndexMap.get(key).toString(16);
return /** @type {bigint} */ (chunkIndexMap.get(key)).toString(16);
};
const getChunkSetsInGraph = memoize(() => {
@ -965,6 +972,12 @@ module.exports = class SplitChunksPlugin {
);
// Create a list of possible combinations
/**
* @param {Map<bigint, Set<Chunk>>} chunkSets chunk sets
* @param {Set<Chunk>} singleChunkSets single chunks sets
* @param {Map<number, Set<Chunk>[]>} chunkSetsByCount chunk sets by count
* @returns {(key: bigint | Chunk) => (Set<Chunk> | Chunk)[]}
*/
const createGetCombinations = (
chunkSets,
singleChunkSets,
@ -981,7 +994,9 @@ module.exports = class SplitChunksPlugin {
combinationsCache.set(key, result);
return result;
}
const chunksSet = chunkSets.get(key);
const chunksSet =
/** @type {Set<Chunk>} */
(chunkSets.get(key));
/** @type {(Set<Chunk> | Chunk)[]} */
const array = [chunksSet];
for (const [count, setArray] of chunkSetsByCount) {
@ -1012,6 +1027,11 @@ module.exports = class SplitChunksPlugin {
getChunkSetsByCount()
);
});
/**
* @param {bigint | Chunk} key key
* @returns {(Set<Chunk> | Chunk)[]}
*/
const getCombinations = key => getCombinationsFactory()(key);
const getExportsCombinationsFactory = memoize(() => {
@ -1023,6 +1043,10 @@ module.exports = class SplitChunksPlugin {
getExportsChunkSetsByCount()
);
});
/**
* @param {bigint | Chunk} key key
* @returns {(Set<Chunk> | Chunk)[]}
*/
const getExportsCombinations = key =>
getExportsCombinationsFactory()(key);
@ -1096,11 +1120,19 @@ module.exports = class SplitChunksPlugin {
module
) => {
// Break if minimum number of chunks is not reached
if (selectedChunks.length < cacheGroup.minChunks) return;
if (
selectedChunks.length <
/** @type {number} */ (cacheGroup.minChunks)
)
return;
// Determine name for split chunk
const name =
/** @type {string} */
(cacheGroup.getName(module, selectedChunks, cacheGroup.key));
(
/** @type {GetName} */
(cacheGroup.getName)(module, selectedChunks, cacheGroup.key)
);
// Check if the name is ok
const existingChunk = compilation.namedChunks.get(name);
if (existingChunk) {
@ -1255,12 +1287,14 @@ module.exports = class SplitChunksPlugin {
// Break if minimum number of chunks is not reached
const count =
chunkCombination instanceof Chunk ? 1 : chunkCombination.size;
if (count < cacheGroup.minChunks) continue;
if (count < /** @type {number} */ (cacheGroup.minChunks))
continue;
// Select chunks by configuration
const { chunks: selectedChunks, key: selectedChunksKey } =
getSelectedChunks(
chunkCombination,
/** @type {ChunkFilterFunction} */ (cacheGroup.chunksFilter)
/** @type {ChunkFilterFunction} */
(cacheGroup.chunksFilter)
);
addModuleToChunksInfoMap(

9
types.d.ts vendored
View File

@ -1993,7 +1993,7 @@ declare class Compilation {
namedChunkGroups: Map<string, ChunkGroup>;
namedChunks: Map<string, Chunk>;
modules: Set<Module>;
records: null | Record<string, any>;
records: null | Records;
additionalChunkAssets: string[];
assets: CompilationAssets;
assetsInfo: Map<string, AssetInfo>;
@ -12214,6 +12214,9 @@ declare interface RealPathTypes {
callback: (arg0: null | NodeJS.ErrnoException, arg1?: string) => void
): void;
}
declare interface Records {
[index: string]: any;
}
type RecursiveArrayOrRecord<T> =
| { [index: string]: RecursiveArrayOrRecord<T> }
| RecursiveArrayOrRecord<T>[]
@ -14400,12 +14403,12 @@ declare interface SplitChunksOptions {
maxAsyncRequests: number;
maxInitialRequests: number;
hidePathInfo: boolean;
filename: TemplatePath;
filename?: string | ((arg0: PathData, arg1?: AssetInfo) => string);
automaticNameDelimiter: string;
getCacheGroups: (
module: Module,
context: CacheGroupsContext
) => CacheGroupSource[];
) => null | CacheGroupSource[];
getName: (
module?: Module,
chunks?: Chunk[],