mirror of https://github.com/webpack/webpack.git
style: improve style of code
This commit is contained in:
parent
7dc4d18848
commit
423e89b2c7
|
@ -75,7 +75,7 @@ module.exports = [
|
|||
ecmaVersion: 2018,
|
||||
globals: {
|
||||
...globals.node,
|
||||
...globals.es2015,
|
||||
...globals.es2018,
|
||||
WebAssembly: true
|
||||
}
|
||||
},
|
||||
|
@ -102,6 +102,14 @@ module.exports = [
|
|||
],
|
||||
"no-inner-declarations": "error",
|
||||
"no-loop-func": "off",
|
||||
"prefer-const": [
|
||||
"error",
|
||||
{
|
||||
destructuring: "all",
|
||||
ignoreReadBeforeAssign: true
|
||||
}
|
||||
],
|
||||
"object-shorthand": "error",
|
||||
"n/no-missing-require": ["error", { allowModules: ["webpack"] }],
|
||||
"n/no-unsupported-features/node-builtins": [
|
||||
"error",
|
||||
|
@ -167,6 +175,10 @@ module.exports = [
|
|||
...globals.browser,
|
||||
...globals.es5
|
||||
}
|
||||
},
|
||||
rules: {
|
||||
"prefer-const": "off",
|
||||
"object-shorthand": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -174,7 +186,7 @@ module.exports = [
|
|||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: {
|
||||
...globals.es2015
|
||||
...globals.es2020
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -206,7 +218,8 @@ module.exports = [
|
|||
{
|
||||
allowExperimental: true
|
||||
}
|
||||
]
|
||||
],
|
||||
"object-shorthand": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
|
@ -111,7 +111,7 @@ class BannerPlugin {
|
|||
const comment = compilation.getPath(banner, data);
|
||||
|
||||
compilation.updateAsset(file, old => {
|
||||
let cached = cache.get(old);
|
||||
const cached = cache.get(old);
|
||||
if (!cached || cached.comment !== comment) {
|
||||
const source = options.footer
|
||||
? new ConcatSource(old, "\n", comment)
|
||||
|
|
|
@ -159,7 +159,7 @@ const getModulesSize = modules => {
|
|||
* @returns {Record<string, number>} the sizes of the modules
|
||||
*/
|
||||
const getModulesSizes = modules => {
|
||||
let sizes = Object.create(null);
|
||||
const sizes = Object.create(null);
|
||||
for (const module of modules) {
|
||||
for (const type of module.getSourceTypes()) {
|
||||
sizes[type] = (sizes[type] || 0) + module.size(type);
|
||||
|
@ -916,7 +916,7 @@ class ChunkGraph {
|
|||
const cgcB = this._getChunkGraphChunk(chunkB);
|
||||
const allModules = new Set(cgcA.modules);
|
||||
for (const m of cgcB.modules) allModules.add(m);
|
||||
let modulesSize = getModulesSize(allModules);
|
||||
const modulesSize = getModulesSize(allModules);
|
||||
const chunkOverhead =
|
||||
typeof options.chunkOverhead === "number" ? options.chunkOverhead : 10000;
|
||||
const entryChunkMultiplicator =
|
||||
|
|
|
@ -580,7 +580,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
|
|||
if (options.stage) {
|
||||
throw new Error(errorMessage("it's using the 'stage' option"));
|
||||
}
|
||||
return { ...options, stage: stage };
|
||||
return { ...options, stage };
|
||||
};
|
||||
return createFakeHook(
|
||||
{
|
||||
|
@ -2066,7 +2066,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
|
|||
: originModule
|
||||
? originModule.context
|
||||
: this.compiler.context,
|
||||
dependencies: dependencies
|
||||
dependencies
|
||||
},
|
||||
(err, result) => {
|
||||
if (result) {
|
||||
|
@ -4242,7 +4242,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|||
}
|
||||
// If there are still remaining references we have cycles and want to create a warning
|
||||
if (remaining > 0) {
|
||||
let circularRuntimeChunkInfo = [];
|
||||
const circularRuntimeChunkInfo = [];
|
||||
for (const info of runtimeChunksMap.values()) {
|
||||
if (info.remaining !== 0) {
|
||||
circularRuntimeChunkInfo.push(info);
|
||||
|
|
|
@ -950,9 +950,9 @@ module.exports = webpackAsyncContext;`;
|
|||
/** @type {ContextElementDependency} */
|
||||
(block.dependencies[0]);
|
||||
return {
|
||||
dependency: dependency,
|
||||
dependency,
|
||||
module: /** @type {Module} */ (moduleGraph.getModule(dependency)),
|
||||
block: block,
|
||||
block,
|
||||
userRequest: dependency.userRequest,
|
||||
chunks: undefined
|
||||
};
|
||||
|
|
|
@ -93,8 +93,8 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
|
|||
const contextDependencies = new LazySet();
|
||||
this.hooks.beforeResolve.callAsync(
|
||||
{
|
||||
context: context,
|
||||
dependencies: dependencies,
|
||||
context,
|
||||
dependencies,
|
||||
layer: data.contextInfo.issuerLayer,
|
||||
resolveOptions,
|
||||
fileDependencies,
|
||||
|
|
|
@ -146,7 +146,7 @@ const stringifyObj = (
|
|||
objKeys
|
||||
) => {
|
||||
let code;
|
||||
let arr = Array.isArray(obj);
|
||||
const arr = Array.isArray(obj);
|
||||
if (arr) {
|
||||
code = `[${
|
||||
/** @type {any[]} */ (obj)
|
||||
|
|
|
@ -101,7 +101,7 @@ class DllReferencePlugin {
|
|||
let content =
|
||||
"content" in this.options ? this.options.content : undefined;
|
||||
if ("manifest" in this.options) {
|
||||
let manifestParameter = this.options.manifest;
|
||||
const manifestParameter = this.options.manifest;
|
||||
let manifest;
|
||||
if (typeof manifestParameter === "string") {
|
||||
const data = this._compilationData.get(params);
|
||||
|
@ -130,7 +130,7 @@ class DllReferencePlugin {
|
|||
normalModuleFactory
|
||||
);
|
||||
new DelegatedModuleFactoryPlugin({
|
||||
source: source,
|
||||
source,
|
||||
type: this.options.type,
|
||||
scope: this.options.scope,
|
||||
context: this.options.context || compiler.options.context,
|
||||
|
@ -144,7 +144,7 @@ class DllReferencePlugin {
|
|||
"DllReferencePlugin",
|
||||
(compilation, params) => {
|
||||
if ("manifest" in this.options) {
|
||||
let manifest = this.options.manifest;
|
||||
const manifest = this.options.manifest;
|
||||
if (typeof manifest === "string") {
|
||||
const data = this._compilationData.get(params);
|
||||
// If there was an error parsing the manifest file, add the
|
||||
|
|
|
@ -662,13 +662,13 @@ class ExportsInfo {
|
|||
getUsed(name, runtime) {
|
||||
if (Array.isArray(name)) {
|
||||
if (name.length === 0) return this.otherExportsInfo.getUsed(runtime);
|
||||
let info = this.getReadOnlyExportInfo(name[0]);
|
||||
const info = this.getReadOnlyExportInfo(name[0]);
|
||||
if (info.exportsInfo && name.length > 1) {
|
||||
return info.exportsInfo.getUsed(name.slice(1), runtime);
|
||||
}
|
||||
return info.getUsed(runtime);
|
||||
}
|
||||
let info = this.getReadOnlyExportInfo(name);
|
||||
const info = this.getReadOnlyExportInfo(name);
|
||||
return info.getUsed(runtime);
|
||||
}
|
||||
|
||||
|
@ -684,7 +684,7 @@ class ExportsInfo {
|
|||
if (!this.isUsed(runtime)) return false;
|
||||
return name;
|
||||
}
|
||||
let info = this.getReadOnlyExportInfo(name[0]);
|
||||
const info = this.getReadOnlyExportInfo(name[0]);
|
||||
const x = info.getUsedName(name[0], runtime);
|
||||
if (x === false) return false;
|
||||
const arr = x === name[0] && name.length === 1 ? name : [x];
|
||||
|
@ -702,7 +702,7 @@ class ExportsInfo {
|
|||
return arr.concat(name.slice(1));
|
||||
}
|
||||
} else {
|
||||
let info = this.getReadOnlyExportInfo(name);
|
||||
const info = this.getReadOnlyExportInfo(name);
|
||||
const usedName = info.getUsedName(name, runtime);
|
||||
return usedName;
|
||||
}
|
||||
|
@ -1260,7 +1260,7 @@ class ExportInfo {
|
|||
*/
|
||||
_findTarget(moduleGraph, validTargetModuleFilter, alreadyVisited) {
|
||||
if (!this._target || this._target.size === 0) return undefined;
|
||||
let rawTarget = this._getMaxTarget().values().next().value;
|
||||
const rawTarget = this._getMaxTarget().values().next().value;
|
||||
if (!rawTarget) return undefined;
|
||||
/** @type {{ module: Module, export: string[] | undefined }} */
|
||||
let target = {
|
||||
|
|
|
@ -353,7 +353,7 @@ const getSourceForModuleExternal = (
|
|||
runtime,
|
||||
runtimeTemplate
|
||||
);
|
||||
let expression = moduleRemapping || baseAccess;
|
||||
const expression = moduleRemapping || baseAccess;
|
||||
return {
|
||||
expression,
|
||||
init: moduleRemapping
|
||||
|
|
|
@ -1670,7 +1670,7 @@ class FileSystemInfo {
|
|||
const module = require.cache[path];
|
||||
if (module && Array.isArray(module.children)) {
|
||||
children: for (const child of module.children) {
|
||||
let childPath = child.filename;
|
||||
const childPath = child.filename;
|
||||
if (childPath) {
|
||||
push({
|
||||
type: RBDT_FILE,
|
||||
|
@ -1682,7 +1682,7 @@ class FileSystemInfo {
|
|||
const context = dirname(this.fs, path);
|
||||
for (const modulePath of module.paths) {
|
||||
if (childPath.startsWith(modulePath)) {
|
||||
let subPath = childPath.slice(modulePath.length + 1);
|
||||
const subPath = childPath.slice(modulePath.length + 1);
|
||||
const packageMatch = /^(@[^\\/]+[\\/])[^\\/]+/.exec(
|
||||
subPath
|
||||
);
|
||||
|
@ -1755,7 +1755,7 @@ class FileSystemInfo {
|
|||
);
|
||||
} else if (imp.d > -1) {
|
||||
// import()
|
||||
let expr = source.substring(imp.s, imp.e).trim();
|
||||
const expr = source.substring(imp.s, imp.e).trim();
|
||||
dependency = parseString(expr);
|
||||
} else {
|
||||
// e.g. import.meta
|
||||
|
|
|
@ -142,7 +142,7 @@ class HotModuleReplacementPlugin {
|
|||
(arg.items).filter(param => param.isString());
|
||||
}
|
||||
/** @type {string[]} */
|
||||
let requests = [];
|
||||
const requests = [];
|
||||
if (params.length > 0) {
|
||||
params.forEach((param, idx) => {
|
||||
const request = /** @type {string} */ (param.string);
|
||||
|
|
|
@ -212,9 +212,9 @@ ModuleFilenameHelpers.createFilename = (
|
|||
if (typeof opts.moduleFilenameTemplate === "function") {
|
||||
return opts.moduleFilenameTemplate(
|
||||
lazyObject({
|
||||
identifier: identifier,
|
||||
shortIdentifier: shortIdentifier,
|
||||
resource: resource,
|
||||
identifier,
|
||||
shortIdentifier,
|
||||
resource,
|
||||
resourcePath: memoize(resourcePath),
|
||||
absoluteResourcePath: memoize(absoluteResourcePath),
|
||||
loaders: memoize(loaders),
|
||||
|
|
|
@ -17,7 +17,7 @@ class ModuleRestoreError extends WebpackError {
|
|||
constructor(module, err) {
|
||||
let message = "Module restore failed: ";
|
||||
/** @type {string | undefined} */
|
||||
let details = undefined;
|
||||
const details = undefined;
|
||||
if (err !== null && typeof err === "object") {
|
||||
if (typeof err.stack === "string" && err.stack) {
|
||||
const stack = err.stack;
|
||||
|
|
|
@ -17,7 +17,7 @@ class ModuleStoreError extends WebpackError {
|
|||
constructor(module, err) {
|
||||
let message = "Module storing failed: ";
|
||||
/** @type {string | undefined} */
|
||||
let details = undefined;
|
||||
const details = undefined;
|
||||
if (err !== null && typeof err === "object") {
|
||||
if (typeof err.stack === "string" && err.stack) {
|
||||
const stack = err.stack;
|
||||
|
|
|
@ -340,8 +340,8 @@ module.exports = class MultiCompiler {
|
|||
* @returns {Compiler[]} compilers
|
||||
*/
|
||||
const getReadyCompilers = () => {
|
||||
let readyCompilers = [];
|
||||
let list = remainingCompilers;
|
||||
const readyCompilers = [];
|
||||
const list = remainingCompilers;
|
||||
remainingCompilers = [];
|
||||
for (const c of list) {
|
||||
const dependencies = this.dependencies.get(c);
|
||||
|
|
|
@ -743,7 +743,7 @@ class NormalModule extends Module {
|
|||
_module: this,
|
||||
_compilation: compilation,
|
||||
_compiler: compilation.compiler,
|
||||
fs: fs
|
||||
fs
|
||||
};
|
||||
|
||||
Object.assign(loaderContext, options.loader);
|
||||
|
@ -1241,8 +1241,8 @@ class NormalModule extends Module {
|
|||
source,
|
||||
current: this,
|
||||
module: this,
|
||||
compilation: compilation,
|
||||
options: options
|
||||
compilation,
|
||||
options
|
||||
});
|
||||
} catch (e) {
|
||||
handleParseError(/** @type {Error} */ (e));
|
||||
|
|
|
@ -652,7 +652,7 @@ class NormalModuleFactory extends ModuleFactory {
|
|||
}
|
||||
for (const loader of /** @type {LoaderItem[]} */ (preLoaders))
|
||||
allLoaders.push(loader);
|
||||
let type = /** @type {string} */ (settings.type);
|
||||
const type = /** @type {string} */ (settings.type);
|
||||
const resolveOptions = settings.resolve;
|
||||
const layer = settings.layer;
|
||||
if (layer !== undefined && !layers) {
|
||||
|
|
|
@ -317,8 +317,8 @@ class SourceMapDevToolPlugin {
|
|||
ModuleFilenameHelpers.createFilename(
|
||||
module,
|
||||
{
|
||||
moduleFilenameTemplate: moduleFilenameTemplate,
|
||||
namespace: namespace
|
||||
moduleFilenameTemplate,
|
||||
namespace
|
||||
},
|
||||
{
|
||||
requestShortener,
|
||||
|
@ -382,7 +382,7 @@ class SourceMapDevToolPlugin {
|
|||
module,
|
||||
{
|
||||
moduleFilenameTemplate: fallbackModuleFilenameTemplate,
|
||||
namespace: namespace
|
||||
namespace
|
||||
},
|
||||
{
|
||||
requestShortener,
|
||||
|
@ -457,7 +457,7 @@ class SourceMapDevToolPlugin {
|
|||
|
||||
/** @type {string | false | (function(PathData, AssetInfo=): string)} */
|
||||
let currentSourceMappingURLComment = sourceMappingURLComment;
|
||||
let cssExtensionDetected =
|
||||
const cssExtensionDetected =
|
||||
CSS_EXTENSION_DETECT_REGEXP.test(file);
|
||||
resetRegexpState(CSS_EXTENSION_DETECT_REGEXP);
|
||||
if (
|
||||
|
@ -473,7 +473,7 @@ class SourceMapDevToolPlugin {
|
|||
}
|
||||
const sourceMapString = JSON.stringify(sourceMap);
|
||||
if (sourceMapFilename) {
|
||||
let filename = file;
|
||||
const filename = file;
|
||||
const sourceMapContentHash =
|
||||
usesContentHash &&
|
||||
/** @type {string} */ (
|
||||
|
|
|
@ -155,7 +155,7 @@ const replacePathVariables = (path, data, assetInfo) => {
|
|||
// [ext] - .js
|
||||
if (typeof data.filename === "string") {
|
||||
// check that filename is data uri
|
||||
let match = data.filename.match(/^data:([^;,]+)/);
|
||||
const match = data.filename.match(/^data:([^;,]+)/);
|
||||
if (match) {
|
||||
const ext = mime.extension(match[1]);
|
||||
const emptyReplacer = replacer("", true);
|
||||
|
|
|
@ -259,7 +259,7 @@ class WebpackOptionsApply extends OptionsApply {
|
|||
append: hidden ? false : undefined,
|
||||
module: moduleMaps ? true : cheap ? false : true,
|
||||
columns: cheap ? false : true,
|
||||
noSources: noSources,
|
||||
noSources,
|
||||
namespace: options.output.devtoolNamespace
|
||||
}).apply(compiler);
|
||||
} else if (options.devtool.includes("eval")) {
|
||||
|
|
|
@ -330,12 +330,12 @@ const visitModules = (
|
|||
let statConnectedChunkGroups = 0;
|
||||
let statProcessedChunkGroupsForMerging = 0;
|
||||
let statMergedAvailableModuleSets = 0;
|
||||
let statForkedAvailableModules = 0;
|
||||
let statForkedAvailableModulesCount = 0;
|
||||
let statForkedAvailableModulesCountPlus = 0;
|
||||
let statForkedMergedModulesCount = 0;
|
||||
let statForkedMergedModulesCountPlus = 0;
|
||||
let statForkedResultModulesCount = 0;
|
||||
const statForkedAvailableModules = 0;
|
||||
const statForkedAvailableModulesCount = 0;
|
||||
const statForkedAvailableModulesCountPlus = 0;
|
||||
const statForkedMergedModulesCount = 0;
|
||||
const statForkedMergedModulesCountPlus = 0;
|
||||
const statForkedResultModulesCount = 0;
|
||||
let statChunkGroupInfoUpdated = 0;
|
||||
let statChildChunkGroupsReconnected = 0;
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ class IdleFileCachePlugin {
|
|||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
let strategy = this.strategy;
|
||||
const strategy = this.strategy;
|
||||
const idleTimeout = this.idleTimeout;
|
||||
const idleTimeoutForInitialStore = Math.min(
|
||||
idleTimeout,
|
||||
|
@ -202,18 +202,11 @@ class IdleFileCachePlugin {
|
|||
}s.`
|
||||
);
|
||||
}
|
||||
idleTimer = setTimeout(
|
||||
() => {
|
||||
idleTimer = setTimeout(() => {
|
||||
idleTimer = undefined;
|
||||
isIdle = true;
|
||||
resolvedPromise.then(processIdleTasks);
|
||||
},
|
||||
Math.min(
|
||||
isInitialStore ? idleTimeoutForInitialStore : Infinity,
|
||||
isLargeChange ? idleTimeoutAfterLargeChanges : Infinity,
|
||||
idleTimeout
|
||||
)
|
||||
);
|
||||
}, Math.min(isInitialStore ? idleTimeoutForInitialStore : Infinity, isLargeChange ? idleTimeoutAfterLargeChanges : Infinity, idleTimeout));
|
||||
idleTimer.unref();
|
||||
}
|
||||
);
|
||||
|
|
10
lib/cli.js
10
lib/cli.js
|
@ -298,7 +298,7 @@ const getArguments = (schema = webpackSchema) => {
|
|||
return 0;
|
||||
}
|
||||
if (Array.isArray(schemaPart.items)) {
|
||||
let i = 0;
|
||||
const i = 0;
|
||||
for (const item of schemaPart.items) {
|
||||
addedArguments += traverse(
|
||||
item,
|
||||
|
@ -390,7 +390,7 @@ const cliAddedItems = new WeakMap();
|
|||
const getObjectAndProperty = (config, schemaPath, index = 0) => {
|
||||
if (!schemaPath) return { value: config };
|
||||
const parts = schemaPath.split(".");
|
||||
let property = parts.pop();
|
||||
const property = parts.pop();
|
||||
let current = config;
|
||||
let i = 0;
|
||||
for (const part of parts) {
|
||||
|
@ -444,7 +444,7 @@ const getObjectAndProperty = (config, schemaPath, index = 0) => {
|
|||
current = value;
|
||||
i++;
|
||||
}
|
||||
let value = current[property];
|
||||
const value = current[property];
|
||||
if (property.endsWith("[]")) {
|
||||
const name = property.slice(0, -2);
|
||||
const value = current[name];
|
||||
|
@ -627,13 +627,13 @@ const processArguments = (args, config, values) => {
|
|||
currentProblems.push({
|
||||
...problem,
|
||||
argument: key,
|
||||
value: value,
|
||||
value,
|
||||
index: i
|
||||
});
|
||||
}
|
||||
problems.push(...currentProblems);
|
||||
};
|
||||
let value = values[key];
|
||||
const value = values[key];
|
||||
if (Array.isArray(value)) {
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
processValue(value[i], i);
|
||||
|
|
|
@ -167,7 +167,7 @@ const applyWebpackOptionsDefaults = (options, compilerIndex) => {
|
|||
|
||||
const { mode, name, target } = options;
|
||||
|
||||
let targetProperties =
|
||||
const targetProperties =
|
||||
target === false
|
||||
? /** @type {false} */ (false)
|
||||
: typeof target === "string"
|
||||
|
@ -912,8 +912,9 @@ const applyOutputDefaults = (
|
|||
} catch (e) {
|
||||
if (/** @type {Error & { code: string }} */ (e).code !== "ENOENT") {
|
||||
/** @type {Error & { code: string }} */
|
||||
(e).message +=
|
||||
`\nwhile determining default 'output.uniqueName' from 'name' in ${pkgPath}`;
|
||||
(
|
||||
e
|
||||
).message += `\nwhile determining default 'output.uniqueName' from 'name' in ${pkgPath}`;
|
||||
throw e;
|
||||
}
|
||||
return "";
|
||||
|
|
|
@ -93,7 +93,7 @@ class CssExportsGenerator extends Generator {
|
|||
chunkGraph: generateContext.chunkGraph,
|
||||
module,
|
||||
runtime: generateContext.runtime,
|
||||
runtimeRequirements: runtimeRequirements,
|
||||
runtimeRequirements,
|
||||
concatenationScope: generateContext.concatenationScope,
|
||||
codeGenerationResults: generateContext.codeGenerationResults,
|
||||
initFragments,
|
||||
|
@ -135,7 +135,7 @@ class CssExportsGenerator extends Generator {
|
|||
const usedIdentifiers = new Set();
|
||||
for (const [name, v] of cssExportsData.exports) {
|
||||
let identifier = Template.toIdentifier(name);
|
||||
let i = 0;
|
||||
const i = 0;
|
||||
while (usedIdentifiers.has(identifier)) {
|
||||
identifier = Template.toIdentifier(name + i);
|
||||
}
|
||||
|
@ -161,7 +161,7 @@ class CssExportsGenerator extends Generator {
|
|||
);
|
||||
}
|
||||
const exports = [];
|
||||
for (let [name, v] of cssExportsData.exports) {
|
||||
for (const [name, v] of cssExportsData.exports) {
|
||||
exports.push(`\t${JSON.stringify(name)}: ${JSON.stringify(v)}`);
|
||||
}
|
||||
return new RawSource(
|
||||
|
|
|
@ -146,7 +146,7 @@ const LZWEncode = str => {
|
|||
let encoded = "";
|
||||
let phrase = str[0];
|
||||
let code = 256;
|
||||
let maxCode = "\uffff".charCodeAt(0);
|
||||
const maxCode = "\uffff".charCodeAt(0);
|
||||
for (let i = 1; i < str.length; i++) {
|
||||
const c = str[i];
|
||||
if (map.has(phrase + c)) {
|
||||
|
@ -623,7 +623,7 @@ class CssModulesPlugin {
|
|||
const cacheEntry = this._moduleCache.get(moduleSourceContent);
|
||||
|
||||
/** @type {Inheritance} */
|
||||
let inheritance = [[module.cssLayer, module.supports, module.media]];
|
||||
const inheritance = [[module.cssLayer, module.supports, module.media]];
|
||||
if (module.inheritance) {
|
||||
inheritance.push(...module.inheritance);
|
||||
}
|
||||
|
|
|
@ -206,7 +206,7 @@ class CssParser extends Parser {
|
|||
/** @type {[number, number] | undefined} */
|
||||
let lastIdentifier = undefined;
|
||||
/** @type [string, number, number][] */
|
||||
let balanced = [];
|
||||
const balanced = [];
|
||||
/** @type {undefined | { start: number, url?: string, urlStart?: number, urlEnd?: number, layer?: string, layerStart?: number, layerEnd?: number, supports?: string, supportsStart?: number, supportsEnd?: number, inSupports?:boolean, media?: string }} */
|
||||
let importData = undefined;
|
||||
/** @type {boolean} */
|
||||
|
@ -320,7 +320,7 @@ class CssParser extends Parser {
|
|||
if (input.charCodeAt(pos) === CC_RIGHT_CURLY) break;
|
||||
pos = walkCssTokens.eatWhitespaceAndComments(input, pos);
|
||||
if (pos === input.length) return pos;
|
||||
let start = pos;
|
||||
const start = pos;
|
||||
let name;
|
||||
[pos, name] = eatText(input, pos, eatExportName);
|
||||
if (pos === input.length) return pos;
|
||||
|
@ -428,7 +428,10 @@ class CssParser extends Parser {
|
|||
return isNextRulePrelude;
|
||||
},
|
||||
url: (input, start, end, contentStart, contentEnd) => {
|
||||
let value = normalizeUrl(input.slice(contentStart, contentEnd), false);
|
||||
const value = normalizeUrl(
|
||||
input.slice(contentStart, contentEnd),
|
||||
false
|
||||
);
|
||||
|
||||
switch (scope) {
|
||||
case CSS_MODE_IN_AT_IMPORT: {
|
||||
|
@ -530,7 +533,7 @@ class CssParser extends Parser {
|
|||
(last[0].replace(/\\/g, "").toLowerCase() === "url" ||
|
||||
IMAGE_SET_FUNCTION.test(last[0].replace(/\\/g, "")))
|
||||
) {
|
||||
let value = normalizeUrl(input.slice(start + 1, end - 1), true);
|
||||
const value = normalizeUrl(input.slice(start + 1, end - 1), true);
|
||||
|
||||
// Ignore `url()`, `url('')` and `url("")`, they are valid by spec
|
||||
if (value.length === 0) {
|
||||
|
@ -884,7 +887,7 @@ class CssParser extends Parser {
|
|||
}
|
||||
|
||||
if (name === "var") {
|
||||
let pos = walkCssTokens.eatWhitespaceAndComments(input, end);
|
||||
const pos = walkCssTokens.eatWhitespaceAndComments(input, end);
|
||||
if (pos === input.length) return pos;
|
||||
const [newPos, name] = eatText(input, pos, eatNameInVar);
|
||||
if (!name.startsWith("--")) return end;
|
||||
|
|
|
@ -431,7 +431,7 @@ const consumePotentialPseudo = (input, pos, callbacks) => {
|
|||
if (!callbacks.isSelector(input, pos) || !_startsIdentifier(input, pos))
|
||||
return pos;
|
||||
pos = _consumeIdentifier(input, pos, callbacks);
|
||||
let cc = input.charCodeAt(pos);
|
||||
const cc = input.charCodeAt(pos);
|
||||
if (cc === CC_LEFT_PARENTHESIS) {
|
||||
pos++;
|
||||
if (callbacks.pseudoFunction !== undefined) {
|
||||
|
@ -723,7 +723,7 @@ module.exports.isIdentStartCodePoint = isIdentStartCodePoint;
|
|||
*/
|
||||
module.exports.eatComments = (input, pos) => {
|
||||
for (;;) {
|
||||
let originalPos = pos;
|
||||
const originalPos = pos;
|
||||
pos = consumeComments(input, pos, {});
|
||||
if (originalPos === pos) {
|
||||
break;
|
||||
|
@ -753,7 +753,7 @@ module.exports.eatWhitespace = (input, pos) => {
|
|||
*/
|
||||
module.exports.eatWhitespaceAndComments = (input, pos) => {
|
||||
for (;;) {
|
||||
let originalPos = pos;
|
||||
const originalPos = pos;
|
||||
pos = consumeComments(input, pos, {});
|
||||
while (_isWhiteSpace(input.charCodeAt(pos))) {
|
||||
pos++;
|
||||
|
|
|
@ -317,7 +317,7 @@ class AMDDefineDependencyParserPlugin {
|
|||
}
|
||||
}
|
||||
}
|
||||
let fnRenames = new Map();
|
||||
const fnRenames = new Map();
|
||||
if (array) {
|
||||
/** @type {Record<number, string>} */
|
||||
const identifiers = {};
|
||||
|
|
|
@ -69,8 +69,8 @@ exports.create = (
|
|||
) => {
|
||||
if (param.isTemplateString()) {
|
||||
const quasis = /** @type {BasicEvaluatedExpression[]} */ (param.quasis);
|
||||
let prefixRaw = /** @type {string} */ (quasis[0].string);
|
||||
let postfixRaw =
|
||||
const prefixRaw = /** @type {string} */ (quasis[0].string);
|
||||
const postfixRaw =
|
||||
/** @type {string} */
|
||||
(quasis.length > 1 ? quasis[quasis.length - 1].string : "");
|
||||
|
||||
|
@ -180,10 +180,10 @@ exports.create = (
|
|||
((param.prefix && param.prefix.isString()) ||
|
||||
(param.postfix && param.postfix.isString()))
|
||||
) {
|
||||
let prefixRaw =
|
||||
const prefixRaw =
|
||||
/** @type {string} */
|
||||
(param.prefix && param.prefix.isString() ? param.prefix.string : "");
|
||||
let postfixRaw =
|
||||
const postfixRaw =
|
||||
/** @type {string} */
|
||||
(param.postfix && param.postfix.isString() ? param.postfix.string : "");
|
||||
const prefixRange =
|
||||
|
|
|
@ -345,7 +345,7 @@ HarmonyImportSpecifierDependency.Template = class HarmonyImportSpecifierDependen
|
|||
|
||||
if (dep.referencedPropertiesInDestructuring) {
|
||||
const prefixedIds = ids[0] === "default" ? ids.slice(1) : ids;
|
||||
for (let {
|
||||
for (const {
|
||||
id,
|
||||
shorthand,
|
||||
range
|
||||
|
|
|
@ -124,7 +124,7 @@ ImportDependency.Template = class ImportDependencyTemplate extends (
|
|||
);
|
||||
const content = runtimeTemplate.moduleNamespacePromise({
|
||||
chunkGraph,
|
||||
block: block,
|
||||
block,
|
||||
module: /** @type {Module} */ (moduleGraph.getModule(dep)),
|
||||
request: dep.request,
|
||||
strict: /** @type {BuildMeta} */ (module.buildMeta).strictHarmonyModule,
|
||||
|
|
|
@ -64,7 +64,7 @@ class ProvidedDependency extends ModuleDependency {
|
|||
* @returns {(string[] | ReferencedExport)[]} referenced exports
|
||||
*/
|
||||
getReferencedExports(moduleGraph, runtime) {
|
||||
let ids = this.ids;
|
||||
const ids = this.ids;
|
||||
if (ids.length === 0) return Dependency.EXPORTS_OBJECT_REFERENCED;
|
||||
return [ids];
|
||||
}
|
||||
|
|
|
@ -258,7 +258,7 @@ class WorkerPlugin {
|
|||
}
|
||||
|
||||
/** @type {EntryOptions} */
|
||||
let entryOptions = {};
|
||||
const entryOptions = {};
|
||||
|
||||
if (importOptions) {
|
||||
if (importOptions.webpackIgnore !== undefined) {
|
||||
|
@ -316,9 +316,9 @@ class WorkerPlugin {
|
|||
}
|
||||
|
||||
if (entryOptions.runtime === undefined) {
|
||||
let i = workerIndexMap.get(parser.state) || 0;
|
||||
const i = workerIndexMap.get(parser.state) || 0;
|
||||
workerIndexMap.set(parser.state, i + 1);
|
||||
let name = `${cachedContextify(
|
||||
const name = `${cachedContextify(
|
||||
parser.state.module.identifier()
|
||||
)}|${i}`;
|
||||
const hash = createHash(compilation.outputOptions.hashFunction);
|
||||
|
|
|
@ -103,7 +103,7 @@ class ModuleChunkFormatPlugin {
|
|||
compilation.outputOptions
|
||||
),
|
||||
{
|
||||
chunk: chunk,
|
||||
chunk,
|
||||
contentHashType: "javascript"
|
||||
}
|
||||
)
|
||||
|
|
|
@ -292,7 +292,7 @@ class BasicEvaluatedExpression {
|
|||
if (this.isBigInt()) return `${this.bigint}`;
|
||||
if (this.isRegExp()) return `${this.regExp}`;
|
||||
if (this.isArray()) {
|
||||
let array = [];
|
||||
const array = [];
|
||||
for (const item of /** @type {BasicEvaluatedExpression[]} */ (
|
||||
this.items
|
||||
)) {
|
||||
|
|
|
@ -718,7 +718,7 @@ class JavascriptModulesPlugin {
|
|||
inlinedModules = new Set(chunkGraph.getChunkEntryModulesIterable(chunk));
|
||||
}
|
||||
|
||||
let source = new ConcatSource();
|
||||
const source = new ConcatSource();
|
||||
let prefix;
|
||||
if (iife) {
|
||||
if (runtimeTemplate.supportsArrowFunction()) {
|
||||
|
@ -849,7 +849,7 @@ class JavascriptModulesPlugin {
|
|||
const exports = runtimeRequirements.has(RuntimeGlobals.exports);
|
||||
const webpackExports =
|
||||
exports && m.exportsArgument === RuntimeGlobals.exports;
|
||||
let iife = innerStrict
|
||||
const iife = innerStrict
|
||||
? "it need to be in strict mode."
|
||||
: inlinedModules.size > 1
|
||||
? // TODO check globals and top-level declarations of other entries and chunk modules
|
||||
|
@ -1047,7 +1047,7 @@ class JavascriptModulesPlugin {
|
|||
allowInlineStartup: true
|
||||
};
|
||||
|
||||
let { header: buf, startup, beforeStartup, afterStartup } = result;
|
||||
const { header: buf, startup, beforeStartup, afterStartup } = result;
|
||||
|
||||
if (result.allowInlineStartup && moduleFactories) {
|
||||
startup.push(
|
||||
|
|
|
@ -1366,8 +1366,8 @@ class JavascriptParser extends Parser {
|
|||
if (expr.arguments.length !== 2) return;
|
||||
if (expr.arguments[0].type === "SpreadElement") return;
|
||||
if (expr.arguments[1].type === "SpreadElement") return;
|
||||
let arg1 = this.evaluateExpression(expr.arguments[0]);
|
||||
let arg2 = this.evaluateExpression(expr.arguments[1]);
|
||||
const arg1 = this.evaluateExpression(expr.arguments[0]);
|
||||
const arg2 = this.evaluateExpression(expr.arguments[1]);
|
||||
if (!arg1.isString() && !arg1.isRegExp()) return;
|
||||
const arg1Value = /** @type {string | RegExp} */ (
|
||||
arg1.regExp || arg1.string
|
||||
|
@ -1387,8 +1387,8 @@ class JavascriptParser extends Parser {
|
|||
.tap("JavascriptParser", (expr, param) => {
|
||||
if (!param.isString()) return;
|
||||
let arg1;
|
||||
let result,
|
||||
str = /** @type {string} */ (param.string);
|
||||
let result;
|
||||
const str = /** @type {string} */ (param.string);
|
||||
switch (expr.arguments.length) {
|
||||
case 1:
|
||||
if (expr.arguments[0].type === "SpreadElement") return;
|
||||
|
@ -3415,7 +3415,7 @@ class JavascriptParser extends Parser {
|
|||
* @param {ImportExpression} expression import expression
|
||||
*/
|
||||
walkImportExpression(expression) {
|
||||
let result = this.hooks.importCall.call(expression);
|
||||
const result = this.hooks.importCall.call(expression);
|
||||
if (result === true) return;
|
||||
|
||||
this.walkExpression(expression.source);
|
||||
|
@ -4395,7 +4395,7 @@ class JavascriptParser extends Parser {
|
|||
const comments = /** @type {Comment[]} */ (this.comments);
|
||||
let idx = binarySearchBounds.ge(comments, rangeStart, compare);
|
||||
/** @type {Comment[]} */
|
||||
let commentsInRange = [];
|
||||
const commentsInRange = [];
|
||||
while (
|
||||
comments[idx] &&
|
||||
/** @type {Range} */ (comments[idx].range)[1] <= rangeEnd
|
||||
|
@ -4579,9 +4579,9 @@ class JavascriptParser extends Parser {
|
|||
if (comments.length === 0) {
|
||||
return EMPTY_COMMENT_OPTIONS;
|
||||
}
|
||||
let options = {};
|
||||
const options = {};
|
||||
/** @type {unknown[]} */
|
||||
let errors = [];
|
||||
const errors = [];
|
||||
for (const comment of comments) {
|
||||
const { value } = comment;
|
||||
if (value && webpackCommentRegExp.test(value)) {
|
||||
|
|
|
@ -79,7 +79,7 @@ exports.evaluateToBoolean = value => {
|
|||
*/
|
||||
exports.evaluateToIdentifier = (identifier, rootInfo, getMembers, truthy) => {
|
||||
return function identifierExpression(expr) {
|
||||
let evaluatedExpression = new BasicEvaluatedExpression()
|
||||
const evaluatedExpression = new BasicEvaluatedExpression()
|
||||
.setIdentifier(identifier, rootInfo, getMembers)
|
||||
.setSideEffects(false)
|
||||
.setRange(/** @type {Range} */ (expr.range));
|
||||
|
|
|
@ -68,7 +68,7 @@ const createObjectForExportsInfo = (data, exportsInfo, runtime) => {
|
|||
/** @type {Record<string, RawJsonData>} */ (reducedData)[name] = value;
|
||||
}
|
||||
if (isArray) {
|
||||
let arrayLengthWhenUsed =
|
||||
const arrayLengthWhenUsed =
|
||||
exportsInfo.getReadOnlyExportInfo("length").getUsed(runtime) !==
|
||||
UsageState.Unused
|
||||
? data.length
|
||||
|
@ -173,7 +173,7 @@ class JsonGenerator extends Generator {
|
|||
}
|
||||
const exportsInfo = moduleGraph.getExportsInfo(module);
|
||||
/** @type {RawJsonData} */
|
||||
let finalJson =
|
||||
const finalJson =
|
||||
typeof data === "object" &&
|
||||
data &&
|
||||
exportsInfo.otherExportsInfo.getUsed(runtime) === UsageState.Unused
|
||||
|
|
|
@ -67,10 +67,9 @@ class AmdLibraryPlugin extends AbstractLibraryPlugin {
|
|||
);
|
||||
}
|
||||
}
|
||||
return {
|
||||
name: /** @type {string} */ (name),
|
||||
amdContainer: /** @type {string} */ (amdContainer)
|
||||
};
|
||||
const _name = /** @type {string} */ (name);
|
||||
const _amdContainer = /** @type {string} */ (amdContainer);
|
||||
return { name: _name, amdContainer: _amdContainer };
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -143,8 +143,9 @@ class AssignLibraryPlugin extends AbstractLibraryPlugin {
|
|||
);
|
||||
}
|
||||
}
|
||||
const _name = /** @type {string | string[]} */ (name);
|
||||
return {
|
||||
name: /** @type {string | string[]} */ (name),
|
||||
name: _name,
|
||||
export: library.export
|
||||
};
|
||||
}
|
||||
|
|
|
@ -54,8 +54,9 @@ class JsonpLibraryPlugin extends AbstractLibraryPlugin {
|
|||
`Jsonp library name must be a simple string. ${AbstractLibraryPlugin.COMMON_LIBRARY_NAME_MESSAGE}`
|
||||
);
|
||||
}
|
||||
const _name = /** @type {string} */ (name);
|
||||
return {
|
||||
name: /** @type {string} */ (name)
|
||||
name: _name
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -73,8 +73,9 @@ class ModernModuleLibraryPlugin extends AbstractLibraryPlugin {
|
|||
`Library name must be unset. ${AbstractLibraryPlugin.COMMON_LIBRARY_NAME_MESSAGE}`
|
||||
);
|
||||
}
|
||||
const _name = /** @type {string} */ (name);
|
||||
return {
|
||||
name: /** @type {string} */ (name)
|
||||
name: _name
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -58,8 +58,9 @@ class ModuleLibraryPlugin extends AbstractLibraryPlugin {
|
|||
`Library name must be unset. ${AbstractLibraryPlugin.COMMON_LIBRARY_NAME_MESSAGE}`
|
||||
);
|
||||
}
|
||||
const _name = /** @type {string} */ (name);
|
||||
return {
|
||||
name: /** @type {string} */ (name)
|
||||
name: _name
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -58,8 +58,9 @@ class SystemLibraryPlugin extends AbstractLibraryPlugin {
|
|||
`System.js library name must be a simple string or unset. ${AbstractLibraryPlugin.COMMON_LIBRARY_NAME_MESSAGE}`
|
||||
);
|
||||
}
|
||||
const _name = /** @type {string} */ (name);
|
||||
return {
|
||||
name: /** @type {string} */ (name)
|
||||
name: _name
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ const { Logger } = require("./Logger");
|
|||
const createConsoleLogger = require("./createConsoleLogger");
|
||||
|
||||
/** @type {createConsoleLogger.LoggerOptions} */
|
||||
let currentDefaultLoggerOptions = {
|
||||
const currentDefaultLoggerOptions = {
|
||||
level: "info",
|
||||
debug: false,
|
||||
console
|
||||
|
|
|
@ -51,7 +51,7 @@ class AggressiveMergingPlugin {
|
|||
chunks => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
/** @type {{a: Chunk, b: Chunk, improvement: number}[]} */
|
||||
let combinations = [];
|
||||
const combinations = [];
|
||||
for (const a of chunks) {
|
||||
if (a.canBeInitial()) continue;
|
||||
for (const b of chunks) {
|
||||
|
|
|
@ -903,7 +903,9 @@ class ConcatenatedModule extends Module {
|
|||
* @returns {Iterable<{ connection: ModuleGraphConnection, runtimeCondition: RuntimeSpec | true }>} imported modules in order
|
||||
*/
|
||||
const getConcatenatedImports = module => {
|
||||
let connections = Array.from(moduleGraph.getOutgoingConnections(module));
|
||||
const connections = Array.from(
|
||||
moduleGraph.getOutgoingConnections(module)
|
||||
);
|
||||
if (module === rootModule) {
|
||||
for (const c of moduleGraph.getOutgoingConnections(this))
|
||||
connections.push(c);
|
||||
|
@ -1077,7 +1079,7 @@ class ConcatenatedModule extends Module {
|
|||
/** @type {string} */ (rootModule.context),
|
||||
associatedObjectForCache
|
||||
);
|
||||
let identifiers = [];
|
||||
const identifiers = [];
|
||||
for (const module of modules) {
|
||||
identifiers.push(cachedMakePathsRelative(module.identifier()));
|
||||
}
|
||||
|
@ -1489,8 +1491,9 @@ class ConcatenatedModule extends Module {
|
|||
} */ ${finalName}`;
|
||||
} catch (e) {
|
||||
/** @type {Error} */
|
||||
(e).message +=
|
||||
`\nwhile generating the root export '${name}' (used name: '${used}')`;
|
||||
(
|
||||
e
|
||||
).message += `\nwhile generating the root export '${name}' (used name: '${used}')`;
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
|
@ -1795,8 +1798,9 @@ ${defineGetters}`
|
|||
info.moduleScope = moduleScope;
|
||||
} catch (err) {
|
||||
/** @type {Error} */
|
||||
(err).message +=
|
||||
`\nwhile analyzing module ${m.identifier()} for concatenation`;
|
||||
(
|
||||
err
|
||||
).message += `\nwhile analyzing module ${m.identifier()} for concatenation`;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -399,7 +399,7 @@ class ModuleConcatenationPlugin {
|
|||
|
||||
// Create a new ConcatenatedModule
|
||||
ConcatenatedModule.getCompilationHooks(compilation);
|
||||
let newModule = ConcatenatedModule.create(
|
||||
const newModule = ConcatenatedModule.create(
|
||||
rootModule,
|
||||
modules,
|
||||
concatConfiguration.runtime,
|
||||
|
|
|
@ -215,7 +215,7 @@ class RealContentHashPlugin {
|
|||
[asset.referencedHashes, asset.ownHashes] =
|
||||
await cacheAnalyse.providePromise(name, etag, () => {
|
||||
const referencedHashes = new Set();
|
||||
let ownHashes = new Set();
|
||||
const ownHashes = new Set();
|
||||
const inContent = content.match(hashRegExp);
|
||||
if (inContent) {
|
||||
for (const hash of inContent) {
|
||||
|
|
|
@ -41,7 +41,7 @@ function* getModulesFromMask(mask, ordinalModules) {
|
|||
// Consider the last 32 bits, since that's what Math.clz32 can handle
|
||||
let last32 = Number(BigInt.asUintN(32, mask));
|
||||
while (last32 > 0) {
|
||||
let last = Math.clz32(last32);
|
||||
const last = Math.clz32(last32);
|
||||
// The number of trailing zeros is the number trimmed off the input mask + 31 - the number of leading zeros
|
||||
// The 32 is baked into the initial value of offset
|
||||
const moduleIndex = offset - last;
|
||||
|
@ -148,7 +148,7 @@ class RemoveParentModulesPlugin {
|
|||
availableModulesMask = parentMask;
|
||||
changed = true;
|
||||
} else {
|
||||
let newMask = availableModulesMask & parentMask;
|
||||
const newMask = availableModulesMask & parentMask;
|
||||
if (newMask !== availableModulesMask) {
|
||||
changed = true;
|
||||
availableModulesMask = newMask;
|
||||
|
|
|
@ -500,7 +500,7 @@ const normalizeCacheGroups = (cacheGroups, defaultSizeTypes) => {
|
|||
*/
|
||||
const fn = (module, context) => {
|
||||
/** @type {CacheGroupSource[]} */
|
||||
let results = [];
|
||||
const results = [];
|
||||
for (const fn of handlers) {
|
||||
fn(module, context, results);
|
||||
}
|
||||
|
@ -1210,7 +1210,7 @@ module.exports = class SplitChunksPlugin {
|
|||
// Walk through all modules
|
||||
for (const module of compilation.modules) {
|
||||
// Get cache group
|
||||
let cacheGroups = this.options.getCacheGroups(module, context);
|
||||
const cacheGroups = this.options.getCacheGroups(module, context);
|
||||
if (!Array.isArray(cacheGroups) || cacheGroups.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
@ -1482,7 +1482,7 @@ module.exports = class SplitChunksPlugin {
|
|||
usedChunks.size === 1
|
||||
) {
|
||||
const [chunk] = usedChunks;
|
||||
let chunkSizes = Object.create(null);
|
||||
const chunkSizes = Object.create(null);
|
||||
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
|
||||
if (!item.modules.has(module)) {
|
||||
for (const type of module.getSourceTypes()) {
|
||||
|
|
|
@ -127,8 +127,8 @@ module.exports = class SizeLimitsPlugin {
|
|||
|
||||
if (size > /** @type {number} */ (entrypointSizeLimit)) {
|
||||
entrypointsOverLimit.push({
|
||||
name: name,
|
||||
size: size,
|
||||
name,
|
||||
size,
|
||||
files: entry.getFiles().filter(fileFilter)
|
||||
});
|
||||
isOverSizeLimitSet.add(entry);
|
||||
|
|
|
@ -1101,6 +1101,7 @@ class BinaryMiddleware extends SerializerMiddleware {
|
|||
}
|
||||
|
||||
// avoid leaking memory in context
|
||||
// eslint-disable-next-line prefer-const
|
||||
let _result = result;
|
||||
result = undefined;
|
||||
return _result;
|
||||
|
|
|
@ -29,7 +29,7 @@ class MapObjectSerializer {
|
|||
*/
|
||||
deserialize(context) {
|
||||
/** @type {number} */
|
||||
let size = context.read();
|
||||
const size = context.read();
|
||||
/** @type {Map<K, V>} */
|
||||
const map = new Map();
|
||||
/** @type {K[]} */
|
||||
|
|
|
@ -26,7 +26,7 @@ class SetObjectSerializer {
|
|||
*/
|
||||
deserialize(context) {
|
||||
/** @type {number} */
|
||||
let size = context.read();
|
||||
const size = context.read();
|
||||
/** @type {Set<T>} */
|
||||
const set = new Set();
|
||||
for (let i = 0; i < size; i++) {
|
||||
|
|
|
@ -57,7 +57,7 @@ class ConsumeSharedPlugin {
|
|||
(item, key) => {
|
||||
if (Array.isArray(item)) throw new Error("Unexpected array in options");
|
||||
/** @type {ConsumeOptions} */
|
||||
let result =
|
||||
const result =
|
||||
item === key || !isRequiredVersion(item)
|
||||
? // item is a request/key
|
||||
{
|
||||
|
|
|
@ -250,7 +250,7 @@ function canBeDecoded(str) {
|
|||
* @returns {string} dep version
|
||||
*/
|
||||
function getGitUrlVersion(gitUrl) {
|
||||
let oriGitUrl = gitUrl;
|
||||
const oriGitUrl = gitUrl;
|
||||
// github extreme shorthand
|
||||
if (RE_URL_GITHUB_EXTREME_SHORT.test(gitUrl)) {
|
||||
gitUrl = "github:" + gitUrl;
|
||||
|
|
|
@ -62,7 +62,7 @@ class TupleSet {
|
|||
}
|
||||
|
||||
const beforeLast = args[args.length - 2];
|
||||
let set = map.get(beforeLast);
|
||||
const set = map.get(beforeLast);
|
||||
if (set === undefined) {
|
||||
return false;
|
||||
}
|
||||
|
@ -86,7 +86,7 @@ class TupleSet {
|
|||
}
|
||||
|
||||
const beforeLast = args[args.length - 2];
|
||||
let set = map.get(beforeLast);
|
||||
const set = map.get(beforeLast);
|
||||
if (set === undefined) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -480,8 +480,8 @@ exports.compareChunksNatural = chunkGraph => {
|
|||
* @returns {-1|0|1} sorting comparator value
|
||||
*/
|
||||
exports.compareLocations = (a, b) => {
|
||||
let isObjectA = typeof a === "object" && a !== null;
|
||||
let isObjectB = typeof b === "object" && b !== null;
|
||||
const isObjectA = typeof a === "object" && a !== null;
|
||||
const isObjectB = typeof b === "object" && b !== null;
|
||||
if (!isObjectA || !isObjectB) {
|
||||
if (isObjectA) return 1;
|
||||
if (isObjectB) return -1;
|
||||
|
|
|
@ -397,14 +397,14 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
|
|||
// going minSize from left and right
|
||||
// at least one node need to be included otherwise we get stuck
|
||||
let left = 1;
|
||||
let leftSize = Object.create(null);
|
||||
const leftSize = Object.create(null);
|
||||
addSizeTo(leftSize, group.nodes[0].size);
|
||||
while (left < group.nodes.length && isTooSmall(leftSize, minSize)) {
|
||||
addSizeTo(leftSize, group.nodes[left].size);
|
||||
left++;
|
||||
}
|
||||
let right = group.nodes.length - 2;
|
||||
let rightSize = Object.create(null);
|
||||
const rightSize = Object.create(null);
|
||||
addSizeTo(rightSize, group.nodes[group.nodes.length - 1].size);
|
||||
while (right >= 0 && isTooSmall(rightSize, minSize)) {
|
||||
addSizeTo(rightSize, group.nodes[right].size);
|
||||
|
@ -452,7 +452,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
|
|||
let best = -1;
|
||||
let bestSimilarity = Infinity;
|
||||
let pos = left;
|
||||
let rightSize = sumSize(group.nodes.slice(pos));
|
||||
const rightSize = sumSize(group.nodes.slice(pos));
|
||||
|
||||
// pos v v right
|
||||
// [ O O O ] O O O [ O O O ]
|
||||
|
|
|
@ -138,7 +138,7 @@ const smartGrouping = (items, groupConfigs) => {
|
|||
}
|
||||
}
|
||||
const targetGroupCount = (options && options.targetGroupCount) || 4;
|
||||
let sizeValue = force
|
||||
const sizeValue = force
|
||||
? items.size
|
||||
: Math.min(
|
||||
items.size,
|
||||
|
|
|
@ -67,7 +67,7 @@ function installYarnAsync() {
|
|||
function exec(command, args, description) {
|
||||
console.log(`Setup: ${description}`);
|
||||
return new Promise((resolve, reject) => {
|
||||
let cp = require("child_process").spawn(command, args, {
|
||||
const cp = require("child_process").spawn(command, args, {
|
||||
cwd: root,
|
||||
stdio: "inherit",
|
||||
shell: true
|
||||
|
@ -88,7 +88,7 @@ function exec(command, args, description) {
|
|||
function execGetOutput(command, args, description) {
|
||||
console.log(`Setup: ${description}`);
|
||||
return new Promise((resolve, reject) => {
|
||||
let cp = require("child_process").spawn(command, args, {
|
||||
const cp = require("child_process").spawn(command, args, {
|
||||
cwd: root,
|
||||
stdio: [process.stdin, "pipe", process.stderr],
|
||||
shell: true
|
||||
|
|
|
@ -108,7 +108,7 @@ describe("BinaryMiddleware", () => {
|
|||
for (const prepend of items) {
|
||||
for (const append of items) {
|
||||
if (c > 1 && append !== undefined) continue;
|
||||
let data = [prepend, ...caseData, append].filter(
|
||||
const data = [prepend, ...caseData, append].filter(
|
||||
x => x !== undefined
|
||||
);
|
||||
if (data.length * c > 200000) continue;
|
||||
|
|
|
@ -88,7 +88,6 @@ describe("ChangesAndRemovals", () => {
|
|||
|
||||
it("should not track modified/removed files during initial watchRun", done => {
|
||||
const compiler = createSingleCompiler();
|
||||
let watcher;
|
||||
const watchRunFinished = new Promise(resolve => {
|
||||
compiler.hooks.watchRun.tap("ChangesAndRemovalsTest", compiler => {
|
||||
expect(getChanges(compiler)).toEqual({
|
||||
|
@ -98,7 +97,7 @@ describe("ChangesAndRemovals", () => {
|
|||
resolve();
|
||||
});
|
||||
});
|
||||
watcher = compiler.watch({ aggregateTimeout: 200 }, err => {
|
||||
const watcher = compiler.watch({ aggregateTimeout: 200 }, err => {
|
||||
if (err) done(err);
|
||||
});
|
||||
|
||||
|
|
|
@ -637,7 +637,7 @@ const describeCases = config => {
|
|||
", "
|
||||
)}) {${content}\n})`;
|
||||
|
||||
let oldCurrentScript = document.currentScript;
|
||||
const oldCurrentScript = document.currentScript;
|
||||
document.currentScript = new CurrentScript(subPath);
|
||||
const fn = runInNewContext
|
||||
? vm.runInNewContext(code, globalContext, p)
|
||||
|
@ -657,9 +657,9 @@ const describeCases = config => {
|
|||
) {
|
||||
return testConfig.modules[module];
|
||||
} else {
|
||||
return require(
|
||||
module.startsWith("node:") ? module.slice(5) : module
|
||||
);
|
||||
return require(module.startsWith("node:")
|
||||
? module.slice(5)
|
||||
: module);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ describe("ContextModuleFactory", () => {
|
|||
setTimeout(() => callback(null, ["/file"]));
|
||||
};
|
||||
memfs.stat = (file, callback) => {
|
||||
let err = new Error("fake ENOENT error");
|
||||
const err = new Error("fake ENOENT error");
|
||||
err.code = "ENOENT";
|
||||
setTimeout(() => callback(err, null));
|
||||
};
|
||||
|
@ -39,7 +39,7 @@ describe("ContextModuleFactory", () => {
|
|||
setTimeout(() => callback(null, ["/file"]));
|
||||
};
|
||||
memfs.stat = (file, callback) => {
|
||||
let err = new Error("fake EACCES error");
|
||||
const err = new Error("fake EACCES error");
|
||||
err.code = "EACCES";
|
||||
setTimeout(() => callback(err, null));
|
||||
};
|
||||
|
|
|
@ -194,7 +194,7 @@ describe("NormalModule", () => {
|
|||
});
|
||||
|
||||
describe("#originalSource", () => {
|
||||
let expectedSource = "some source";
|
||||
const expectedSource = "some source";
|
||||
beforeEach(() => {
|
||||
normalModule._source = new RawSource(expectedSource);
|
||||
});
|
||||
|
|
|
@ -70,7 +70,7 @@ const describeCases = config => {
|
|||
return true;
|
||||
})
|
||||
.forEach(testName => {
|
||||
let infraStructureLog = [];
|
||||
const infraStructureLog = [];
|
||||
|
||||
describe(testName, () => {
|
||||
const testDirectory = path.join(
|
||||
|
|
|
@ -33,7 +33,7 @@ module.exports = class FakeDocument {
|
|||
|
||||
_onElementRemoved(element) {
|
||||
const type = element._type;
|
||||
let list = this._elementsByTagName.get(type);
|
||||
const list = this._elementsByTagName.get(type);
|
||||
const idx = list.indexOf(element);
|
||||
list.splice(idx, 1);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
module.exports = () => {
|
||||
let warnings = [];
|
||||
const warnings = [];
|
||||
let oldWarn;
|
||||
|
||||
beforeEach(done => {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
describe("warmup", () => {
|
||||
it("should warmup webpack", done => {
|
||||
let webpack = require("../../");
|
||||
let END = new Error("end warmup");
|
||||
const END = new Error("end warmup");
|
||||
webpack(
|
||||
{
|
||||
entry: "data:text/javascript,import 'data:text/javascript,'",
|
||||
|
|
Loading…
Reference in New Issue