style: set `arrowParens` to `true` (#19706)
Github Actions / lint (push) Waiting to run Details
Github Actions / validate-legacy-node (push) Waiting to run Details
Github Actions / benchmark (1/4) (push) Waiting to run Details
Github Actions / benchmark (2/4) (push) Waiting to run Details
Github Actions / benchmark (3/4) (push) Waiting to run Details
Github Actions / benchmark (4/4) (push) Waiting to run Details
Github Actions / basic (push) Waiting to run Details
Github Actions / unit (push) Waiting to run Details
Github Actions / integration (10.x, macos-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (10.x, macos-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (10.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (10.x, ubuntu-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (10.x, windows-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (10.x, windows-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (12.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (14.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (16.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (18.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (20.x, macos-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (20.x, macos-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (20.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (20.x, ubuntu-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (20.x, windows-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (20.x, windows-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (22.x, macos-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (22.x, macos-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (22.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (22.x, ubuntu-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (22.x, windows-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (22.x, windows-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (24.x, macos-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (24.x, macos-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (24.x, ubuntu-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (24.x, ubuntu-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (24.x, windows-latest, a) (push) Blocked by required conditions Details
Github Actions / integration (24.x, windows-latest, b) (push) Blocked by required conditions Details
Github Actions / integration (lts/*, ubuntu-latest, a, 1) (push) Blocked by required conditions Details
Github Actions / integration (lts/*, ubuntu-latest, b, 1) (push) Blocked by required conditions Details

This commit is contained in:
Alexander Akait 2025-07-16 19:13:14 +03:00 committed by GitHub
parent d8690f3d4b
commit 703d9acfe7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
497 changed files with 2679 additions and 2595 deletions

View File

@ -5,7 +5,7 @@ module.exports = {
useTabs: true,
tabWidth: 2,
trailingComma: "none",
arrowParens: "avoid",
arrowParens: "always",
overrides: [
{
files: "*.json",

View File

@ -16,11 +16,11 @@ const runCommand = (command, args) => {
shell: true
});
executedCommand.on("error", error => {
executedCommand.on("error", (error) => {
reject(error);
});
executedCommand.on("exit", code => {
executedCommand.on("exit", (code) => {
if (code === 0) {
resolve();
} else {
@ -34,7 +34,7 @@ const runCommand = (command, args) => {
* @param {string} packageName name of the package
* @returns {boolean} is the package installed?
*/
const isInstalled = packageName => {
const isInstalled = (packageName) => {
if (process.versions.pnp) {
return true;
}
@ -76,7 +76,7 @@ const isInstalled = packageName => {
* @param {CliOption} cli options
* @returns {void}
*/
const runCli = cli => {
const runCli = (cli) => {
const path = require("path");
const pkgPath = require.resolve(`${cli.package}/package.json`);
@ -85,7 +85,7 @@ const runCli = cli => {
if (pkg.type === "module" || /\.mjs/i.test(pkg.bin[cli.binName])) {
import(path.resolve(path.dirname(pkgPath), pkg.bin[cli.binName])).catch(
err => {
(err) => {
console.error(err);
process.exitCode = 1;
}
@ -152,7 +152,7 @@ if (!cli.installed) {
// executed. Setting the exit code here to ensure the script exits correctly in those cases. The callback
// function is responsible for clearing the exit code if the user wishes to install webpack-cli.
process.exitCode = 1;
questionInterface.question(question, answer => {
questionInterface.question(question, (answer) => {
questionInterface.close();
const normalizedAnswer = answer.toLowerCase().startsWith("y");
@ -183,7 +183,7 @@ if (!cli.installed) {
.then(() => {
runCli(cli);
})
.catch(err => {
.catch((err) => {
console.error(err);
process.exitCode = 1;
});

View File

@ -137,9 +137,6 @@ export default defineConfig([
"id-length": "off",
"unicorn/no-array-for-each": "off",
"unicorn/prefer-includes": "off",
"jsdoc/require-jsdoc": "off",
// Revisit it in future
@ -205,7 +202,6 @@ export default defineConfig([
{
files: [
"test/configCases/{dll-plugin-entry,dll-plugin-side-effects,dll-plugin}/**/webpack.config.js",
"examples/**/*.js",
"test/NodeTemplatePlugin.test.js",
"test/PersistentCaching.test.js"
],

View File

@ -169,7 +169,7 @@ class APIPlugin {
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.chunkName)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(
chunk,
new ChunkNameRuntimeModule(/** @type {string} */ (chunk.name))
@ -213,10 +213,10 @@ class APIPlugin {
/**
* @param {JavascriptParser} parser the parser
*/
const handler = parser => {
const handler = (parser) => {
for (const key of Object.keys(REPLACEMENTS)) {
const info = REPLACEMENTS[key];
parser.hooks.expression.for(key).tap(PLUGIN_NAME, expression => {
parser.hooks.expression.for(key).tap(PLUGIN_NAME, (expression) => {
const dep = toConstantDependency(parser, info.expr, info.req);
if (key === "__non_webpack_require__" && this.options.module) {
@ -227,7 +227,7 @@ class APIPlugin {
return dep(expression);
});
if (info.assign === false) {
parser.hooks.assign.for(key).tap(PLUGIN_NAME, expr => {
parser.hooks.assign.for(key).tap(PLUGIN_NAME, (expr) => {
const err = new WebpackError(`${key} must not be assigned`);
err.loc = /** @type {DependencyLocation} */ (expr.loc);
throw err;
@ -242,7 +242,7 @@ class APIPlugin {
parser.hooks.expression
.for("__webpack_layer__")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
const dep = new ConstDependency(
JSON.stringify(parser.state.module.layer),
/** @type {Range} */ (expr.range)
@ -253,7 +253,7 @@ class APIPlugin {
});
parser.hooks.evaluateIdentifier
.for("__webpack_layer__")
.tap(PLUGIN_NAME, expr =>
.tap(PLUGIN_NAME, (expr) =>
(parser.state.module.layer === null
? new BasicEvaluatedExpression().setNull()
: new BasicEvaluatedExpression().setString(
@ -263,7 +263,7 @@ class APIPlugin {
);
parser.hooks.evaluateTypeof
.for("__webpack_layer__")
.tap(PLUGIN_NAME, expr =>
.tap(PLUGIN_NAME, (expr) =>
new BasicEvaluatedExpression()
.setString(
parser.state.module.layer === null ? "object" : "string"
@ -273,7 +273,7 @@ class APIPlugin {
parser.hooks.expression
.for("__webpack_module__.id")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
/** @type {BuildInfo} */
(parser.state.module.buildInfo).moduleConcatenationBailout =
"__webpack_module__.id";
@ -289,7 +289,7 @@ class APIPlugin {
parser.hooks.expression
.for("__webpack_module__")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
/** @type {BuildInfo} */
(parser.state.module.buildInfo).moduleConcatenationBailout =
"__webpack_module__";

View File

@ -31,7 +31,7 @@ class AutomaticPrefetchPlugin {
);
/** @type {{context: string | null, request: string}[] | null} */
let lastModules = null;
compiler.hooks.afterCompile.tap(PLUGIN_NAME, compilation => {
compiler.hooks.afterCompile.tap(PLUGIN_NAME, (compilation) => {
lastModules = [];
for (const m of compilation.modules) {
@ -54,7 +54,7 @@ class AutomaticPrefetchPlugin {
callback
);
},
err => {
(err) => {
lastModules = null;
callback(err);
}

View File

@ -32,7 +32,7 @@ const validate = createSchemaValidation(
* @param {string} str string to wrap
* @returns {string} wrapped string
*/
const wrapComment = str => {
const wrapComment = (str) => {
if (!str.includes("\n")) {
return Template.toComment(str);
}
@ -67,7 +67,7 @@ class BannerPlugin {
/** @type {BannerFunction} */
this.banner = this.options.raw
? getBanner
: /** @type {BannerFunction} */ data => wrapComment(getBanner(data));
: /** @type {BannerFunction} */ (data) => wrapComment(getBanner(data));
} else {
const banner = this.options.raw
? bannerOption
@ -93,7 +93,7 @@ class BannerPlugin {
const stage =
this.options.stage || Compilation.PROCESS_ASSETS_STAGE_ADDITIONS;
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
compilation.hooks.processAssets.tap({ name: PLUGIN_NAME, stage }, () => {
for (const chunk of compilation.chunks) {
if (options.entryOnly && !chunk.canBeInitial()) {
@ -114,7 +114,7 @@ class BannerPlugin {
data
);
compilation.updateAsset(file, old => {
compilation.updateAsset(file, (old) => {
const cached = cache.get(old);
if (!cached || cached.comment !== comment) {
const source = options.footer

View File

@ -40,7 +40,7 @@ const {
* @param {(err?: Error) => void} callback callback
* @returns {(err?: Error) => void} callback
*/
const needCalls = (times, callback) => err => {
const needCalls = (times, callback) => (err) => {
if (--times === 0) {
return callback(err);
}

View File

@ -61,8 +61,8 @@ class MultiItemCache {
* @param {number} i index
* @returns {Promise<T>} promise with the data
*/
const next = i =>
this._items[i].getPromise().then(result => {
const next = (i) =>
this._items[i].getPromise().then((result) => {
if (result !== undefined) return result;
if (++i < this._items.length) return next(i);
});
@ -89,7 +89,7 @@ class MultiItemCache {
* @returns {Promise<void>} promise signals when the value is stored
*/
storePromise(data) {
return Promise.all(this._items.map(item => item.storePromise(data))).then(
return Promise.all(this._items.map((item) => item.storePromise(data))).then(
() => {}
);
}
@ -149,7 +149,7 @@ class ItemCacheFacade {
*/
storePromise(data) {
return new Promise((resolve, reject) => {
this._cache.store(this._name, this._etag, data, err => {
this._cache.store(this._name, this._etag, data, (err) => {
if (err) {
reject(err);
} else {
@ -171,7 +171,7 @@ class ItemCacheFacade {
if (cacheEntry !== undefined) return cacheEntry;
computer((err, result) => {
if (err) return callback(err);
this.store(result, err => {
this.store(result, (err) => {
if (err) return callback(err);
callback(null, result);
});
@ -297,7 +297,7 @@ class CacheFacade {
*/
storePromise(identifier, etag, data) {
return new Promise((resolve, reject) => {
this._cache.store(`${this._name}|${identifier}`, etag, data, err => {
this._cache.store(`${this._name}|${identifier}`, etag, data, (err) => {
if (err) {
reject(err);
} else {
@ -321,7 +321,7 @@ class CacheFacade {
if (cacheEntry !== undefined) return cacheEntry;
computer((err, result) => {
if (err) return callback(err);
this.store(identifier, etag, result, err => {
this.store(identifier, etag, result, (err) => {
if (err) return callback(err);
callback(null, result);
});

View File

@ -14,7 +14,7 @@ const WebpackError = require("./WebpackError");
* @param {Module[]} modules the modules to be sorted
* @returns {Module[]} sorted version of original modules
*/
const sortModules = modules =>
const sortModules = (modules) =>
modules.sort((a, b) => {
const aIdent = a.identifier();
const bIdent = b.identifier();
@ -33,7 +33,7 @@ const sortModules = modules =>
*/
const createModulesListMessage = (modules, moduleGraph) =>
modules
.map(m => {
.map((m) => {
let message = `* ${m.identifier()}`;
const validReasons = [
...moduleGraph.getIncomingConnectionsByOriginModule(m).keys()

View File

@ -583,7 +583,7 @@ class Chunk {
const chunks = new Set();
const initialChunks = intersect(
Array.from(this.groupsIterable, g => new Set(g.chunks))
Array.from(this.groupsIterable, (g) => new Set(g.chunks))
);
const initialQueue = new Set(this.groupsIterable);
@ -672,7 +672,7 @@ class Chunk {
const queue = new Set();
const initialChunks = intersect(
Array.from(this.groupsIterable, g => new Set(g.chunks))
Array.from(this.groupsIterable, (g) => new Set(g.chunks))
);
for (const chunkGroup of this.groupsIterable) {
@ -812,7 +812,7 @@ class Chunk {
* @param {Chunk} chunk a chunk
* @returns {void}
*/
const addChildIdsByOrdersToMap = chunk => {
const addChildIdsByOrdersToMap = (chunk) => {
const data = chunk.getChildIdsByOrders(chunkGraph, filterFn);
for (const key of Object.keys(data)) {
let chunkMap = chunkMaps[key];

View File

@ -76,13 +76,13 @@ class ModuleHashInfo {
* @param {SortableSet<T>} set the set
* @returns {T[]} set as array
*/
const getArray = set => [...set];
const getArray = (set) => [...set];
/**
* @param {SortableSet<Chunk>} chunks the chunks
* @returns {RuntimeSpecSet} runtimes
*/
const getModuleRuntimes = chunks => {
const getModuleRuntimes = (chunks) => {
const runtimes = new RuntimeSpecSet();
for (const chunk of chunks) {
runtimes.add(chunk.runtime);
@ -94,7 +94,7 @@ const getModuleRuntimes = chunks => {
* @param {WeakMap<Module, Set<string>> | undefined} sourceTypesByModule sourceTypesByModule
* @returns {(set: SortableSet<Module>) => Map<string, SortableSet<Module>>} modules by source type
*/
const modulesBySourceType = sourceTypesByModule => set => {
const modulesBySourceType = (sourceTypesByModule) => (set) => {
/** @type {Map<string, SortableSet<Module>>} */
const map = new Map();
for (const module of set) {
@ -136,10 +136,10 @@ const createOrderedArrayFunctionMap = new WeakMap();
* @param {ModuleComparator} comparator comparator function
* @returns {ModuleSetToArrayFunction} set as ordered array
*/
const createOrderedArrayFunction = comparator => {
const createOrderedArrayFunction = (comparator) => {
let fn = createOrderedArrayFunctionMap.get(comparator);
if (fn !== undefined) return fn;
fn = set => {
fn = (set) => {
set.sortWith(comparator);
return [...set];
};
@ -151,7 +151,7 @@ const createOrderedArrayFunction = comparator => {
* @param {Iterable<Module>} modules the modules to get the count/size of
* @returns {number} the size of the modules
*/
const getModulesSize = modules => {
const getModulesSize = (modules) => {
let size = 0;
for (const module of modules) {
for (const type of module.getSourceTypes()) {
@ -165,7 +165,7 @@ const getModulesSize = modules => {
* @param {Iterable<Module>} modules the sortable Set to get the size of
* @returns {Record<string, number>} the sizes of the modules
*/
const getModulesSizes = modules => {
const getModulesSizes = (modules) => {
const sizes = Object.create(null);
for (const module of modules) {
for (const type of module.getSourceTypes()) {
@ -311,13 +311,13 @@ class ChunkGraph {
_getGraphRoots(set) {
const { moduleGraph } = this;
return [
...findGraphRoots(set, module => {
...findGraphRoots(set, (module) => {
/** @type {Set<Module>} */
const set = new Set();
/**
* @param {Module} module module
*/
const addDependencies = module => {
const addDependencies = (module) => {
for (const connection of moduleGraph.getOutgoingConnections(module)) {
if (!connection.module) continue;
const activeState = connection.getActiveState(undefined);
@ -1272,7 +1272,10 @@ class ChunkGraph {
const array = [...cgc.runtimeModules];
array.sort(
concatComparators(
compareSelect(r => /** @type {RuntimeModule} */ (r).stage, compareIds),
compareSelect(
(r) => /** @type {RuntimeModule} */ (r).stage,
compareIds
),
compareModulesByIdentifier
)
);
@ -1401,7 +1404,7 @@ class ChunkGraph {
throw new Error(
`No unique hash info entry for unspecified runtime for ${module.identifier()} (existing runtimes: ${Array.from(
hashes.keys(),
r => runtimeToString(r)
(r) => runtimeToString(r)
).join(", ")}).
Caller might not support runtime-dependent code generation (opt-out via optimization.usedExports: "global").`
);
@ -1493,7 +1496,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
cgm.runtimeRequirements = map;
return;
}
runtimeRequirementsMap.update(runtime, runtimeRequirements => {
runtimeRequirementsMap.update(runtime, (runtimeRequirements) => {
if (runtimeRequirements === undefined) {
return transferOwnership ? items : new Set(items);
} else if (!transferOwnership || runtimeRequirements.size >= items.size) {
@ -1623,7 +1626,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
* @param {ConnectionState} state state
* @returns {"F" | "T" | "O"} result
*/
const activeStateToString = state => {
const activeStateToString = (state) => {
if (state === false) return "F";
if (state === true) return "T";
if (state === ModuleGraphConnection.TRANSITIVE_ONLY) return "O";
@ -1675,7 +1678,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
let stateInfo = "";
forEachRuntime(
runtime,
runtime => {
(runtime) => {
const state = connection.getActiveState(runtime);
states.add(state);
stateInfo += activeStateToString(state) + runtime;
@ -1702,7 +1705,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
/**
* @param {Module} module module
*/
const addModuleToHash = module => {
const addModuleToHash = (module) => {
hash.update(
this._getModuleGraphHashBigInt(
this._getChunkGraphModule(module),
@ -1714,7 +1717,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
/**
* @param {Set<Module>} modules modules
*/
const addModulesToHash = modules => {
const addModulesToHash = (modules) => {
let xor = ZERO_BIG_INT;
for (const m of modules) {
xor ^= this._getModuleGraphHashBigInt(
@ -1769,7 +1772,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
* @param {Module} module the module
* @returns {ChunkGraph} the chunk graph
*/
module => {
(module) => {
const chunkGraph = chunkGraphForModuleMap.get(module);
if (!chunkGraph) {
throw new Error(
@ -1821,7 +1824,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
* @param {Chunk} chunk the chunk
* @returns {ChunkGraph} the chunk graph
*/
chunk => {
(chunk) => {
const chunkGraph = chunkGraphForChunkMap.get(chunk);
if (!chunkGraph) {
throw new Error(

View File

@ -40,7 +40,7 @@ let debugId = 5000;
* @param {SortableSet<T>} set set to convert to array.
* @returns {T[]} the array format of existing set
*/
const getArray = set => [...set];
const getArray = (set) => [...set];
/**
* A convenience method used to sort chunks based on their id's
@ -165,7 +165,7 @@ class ChunkGroup {
* @returns {string} a unique concatenation of chunk debugId's
*/
get debugId() {
return Array.from(this.chunks, x => x.debugId).join("+");
return Array.from(this.chunks, (x) => x.debugId).join("+");
}
/**
@ -173,7 +173,7 @@ class ChunkGroup {
* @returns {string} a unique concatenation of chunk ids
*/
get id() {
return Array.from(this.chunks, x => x.id).join("+");
return Array.from(this.chunks, (x) => x.id).join("+");
}
/**
@ -533,7 +533,7 @@ class ChunkGroup {
if (cmp !== 0) return cmp;
return a.group.compareTo(chunkGraph, b.group);
});
result[name] = list.map(i => i.group);
result[name] = list.map((i) => i.group);
}
return result;
}

View File

@ -72,7 +72,7 @@ function getDirectories(assets) {
/**
* @param {string} filename asset filename
*/
const addDirectory = filename => {
const addDirectory = (filename) => {
directories.add(path.dirname(filename));
};
@ -158,7 +158,7 @@ const getDiffToFs = (fs, outputPath, currentAssets, callback) => {
callback();
});
},
err => {
(err) => {
if (err) return callback(err);
callback(null, diff);
@ -210,7 +210,7 @@ const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => {
/**
* @param {string} msg message
*/
const log = msg => {
const log = (msg) => {
if (dry) {
logger.info(msg);
} else {
@ -219,7 +219,7 @@ const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => {
};
/** @typedef {{ type: "check" | "unlink" | "rmdir", filename: string, parent: { remaining: number, job: Job } | undefined }} Job */
/** @type {Job[]} */
const jobs = Array.from(diff.keys(), filename => ({
const jobs = Array.from(diff.keys(), (filename) => ({
type: "check",
filename,
parent: undefined
@ -235,15 +235,15 @@ const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => {
* @param {Error & { code?: string }} err error
* @returns {void}
*/
const handleError = err => {
const handleError = (err) => {
const isAlreadyRemoved = () =>
new Promise(resolve => {
new Promise((resolve) => {
if (err.code === "ENOENT") {
resolve(true);
} else if (err.code === "EPERM") {
// https://github.com/isaacs/rimraf/blob/main/src/fix-eperm.ts#L37
// fs.existsSync(path) === false https://github.com/webpack/webpack/actions/runs/15493412975/job/43624272783?pr=19586
doStat(fs, path, err => {
doStat(fs, path, (err) => {
if (err) {
resolve(err.code === "ENOENT");
} else {
@ -255,7 +255,7 @@ const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => {
}
});
isAlreadyRemoved().then(isRemoved => {
isAlreadyRemoved().then((isRemoved) => {
if (isRemoved) {
log(`${filename} was removed during cleaning by something else`);
handleParent();
@ -334,7 +334,7 @@ const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => {
);
return process.nextTick(callback);
}
fs.rmdir(path, err => {
fs.rmdir(path, (err) => {
if (err) return handleError(err);
handleParent();
callback();
@ -352,7 +352,7 @@ const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => {
);
return process.nextTick(callback);
}
fs.unlink(path, err => {
fs.unlink(path, (err) => {
if (err) return handleError(err);
handleParent();
callback();
@ -360,7 +360,7 @@ const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => {
break;
}
},
err => {
(err) => {
if (err) return callback(err);
callback(undefined, keptAssets);
}
@ -412,9 +412,9 @@ class CleanPlugin {
typeof keep === "function"
? keep
: typeof keep === "string"
? path => path.startsWith(keep)
? (path) => path.startsWith(keep)
: typeof keep === "object" && keep.test
? path => keep.test(path)
? (path) => keep.test(path)
: () => false;
// We assume that no external modification happens while the compiler is active
@ -470,7 +470,7 @@ class CleanPlugin {
* @param {string} path path
* @returns {boolean | void} true, if needs to be kept
*/
const isKept = path => {
const isKept = (path) => {
const result = hooks.keep.call(path);
if (result !== undefined) return result;
return keepFn(path);

View File

@ -39,7 +39,7 @@ class CodeGenerationResults {
throw new Error(
`No code generation entry for ${module.identifier()} (existing entries: ${Array.from(
this.map.keys(),
m => m.identifier()
(m) => m.identifier()
).join(", ")})`
);
}
@ -50,7 +50,7 @@ class CodeGenerationResults {
throw new Error(
`No unique code generation entry for unspecified runtime for ${module.identifier()} (existing runtimes: ${Array.from(
entry.keys(),
r => runtimeToString(r)
(r) => runtimeToString(r)
).join(", ")}).
Caller might not support runtime-dependent code generation (opt-out via optimization.usedExports: "global").`
);
@ -66,7 +66,7 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
runtime
)} for ${module.identifier()} (existing runtimes: ${Array.from(
entry.keys(),
r => runtimeToString(r)
(r) => runtimeToString(r)
).join(", ")})`
);
}

View File

@ -54,7 +54,7 @@ class CompatibilityPlugin {
* @param {CallExpression} expr call expression
* @returns {boolean | void} true when need to handle
*/
expr => {
(expr) => {
// support for browserify style require delegator: "require(o, !0)"
if (expr.arguments.length !== 2) return;
const second = parser.evaluateExpression(expr.arguments[1]);
@ -90,9 +90,9 @@ class CompatibilityPlugin {
* @param {JavascriptParser} parser the parser
* @returns {void}
*/
const handler = parser => {
const handler = (parser) => {
// Handle nested requires
parser.hooks.preStatement.tap(PLUGIN_NAME, statement => {
parser.hooks.preStatement.tap(PLUGIN_NAME, (statement) => {
if (
statement.type === "FunctionDeclaration" &&
statement.id &&
@ -118,7 +118,7 @@ class CompatibilityPlugin {
});
parser.hooks.pattern
.for(RuntimeGlobals.require)
.tap(PLUGIN_NAME, pattern => {
.tap(PLUGIN_NAME, (pattern) => {
const newName = `__nested_webpack_require_${
/** @type {Range} */ (pattern.range)[0]
}__`;
@ -134,7 +134,7 @@ class CompatibilityPlugin {
});
parser.hooks.pattern
.for(RuntimeGlobals.exports)
.tap(PLUGIN_NAME, pattern => {
.tap(PLUGIN_NAME, (pattern) => {
parser.tagVariable(pattern.name, nestedWebpackIdentifierTag, {
name: "__nested_webpack_exports__",
declaration: {
@ -147,7 +147,7 @@ class CompatibilityPlugin {
});
parser.hooks.expression
.for(nestedWebpackIdentifierTag)
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
const { name, declaration } =
/** @type {TagData} */
(parser.currentTagData);

View File

@ -403,7 +403,7 @@ const deprecatedNormalModuleLoaderHook = util.deprecate(
* @param {Compilation} compilation compilation
* @returns {NormalModuleCompilationHooks["loader"]} hooks
*/
compilation =>
(compilation) =>
require("./NormalModule").getCompilationHooks(compilation).loader,
"Compilation.hooks.normalModuleLoader was moved to NormalModule.getCompilationHooks(compilation).loader",
"DEP_WEBPACK_COMPILATION_NORMAL_MODULE_LOADER_HOOK"
@ -413,7 +413,7 @@ const deprecatedNormalModuleLoaderHook = util.deprecate(
/**
* @param {ModuleTemplates | undefined} moduleTemplates module templates
*/
const defineRemovedModuleTemplates = moduleTemplates => {
const defineRemovedModuleTemplates = (moduleTemplates) => {
Object.defineProperties(moduleTemplates, {
asset: {
enumerable: false,
@ -437,21 +437,24 @@ const defineRemovedModuleTemplates = moduleTemplates => {
moduleTemplates = undefined;
};
const byId = compareSelect(c => c.id, compareIds);
const byId = compareSelect((c) => c.id, compareIds);
const byNameOrHash = concatComparators(
compareSelect(c => c.name, compareIds),
compareSelect(c => c.fullHash, compareIds)
compareSelect((c) => c.name, compareIds),
compareSelect((c) => c.fullHash, compareIds)
);
const byMessage = compareSelect(err => `${err.message}`, compareStringsNumeric);
const byModule = compareSelect(
err => (err.module && err.module.identifier()) || "",
const byMessage = compareSelect(
(err) => `${err.message}`,
compareStringsNumeric
);
const byLocation = compareSelect(err => err.loc, compareLocations);
const byModule = compareSelect(
(err) => (err.module && err.module.identifier()) || "",
compareStringsNumeric
);
const byLocation = compareSelect((err) => err.loc, compareLocations);
const compareErrors = concatComparators(byModule, byLocation, byMessage);
@ -497,7 +500,7 @@ class Compilation {
* @param {CompilationAssets} assets assets
* @returns {CompilationAssets} new assets
*/
const popNewAssets = assets => {
const popNewAssets = (assets) => {
let newAssets;
for (const file of Object.keys(assets)) {
if (savedAssets.has(file)) continue;
@ -514,7 +517,7 @@ class Compilation {
call: () => {
savedAssets = new Set(Object.keys(this.assets));
},
register: tap => {
register: (tap) => {
const { type, name } = tap;
const { fn, additionalAssets, ...remainingTap } = tap;
const additionalAssetsFn =
@ -526,7 +529,7 @@ class Compilation {
switch (type) {
case "sync":
if (additionalAssetsFn) {
this.hooks.processAdditionalAssets.tap(name, assets => {
this.hooks.processAdditionalAssets.tap(name, (assets) => {
if (
/** @type {ProcessedAssets} */
(processedAssets).has(this.assets)
@ -592,7 +595,7 @@ class Compilation {
* @param {Error} err err
* @returns {void}
*/
err => {
(err) => {
if (err) return callback(err);
if (processedAssets !== undefined) {
processedAssets.add(this.assets);
@ -612,7 +615,7 @@ class Compilation {
};
case "promise":
if (additionalAssetsFn) {
this.hooks.processAdditionalAssets.tapPromise(name, assets => {
this.hooks.processAdditionalAssets.tapPromise(name, (assets) => {
if (
/** @type {ProcessedAssets} */
(processedAssets).has(this.assets)
@ -628,7 +631,7 @@ class Compilation {
* @param {CompilationAssets} assets assets
* @returns {Promise<CompilationAssets>} result
*/
fn: assets => {
fn: (assets) => {
const p = fn(assets);
if (!p || !p.then) return p;
return p.then(() => {
@ -665,14 +668,15 @@ class Compilation {
* @param {string} reason reason
* @returns {string} error message
*/
const errorMessage =
reason => `Can't automatically convert plugin using Compilation.hooks.${name} to Compilation.hooks.processAssets because ${reason}.
const errorMessage = (
reason
) => `Can't automatically convert plugin using Compilation.hooks.${name} to Compilation.hooks.processAssets because ${reason}.
BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a single Compilation.hooks.processAssets hook.`;
/**
* @param {string | (import("tapable").TapOptions & { name: string; } & ProcessAssetsAdditionalOptions)} options hook options
* @returns {import("tapable").TapOptions & { name: string; } & ProcessAssetsAdditionalOptions} modified options
*/
const getOptions = options => {
const getOptions = (options) => {
if (typeof options === "string") options = { name: options };
if (options.stage) {
throw new Error(errorMessage("it's using the 'stage' option"));
@ -1075,7 +1079,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
this.addModuleQueue = new AsyncQueue({
name: "addModule",
parent: this.processDependenciesQueue,
getKey: module => module.identifier(),
getKey: (module) => module.identifier(),
processor: this._addModule.bind(this)
});
/** @type {AsyncQueue<FactorizeModuleOptions, string, Module | ModuleFactoryResult>} */
@ -1199,7 +1203,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {string} item item
* @returns {LazySet<string>} file dependencies
*/
item => this.fileDependencies.add(item),
(item) => this.fileDependencies.add(item),
"Compilation.compilationDependencies is deprecated (used Compilation.fileDependencies instead)",
"DEP_WEBPACK_COMPILATION_COMPILATION_DEPENDENCIES"
)
@ -1351,7 +1355,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
/* eslint-enable no-console */
}
},
childName => {
(childName) => {
if (typeof name === "function") {
if (typeof childName === "function") {
return this.getLogger(() => {
@ -1527,7 +1531,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
this.resolverFactory.get("normal", module.resolveOptions),
/** @type {InputFileSystem} */
(this.inputFileSystem),
err => {
(err) => {
if (currentProfile !== undefined) {
currentProfile.markBuildingEnd();
}
@ -1538,20 +1542,25 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
if (currentProfile !== undefined) {
currentProfile.markStoringStart();
}
this._modulesCache.store(module.identifier(), null, module, err => {
if (currentProfile !== undefined) {
currentProfile.markStoringEnd();
this._modulesCache.store(
module.identifier(),
null,
module,
(err) => {
if (currentProfile !== undefined) {
currentProfile.markStoringEnd();
}
if (err) {
this.hooks.failedModule.call(
module,
/** @type {WebpackError} */ (err)
);
return callback(new ModuleStoreError(module, err));
}
this.hooks.succeedModule.call(module);
return callback();
}
if (err) {
this.hooks.failedModule.call(
module,
/** @type {WebpackError} */ (err)
);
return callback(new ModuleStoreError(module, err));
}
this.hooks.succeedModule.call(module);
return callback();
});
);
}
);
}
@ -1575,7 +1584,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
/**
* @param {DependenciesBlock} block block
*/
const processDependenciesBlock = block => {
const processDependenciesBlock = (block) => {
if (block.dependencies) {
let i = 0;
for (const dep of block.dependencies) {
@ -1625,7 +1634,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {WebpackError=} err error
* @returns {void}
*/
const onDependenciesSorted = err => {
const onDependenciesSorted = (err) => {
if (err) return callback(err);
// early exit without changing parallelism back and forth
@ -1639,7 +1648,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
for (const item of sortedDependencies) {
inProgressTransitive++;
// eslint-disable-next-line no-loop-func
this.handleModuleCreation(item, err => {
this.handleModuleCreation(item, (err) => {
// In V8, the Error objects keep a reference to the functions on the stack. These warnings &
// errors are created inside closures that keep a reference to the Compilation, so errors are
// leaking the Compilation object.
@ -1661,7 +1670,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {WebpackError=} err error
* @returns {void}
*/
const onTransitiveTasksFinished = err => {
const onTransitiveTasksFinished = (err) => {
if (err) return callback(err);
this.processDependenciesQueue.decreaseParallelism();
@ -1738,7 +1747,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
module,
dep,
cachedModule,
err => {
(err) => {
if (err) {
if (inProgressTransitive <= 0) return;
inProgressTransitive = -1;
@ -1783,7 +1792,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {Dependency} dep dependency
* @returns {void}
*/
const processDependencyForResolving = dep => {
const processDependencyForResolving = (dep) => {
const resourceIdent = dep.getResourceIdentifier();
if (resourceIdent !== undefined && resourceIdent !== null) {
const category = dep.category;
@ -1982,7 +1991,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
};
if (err) {
if (factoryResult) applyFactoryResultDependencies();
if (dependencies.every(d => d.optional)) {
if (dependencies.every((d) => d.optional)) {
this.warnings.push(err);
return callback();
}
@ -2136,7 +2145,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
}
}
this.buildModule(module, err => {
this.buildModule(module, (err) => {
if (creatingModuleDuringBuildSet !== undefined) {
creatingModuleDuringBuildSet.delete(module);
}
@ -2160,7 +2169,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
return callback(null, module);
}
this.processModuleDependencies(module, err => {
this.processModuleDependencies(module, (err) => {
if (err) {
return callback(err);
}
@ -2238,7 +2247,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
originModule,
err,
/** @type {DependencyLocation} */
(dependencies.map(d => d.loc).find(Boolean))
(dependencies.map((d) => d.loc).find(Boolean))
);
return callback(notFoundError, factoryResult ? result : undefined);
}
@ -2453,9 +2462,9 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
const oldBlocks = [...module.blocks];
module.invalidateBuild();
this.buildQueue.invalidate(module);
this.buildModule(module, err => {
this.buildModule(module, (err) => {
if (err) {
return this.hooks.finishRebuildingModule.callAsync(module, err2 => {
return this.hooks.finishRebuildingModule.callAsync(module, (err2) => {
if (err2) {
callback(
makeWebpackError(err2, "Compilation.hooks.finishRebuildingModule")
@ -2468,13 +2477,13 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
this.processDependenciesQueue.invalidate(module);
this.moduleGraph.unfreeze();
this.processModuleDependencies(module, err => {
this.processModuleDependencies(module, (err) => {
if (err) return callback(err);
this.removeReasonsOfDependencyBlock(module, {
dependencies: oldDependencies,
blocks: oldBlocks
});
this.hooks.finishRebuildingModule.callAsync(module, err2 => {
this.hooks.finishRebuildingModule.callAsync(module, (err2) => {
if (err2) {
callback(
makeWebpackError(err2, "Compilation.hooks.finishRebuildingModule")
@ -2511,7 +2520,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {Module} module module
* @returns {WeakReferences | undefined} references
*/
const computeReferences = module => {
const computeReferences = (module) => {
/** @type {WeakReferences | undefined} */
let references;
for (const connection of moduleGraph.getOutgoingConnections(module)) {
@ -2604,7 +2613,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {readonly ModuleGraphConnection[]} connections connections
* @returns {symbol|boolean} result
*/
const reduceAffectType = connections => {
const reduceAffectType = (connections) => {
let affected = false;
for (const { dependency } of connections) {
if (!dependency) continue;
@ -2684,7 +2693,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {Module} module module
* @returns {References} references
*/
const computeReferences = module => {
const computeReferences = (module) => {
const id = /** @type {ModuleId} */ (chunkGraph.getModuleId(module));
/** @type {Map<Module, string | number | undefined> | undefined} */
let modules;
@ -2804,27 +2813,27 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
p.range(
profile.buildingStartTime,
profile.buildingEndTime,
f => (profile.buildingParallelismFactor = f)
(f) => (profile.buildingParallelismFactor = f)
);
p.range(
profile.factoryStartTime,
profile.factoryEndTime,
f => (profile.factoryParallelismFactor = f)
(f) => (profile.factoryParallelismFactor = f)
);
p.range(
profile.integrationStartTime,
profile.integrationEndTime,
f => (profile.integrationParallelismFactor = f)
(f) => (profile.integrationParallelismFactor = f)
);
p.range(
profile.storingStartTime,
profile.storingEndTime,
f => (profile.storingParallelismFactor = f)
(f) => (profile.storingParallelismFactor = f)
);
p.range(
profile.restoringStartTime,
profile.restoringEndTime,
f => (profile.restoringParallelismFactor = f)
(f) => (profile.restoringParallelismFactor = f)
);
if (profile.additionalFactoryTimes) {
for (const { start, end } of profile.additionalFactoryTimes) {
@ -2832,7 +2841,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
p.range(
start,
end,
f =>
(f) =>
(profile.additionalFactoriesParallelismFactor += f * influence)
);
}
@ -2955,33 +2964,33 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
};
logNormalSummary(
"resolve to new modules",
p => p.factory,
p => p.factoryParallelismFactor
(p) => p.factory,
(p) => p.factoryParallelismFactor
);
logNormalSummary(
"resolve to existing modules",
p => p.additionalFactories,
p => p.additionalFactoriesParallelismFactor
(p) => p.additionalFactories,
(p) => p.additionalFactoriesParallelismFactor
);
logNormalSummary(
"integrate modules",
p => p.restoring,
p => p.restoringParallelismFactor
(p) => p.restoring,
(p) => p.restoringParallelismFactor
);
logByLoadersSummary(
"build modules",
p => p.building,
p => p.buildingParallelismFactor
(p) => p.building,
(p) => p.buildingParallelismFactor
);
logNormalSummary(
"store modules",
p => p.storing,
p => p.storingParallelismFactor
(p) => p.storing,
(p) => p.storingParallelismFactor
);
logNormalSummary(
"restore modules",
p => p.restoring,
p => p.restoringParallelismFactor
(p) => p.restoring,
(p) => p.restoringParallelismFactor
);
this.logger.timeEnd("finish module profiles");
}
@ -2990,7 +2999,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
this.logger.timeEnd("compute affected modules");
this.logger.time("finish modules");
const { modules, moduleMemCaches } = this;
this.hooks.finishModules.callAsync(modules, err => {
this.hooks.finishModules.callAsync(modules, (err) => {
this.logger.timeEnd("finish modules");
if (err) return callback(/** @type {WebpackError} */ (err));
@ -3061,7 +3070,7 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {WebpackError=} err err
* @returns {void}
*/
const finalCallback = err => {
const finalCallback = (err) => {
this.factorizeQueue.clear();
this.buildQueue.clear();
this.rebuildQueue.clear();
@ -3139,11 +3148,11 @@ BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si
* @param {Dependency[]} deps deps
* @returns {Module[]} sorted deps
*/
const mapAndSort = deps =>
const mapAndSort = (deps) =>
/** @type {Module[]} */
(deps.map(dep => this.moduleGraph.getModule(dep)).filter(Boolean)).sort(
compareModulesByIdentifier
);
(
deps.map((dep) => this.moduleGraph.getModule(dep)).filter(Boolean)
).sort(compareModulesByIdentifier);
const includedModules = [
...mapAndSort(this.globalEntry.includeDependencies),
...mapAndSort(includeDependencies)
@ -3250,7 +3259,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
}
this.hooks.afterOptimizeChunks.call(this.chunks, this.chunkGroups);
this.hooks.optimizeTree.callAsync(this.chunks, this.modules, err => {
this.hooks.optimizeTree.callAsync(this.chunks, this.modules, (err) => {
if (err) {
return finalCallback(
makeWebpackError(err, "Compilation.hooks.optimizeTree")
@ -3262,7 +3271,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
this.hooks.optimizeChunkModules.callAsync(
this.chunks,
this.modules,
err => {
(err) => {
if (err) {
return finalCallback(
makeWebpackError(err, "Compilation.hooks.optimizeChunkModules")
@ -3325,7 +3334,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
this.logger.time("code generation");
this.hooks.beforeCodeGeneration.call();
this.codeGeneration(err => {
this.codeGeneration((err) => {
if (err) {
return finalCallback(err);
}
@ -3344,7 +3353,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
this.hooks.afterHash.call();
this.logger.timeEnd("hashing");
this._runCodeGenerationJobs(codeGenerationJobs, err => {
this._runCodeGenerationJobs(codeGenerationJobs, (err) => {
if (err) {
return finalCallback(err);
}
@ -3367,7 +3376,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
const cont = () => {
this.logger.time("process assets");
this.hooks.processAssets.callAsync(this.assets, err => {
this.hooks.processAssets.callAsync(this.assets, (err) => {
if (err) {
return finalCallback(
makeWebpackError(err, "Compilation.hooks.processAssets")
@ -3403,7 +3412,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
this.unseal();
return this.seal(callback);
}
return this.hooks.afterSeal.callAsync(err => {
return this.hooks.afterSeal.callAsync((err) => {
if (err) {
return finalCallback(
makeWebpackError(err, "Compilation.hooks.afterSeal")
@ -3418,7 +3427,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
this.logger.time("create chunk assets");
if (this.hooks.shouldGenerateChunkAssets.call() !== false) {
this.hooks.beforeChunkAssets.call();
this.createChunkAssets(err => {
this.createChunkAssets((err) => {
this.logger.timeEnd("create chunk assets");
if (err) {
return finalCallback(err);
@ -3542,7 +3551,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
if (
codeGenerationDependencies !== undefined &&
(notCodeGeneratedModules === undefined ||
codeGenerationDependencies.some(dep => {
codeGenerationDependencies.some((dep) => {
const referencedModule = /** @type {Module} */ (
moduleGraph.getModule(dep)
);
@ -3574,7 +3583,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
}
);
},
err => {
(err) => {
if (err) return callback(err);
if (delayedJobs.length > 0) {
if (delayedJobs.length === jobs.length) {
@ -3583,7 +3592,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
new Error(
`Unable to make progress during code generation because of circular code generation dependency: ${Array.from(
delayedModules,
m => m.identifier()
(m) => m.identifier()
).join(", ")}`
)
)
@ -3597,7 +3606,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
}
if (errors.length > 0) {
errors.sort(
compareSelect(err => err.module, compareModulesByIdentifier)
compareSelect((err) => err.module, compareModulesByIdentifier)
);
for (const error of errors) {
this.errors.push(error);
@ -3644,7 +3653,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
) {
let codeGenerated = false;
const cache = new MultiItemCache(
runtimes.map(runtime =>
runtimes.map((runtime) =>
this._codeGenerationCache.getItemCache(
`${module.identifier()}|${getRuntimeKey(runtime)}`,
`${hash}|${dependencyTemplates.getHash()}`
@ -3683,7 +3692,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
results.add(module, runtime, result);
}
if (!cachedResult) {
cache.store(result, err =>
cache.store(result, (err) =>
callback(/** @type {WebpackError} */ (err), codeGenerated)
);
} else {
@ -4031,7 +4040,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
* @param {Module} module module for processing
* @returns {void}
*/
const processModule = module => {
const processModule = (module) => {
if (!moduleGraph.setDepthIfLower(module, depth)) return;
queue.add(module);
};
@ -4157,7 +4166,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
/**
* @param {Dependency} d dependency to (maybe) patch up
*/
const iteratorDependency = d => {
const iteratorDependency = (d) => {
const depModule = this.moduleGraph.getModule(d);
if (!depModule) {
return;
@ -4190,7 +4199,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
/**
* @param {Entrypoint} ep an entrypoint
*/
const processEntrypoint = ep => {
const processEntrypoint = (ep) => {
const runtime = /** @type {string} */ (ep.options.runtime || ep.name);
const chunk = /** @type {Chunk} */ (ep.getRuntimeChunk());
chunkGraph.setRuntimeId(runtime, /** @type {ChunkId} */ (chunk.id));
@ -4273,7 +4282,9 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
}
}
if (errors.length > 0) {
errors.sort(compareSelect(err => err.module, compareModulesByIdentifier));
errors.sort(
compareSelect((err) => err.module, compareModulesByIdentifier)
);
for (const error of errors) {
this.errors.push(error);
}
@ -4410,7 +4421,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
for (const info of runtimeChunksMap.values()) {
for (const other of new Set(
[...info.chunk.getAllReferencedAsyncEntrypoints()].map(
e => e.chunks[e.chunks.length - 1]
(e) => e.chunks[e.chunks.length - 1]
)
)) {
const otherInfo =
@ -4463,11 +4474,11 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
circularRuntimeChunkInfo.push(info);
}
}
circularRuntimeChunkInfo.sort(compareSelect(i => i.chunk, byId));
circularRuntimeChunkInfo.sort(compareSelect((i) => i.chunk, byId));
const err =
new WebpackError(`Circular dependency between chunks with runtime (${Array.from(
circularRuntimeChunkInfo,
c => c.chunk.name || c.chunk.id
(c) => c.chunk.name || c.chunk.id
).join(", ")})
This prevents using hashes of each other and should be avoided.`);
err.chunk = circularRuntimeChunkInfo[0].chunk;
@ -4487,7 +4498,7 @@ This prevents using hashes of each other and should be avoided.`);
/**
* @param {Chunk} chunk chunk
*/
const processChunk = chunk => {
const processChunk = (chunk) => {
// Last minute module hash generation for modules that depend on chunk hashes
this.logger.time("hashing: hash runtime modules");
const runtime = chunk.runtime;
@ -4564,7 +4575,9 @@ This prevents using hashes of each other and should be avoided.`);
for (const chunk of runtimeChunks) processChunk(chunk);
for (const chunk of initialChunks) processChunk(chunk);
if (errors.length > 0) {
errors.sort(compareSelect(err => err.module, compareModulesByIdentifier));
errors.sort(
compareSelect((err) => err.module, compareModulesByIdentifier)
);
for (const error of errors) {
this.errors.push(error);
}
@ -4671,7 +4684,7 @@ This prevents using hashes of each other and should be avoided.`);
/**
* @param {string} name name
*/
const remove = name => {
const remove = (name) => {
const relatedIn = this._assetsRelatedIn.get(name);
if (relatedIn === undefined) return;
const entry = relatedIn.get(key);
@ -4696,7 +4709,7 @@ This prevents using hashes of each other and should be avoided.`);
/**
* @param {string} name name
*/
const add = name => {
const add = (name) => {
let relatedIn = this._assetsRelatedIn.get(name);
if (relatedIn === undefined) {
this._assetsRelatedIn.set(name, (relatedIn = new Map()));
@ -4783,7 +4796,7 @@ This prevents using hashes of each other and should be avoided.`);
const entry = related[key];
let newEntry;
if (Array.isArray(entry)) {
newEntry = entry.map(x => (x === file ? newFile : x));
newEntry = entry.map((x) => (x === file ? newFile : x));
} else if (entry === file) {
newEntry = newFile;
} else {
@ -4837,7 +4850,7 @@ This prevents using hashes of each other and should be avoided.`);
/**
* @param {string} file file
*/
const checkUsedAndDelete = file => {
const checkUsedAndDelete = (file) => {
if (!this._assetsRelatedIn.has(file)) {
this.deleteAsset(file);
}
@ -4987,7 +5000,7 @@ This prevents using hashes of each other and should be avoided.`);
* @param {Error} err error
* @returns {void}
*/
const errorAndCallback = err => {
const errorAndCallback = (err) => {
const filename =
file ||
(typeof file === "string"
@ -5068,7 +5081,7 @@ This prevents using hashes of each other and should be avoided.`);
chunk
});
if (source !== sourceFromCache) {
assetCacheItem.store(source, err => {
assetCacheItem.store(source, (err) => {
if (err) return errorAndCallback(err);
inTry = false;
return callback();
@ -5190,9 +5203,9 @@ This prevents using hashes of each other and should be avoided.`);
modules,
10,
(module, push, callback) => {
this.buildQueue.waitFor(module, err => {
this.buildQueue.waitFor(module, (err) => {
if (err) return callback(err);
this.processDependenciesQueue.waitFor(module, err => {
this.processDependenciesQueue.waitFor(module, (err) => {
if (err) return callback(err);
for (const { module: m } of this.moduleGraph.getOutgoingConnections(
module
@ -5205,7 +5218,7 @@ This prevents using hashes of each other and should be avoided.`);
});
});
},
err => {
(err) => {
if (err) return callback(/** @type {WebpackError} */ (err));
// Create new chunk graph, chunk and entrypoint for the build time execution
@ -5288,7 +5301,7 @@ This prevents using hashes of each other and should be avoided.`);
const reportErrors = () => {
if (errors.length > 0) {
errors.sort(
compareSelect(err => err.module, compareModulesByIdentifier)
compareSelect((err) => err.module, compareModulesByIdentifier)
);
for (const error of errors) {
this.errors.push(error);
@ -5298,7 +5311,7 @@ This prevents using hashes of each other and should be avoided.`);
};
// Generate code for all aggregated modules
asyncLib.eachLimit(modules, 10, codeGen, err => {
asyncLib.eachLimit(modules, 10, codeGen, (err) => {
if (err) return callback(err);
reportErrors();
@ -5334,7 +5347,7 @@ This prevents using hashes of each other and should be avoided.`);
}
// Generate code for all runtime modules
asyncLib.eachLimit(runtimeModules, 10, codeGen, err => {
asyncLib.eachLimit(runtimeModules, 10, codeGen, (err) => {
if (err) return callback(err);
reportErrors();
@ -5410,7 +5423,7 @@ This prevents using hashes of each other and should be avoided.`);
callback
);
},
err => {
(err) => {
if (err) return callback(err);
/** @type {ExecuteModuleExports | undefined} */
@ -5422,7 +5435,7 @@ This prevents using hashes of each other and should be avoided.`);
} = this.outputOptions;
/** @type {WebpackRequire} */
const __webpack_require__ = id => {
const __webpack_require__ = (id) => {
const cached = moduleCache[id];
if (cached !== undefined) {
if (cached.error) throw cached.error;
@ -5656,7 +5669,7 @@ Object.defineProperty(compilationPrototype, "cache", {
/**
* @param {EXPECTED_ANY} _v value
*/
_v => {},
(_v) => {},
"Compilation.cache was removed in favor of Compilation.getCache()",
"DEP_WEBPACK_COMPILATION_CACHE"
)

View File

@ -125,7 +125,7 @@ const webpack = require(".");
* @param {string[]} array an array
* @returns {boolean} true, if the array is sorted
*/
const isSorted = array => {
const isSorted = (array) => {
for (let i = 1; i < array.length; i++) {
if (array[i - 1] > array[i]) return false;
}
@ -154,7 +154,7 @@ const sortObject = (obj, keys) => {
const includesHash = (filename, hashes) => {
if (!hashes) return false;
if (Array.isArray(hashes)) {
return hashes.some(hash => filename.includes(hash));
return hashes.some((hash) => filename.includes(hash));
}
return filename.includes(hashes);
};
@ -388,7 +388,7 @@ class Compiler {
this.infrastructureLogger(name, type, args);
}
},
childName => {
(childName) => {
if (typeof name === "function") {
if (typeof childName === "function") {
return this.getInfrastructureLogger(() => {
@ -540,7 +540,7 @@ class Compiler {
compilation.startTime = startTime;
compilation.endTime = Date.now();
const stats = new Stats(compilation);
this.hooks.done.callAsync(stats, err => {
this.hooks.done.callAsync(stats, (err) => {
if (err) return finalCallback(err);
return finalCallback(null, stats);
});
@ -550,7 +550,7 @@ class Compiler {
process.nextTick(() => {
logger = compilation.getLogger("webpack.Compiler");
logger.time("emitAssets");
this.emitAssets(compilation, err => {
this.emitAssets(compilation, (err) => {
/** @type {Logger} */
(logger).timeEnd("emitAssets");
if (err) return finalCallback(err);
@ -563,12 +563,12 @@ class Compiler {
/** @type {Logger} */
(logger).time("done hook");
const stats = new Stats(compilation);
this.hooks.done.callAsync(stats, err => {
this.hooks.done.callAsync(stats, (err) => {
/** @type {Logger} */
(logger).timeEnd("done hook");
if (err) return finalCallback(err);
this.hooks.additionalPass.callAsync(err => {
this.hooks.additionalPass.callAsync((err) => {
if (err) return finalCallback(err);
this.compile(onCompiled);
});
@ -578,7 +578,7 @@ class Compiler {
/** @type {Logger} */
(logger).time("emitRecords");
this.emitRecords(err => {
this.emitRecords((err) => {
/** @type {Logger} */
(logger).timeEnd("emitRecords");
if (err) return finalCallback(err);
@ -588,13 +588,13 @@ class Compiler {
/** @type {Logger} */
(logger).time("done hook");
const stats = new Stats(compilation);
this.hooks.done.callAsync(stats, err => {
this.hooks.done.callAsync(stats, (err) => {
/** @type {Logger} */
(logger).timeEnd("done hook");
if (err) return finalCallback(err);
this.cache.storeBuildDependencies(
compilation.buildDependencies,
err => {
(err) => {
if (err) return finalCallback(err);
return finalCallback(null, stats);
}
@ -606,13 +606,13 @@ class Compiler {
};
const run = () => {
this.hooks.beforeRun.callAsync(this, err => {
this.hooks.beforeRun.callAsync(this, (err) => {
if (err) return finalCallback(err);
this.hooks.run.callAsync(this, err => {
this.hooks.run.callAsync(this, (err) => {
if (err) return finalCallback(err);
this.readRecords(err => {
this.readRecords((err) => {
if (err) return finalCallback(err);
this.compile(onCompiled);
@ -622,7 +622,7 @@ class Compiler {
};
if (this.idle) {
this.cache.endIdle(err => {
this.cache.endIdle((err) => {
if (err) return finalCallback(err);
this.idle = false;
@ -706,7 +706,7 @@ class Compiler {
* @param {Error=} err error
* @returns {void}
*/
const emitFiles = err => {
const emitFiles = (err) => {
if (err) return callback(err);
const assets = compilation.getAssets();
@ -739,7 +739,7 @@ class Compiler {
* @param {Error=} err error
* @returns {void}
*/
const writeOut = err => {
const writeOut = (err) => {
if (err) return callback(err);
const targetPath = join(
/** @type {OutputFileSystem} */
@ -838,9 +838,9 @@ ${other}`);
* @param {Buffer} content content to be written
* @returns {void}
*/
const doWrite = content => {
const doWrite = (content) => {
/** @type {OutputFileSystem} */
(this.outputFileSystem).writeFile(targetPath, content, err => {
(this.outputFileSystem).writeFile(targetPath, content, (err) => {
if (err) return callback(err);
// information marker that the asset has been emitted
@ -871,7 +871,7 @@ ${other}`);
/**
* @param {number} size size
*/
const updateWithReplacementSource = size => {
const updateWithReplacementSource = (size) => {
updateFileWithReplacementSource(
file,
/** @type {CacheEntry} */ (cacheEntry),
@ -915,7 +915,7 @@ ${other}`);
* @param {IStats} stats stats
* @returns {void}
*/
const processExistingFile = stats => {
const processExistingFile = (stats) => {
// skip emitting if it's already there and an immutable file
if (immutable) {
updateWithReplacementSource(/** @type {number} */ (stats.size));
@ -1015,7 +1015,7 @@ ${other}`);
writeOut();
}
},
err => {
(err) => {
// Clear map to free up memory
caseInsensitiveMap.clear();
if (err) {
@ -1025,7 +1025,7 @@ ${other}`);
this._assetEmittingPreviousFiles = allTargetPaths;
this.hooks.afterEmit.callAsync(compilation, err => {
this.hooks.afterEmit.callAsync(compilation, (err) => {
if (err) return callback(err);
return callback();
@ -1034,7 +1034,7 @@ ${other}`);
);
};
this.hooks.emit.callAsync(compilation, err => {
this.hooks.emit.callAsync(compilation, (err) => {
if (err) return callback(err);
outputPath = compilation.getPath(this.outputPath, {});
mkdirp(
@ -1054,10 +1054,10 @@ ${other}`);
if (this.recordsOutputPath) {
asyncLib.parallel(
[
cb => this.hooks.emitRecords.callAsync(cb),
(cb) => this.hooks.emitRecords.callAsync(cb),
this._emitRecords.bind(this)
],
err => callback(err)
(err) => callback(err)
);
} else {
this.hooks.emitRecords.callAsync(callback);
@ -1111,7 +1111,7 @@ ${other}`);
mkdirp(
/** @type {OutputFileSystem} */ (this.outputFileSystem),
recordsOutputPathDirectory,
err => {
(err) => {
if (err) return callback(err);
writeFile();
}
@ -1127,10 +1127,10 @@ ${other}`);
if (this.recordsInputPath) {
asyncLib.parallel(
[
cb => this.hooks.readRecords.callAsync(cb),
(cb) => this.hooks.readRecords.callAsync(cb),
this._readRecords.bind(this)
],
err => callback(err)
(err) => callback(err)
);
} else {
this.records = {};
@ -1154,7 +1154,7 @@ ${other}`);
return callback();
}
/** @type {InputFileSystem} */
(this.inputFileSystem).stat(this.recordsInputPath, err => {
(this.inputFileSystem).stat(this.recordsInputPath, (err) => {
// It doesn't exist
// We can ignore this.
if (err) return callback();
@ -1339,7 +1339,7 @@ ${other}`);
*/
compile(callback) {
const params = this.newCompilationParams();
this.hooks.beforeCompile.callAsync(params, err => {
this.hooks.beforeCompile.callAsync(params, (err) => {
if (err) return callback(err);
this.hooks.compile.call(params);
@ -1349,28 +1349,28 @@ ${other}`);
const logger = compilation.getLogger("webpack.Compiler");
logger.time("make hook");
this.hooks.make.callAsync(compilation, err => {
this.hooks.make.callAsync(compilation, (err) => {
logger.timeEnd("make hook");
if (err) return callback(err);
logger.time("finish make hook");
this.hooks.finishMake.callAsync(compilation, err => {
this.hooks.finishMake.callAsync(compilation, (err) => {
logger.timeEnd("finish make hook");
if (err) return callback(err);
process.nextTick(() => {
logger.time("finish compilation");
compilation.finish(err => {
compilation.finish((err) => {
logger.timeEnd("finish compilation");
if (err) return callback(err);
logger.time("seal compilation");
compilation.seal(err => {
compilation.seal((err) => {
logger.timeEnd("seal compilation");
if (err) return callback(err);
logger.time("afterCompile hook");
this.hooks.afterCompile.callAsync(compilation, err => {
this.hooks.afterCompile.callAsync(compilation, (err) => {
logger.timeEnd("afterCompile hook");
if (err) return callback(err);
@ -1391,12 +1391,12 @@ ${other}`);
close(callback) {
if (this.watching) {
// When there is still an active watching, close this first
this.watching.close(_err => {
this.watching.close((_err) => {
this.close(callback);
});
return;
}
this.hooks.shutdown.callAsync(err => {
this.hooks.shutdown.callAsync((err) => {
if (err) return callback(err);
// Get rid of reference to last compilation to avoid leaking memory
// We can't run this._cleanupLastCompilation() as the Stats to this compilation

View File

@ -159,9 +159,9 @@ class ConstPlugin {
/**
* @param {JavascriptParser} parser the parser
*/
const handler = parser => {
parser.hooks.terminate.tap(PLUGIN_NAME, _statement => true);
parser.hooks.statementIf.tap(PLUGIN_NAME, statement => {
const handler = (parser) => {
parser.hooks.terminate.tap(PLUGIN_NAME, (_statement) => true);
parser.hooks.statementIf.tap(PLUGIN_NAME, (statement) => {
if (parser.scope.isAsmJs) return;
const param = parser.evaluateExpression(statement.test);
const bool = param.asBool();
@ -185,7 +185,7 @@ class ConstPlugin {
return bool;
}
});
parser.hooks.unusedStatement.tap(PLUGIN_NAME, statement => {
parser.hooks.unusedStatement.tap(PLUGIN_NAME, (statement) => {
if (
parser.scope.isAsmJs ||
// Check top level scope here again
@ -198,7 +198,7 @@ class ConstPlugin {
});
parser.hooks.expressionConditionalOperator.tap(
PLUGIN_NAME,
expression => {
(expression) => {
if (parser.scope.isAsmJs) return;
const param = parser.evaluateExpression(expression.test);
const bool = param.asBool();
@ -239,7 +239,7 @@ class ConstPlugin {
);
parser.hooks.expressionLogicalOperator.tap(
PLUGIN_NAME,
expression => {
(expression) => {
if (parser.scope.isAsmJs) return;
if (
expression.operator === "&&" ||
@ -381,7 +381,7 @@ class ConstPlugin {
}
}
);
parser.hooks.optionalChaining.tap(PLUGIN_NAME, expr => {
parser.hooks.optionalChaining.tap(PLUGIN_NAME, (expr) => {
/** @type {Expression[]} */
const optionalExpressionsStack = [];
/** @type {Expression | Super} */
@ -441,7 +441,7 @@ class ConstPlugin {
});
parser.hooks.evaluateIdentifier
.for("__resourceQuery")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
if (parser.scope.isAsmJs) return;
if (!parser.state.module) return;
return evaluateToString(
@ -450,7 +450,7 @@ class ConstPlugin {
});
parser.hooks.expression
.for("__resourceQuery")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
if (parser.scope.isAsmJs) return;
if (!parser.state.module) return;
const dep = new CachedConstDependency(
@ -467,7 +467,7 @@ class ConstPlugin {
parser.hooks.evaluateIdentifier
.for("__resourceFragment")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
if (parser.scope.isAsmJs) return;
if (!parser.state.module) return;
return evaluateToString(
@ -476,7 +476,7 @@ class ConstPlugin {
});
parser.hooks.expression
.for("__resourceFragment")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
if (parser.scope.isAsmJs) return;
if (!parser.state.module) return;
const dep = new CachedConstDependency(

View File

@ -23,9 +23,9 @@ class ContextExclusionPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.contextModuleFactory.tap(PLUGIN_NAME, cmf => {
cmf.hooks.contextModuleFiles.tap(PLUGIN_NAME, files =>
files.filter(filePath => !this.negativeMatcher.test(filePath))
compiler.hooks.contextModuleFactory.tap(PLUGIN_NAME, (cmf) => {
cmf.hooks.contextModuleFiles.tap(PLUGIN_NAME, (files) =>
files.filter((filePath) => !this.negativeMatcher.test(filePath))
);
});
}

View File

@ -283,7 +283,7 @@ class ContextModule extends Module {
identifier = `${requestShortener.shorten(`${this.options.resource}`)}/`;
} else {
identifier = this.options.resource
.map(r => `${requestShortener.shorten(r)}/`)
.map((r) => `${requestShortener.shorten(r)}/`)
.join(" ");
}
if (this.options.resourceQuery) {
@ -309,7 +309,7 @@ class ContextModule extends Module {
}
if (this.options.referencedExports) {
identifier += ` referencedExports: ${this.options.referencedExports
.map(e => e.join("."))
.map((e) => e.join("."))
.join(", ")}`;
}
if (this.options.chunkName) {
@ -355,7 +355,7 @@ class ContextModule extends Module {
identifier = "false";
} else {
identifier = this.options.resource
.map(res =>
.map((res) =>
contextify(options.context, res, options.associatedObjectForCache)
)
.join(" ");
@ -386,7 +386,7 @@ class ContextModule extends Module {
}
if (this.options.referencedExports) {
identifier += ` referencedExports: ${this.options.referencedExports
.map(e => e.join("."))
.map((e) => e.join("."))
.join(", ")}`;
}
@ -466,7 +466,7 @@ class ContextModule extends Module {
}
dependencies.sort(
concatComparators(
compareSelect(a => a.loc, compareLocations),
compareSelect((a) => a.loc, compareLocations),
keepOriginalOrder(this.dependencies)
)
);
@ -588,7 +588,7 @@ class ContextModule extends Module {
const sortedDependencies =
/** @type {ContextElementDependency[]} */
(dependencies)
.filter(dependency => moduleGraph.getModule(dependency))
.filter((dependency) => moduleGraph.getModule(dependency))
.sort((a, b) => {
if (a.userRequest === b.userRequest) {
return 0;
@ -621,7 +621,8 @@ class ContextModule extends Module {
// therefore the order of this is !important!
const sortedModules = dependencies
.map(
dependency => /** @type {Module} */ (moduleGraph.getModule(dependency))
(dependency) =>
/** @type {Module} */ (moduleGraph.getModule(dependency))
)
.filter(Boolean)
.sort(comparator);
@ -945,7 +946,7 @@ module.exports = webpackAsyncContext;`;
let hasMultipleOrNoChunks = false;
let hasNoChunk = true;
const fakeMap = this.getFakeMap(
blocks.map(b => b.dependencies[0]),
blocks.map((b) => b.dependencies[0]),
chunkGraph
);
const hasFakeMap = typeof fakeMap === "object";
@ -954,7 +955,7 @@ module.exports = webpackAsyncContext;`;
* @type {Item[]}
*/
const items = blocks
.map(block => {
.map((block) => {
const dependency =
/** @type {ContextElementDependency} */
(block.dependencies[0]);
@ -966,7 +967,7 @@ module.exports = webpackAsyncContext;`;
chunks: undefined
};
})
.filter(item => item.module);
.filter((item) => item.module);
for (const item of items) {
const chunkGroup = chunkGraph.getBlockChunkGroup(item.block);
const chunks = (chunkGroup && chunkGroup.chunks) || [];
@ -1000,7 +1001,7 @@ module.exports = webpackAsyncContext;`;
map[item.userRequest] = [
...arrayStart,
.../** @type {Chunk[]} */
(item.chunks).map(chunk => /** @type {ChunkId} */ (chunk.id))
(item.chunks).map((chunk) => /** @type {ChunkId} */ (chunk.id))
];
}
}

View File

@ -56,7 +56,7 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
{
name: "alternatives",
/** @type {AsyncSeriesWaterfallHook<[ContextAlternativeRequest[]]>["intercept"]} */
intercept: interceptor => {
intercept: (interceptor) => {
throw new Error(
"Intercepting fake hook ContextModuleFactory.hooks.alternatives is not possible, use ContextModuleFactory.hooks.alternativeRequests instead"
);
@ -169,13 +169,13 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
asyncLib.parallel(
[
callback => {
(callback) => {
const results = /** @type ResolveRequest[] */ ([]);
/**
* @param {ResolveRequest} obj obj
* @returns {void}
*/
const yield_ = obj => {
const yield_ = (obj) => {
results.push(obj);
};
@ -189,13 +189,13 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
contextDependencies,
yield: yield_
},
err => {
(err) => {
if (err) return callback(err);
callback(null, results);
}
);
},
callback => {
(callback) => {
asyncLib.map(
loaders,
(loader, callback) => {
@ -230,7 +230,7 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
/** @type {[ResolveRequest[], string[]]} */ (result);
if (contextResult.length > 1) {
const first = contextResult[0];
contextResult = contextResult.filter(r => r.path);
contextResult = contextResult.filter((r) => r.path);
if (contextResult.length === 0) contextResult.push(first);
}
this.hooks.afterResolve.callAsync(
@ -241,7 +241,7 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
(loaderResult.length > 0 ? "!" : ""),
resource:
contextResult.length > 1
? contextResult.map(r => r.path)
? contextResult.map((r) => r.path)
: contextResult[0].path,
resolveDependencies: this.resolveDependencies.bind(this),
resourceQuery: contextResult[0].query,
@ -342,13 +342,13 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
fs.readdir(directory, (err, files) => {
if (err) return callback(err);
const processedFiles = cmf.hooks.contextModuleFiles.call(
/** @type {string[]} */ (files).map(file => file.normalize("NFC"))
/** @type {string[]} */ (files).map((file) => file.normalize("NFC"))
);
if (!processedFiles || processedFiles.length === 0) {
return callback(null, []);
}
asyncLib.map(
processedFiles.filter(p => p.indexOf(".") !== 0),
processedFiles.filter((p) => p.indexOf(".") !== 0),
(segment, callback) => {
const subResource = join(fs, directory, segment);
@ -387,10 +387,10 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
null,
/** @type {ContextAlternativeRequest[]} */
(alternatives)
.filter(obj =>
.filter((obj) =>
regExp.test(/** @type {string} */ (obj.request))
)
.map(obj => {
.map((obj) => {
const dep = new ContextElementDependency(
`${obj.request}${resourceQuery}${resourceFragment}`,
obj.request,

View File

@ -97,8 +97,8 @@ class ContextReplacementPlugin {
const newContentRegExp = this.newContentRegExp;
const newContentCreateContextMap = this.newContentCreateContextMap;
compiler.hooks.contextModuleFactory.tap(PLUGIN_NAME, cmf => {
cmf.hooks.beforeResolve.tap(PLUGIN_NAME, result => {
compiler.hooks.contextModuleFactory.tap(PLUGIN_NAME, (cmf) => {
cmf.hooks.beforeResolve.tap(PLUGIN_NAME, (result) => {
if (!result) return;
if (resourceRegExp.test(result.request)) {
if (newContentResource !== undefined) {
@ -120,7 +120,7 @@ class ContextReplacementPlugin {
}
return result;
});
cmf.hooks.afterResolve.tap(PLUGIN_NAME, result => {
cmf.hooks.afterResolve.tap(PLUGIN_NAME, (result) => {
if (!result) return;
if (resourceRegExp.test(result.resource)) {
if (newContentResource !== undefined) {
@ -183,11 +183,11 @@ class ContextReplacementPlugin {
* @returns {(fs: InputFileSystem, options: ContextModuleOptions, callback: (err: null | Error, dependencies?: ContextElementDependency[]) => void) => void} resolve resolve dependencies from context map function
*/
const createResolveDependenciesFromContextMap =
createContextMap => (fs, options, callback) => {
(createContextMap) => (fs, options, callback) => {
createContextMap(fs, (err, map) => {
if (err) return callback(err);
const dependencies = Object.keys(map).map(
key =>
(key) =>
new ContextElementDependency(
map[key] + options.resourceQuery + options.resourceFragment,
key,

View File

@ -130,7 +130,7 @@ class RuntimeValue {
*/
function getObjKeys(properties) {
if (!properties) return;
return new Set([...properties].map(p => p.id));
return new Set([...properties].map((p) => p.id));
}
/** @typedef {Set<string> | null} ObjKeys */
@ -161,7 +161,7 @@ const stringifyObj = (
const arr = Array.isArray(obj);
if (arr) {
code = `[${obj
.map(code =>
.map((code) =>
toCode(
code,
parser,
@ -176,10 +176,10 @@ const stringifyObj = (
} else {
let keys = Object.keys(obj);
if (objKeys) {
keys = objKeys.size === 0 ? [] : keys.filter(k => objKeys.has(k));
keys = objKeys.size === 0 ? [] : keys.filter((k) => objKeys.has(k));
}
code = `{${keys
.map(key => {
.map((key) => {
const code = obj[key];
return `${JSON.stringify(key)}:${toCode(
code,
@ -286,7 +286,7 @@ const toCode = (
* @param {CodeValue} code code
* @returns {string | undefined} result
*/
const toCacheVersion = code => {
const toCacheVersion = (code) => {
if (code === null) {
return "null";
}
@ -306,7 +306,7 @@ const toCacheVersion = code => {
return `(${code.toString()})`;
}
if (typeof code === "object") {
const items = Object.keys(code).map(key => ({
const items = Object.keys(code).map((key) => ({
key,
value: toCacheVersion(
/** @type {Record<string, EXPECTED_ANY>} */
@ -390,7 +390,7 @@ class DefinePlugin {
* @param {JavascriptParser} parser Parser
* @returns {void}
*/
const handler = parser => {
const handler = (parser) => {
const hooked = new Set();
const mainValue =
/** @type {ValueCacheVersion} */
@ -408,7 +408,7 @@ class DefinePlugin {
/**
* @param {string} key key
*/
const addValueDependency = key => {
const addValueDependency = (key) => {
const buildInfo =
/** @type {BuildInfo} */
(parser.state.module.buildInfo);
@ -495,7 +495,7 @@ class DefinePlugin {
// why 100? Ensures it runs after object define
stage: 100
},
expr => {
(expr) => {
const destructed =
parser.destructuringAssignmentPropertiesFor(expr);
if (destructed === undefined) {
@ -557,7 +557,7 @@ class DefinePlugin {
});
parser.hooks.evaluateIdentifier
.for(key)
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
/**
* this is needed in case there is a recursion in the DefinePlugin
* to prevent an endless recursion
@ -584,7 +584,7 @@ class DefinePlugin {
res.setRange(/** @type {Range} */ (expr.range));
return res;
});
parser.hooks.expression.for(key).tap(PLUGIN_NAME, expr => {
parser.hooks.expression.for(key).tap(PLUGIN_NAME, (expr) => {
addValueDependency(originalKey);
let strCode = toCode(
code,
@ -613,7 +613,7 @@ class DefinePlugin {
return toConstantDependency(parser, strCode)(expr);
});
}
parser.hooks.evaluateTypeof.for(key).tap(PLUGIN_NAME, expr => {
parser.hooks.evaluateTypeof.for(key).tap(PLUGIN_NAME, (expr) => {
/**
* this is needed in case there is a recursion in the DefinePlugin
* to prevent an endless recursion
@ -640,7 +640,7 @@ class DefinePlugin {
res.setRange(/** @type {Range} */ (expr.range));
return res;
});
parser.hooks.typeof.for(key).tap(PLUGIN_NAME, expr => {
parser.hooks.typeof.for(key).tap(PLUGIN_NAME, (expr) => {
addValueDependency(originalKey);
const codeCode = toCode(
code,
@ -672,20 +672,22 @@ class DefinePlugin {
addValueDependency(key);
return true;
});
parser.hooks.evaluateIdentifier.for(key).tap(PLUGIN_NAME, expr => {
addValueDependency(key);
return new BasicEvaluatedExpression()
.setTruthy()
.setSideEffects(false)
.setRange(/** @type {Range} */ (expr.range));
});
parser.hooks.evaluateIdentifier
.for(key)
.tap(PLUGIN_NAME, (expr) => {
addValueDependency(key);
return new BasicEvaluatedExpression()
.setTruthy()
.setSideEffects(false)
.setRange(/** @type {Range} */ (expr.range));
});
parser.hooks.evaluateTypeof
.for(key)
.tap(
PLUGIN_NAME,
withValueDependency(key, evaluateToString("object"))
);
parser.hooks.expression.for(key).tap(PLUGIN_NAME, expr => {
parser.hooks.expression.for(key).tap(PLUGIN_NAME, (expr) => {
addValueDependency(key);
let strCode = stringifyObj(
obj,
@ -778,7 +780,7 @@ class DefinePlugin {
* @param {Record<string, CodeValue>} definitions Definitions map
* @returns {void}
*/
const walkDefinitionsForKeys = definitions => {
const walkDefinitionsForKeys = (definitions) => {
/**
* @param {Map<string, Set<string>>} map Map
* @param {string} key key

View File

@ -92,7 +92,7 @@ class DelegatedModuleFactoryPlugin {
}
);
} else {
normalModuleFactory.hooks.module.tap(PLUGIN_NAME, module => {
normalModuleFactory.hooks.module.tap(PLUGIN_NAME, (module) => {
const request = module.libIdent(this.options);
if (request && request in this.options.content) {
const resolved = this.options.content[request];

View File

@ -64,7 +64,7 @@ class DllEntryPlugin {
this.options.name
),
this.options,
error => {
(error) => {
if (error) return callback(error);
callback();
}

View File

@ -102,7 +102,7 @@ class DllReferencePlugin {
return callback();
});
compiler.hooks.compile.tap(PLUGIN_NAME, params => {
compiler.hooks.compile.tap(PLUGIN_NAME, (params) => {
let name = this.options.name;
let sourceType = this.options.sourceType;
let resolvedContent =

View File

@ -43,9 +43,9 @@ class DynamicEntryPlugin {
}
);
compiler.hooks.make.tapPromise(PLUGIN_NAME, compilation =>
compiler.hooks.make.tapPromise(PLUGIN_NAME, (compilation) =>
Promise.resolve(this.entry())
.then(entry => {
.then((entry) => {
const promises = [];
for (const name of Object.keys(entry)) {
const desc = entry[name];
@ -68,7 +68,7 @@ class DynamicEntryPlugin {
this.context,
EntryPlugin.createDependency(entry, options),
options,
err => {
(err) => {
if (err) return reject(err);
resolve();
}

View File

@ -45,7 +45,7 @@ class EntryPlugin {
const dep = EntryPlugin.createDependency(entry, options);
compiler.hooks.make.tapAsync(PLUGIN_NAME, (compilation, callback) => {
compilation.addEntry(context, dep, options, err => {
compilation.addEntry(context, dep, options, (err) => {
callback(err);
});
});

View File

@ -48,7 +48,7 @@ class EnvironmentPlugin {
: this.defaultValues[key];
if (value === undefined) {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, (compilation) => {
const error = new WebpackError(
`${PLUGIN_NAME} - ${key} environment variable is undefined.\n\n` +
"You can pass an object with default values to suppress this warning.\n" +

View File

@ -28,13 +28,13 @@ const cutOffByFlag = (stack, flag) => {
* @param {string} stack stack trace
* @returns {string} stack trace without the loader execution flag included
*/
const cutOffLoaderExecution = stack => cutOffByFlag(stack, loaderFlag);
const cutOffLoaderExecution = (stack) => cutOffByFlag(stack, loaderFlag);
/**
* @param {string} stack stack trace
* @returns {string} stack trace without the webpack options flag included
*/
const cutOffWebpackOptions = stack => cutOffByFlag(stack, webpackOptionsFlag);
const cutOffWebpackOptions = (stack) => cutOffByFlag(stack, webpackOptionsFlag);
/**
* @param {string} stack stack trace

View File

@ -55,7 +55,7 @@ class EvalDevToolModulePlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
const hooks = JavascriptModulesPlugin.getCompilationHooks(compilation);
hooks.renderModuleContent.tap(
PLUGIN_NAME,
@ -110,7 +110,7 @@ class EvalDevToolModulePlugin {
);
hooks.render.tap(
PLUGIN_NAME,
source => new ConcatSource(devtoolWarning, source)
(source) => new ConcatSource(devtoolWarning, source)
);
hooks.chunkHash.tap(PLUGIN_NAME, (chunk, hash) => {
hash.update(PLUGIN_NAME);

View File

@ -69,7 +69,7 @@ class EvalSourceMapDevToolPlugin {
*/
apply(compiler) {
const options = this.options;
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
const hooks = JavascriptModulesPlugin.getCompilationHooks(compilation);
new SourceMapDevToolModuleOptionsPlugin(options).apply(compilation);
const matchModule = ModuleFilenameHelpers.matchObject.bind(
@ -88,7 +88,7 @@ class EvalSourceMapDevToolPlugin {
* @param {Source} r result
* @returns {Source} result
*/
const result = r => {
const result = (r) => {
cache.set(source, r);
return r;
};
@ -136,13 +136,13 @@ class EvalSourceMapDevToolPlugin {
sourceMap = { ...sourceMap };
const context = /** @type {string} */ (compiler.options.context);
const root = compiler.root;
const modules = sourceMap.sources.map(source => {
const modules = sourceMap.sources.map((source) => {
if (!source.startsWith("webpack://")) return source;
source = makePathsAbsolute(context, source.slice(10), root);
const module = compilation.findModule(source);
return module || source;
});
let moduleFilenames = modules.map(module =>
let moduleFilenames = modules.map((module) =>
ModuleFilenameHelpers.createFilename(
module,
{
@ -205,7 +205,7 @@ class EvalSourceMapDevToolPlugin {
);
hooks.render.tap(
PLUGIN_NAME,
source => new ConcatSource(devtoolWarning, source)
(source) => new ConcatSource(devtoolWarning, source)
);
hooks.chunkHash.tap(PLUGIN_NAME, (chunk, hash) => {
hash.update(PLUGIN_NAME);

View File

@ -142,7 +142,7 @@ class ExportsInfo {
if (this._redirectTo !== undefined) {
/** @type {Exports} */
const map = new Map(
Array.from(this._redirectTo.orderedExports, item => [item.name, item])
Array.from(this._redirectTo.orderedExports, (item) => [item.name, item])
);
for (const [key, value] of this._exports) {
map.set(key, value);
@ -402,7 +402,7 @@ class ExportsInfo {
} else {
if (
this._otherExportsInfo.setUsedConditionally(
used => used < UsageState.Unknown,
(used) => used < UsageState.Unknown,
UsageState.Unknown,
runtime
)
@ -465,7 +465,7 @@ class ExportsInfo {
*/
setUsedForSideEffectsOnly(runtime) {
return this._sideEffectsOnlyInfo.setUsedConditionally(
used => used === UsageState.Unused,
(used) => used === UsageState.Unused,
UsageState.Used,
runtime
);
@ -988,7 +988,7 @@ class ExportInfo {
let changed = false;
if (
this.setUsedConditionally(
used => used < UsageState.Unknown,
(used) => used < UsageState.Unknown,
UsageState.Unknown,
runtime
)
@ -1050,7 +1050,7 @@ class ExportInfo {
} else if (this._usedInRuntime === undefined) {
if (newValue !== UsageState.Unused && condition(UsageState.Unused)) {
this._usedInRuntime = new Map();
forEachRuntime(runtime, runtime =>
forEachRuntime(runtime, (runtime) =>
/** @type {UsedInRuntime} */
(this._usedInRuntime).set(/** @type {string} */ (runtime), newValue)
);
@ -1058,7 +1058,7 @@ class ExportInfo {
}
} else {
let changed = false;
forEachRuntime(runtime, _runtime => {
forEachRuntime(runtime, (_runtime) => {
const runtime = /** @type {string} */ (_runtime);
const usedInRuntime =
/** @type {UsedInRuntime} */
@ -1098,7 +1098,7 @@ class ExportInfo {
} else if (this._usedInRuntime === undefined) {
if (newValue !== UsageState.Unused) {
this._usedInRuntime = new Map();
forEachRuntime(runtime, runtime =>
forEachRuntime(runtime, (runtime) =>
/** @type {UsedInRuntime} */
(this._usedInRuntime).set(/** @type {string} */ (runtime), newValue)
);
@ -1106,7 +1106,7 @@ class ExportInfo {
}
} else {
let changed = false;
forEachRuntime(runtime, _runtime => {
forEachRuntime(runtime, (_runtime) => {
const runtime = /** @type {string} */ (_runtime);
const usedInRuntime =
/** @type {UsedInRuntime} */
@ -1247,7 +1247,7 @@ class ExportInfo {
} else if (
runtime !== undefined &&
[...runtime].every(
runtime =>
(runtime) =>
!(/** @type {UsedInRuntime} */ (this._usedInRuntime).has(runtime))
)
) {

View File

@ -38,7 +38,7 @@ class ExportsInfoApiPlugin {
* @param {JavascriptParser} parser the parser
* @returns {void}
*/
const handler = parser => {
const handler = (parser) => {
parser.hooks.expressionMemberChain
.for("__webpack_exports_info__")
.tap(PLUGIN_NAME, (expr, members) => {
@ -60,7 +60,7 @@ class ExportsInfoApiPlugin {
});
parser.hooks.expression
.for("__webpack_exports_info__")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
const dep = new ConstDependency(
"true",
/** @type {Range} */ (expr.range)

View File

@ -96,7 +96,9 @@ const getSourceForGlobalVariableExternal = (variableName, type) => {
}
// needed for e.g. window["some"]["thing"]
const objectLookup = variableName.map(r => `[${JSON.stringify(r)}]`).join("");
const objectLookup = variableName
.map((r) => `[${JSON.stringify(r)}]`)
.join("");
return {
iife: type === "this",
expression: `${type}${objectLookup}`
@ -107,7 +109,7 @@ const getSourceForGlobalVariableExternal = (variableName, type) => {
* @param {string|string[]} moduleAndSpecifiers the module request
* @returns {SourceData} the generated source
*/
const getSourceForCommonJsExternal = moduleAndSpecifiers => {
const getSourceForCommonJsExternal = (moduleAndSpecifiers) => {
if (!Array.isArray(moduleAndSpecifiers)) {
return {
expression: `require(${JSON.stringify(moduleAndSpecifiers)})`

View File

@ -264,7 +264,7 @@ class ExternalModuleFactoryPlugin {
request: dependency.request,
dependencyType,
contextInfo,
getResolve: options => (context, request, callback) => {
getResolve: (options) => (context, request, callback) => {
const resolveContext = {
fileDependencies: data.fileDependencies,
missingDependencies: data.missingDependencies,
@ -307,7 +307,7 @@ class ExternalModuleFactoryPlugin {
},
cb
);
if (promise && promise.then) promise.then(r => cb(null, r), cb);
if (promise && promise.then) promise.then((r) => cb(null, r), cb);
}
return;
} else if (typeof externals === "object") {

View File

@ -550,7 +550,7 @@ class Snapshot {
*/
getFileIterable() {
if (this._cachedFileIterable === undefined) {
this._cachedFileIterable = this._createIterable(s => [
this._cachedFileIterable = this._createIterable((s) => [
s.fileTimestamps,
s.fileHashes,
s.fileTshs,
@ -565,7 +565,7 @@ class Snapshot {
*/
getContextIterable() {
if (this._cachedContextIterable === undefined) {
this._cachedContextIterable = this._createIterable(s => [
this._cachedContextIterable = this._createIterable((s) => [
s.contextTimestamps,
s.contextHashes,
s.contextTshs,
@ -580,7 +580,7 @@ class Snapshot {
*/
getMissingIterable() {
if (this._cachedMissingIterable === undefined) {
this._cachedMissingIterable = this._createIterable(s => [
this._cachedMissingIterable = this._createIterable((s) => [
s.missingExistence,
s.managedMissing
]);
@ -661,7 +661,7 @@ class SnapshotOptimization {
* @param {SnapshotOptimizationEntry} entry optimization entry
* @returns {void}
*/
const increaseSharedAndStoreOptimizationEntry = entry => {
const increaseSharedAndStoreOptimizationEntry = (entry) => {
if (entry.children !== undefined) {
for (const child of entry.children) {
increaseSharedAndStoreOptimizationEntry(child);
@ -674,7 +674,7 @@ class SnapshotOptimization {
* @param {SnapshotOptimizationEntry} entry optimization entry
* @returns {void}
*/
const storeOptimizationEntry = entry => {
const storeOptimizationEntry = (entry) => {
for (const path of /** @type {SnapshotContent} */ (
entry.snapshotContent
)) {
@ -872,7 +872,7 @@ class SnapshotOptimization {
* @param {string} str input
* @returns {string} result
*/
const parseString = str => {
const parseString = (str) => {
if (str[0] === "'" || str[0] === "`") {
str = `"${str.slice(1, -1).replace(/"/g, '\\"')}"`;
}
@ -883,7 +883,7 @@ const parseString = str => {
/**
* @param {number} mtime mtime
*/
const applyMtime = mtime => {
const applyMtime = (mtime) => {
if (FS_ACCURACY > 1 && mtime % 2 !== 0) FS_ACCURACY = 1;
else if (FS_ACCURACY > 10 && mtime % 20 !== 0) FS_ACCURACY = 10;
else if (FS_ACCURACY > 100 && mtime % 200 !== 0) FS_ACCURACY = 100;
@ -1014,7 +1014,7 @@ const getManagedItem = (managedPath, path) => {
* @param {T | null} entry entry
* @returns {T["resolved"] | null | undefined} the resolved entry
*/
const getResolvedTimestamp = entry => {
const getResolvedTimestamp = (entry) => {
if (entry === null) return null;
if (entry.resolved !== undefined) return entry.resolved;
return entry.symlinks === undefined ? entry : undefined;
@ -1024,7 +1024,7 @@ const getResolvedTimestamp = entry => {
* @param {ContextHash | null} entry entry
* @returns {string | null | undefined} the resolved entry
*/
const getResolvedHash = entry => {
const getResolvedHash = (entry) => {
if (entry === null) return null;
if (entry.resolved !== undefined) return entry.resolved;
return entry.symlinks === undefined ? entry.hash : undefined;
@ -1079,66 +1079,66 @@ class FileSystemInfo {
/** @type {WeakMap<Snapshot, boolean | CheckSnapshotValidCallback[]>} */
this._snapshotCache = new WeakMap();
this._fileTimestampsOptimization = new SnapshotOptimization(
s => s.hasFileTimestamps(),
s => s.fileTimestamps,
(s) => s.hasFileTimestamps(),
(s) => s.fileTimestamps,
(s, v) => s.setFileTimestamps(v)
);
this._fileHashesOptimization = new SnapshotOptimization(
s => s.hasFileHashes(),
s => s.fileHashes,
(s) => s.hasFileHashes(),
(s) => s.fileHashes,
(s, v) => s.setFileHashes(v),
false
);
this._fileTshsOptimization = new SnapshotOptimization(
s => s.hasFileTshs(),
s => s.fileTshs,
(s) => s.hasFileTshs(),
(s) => s.fileTshs,
(s, v) => s.setFileTshs(v)
);
this._contextTimestampsOptimization = new SnapshotOptimization(
s => s.hasContextTimestamps(),
s => s.contextTimestamps,
(s) => s.hasContextTimestamps(),
(s) => s.contextTimestamps,
(s, v) => s.setContextTimestamps(v)
);
this._contextHashesOptimization = new SnapshotOptimization(
s => s.hasContextHashes(),
s => s.contextHashes,
(s) => s.hasContextHashes(),
(s) => s.contextHashes,
(s, v) => s.setContextHashes(v),
false
);
this._contextTshsOptimization = new SnapshotOptimization(
s => s.hasContextTshs(),
s => s.contextTshs,
(s) => s.hasContextTshs(),
(s) => s.contextTshs,
(s, v) => s.setContextTshs(v)
);
this._missingExistenceOptimization = new SnapshotOptimization(
s => s.hasMissingExistence(),
s => s.missingExistence,
(s) => s.hasMissingExistence(),
(s) => s.missingExistence,
(s, v) => s.setMissingExistence(v),
false
);
this._managedItemInfoOptimization = new SnapshotOptimization(
s => s.hasManagedItemInfo(),
s => s.managedItemInfo,
(s) => s.hasManagedItemInfo(),
(s) => s.managedItemInfo,
(s, v) => s.setManagedItemInfo(v),
false
);
this._managedFilesOptimization = new SnapshotOptimization(
s => s.hasManagedFiles(),
s => s.managedFiles,
(s) => s.hasManagedFiles(),
(s) => s.managedFiles,
(s, v) => s.setManagedFiles(v),
false,
true
);
this._managedContextsOptimization = new SnapshotOptimization(
s => s.hasManagedContexts(),
s => s.managedContexts,
(s) => s.hasManagedContexts(),
(s) => s.managedContexts,
(s, v) => s.setManagedContexts(v),
false,
true
);
this._managedMissingOptimization = new SnapshotOptimization(
s => s.hasManagedMissing(),
s => s.managedMissing,
(s) => s.hasManagedMissing(),
(s) => s.managedMissing,
(s, v) => s.setManagedMissing(v),
false,
true
@ -1202,32 +1202,32 @@ class FileSystemInfo {
const _unmanagedPaths = [...unmanagedPaths];
this.unmanagedPathsWithSlash =
/** @type {string[]} */
(_unmanagedPaths.filter(p => typeof p === "string")).map(p =>
(_unmanagedPaths.filter((p) => typeof p === "string")).map((p) =>
join(fs, p, "_").slice(0, -1)
);
this.unmanagedPathsRegExps =
/** @type {RegExp[]} */
(_unmanagedPaths.filter(p => typeof p !== "string"));
(_unmanagedPaths.filter((p) => typeof p !== "string"));
this.managedPaths = [...managedPaths];
this.managedPathsWithSlash =
/** @type {string[]} */
(this.managedPaths.filter(p => typeof p === "string")).map(p =>
(this.managedPaths.filter((p) => typeof p === "string")).map((p) =>
join(fs, p, "_").slice(0, -1)
);
this.managedPathsRegExps =
/** @type {RegExp[]} */
(this.managedPaths.filter(p => typeof p !== "string"));
(this.managedPaths.filter((p) => typeof p !== "string"));
this.immutablePaths = [...immutablePaths];
this.immutablePathsWithSlash =
/** @type {string[]} */
(this.immutablePaths.filter(p => typeof p === "string")).map(p =>
(this.immutablePaths.filter((p) => typeof p === "string")).map((p) =>
join(fs, p, "_").slice(0, -1)
);
this.immutablePathsRegExps =
/** @type {RegExp[]} */
(this.immutablePaths.filter(p => typeof p !== "string"));
(this.immutablePaths.filter((p) => typeof p !== "string"));
this._cachedDeprecatedFileTimestamps = undefined;
this._cachedDeprecatedContextTimestamps = undefined;
@ -1605,7 +1605,7 @@ class FileSystemInfo {
* @param {undefined | boolean | string} expected expected result
* @returns {string} expected result
*/
const expectedToString = expected =>
const expectedToString = (expected) =>
expected ? ` (expected ${expected})` : "";
/** @typedef {{ type: JobType, context: string | undefined, path: string, issuer: Job | undefined, expected: undefined | boolean | string }} Job */
@ -1613,7 +1613,7 @@ class FileSystemInfo {
* @param {Job} job job
* @returns {`resolve commonjs file ${string}${string}`|`resolve esm file ${string}${string}`|`resolve esm ${string}${string}`|`resolve directory ${string}`|`file ${string}`|`unknown ${string} ${string}`|`resolve commonjs ${string}${string}`|`directory ${string}`|`file dependencies ${string}`|`directory dependencies ${string}`} result
*/
const jobToString = job => {
const jobToString = (job) => {
switch (job.type) {
case RBDT_RESOLVE_CJS:
return `resolve commonjs ${job.path}${expectedToString(
@ -1646,7 +1646,7 @@ class FileSystemInfo {
* @param {Job} job job
* @returns {string} string value
*/
const pathToString = job => {
const pathToString = (job) => {
let result = ` at ${jobToString(job)}`;
/** @type {Job | undefined} */
(job) = job.issuer;
@ -1660,7 +1660,7 @@ class FileSystemInfo {
processAsyncTree(
Array.from(
deps,
dep =>
(dep) =>
/** @type {Job} */ ({
type: RBDT_RESOLVE_CJS,
context,
@ -1676,7 +1676,7 @@ class FileSystemInfo {
* @param {string} path path
* @returns {void}
*/
const resolveDirectory = path => {
const resolveDirectory = (path) => {
const key = `d\n${context}\n${path}`;
if (resolveResults.has(key)) {
return callback();
@ -2071,7 +2071,7 @@ class FileSystemInfo {
}
}
},
err => {
(err) => {
if (err) return callback(err);
for (const l of fileSymlinks) files.delete(l);
for (const l of directorySymlinks) directories.delete(l);
@ -2158,7 +2158,7 @@ class FileSystemInfo {
* @param {Error | typeof INVALID=} err error or invalid flag
* @returns {void}
*/
err => {
(err) => {
if (err === INVALID) {
return callback(null, false);
}
@ -2332,7 +2332,7 @@ class FileSystemInfo {
/**
* @param {ManagedFiles} capturedFiles captured files
*/
const processCapturedFiles = capturedFiles => {
const processCapturedFiles = (capturedFiles) => {
switch (mode) {
case 3:
this._fileTshsOptimization.optimize(snapshot, capturedFiles);
@ -2420,7 +2420,7 @@ class FileSystemInfo {
/**
* @param {ManagedContexts} capturedDirectories captured directories
*/
const processCapturedDirectories = capturedDirectories => {
const processCapturedDirectories = (capturedDirectories) => {
switch (mode) {
case 3:
this._contextTshsOptimization.optimize(snapshot, capturedDirectories);
@ -2565,7 +2565,7 @@ class FileSystemInfo {
/**
* @param {ManagedMissing} capturedMissing captured missing
*/
const processCapturedMissing = capturedMissing => {
const processCapturedMissing = (capturedMissing) => {
this._missingExistenceOptimization.optimize(snapshot, capturedMissing);
for (const path of capturedMissing) {
const cache = this._fileTimestamps.get(path);
@ -3345,7 +3345,7 @@ class FileSystemInfo {
* @param {string} hash hash
* @returns {void}
*/
const continueWithHash = hash => {
const continueWithHash = (hash) => {
const cache = this._fileTimestamps.get(path);
if (cache !== undefined) {
if (cache !== "ignore") {
@ -3421,8 +3421,8 @@ class FileSystemInfo {
return callback(err);
}
const files = /** @type {string[]} */ (_files)
.map(file => file.normalize("NFC"))
.filter(file => !/^\./.test(file))
.map((file) => file.normalize("NFC"))
.filter((file) => !/^\./.test(file))
.sort();
asyncLib.map(
files,
@ -3510,7 +3510,7 @@ class FileSystemInfo {
fromImmutablePath: () =>
/** @type {ContextFileSystemInfoEntry | FileSystemInfoEntry | "ignore" | null} */
(null),
fromManagedItem: info => ({
fromManagedItem: (info) => ({
safeTime: 0,
timestampHash: info
}),
@ -3643,7 +3643,7 @@ class FileSystemInfo {
callback();
});
},
err => {
(err) => {
if (err) return callback(/** @type {WebpackError} */ (err));
const hash = createHash(this._hashFunction);
hash.update(/** @type {string} */ (entry.timestampHash));
@ -3674,7 +3674,7 @@ class FileSystemInfo {
{
path,
fromImmutablePath: () => /** @type {ContextHash | ""} */ (""),
fromManagedItem: info => info || "",
fromManagedItem: (info) => info || "",
fromSymlink: (file, target, callback) => {
callback(
null,
@ -3759,7 +3759,7 @@ class FileSystemInfo {
callback();
});
},
err => {
(err) => {
if (err) return callback(/** @type {WebpackError} */ (err));
const hash = createHash(this._hashFunction);
hash.update(entry.hash);
@ -3817,7 +3817,7 @@ class FileSystemInfo {
path,
fromImmutablePath: () =>
/** @type {ContextTimestampAndHash | null} */ (null),
fromManagedItem: info => ({
fromManagedItem: (info) => ({
safeTime: 0,
timestampHash: info,
hash: info || ""
@ -3937,7 +3937,7 @@ class FileSystemInfo {
callback();
});
},
err => {
(err) => {
if (err) return callback(/** @type {WebpackError} */ (err));
const hash = createHash(this._hashFunction);
const tsHash = createHash(this._hashFunction);
@ -3979,7 +3979,7 @@ class FileSystemInfo {
return callback(/** @type {WebpackError} */ (err));
}
const set = new Set(
/** @type {string[]} */ (elements).map(element =>
/** @type {string[]} */ (elements).map((element) =>
join(this.fs, path, element)
)
);

View File

@ -26,9 +26,9 @@ class FlagAllModulesAsUsedPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
const moduleGraph = compilation.moduleGraph;
compilation.hooks.optimizeDependencies.tap(PLUGIN_NAME, modules => {
compilation.hooks.optimizeDependencies.tap(PLUGIN_NAME, (modules) => {
/** @type {RuntimeSpec} */
let runtime;
for (const [name, { options }] of compilation.entries) {

View File

@ -28,7 +28,7 @@ class FlagDependencyExportsPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
const moduleGraph = compilation.moduleGraph;
const cache = compilation.getCache(PLUGIN_NAME);
compilation.hooks.finishModules.tapAsync(
@ -103,7 +103,7 @@ class FlagDependencyExportsPlugin {
}
);
},
err => {
(err) => {
logger.timeEnd("restore cached provided exports");
if (err) return callback(err);
@ -129,7 +129,7 @@ class FlagDependencyExportsPlugin {
* @param {DependenciesBlock} depBlock the dependencies block
* @returns {void}
*/
const processDependenciesBlock = depBlock => {
const processDependenciesBlock = (depBlock) => {
for (const dep of depBlock.dependencies) {
processDependency(dep);
}
@ -142,7 +142,7 @@ class FlagDependencyExportsPlugin {
* @param {Dependency} dep the dependency
* @returns {void}
*/
const processDependency = dep => {
const processDependency = (dep) => {
const exportDesc = dep.getExports(moduleGraph);
if (!exportDesc) return;
exportsSpecsFromDependencies.set(dep, exportDesc);
@ -399,7 +399,7 @@ class FlagDependencyExportsPlugin {
callback
);
},
err => {
(err) => {
logger.timeEnd("store provided exports into cache");
callback(err);
}
@ -411,13 +411,13 @@ class FlagDependencyExportsPlugin {
/** @type {WeakMap<Module, RestoreProvidedData>} */
const providedExportsCache = new WeakMap();
compilation.hooks.rebuildModule.tap(PLUGIN_NAME, module => {
compilation.hooks.rebuildModule.tap(PLUGIN_NAME, (module) => {
providedExportsCache.set(
module,
moduleGraph.getExportsInfo(module).getRestoreProvidedData()
);
});
compilation.hooks.finishRebuildingModule.tap(PLUGIN_NAME, module => {
compilation.hooks.finishRebuildingModule.tap(PLUGIN_NAME, (module) => {
moduleGraph.getExportsInfo(module).restoreProvided(
/** @type {RestoreProvidedData} */
(providedExportsCache.get(module))

View File

@ -41,11 +41,11 @@ class FlagDependencyUsagePlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
const moduleGraph = compilation.moduleGraph;
compilation.hooks.optimizeDependencies.tap(
{ name: PLUGIN_NAME, stage: STAGE_DEFAULT },
modules => {
(modules) => {
if (compilation.moduleMemCaches) {
throw new Error(
"optimization.usedExports can't be used with cacheUnaffected as export usage is a global effect"
@ -108,7 +108,7 @@ class FlagDependencyUsagePlugin {
if (nestedInfo) {
if (
exportInfo.setUsedConditionally(
used => used === UsageState.Unused,
(used) => used === UsageState.Unused,
UsageState.OnlyPropertiesUsed,
runtime
)
@ -127,7 +127,7 @@ class FlagDependencyUsagePlugin {
}
if (
exportInfo.setUsedConditionally(
v => v !== UsageState.Used,
(v) => v !== UsageState.Used,
UsageState.Used,
runtime
)

View File

@ -27,7 +27,7 @@ class FlagEntryExportAsUsedPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, (compilation) => {
const moduleGraph = compilation.moduleGraph;
compilation.hooks.seal.tap(PLUGIN_NAME, () => {
for (const [

View File

@ -123,7 +123,7 @@ class HotModuleReplacementPlugin {
const { hotAcceptCallback, hotAcceptWithoutCallback } =
HotModuleReplacementPlugin.getParserHooks(parser);
return expr => {
return (expr) => {
const module = parser.state.module;
const dep = new ConstDependency(
`${module.moduleArgument}.hot.accept`,
@ -145,7 +145,7 @@ class HotModuleReplacementPlugin {
} else if (arg.isArray()) {
params =
/** @type {BasicEvaluatedExpression[]} */
(arg.items).filter(param => param.isString());
(arg.items).filter((param) => param.isString());
}
/** @type {string[]} */
const requests = [];
@ -185,7 +185,7 @@ class HotModuleReplacementPlugin {
* @param {typeof ModuleHotDeclineDependency} ParamDependency dependency
* @returns {(expr: CallExpression) => boolean | undefined} callback
*/
const createDeclineHandler = (parser, ParamDependency) => expr => {
const createDeclineHandler = (parser, ParamDependency) => (expr) => {
const module = parser.state.module;
const dep = new ConstDependency(
`${module.moduleArgument}.hot.decline`,
@ -205,7 +205,7 @@ class HotModuleReplacementPlugin {
} else if (arg.isArray()) {
params =
/** @type {BasicEvaluatedExpression[]} */
(arg.items).filter(param => param.isString());
(arg.items).filter((param) => param.isString());
}
for (const [idx, param] of params.entries()) {
const dep = new ParamDependency(
@ -225,7 +225,7 @@ class HotModuleReplacementPlugin {
* @param {JavascriptParser} parser the parser
* @returns {(expr: Expression) => boolean | undefined} callback
*/
const createHMRExpressionHandler = parser => expr => {
const createHMRExpressionHandler = (parser) => (expr) => {
const module = parser.state.module;
const dep = new ConstDependency(
`${module.moduleArgument}.hot`,
@ -243,13 +243,13 @@ class HotModuleReplacementPlugin {
* @param {JavascriptParser} parser the parser
* @returns {void}
*/
const applyModuleHot = parser => {
const applyModuleHot = (parser) => {
parser.hooks.evaluateIdentifier.for("module.hot").tap(
{
name: PLUGIN_NAME,
before: "NodeStuffPlugin"
},
expr =>
(expr) =>
evaluateToIdentifier(
"module.hot",
"module",
@ -278,10 +278,10 @@ class HotModuleReplacementPlugin {
* @param {JavascriptParser} parser the parser
* @returns {void}
*/
const applyImportMetaHot = parser => {
const applyImportMetaHot = (parser) => {
parser.hooks.evaluateIdentifier
.for("import.meta.webpackHot")
.tap(PLUGIN_NAME, expr =>
.tap(PLUGIN_NAME, (expr) =>
evaluateToIdentifier(
"import.meta.webpackHot",
"import.meta",
@ -381,7 +381,7 @@ class HotModuleReplacementPlugin {
chunk,
compareModulesById(chunkGraph)
),
m => /** @type {ModuleId} */ (chunkGraph.getModuleId(m))
(m) => /** @type {ModuleId} */ (chunkGraph.getModuleId(m))
);
}
});
@ -391,7 +391,7 @@ class HotModuleReplacementPlugin {
const fullHashModules = new TupleSet();
/** @type {TupleSet<Module, RuntimeSpec>} */
const nonCodeGeneratedModules = new TupleSet();
compilation.hooks.fullHash.tap(PLUGIN_NAME, hash => {
compilation.hooks.fullHash.tap(PLUGIN_NAME, (hash) => {
const chunkGraph = compilation.chunkGraph;
const records = /** @type {Records} */ (compilation.records);
for (const chunk of compilation.chunks) {
@ -399,7 +399,7 @@ class HotModuleReplacementPlugin {
* @param {Module} module module
* @returns {string} module hash
*/
const getModuleHash = module => {
const getModuleHash = (module) => {
if (
compilation.codeGenerationResults.has(module, chunk.runtime)
) {
@ -524,7 +524,7 @@ class HotModuleReplacementPlugin {
const runtime = keyToRuntime(chunkRuntime[key]);
allOldRuntime = mergeRuntimeOwned(allOldRuntime, runtime);
}
forEachRuntime(allOldRuntime, runtime => {
forEachRuntime(allOldRuntime, (runtime) => {
const { path: filename, info: assetInfo } =
compilation.getPathWithInfo(
/** @type {NonNullable<OutputNormalized["hotUpdateMainFilename"]>} */
@ -588,7 +588,7 @@ class HotModuleReplacementPlugin {
let removedFromRuntime;
const currentChunk = find(
compilation.chunks,
chunk => `${chunk.id}` === key
(chunk) => `${chunk.id}` === key
);
if (currentChunk) {
chunkId = currentChunk.id;
@ -599,22 +599,22 @@ class HotModuleReplacementPlugin {
if (newRuntime === undefined) continue;
newModules = chunkGraph
.getChunkModules(currentChunk)
.filter(module => updatedModules.has(module, currentChunk));
.filter((module) => updatedModules.has(module, currentChunk));
newRuntimeModules = [
...chunkGraph.getChunkRuntimeModulesIterable(currentChunk)
].filter(module => updatedModules.has(module, currentChunk));
].filter((module) => updatedModules.has(module, currentChunk));
const fullHashModules =
chunkGraph.getChunkFullHashModulesIterable(currentChunk);
newFullHashModules =
fullHashModules &&
[...fullHashModules].filter(module =>
[...fullHashModules].filter((module) =>
updatedModules.has(module, currentChunk)
);
const dependentHashModules =
chunkGraph.getChunkDependentHashModulesIterable(currentChunk);
newDependentHashModules =
dependentHashModules &&
[...dependentHashModules].filter(module =>
[...dependentHashModules].filter((module) =>
updatedModules.has(module, currentChunk)
);
removedFromRuntime = subtractRuntime(oldRuntime, newRuntime);
@ -626,7 +626,7 @@ class HotModuleReplacementPlugin {
}
if (removedFromRuntime) {
// chunk was removed from some runtimes
forEachRuntime(removedFromRuntime, runtime => {
forEachRuntime(removedFromRuntime, (runtime) => {
const item =
/** @type {HotUpdateMainContentByRuntimeItem} */
(
@ -665,7 +665,7 @@ class HotModuleReplacementPlugin {
// module is no longer in this runtime combination
// We (incorrectly) assume that it's not in an overlapping runtime combination
// and dispose it from the main runtimes the chunk was removed from
forEachRuntime(removedFromRuntime, runtime => {
forEachRuntime(removedFromRuntime, (runtime) => {
// If the module is still used in this runtime, do not dispose it
// This could create a bad runtime state where the module is still loaded,
// but no chunk which contains it. This means we don't receive further HMR updates
@ -764,7 +764,7 @@ class HotModuleReplacementPlugin {
compilation.hooks.chunkAsset.call(currentChunk, filename);
}
}
forEachRuntime(newRuntime, runtime => {
forEachRuntime(newRuntime, (runtime) => {
const item =
/** @type {HotUpdateMainContentByRuntimeItem} */ (
hotUpdateMainContentByRuntime.get(
@ -829,7 +829,7 @@ To fix this, make sure to include [runtime] in the output.hotUpdateMainFilename
...completelyRemovedModulesArray,
...Array.from(
removedModules,
m =>
(m) =>
/** @type {ModuleId} */ (chunkGraph.getModuleId(m))
)
]
@ -863,28 +863,28 @@ To fix this, make sure to include [runtime] in the output.hotUpdateMainFilename
normalModuleFactory.hooks.parser
.for(JAVASCRIPT_MODULE_TYPE_AUTO)
.tap(PLUGIN_NAME, parser => {
.tap(PLUGIN_NAME, (parser) => {
applyModuleHot(parser);
applyImportMetaHot(parser);
});
normalModuleFactory.hooks.parser
.for(JAVASCRIPT_MODULE_TYPE_DYNAMIC)
.tap(PLUGIN_NAME, parser => {
.tap(PLUGIN_NAME, (parser) => {
applyModuleHot(parser);
});
normalModuleFactory.hooks.parser
.for(JAVASCRIPT_MODULE_TYPE_ESM)
.tap(PLUGIN_NAME, parser => {
.tap(PLUGIN_NAME, (parser) => {
applyImportMetaHot(parser);
});
normalModuleFactory.hooks.module.tap(PLUGIN_NAME, module => {
normalModuleFactory.hooks.module.tap(PLUGIN_NAME, (module) => {
module.hot = true;
return module;
});
NormalModule.getCompilationHooks(compilation).loader.tap(
PLUGIN_NAME,
context => {
(context) => {
context.hot = true;
}
);

View File

@ -71,8 +71,8 @@ class IgnorePlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.normalModuleFactory.tap(PLUGIN_NAME, nmf => {
nmf.hooks.beforeResolve.tap(PLUGIN_NAME, resolveData => {
compiler.hooks.normalModuleFactory.tap(PLUGIN_NAME, (nmf) => {
nmf.hooks.beforeResolve.tap(PLUGIN_NAME, (resolveData) => {
const result = this.checkIgnore(resolveData);
if (
@ -93,7 +93,7 @@ class IgnorePlugin {
return result;
});
});
compiler.hooks.contextModuleFactory.tap(PLUGIN_NAME, cmf => {
compiler.hooks.contextModuleFactory.tap(PLUGIN_NAME, (cmf) => {
cmf.hooks.beforeResolve.tap(PLUGIN_NAME, this.checkIgnore);
});
}

View File

@ -24,11 +24,11 @@ class IgnoreWarningsPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compilation.hooks.processWarnings.tap(PLUGIN_NAME, warnings =>
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
compilation.hooks.processWarnings.tap(PLUGIN_NAME, (warnings) =>
warnings.filter(
warning =>
!this._ignoreWarnings.some(ignore => ignore(warning, compilation))
(warning) =>
!this._ignoreWarnings.some((ignore) => ignore(warning, compilation))
)
);
});

View File

@ -18,7 +18,7 @@ class InvalidDependenciesModuleWarning extends WebpackError {
*/
constructor(module, deps) {
const orderedDeps = deps ? [...deps].sort() : [];
const depsList = orderedDeps.map(dep => ` * ${JSON.stringify(dep)}`);
const depsList = orderedDeps.map((dep) => ` * ${JSON.stringify(dep)}`);
super(`Invalid dependencies have been reported by plugins or loaders for this module. All reported dependencies need to be absolute paths.
Invalid dependencies may lead to broken watching and caching.
As best effort we try to convert all invalid values to absolute paths and converting globs into context dependencies, but this is deprecated behavior.

View File

@ -32,7 +32,7 @@ class JavascriptMetaInfoPlugin {
* @param {JavascriptParser} parser the parser
* @returns {void}
*/
const handler = parser => {
const handler = (parser) => {
parser.hooks.call.for("eval").tap(PLUGIN_NAME, () => {
const buildInfo =
/** @type {BuildInfo} */

View File

@ -86,7 +86,7 @@ class LibManifestPlugin {
this.options.entryOnly &&
!someInIterable(
moduleGraph.getIncomingConnections(module),
c => c.dependency instanceof EntryDependency
(c) => c.dependency instanceof EntryDependency
)
) {
continue;
@ -129,7 +129,7 @@ class LibManifestPlugin {
mkdirp(
intermediateFileSystem,
dirname(intermediateFileSystem, targetPath),
err => {
(err) => {
if (err) return callback(err);
intermediateFileSystem.writeFile(targetPath, buffer, callback);
}

View File

@ -59,7 +59,7 @@ class LoaderOptionsPlugin {
*/
apply(compiler) {
const options = this.options;
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
NormalModule.getCompilationHooks(compilation).loader.tap(
PLUGIN_NAME,
(context, module) => {

View File

@ -25,10 +25,10 @@ class LoaderTargetPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
NormalModule.getCompilationHooks(compilation).loader.tap(
PLUGIN_NAME,
loaderContext => {
(loaderContext) => {
loaderContext.target = this.target;
}
);

View File

@ -323,7 +323,7 @@ class MainTemplate {
/**
* @param {PathData} options context data
* @returns {string} interpolated path
*/ options =>
*/ (options) =>
compilation.getAssetPath(
/** @type {string} */
(compilation.outputOptions.publicPath),

View File

@ -775,7 +775,7 @@ class Module extends DependenciesBlock {
fromModule,
connections
] of moduleGraph.getIncomingConnectionsByOriginModule(this)) {
if (!connections.some(c => c.isTargetActive(chunk.runtime))) continue;
if (!connections.some((c) => c.isTargetActive(chunk.runtime))) continue;
for (const originChunk of chunkGraph.getModuleChunksIterable(
/** @type {Module} */ (fromModule)
)) {

View File

@ -96,13 +96,13 @@ const getHash =
* @param {Record<string, () => T>} obj the object to convert to a lazy access object
* @returns {T} the lazy access object
*/
const lazyObject = obj => {
const lazyObject = (obj) => {
const newObj = /** @type {T} */ ({});
for (const key of Object.keys(obj)) {
const fn = obj[key];
Object.defineProperty(newObj, key, {
get: () => fn(),
set: v => {
set: (v) => {
Object.defineProperty(newObj, key, {
value: v,
enumerable: true,
@ -320,7 +320,7 @@ ModuleFilenameHelpers.replaceDuplicates = (array, fn, comparator) => {
const matchPart = (str, test) => {
if (!test) return true;
if (Array.isArray(test)) {
return test.some(test => matchPart(str, test));
return test.some((test) => matchPart(str, test));
}
if (typeof test === "string") {
return str.startsWith(test);

View File

@ -36,7 +36,7 @@ const EMPTY_SET = new Set();
* @param {SortableSet<ModuleGraphConnection>} set input
* @returns {readonly Map<Module | undefined, readonly ModuleGraphConnection[]>} mapped by origin module
*/
const getConnectionsByOriginModule = set => {
const getConnectionsByOriginModule = (set) => {
const map = new Map();
/** @type {Module | 0} */
let lastModule = 0;
@ -67,7 +67,7 @@ const getConnectionsByOriginModule = set => {
* @param {SortableSet<ModuleGraphConnection>} set input
* @returns {readonly Map<Module | undefined, readonly ModuleGraphConnection[]>} mapped by module
*/
const getConnectionsByModule = set => {
const getConnectionsByModule = (set) => {
const map = new Map();
/** @type {Module | 0} */
let lastModule = 0;
@ -956,7 +956,7 @@ class ModuleGraph {
* @param {Module} module the module
* @returns {ModuleGraph} the module graph
*/
module => {
(module) => {
const moduleGraph = moduleGraphForModuleMap.get(module);
if (!moduleGraph) {
throw new Error(

View File

@ -26,7 +26,7 @@ const JavascriptModulesPlugin = require("./javascript/JavascriptModulesPlugin");
* @param {Iterable<T>} iterable iterable
* @returns {string} joined with comma
*/
const joinIterableWithComma = iterable => {
const joinIterableWithComma = (iterable) => {
// This is more performant than Array.from().join(", ")
// as it doesn't create an array
let str = "";
@ -94,7 +94,7 @@ const printExportsInfoToSource = (
? ` -> ${target.module.readableIdentifier(requestShortener)}${
target.export
? ` .${target.export
.map(e => JSON.stringify(e).slice(1, -1))
.map((e) => JSON.stringify(e).slice(1, -1))
.join(".")}`
: ""
}`
@ -165,7 +165,7 @@ class ModuleInfoHeaderPlugin {
*/
apply(compiler) {
const { _verbose: verbose } = this;
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
const javascriptHooks =
JavascriptModulesPlugin.getCompilationHooks(compilation);
javascriptHooks.renderModulePackage.tap(

View File

@ -43,7 +43,7 @@ class ModuleParseError extends WebpackError {
"\nYou may need an appropriate loader to handle this file type.";
} else if (loaders.length >= 1) {
message += `\nFile was processed with these loaders:${loaders
.map(loader => `\n * ${loader}`)
.map((loader) => `\n * ${loader}`)
.join("")}`;
message +=
"\nYou may need an additional loader to handle the result of these loaders.";
@ -75,8 +75,10 @@ class ModuleParseError extends WebpackError {
const linesAfter = sourceLines.slice(lineNumber, lineNumber + 2);
message += `${linesBefore
.map(l => `\n| ${l}`)
.join("")}\n> ${theLine}${linesAfter.map(l => `\n| ${l}`).join("")}`;
.map((l) => `\n| ${l}`)
.join(
""
)}\n> ${theLine}${linesAfter.map((l) => `\n| ${l}`).join("")}`;
}
loc = { start: err.loc };

View File

@ -54,7 +54,7 @@ module.exports = class MultiCompiler {
constructor(compilers, options) {
if (!Array.isArray(compilers)) {
/** @type {Compiler[]} */
compilers = Object.keys(compilers).map(name => {
compilers = Object.keys(compilers).map((name) => {
/** @type {Record<string, Compiler>} */
(compilers)[name].name = name;
return /** @type {Record<string, Compiler>} */ (compilers)[name];
@ -65,16 +65,16 @@ module.exports = class MultiCompiler {
/** @type {SyncHook<[MultiStats]>} */
done: new SyncHook(["stats"]),
/** @type {MultiHook<SyncHook<[string | null, number]>>} */
invalid: new MultiHook(compilers.map(c => c.hooks.invalid)),
invalid: new MultiHook(compilers.map((c) => c.hooks.invalid)),
/** @type {MultiHook<AsyncSeriesHook<[Compiler]>>} */
run: new MultiHook(compilers.map(c => c.hooks.run)),
run: new MultiHook(compilers.map((c) => c.hooks.run)),
/** @type {SyncHook<[]>} */
watchClose: new SyncHook([]),
/** @type {MultiHook<AsyncSeriesHook<[Compiler]>>} */
watchRun: new MultiHook(compilers.map(c => c.hooks.watchRun)),
watchRun: new MultiHook(compilers.map((c) => c.hooks.watchRun)),
/** @type {MultiHook<SyncBailHook<[string, string, EXPECTED_ANY[] | undefined], true | void>>} */
infrastructureLog: new MultiHook(
compilers.map(c => c.hooks.infrastructureLog)
compilers.map((c) => c.hooks.infrastructureLog)
)
});
this.compilers = compilers;
@ -94,7 +94,7 @@ module.exports = class MultiCompiler {
const compilerIndex = index;
let compilerDone = false;
// eslint-disable-next-line no-loop-func
compiler.hooks.done.tap(CLASS_NAME, stats => {
compiler.hooks.done.tap(CLASS_NAME, (stats) => {
if (!compilerDone) {
compilerDone = true;
doneCompilers++;
@ -124,7 +124,7 @@ module.exports = class MultiCompiler {
* @param {WebpackError} warning warning
*/
const addWarning = (compiler, warning) => {
compiler.hooks.thisCompilation.tap(CLASS_NAME, compilation => {
compiler.hooks.thisCompilation.tap(CLASS_NAME, (compilation) => {
compilation.warnings.push(warning);
});
};
@ -152,7 +152,7 @@ module.exports = class MultiCompiler {
get options() {
return Object.assign(
this.compilers.map(c => c.options),
this.compilers.map((c) => c.options),
this._options
);
}
@ -254,7 +254,7 @@ module.exports = class MultiCompiler {
* @param {Compiler} compiler compiler
* @returns {boolean} target was found
*/
const targetFound = compiler => {
const targetFound = (compiler) => {
for (const edge of edges) {
if (edge.target === compiler) {
return true;
@ -276,7 +276,7 @@ module.exports = class MultiCompiler {
const dependencies = this.dependencies.get(source);
if (dependencies) {
for (const dep of dependencies) {
const target = this.compilers.find(c => c.name === dep);
const target = this.compilers.find((c) => c.name === dep);
if (!target) {
missing.push(dep);
} else {
@ -289,8 +289,10 @@ module.exports = class MultiCompiler {
}
}
/** @type {string[]} */
const errors = missing.map(m => `Compiler dependency \`${m}\` not found.`);
const stack = this.compilers.filter(c => !targetFound(c));
const errors = missing.map(
(m) => `Compiler dependency \`${m}\` not found.`
);
const stack = this.compilers.filter((c) => !targetFound(c));
while (stack.length > 0) {
const current = stack.pop();
for (const edge of edges) {
@ -307,7 +309,7 @@ module.exports = class MultiCompiler {
/** @type {string[]} */
const lines = [...edges]
.sort(sortEdges)
.map(edge => `${edge.source.name} -> ${edge.target.name}`);
.map((edge) => `${edge.source.name} -> ${edge.target.name}`);
lines.unshift("Circular dependency found in compiler dependencies.");
errors.unshift(lines.join("\n"));
}
@ -334,7 +336,7 @@ module.exports = class MultiCompiler {
* @param {string} d dependency
* @returns {boolean} when dependency was fulfilled
*/
const isDependencyFulfilled = d => fulfilledNames.has(d);
const isDependencyFulfilled = (d) => fulfilledNames.has(d);
/**
* @returns {Compiler[]} compilers
*/
@ -358,12 +360,12 @@ module.exports = class MultiCompiler {
* @param {Callback<Stats[]>} callback callback
* @returns {void}
*/
const runCompilers = callback => {
const runCompilers = (callback) => {
if (remainingCompilers.length === 0) return callback(null);
asyncLib.map(
getReadyCompilers(),
(compiler, callback) => {
fn(compiler, err => {
fn(compiler, (err) => {
if (err) return callback(err);
fulfilledNames.add(compiler.name);
runCompilers(callback);
@ -400,7 +402,7 @@ module.exports = class MultiCompiler {
// running-outdated -> blocked [running--] (when compilation is done)
/** @type {Node[]} */
const nodes = this.compilers.map(compiler => ({
const nodes = this.compilers.map((compiler) => ({
compiler,
setupResult: undefined,
result: undefined,
@ -472,7 +474,7 @@ module.exports = class MultiCompiler {
* @param {Node} node node
* @returns {void}
*/
const nodeInvalidFromParent = node => {
const nodeInvalidFromParent = (node) => {
if (node.state === "done") {
node.state = "blocked";
} else if (node.state === "running") {
@ -486,7 +488,7 @@ module.exports = class MultiCompiler {
* @param {Node} node node
* @returns {void}
*/
const nodeInvalid = node => {
const nodeInvalid = (node) => {
if (node.state === "done") {
node.state = "pending";
} else if (node.state === "running") {
@ -500,7 +502,7 @@ module.exports = class MultiCompiler {
* @param {Node} node node
* @returns {void}
*/
const nodeChange = node => {
const nodeChange = (node) => {
nodeInvalid(node);
if (node.state === "pending") {
node.state = "blocked";
@ -538,7 +540,7 @@ module.exports = class MultiCompiler {
if (
node.state === "queued" ||
(node.state === "blocked" &&
node.parents.every(p => p.state === "done"))
node.parents.every((p) => p.state === "done"))
) {
running++;
node.state = "starting";
@ -554,7 +556,7 @@ module.exports = class MultiCompiler {
if (
!errored &&
running === 0 &&
nodes.every(node => node.state === "done")
nodes.every((node) => node.state === "done")
) {
const stats = [];
for (const node of nodes) {
@ -653,7 +655,7 @@ module.exports = class MultiCompiler {
(compiler, callback) => {
compiler.close(callback);
},
error => {
(error) => {
callback(error);
}
);

View File

@ -36,21 +36,21 @@ class MultiStats {
}
get hash() {
return this.stats.map(stat => stat.hash).join("");
return this.stats.map((stat) => stat.hash).join("");
}
/**
* @returns {boolean} true if a child compilation encountered an error
*/
hasErrors() {
return this.stats.some(stat => stat.hasErrors());
return this.stats.some((stat) => stat.hasErrors());
}
/**
* @returns {boolean} true if a child compilation had a warning
*/
hasWarnings() {
return this.stats.some(stat => stat.hasWarnings());
return this.stats.some((stat) => stat.hasWarnings());
}
/**
@ -93,12 +93,12 @@ class MultiStats {
);
});
return {
version: children.every(o => o.version),
hash: children.every(o => o.hash),
errorsCount: children.every(o => o.errorsCount),
warningsCount: children.every(o => o.warningsCount),
errors: children.every(o => o.errors),
warnings: children.every(o => o.warnings),
version: children.every((o) => o.version),
hash: children.every((o) => o.hash),
errorsCount: children.every((o) => o.errorsCount),
warningsCount: children.every((o) => o.warningsCount),
errors: children.every((o) => o.errors),
warnings: children.every((o) => o.warnings),
children
};
}
@ -130,7 +130,7 @@ class MultiStats {
obj.version = obj.children[0].version;
}
if (childOptions.hash) {
obj.hash = obj.children.map(j => j.hash).join("");
obj.hash = obj.children.map((j) => j.hash).join("");
}
/**
* @param {StatsCompilation} j stats error

View File

@ -67,7 +67,7 @@ class MultiWatching {
(watching, finishedCallback) => {
watching.close(finishedCallback);
},
err => {
(err) => {
this.compiler.hooks.watchClose.call();
if (typeof callback === "function") {
this.compiler.running = false;

View File

@ -16,10 +16,10 @@ class NoEmitOnErrorsPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.shouldEmit.tap(PLUGIN_NAME, compilation => {
compiler.hooks.shouldEmit.tap(PLUGIN_NAME, (compilation) => {
if (compilation.getStats().hasErrors()) return false;
});
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
compilation.hooks.shouldRecord.tap(PLUGIN_NAME, () => {
if (compilation.getStats().hasErrors()) return false;
});

View File

@ -74,7 +74,7 @@ class NodeStuffPlugin {
if (localOptions.global !== false) {
const withWarning = localOptions.global === "warn";
parser.hooks.expression.for("global").tap(PLUGIN_NAME, expr => {
parser.hooks.expression.for("global").tap(PLUGIN_NAME, (expr) => {
const dep = new ConstDependency(
RuntimeGlobals.global,
/** @type {Range} */ (expr.range),
@ -94,7 +94,7 @@ class NodeStuffPlugin {
);
}
});
parser.hooks.rename.for("global").tap(PLUGIN_NAME, expr => {
parser.hooks.rename.for("global").tap(PLUGIN_NAME, (expr) => {
const dep = new ConstDependency(
RuntimeGlobals.global,
/** @type {Range} */ (expr.range),
@ -115,7 +115,7 @@ class NodeStuffPlugin {
const setModuleConstant = (expressionName, fn, warning) => {
parser.hooks.expression
.for(expressionName)
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
const dep = new CachedConstDependency(
JSON.stringify(fn(parser.state.module)),
/** @type {Range} */ (expr.range),
@ -143,7 +143,7 @@ class NodeStuffPlugin {
const setUrlModuleConstant = (expressionName, fn) => {
parser.hooks.expression
.for(expressionName)
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
const dep = new ExternalModuleDependency(
"url",
[
@ -189,11 +189,11 @@ class NodeStuffPlugin {
case "node-module":
setUrlModuleConstant(
"__filename",
functionName => `${functionName}(import.meta.url)`
(functionName) => `${functionName}(import.meta.url)`
);
break;
case true:
setModuleConstant("__filename", module =>
setModuleConstant("__filename", (module) =>
relative(
/** @type {InputFileSystem} */ (compiler.inputFileSystem),
context,
@ -205,7 +205,7 @@ class NodeStuffPlugin {
parser.hooks.evaluateIdentifier
.for("__filename")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
if (!parser.state.module) return;
const resource = parseResource(parser.state.module.resource);
return evaluateToString(resource.path)(expr);
@ -226,12 +226,12 @@ class NodeStuffPlugin {
case "node-module":
setUrlModuleConstant(
"__dirname",
functionName =>
(functionName) =>
`${functionName}(import.meta.url + "/..").slice(0, -1)`
);
break;
case true:
setModuleConstant("__dirname", module =>
setModuleConstant("__dirname", (module) =>
relative(
/** @type {InputFileSystem} */ (compiler.inputFileSystem),
context,
@ -243,7 +243,7 @@ class NodeStuffPlugin {
parser.hooks.evaluateIdentifier
.for("__dirname")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
if (!parser.state.module) return;
return evaluateToString(
/** @type {string} */ (parser.state.module.context)

View File

@ -163,17 +163,17 @@ const contextifySourceMap = (context, sourceMap, associatedObjectForCache) => {
const { sourceRoot } = sourceMap;
/** @type {(source: string) => string} */
const mapper = !sourceRoot
? source => source
? (source) => source
: sourceRoot.endsWith("/")
? source =>
? (source) =>
source.startsWith("/")
? `${sourceRoot.slice(0, -1)}${source}`
: `${sourceRoot}${source}`
: source =>
: (source) =>
source.startsWith("/")
? `${sourceRoot}${source}`
: `${sourceRoot}/${source}`;
const newSources = sourceMap.sources.map(source =>
const newSources = sourceMap.sources.map((source) =>
contextifySourceUrl(context, mapper(source), associatedObjectForCache)
);
return {
@ -188,7 +188,7 @@ const contextifySourceMap = (context, sourceMap, associatedObjectForCache) => {
* @param {string | Buffer} input the input
* @returns {string} the converted string
*/
const asString = input => {
const asString = (input) => {
if (Buffer.isBuffer(input)) {
return input.toString("utf8");
}
@ -199,7 +199,7 @@ const asString = input => {
* @param {string | Buffer} input the input
* @returns {Buffer} the converted buffer
*/
const asBuffer = input => {
const asBuffer = (input) => {
if (!Buffer.isBuffer(input)) {
return Buffer.from(input, "utf8");
}
@ -279,14 +279,14 @@ class NormalModule extends Module {
beforeParse: new SyncHook(["module"]),
beforeSnapshot: new SyncHook(["module"]),
// TODO webpack 6 deprecate
readResourceForScheme: new HookMap(scheme => {
readResourceForScheme: new HookMap((scheme) => {
const hook =
/** @type {NormalModuleCompilationHooks} */
(hooks).readResource.for(scheme);
return createFakeHook(
/** @type {AsyncSeriesBailHook<[string, NormalModule], string | Buffer | null>} */ ({
tap: (options, fn) =>
hook.tap(options, loaderContext =>
hook.tap(options, (loaderContext) =>
fn(
loaderContext.resource,
/** @type {NormalModule} */ (loaderContext._module)
@ -301,7 +301,7 @@ class NormalModule extends Module {
)
),
tapPromise: (options, fn) =>
hook.tapPromise(options, loaderContext =>
hook.tapPromise(options, (loaderContext) =>
fn(
loaderContext.resource,
/** @type {NormalModule} */ (loaderContext._module)
@ -599,19 +599,19 @@ class NormalModule extends Module {
*/
const getResolveContext = () => ({
fileDependencies: {
add: d =>
add: (d) =>
/** @type {LoaderContext<EXPECTED_ANY>} */ (
loaderContext
).addDependency(d)
},
contextDependencies: {
add: d =>
add: (d) =>
/** @type {LoaderContext<EXPECTED_ANY>} */ (
loaderContext
).addContextDependency(d)
},
missingDependencies: {
add: d =>
add: (d) =>
/** @type {LoaderContext<EXPECTED_ANY>} */ (
loaderContext
).addMissingDependency(d)
@ -660,7 +660,7 @@ class NormalModule extends Module {
* @param {HashFunction=} type type
* @returns {Hash} hash
*/
createHash: type =>
createHash: (type) =>
createHash(
type ||
/** @type {HashFunction} */
@ -674,7 +674,7 @@ class NormalModule extends Module {
* @param {import("../declarations/LoaderContext").Schema=} schema schema
* @returns {T} options
*/
getOptions: schema => {
getOptions: (schema) => {
const loader = this.getCurrentLoader(
/** @type {LoaderContext<EXPECTED_ANY>} */ (loaderContext)
);
@ -716,7 +716,7 @@ class NormalModule extends Module {
return /** @type {T} */ (options);
},
emitWarning: warning => {
emitWarning: (warning) => {
if (!(warning instanceof Error)) {
warning = new NonErrorEmittedError(warning);
}
@ -726,7 +726,7 @@ class NormalModule extends Module {
})
);
},
emitError: error => {
emitError: (error) => {
if (!(error instanceof Error)) {
error = new NonErrorEmittedError(error);
}
@ -736,7 +736,7 @@ class NormalModule extends Module {
})
);
},
getLogger: name => {
getLogger: (name) => {
const currentLoader = this.getCurrentLoader(
/** @type {LoaderContext<EXPECTED_ANY>} */ (loaderContext)
);
@ -802,7 +802,7 @@ class NormalModule extends Module {
);
assetsInfo.set(name, assetInfo);
},
addBuildDependency: dep => {
addBuildDependency: (dep) => {
const buildInfo = /** @type {BuildInfo} */ (this.buildInfo);
if (buildInfo.buildDependencies === undefined) {
@ -1190,7 +1190,7 @@ class NormalModule extends Module {
const hooks = NormalModule.getCompilationHooks(compilation);
return this._doBuild(options, compilation, resolver, fs, hooks, err => {
return this._doBuild(options, compilation, resolver, fs, hooks, (err) => {
// if we have an error mark module as failed and exit
if (err) {
this.markModuleAsErrored(err);
@ -1202,9 +1202,9 @@ class NormalModule extends Module {
* @param {Error} e error
* @returns {void}
*/
const handleParseError = e => {
const handleParseError = (e) => {
const source = /** @type {Source} */ (this._source).source();
const loaders = this.loaders.map(item =>
const loaders = this.loaders.map((item) =>
contextify(
/** @type {string} */ (options.context),
item.loader,
@ -1220,7 +1220,7 @@ class NormalModule extends Module {
const handleParseResult = () => {
this.dependencies.sort(
concatComparators(
compareSelect(a => a.loc, compareLocations),
compareSelect((a) => a.loc, compareLocations),
keepOriginalOrder(this.dependencies)
)
);
@ -1252,7 +1252,7 @@ class NormalModule extends Module {
/**
* @param {LazySet<string>} deps deps
*/
const checkDependencies = deps => {
const checkDependencies = (deps) => {
for (const dep of deps) {
if (!ABSOLUTE_PATH_REGEX.test(dep)) {
if (nonAbsoluteDependencies === undefined) {

View File

@ -124,7 +124,7 @@ const LEADING_DOT_EXTENSION_REGEX = /^[^.]/;
* @param {LoaderItem} data data
* @returns {string} ident
*/
const loaderToIdent = data => {
const loaderToIdent = (data) => {
if (!data.options) {
return data.loader;
}
@ -158,7 +158,7 @@ const stringifyLoadersAndResource = (loaders, resource) => {
* @param {(err?: null | Error) => void} callback callback
* @returns {(err?: null | Error) => void} callback
*/
const needCalls = (times, callback) => err => {
const needCalls = (times, callback) => (err) => {
if (--times === 0) {
return callback(err);
}
@ -204,7 +204,7 @@ const mergeGlobalOptions = (globalOptions, type, localOptions) => {
* @returns {string} result
*/
const deprecationChangedHookMessage = (name, hook) => {
const names = hook.taps.map(tapped => tapped.name).join(", ");
const names = hook.taps.map((tapped) => tapped.name).join(", ");
return (
`NormalModuleFactory.${name} (${names}) is no longer a waterfall hook, but a bailing hook instead. ` +
@ -227,7 +227,7 @@ const ruleSetCompiler = new RuleSetCompiler([
new BasicMatcherRulePlugin("issuer"),
new BasicMatcherRulePlugin("compiler"),
new BasicMatcherRulePlugin("issuerLayer"),
new ObjectMatcherRulePlugin("assert", "assertions", value => {
new ObjectMatcherRulePlugin("assert", "assertions", (value) => {
if (value) {
return (
/** @type {ImportAttributes} */ (value)._isLegacyAssert !== undefined
@ -236,7 +236,7 @@ const ruleSetCompiler = new RuleSetCompiler([
return false;
}),
new ObjectMatcherRulePlugin("with", "assertions", value => {
new ObjectMatcherRulePlugin("with", "assertions", (value) => {
if (value) {
return !(/** @type {ImportAttributes} */ (value)._isLegacyAssert);
}
@ -498,7 +498,7 @@ class NormalModuleFactory extends ModuleFactory {
)
.split(/!+/);
unresolvedResource = /** @type {string} */ (rawElements.pop());
elements = rawElements.map(el => {
elements = rawElements.map((el) => {
const { path, query } = cachedParseResourceWithoutFragment(el);
return {
loader: path,
@ -528,7 +528,7 @@ class NormalModuleFactory extends ModuleFactory {
/** @type {undefined | LoaderItem[]} */
let loaders;
const continueCallback = needCalls(2, err => {
const continueCallback = needCalls(2, (err) => {
if (err) return callback(err);
// translate option idents
@ -656,7 +656,7 @@ class NormalModuleFactory extends ModuleFactory {
/** @type {undefined | LoaderItem[]} */
let preLoaders;
const continueCallback = needCalls(3, err => {
const continueCallback = needCalls(3, (err) => {
if (err) {
return callback(err);
}
@ -775,7 +775,7 @@ class NormalModuleFactory extends ModuleFactory {
/**
* @param {string} context context
*/
const defaultResolve = context => {
const defaultResolve = (context) => {
if (/^($|\?)/.test(unresolvedResource)) {
resourceData = {
...cacheParseResource(unresolvedResource),
@ -835,7 +835,7 @@ class NormalModuleFactory extends ModuleFactory {
};
this.hooks.resolveForScheme
.for(scheme)
.callAsync(resourceData, data, err => {
.callAsync(resourceData, data, (err) => {
if (err) return continueCallback(err);
continueCallback();
});
@ -1016,13 +1016,15 @@ ${hints.join("\n\n")}`;
// Check if the extension is missing a leading dot (e.g. "js" instead of ".js")
let appendResolveExtensionsHint = false;
const specifiedExtensions = [...resolver.options.extensions];
const expectedExtensions = specifiedExtensions.map(extension => {
if (LEADING_DOT_EXTENSION_REGEX.test(extension)) {
appendResolveExtensionsHint = true;
return `.${extension}`;
const expectedExtensions = specifiedExtensions.map(
(extension) => {
if (LEADING_DOT_EXTENSION_REGEX.test(extension)) {
appendResolveExtensionsHint = true;
return `.${extension}`;
}
return extension;
}
return extension;
});
);
if (appendResolveExtensionsHint) {
err.message += `\nDid you miss the leading dot in 'resolve.extensions'? Did you mean '${JSON.stringify(
expectedExtensions
@ -1059,7 +1061,7 @@ ${hints.join("\n\n")}`;
) {
asyncLib.parallel(
[
callback => {
(callback) => {
if (!resolver.options.fullySpecified) return callback();
resolver
.withOptions({
@ -1089,7 +1091,7 @@ Add the extension to the request.`
}
);
},
callback => {
(callback) => {
if (!resolver.options.enforceExtension) return callback();
resolver
.withOptions({
@ -1128,7 +1130,7 @@ Including the extension in the request is no longer possible. Did you mean to en
}
);
},
callback => {
(callback) => {
if (
/^\.\.?\//.test(unresolvedResource) ||
resolver.options.preferRelative
@ -1143,7 +1145,7 @@ Including the extension in the request is no longer possible. Did you mean to en
(err, resolvedResource) => {
if (err || !resolvedResource) return callback();
const moduleDirectories = resolver.options.modules
.map(m => (Array.isArray(m) ? m.join(", ") : m))
.map((m) => (Array.isArray(m) ? m.join(", ") : m))
.join(", ");
callback(
null,
@ -1201,7 +1203,7 @@ If changing the source code is not an option there is also a resolve options cal
context,
`${item.loader}-loader`,
resolveContext,
err2 => {
(err2) => {
if (!err2) {
err.message =
`${err.message}\n` +

View File

@ -34,8 +34,8 @@ class NormalModuleReplacementPlugin {
apply(compiler) {
const resourceRegExp = this.resourceRegExp;
const newResource = this.newResource;
compiler.hooks.normalModuleFactory.tap(PLUGIN_NAME, nmf => {
nmf.hooks.beforeResolve.tap(PLUGIN_NAME, result => {
compiler.hooks.normalModuleFactory.tap(PLUGIN_NAME, (nmf) => {
nmf.hooks.beforeResolve.tap(PLUGIN_NAME, (result) => {
if (resourceRegExp.test(result.request)) {
if (typeof newResource === "function") {
newResource(result);
@ -44,7 +44,7 @@ class NormalModuleReplacementPlugin {
}
}
});
nmf.hooks.afterResolve.tap(PLUGIN_NAME, result => {
nmf.hooks.afterResolve.tap(PLUGIN_NAME, (result) => {
const createData = result.createData;
if (resourceRegExp.test(/** @type {string} */ (createData.resource))) {
if (typeof newResource === "function") {

View File

@ -45,7 +45,7 @@ class PrefetchPlugin {
compilation.addModuleChain(
this.context || compiler.context,
new PrefetchDependency(this.request),
err => {
(err) => {
callback(err);
}
);

View File

@ -74,7 +74,7 @@ const createDefaultHandler = (profile, logger) => {
lastStateInfo.length = 0;
}
const fullState = [msg, ...args];
const state = fullState.map(s => s.replace(/\d+\/\d+ /g, ""));
const state = fullState.map((s) => s.replace(/\d+\/\d+ /g, ""));
const now = Date.now();
const len = Math.max(state.length, lastStateInfo.length);
for (let i = len; i >= 0; i--) {
@ -354,7 +354,7 @@ class ProgressPlugin {
/**
* @param {Module} module the module
*/
const moduleBuild = module => {
const moduleBuild = (module) => {
const ident = module.identifier();
if (ident) {
activeModules.add(ident);
@ -375,7 +375,7 @@ class ProgressPlugin {
/**
* @param {Module} module the module
*/
const moduleDone = module => {
const moduleDone = (module) => {
doneModules++;
if (showActiveModules) {
const ident = module.identifier();
@ -411,7 +411,7 @@ class ProgressPlugin {
compiler.hooks.beforeCompile.tap(PLUGIN_NAME, () => {
if (!cacheGetPromise) {
cacheGetPromise = cache.getPromise().then(
data => {
(data) => {
if (data) {
lastModulesCount = lastModulesCount || data.modulesCount;
lastDependenciesCount =
@ -419,17 +419,17 @@ class ProgressPlugin {
}
return data;
},
_err => {
(_err) => {
// Ignore error
}
);
}
});
compiler.hooks.afterCompile.tapPromise(PLUGIN_NAME, compilation => {
compiler.hooks.afterCompile.tapPromise(PLUGIN_NAME, (compilation) => {
if (compilation.compiler.isChild()) return Promise.resolve();
return /** @type {Promise<CountsData>} */ (cacheGetPromise).then(
async oldData => {
async (oldData) => {
const realModulesCount = modulesCount - skippedModulesCount;
const realDependenciesCount =
dependenciesCount - skippedDependenciesCount;
@ -448,7 +448,7 @@ class ProgressPlugin {
);
});
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
if (compilation.compiler.isChild()) return;
lastModulesCount = modulesCount;
lastEntriesCount = entriesCount;
@ -461,12 +461,12 @@ class ProgressPlugin {
0;
doneModules = doneDependencies = doneEntries = 0;
compilation.factorizeQueue.hooks.added.tap(PLUGIN_NAME, item =>
compilation.factorizeQueue.hooks.added.tap(PLUGIN_NAME, (item) =>
factorizeAdd(compilation.factorizeQueue, item)
);
compilation.factorizeQueue.hooks.result.tap(PLUGIN_NAME, factorizeDone);
compilation.addModuleQueue.hooks.added.tap(PLUGIN_NAME, item =>
compilation.addModuleQueue.hooks.added.tap(PLUGIN_NAME, (item) =>
moduleAdd(compilation.addModuleQueue, item)
);
compilation.processDependenciesQueue.hooks.result.tap(
@ -487,7 +487,7 @@ class ProgressPlugin {
const requiredLoaders = new Set();
NormalModule.getCompilationHooks(compilation).beforeLoaders.tap(
PLUGIN_NAME,
loaders => {
(loaders) => {
for (const loader of loaders) {
if (
loader.type !== "module" &&

View File

@ -74,7 +74,7 @@ class ProvidePlugin {
}
}
parser.hooks.expression.for(name).tap(PLUGIN_NAME, expr => {
parser.hooks.expression.for(name).tap(PLUGIN_NAME, (expr) => {
const nameIdentifier = name.includes(".")
? `__webpack_provided_${name.replace(/\./g, "_dot_")}`
: name;
@ -89,7 +89,7 @@ class ProvidePlugin {
return true;
});
parser.hooks.call.for(name).tap(PLUGIN_NAME, expr => {
parser.hooks.call.for(name).tap(PLUGIN_NAME, (expr) => {
const nameIdentifier = name.includes(".")
? `__webpack_provided_${name.replace(/\./g, "_dot_")}`
: name;

View File

@ -63,14 +63,14 @@ class RecordIdsPlugin {
* @param {Module} module the module
* @returns {string} the (portable) identifier
*/
const getModuleIdentifier = module => {
const getModuleIdentifier = (module) => {
if (portableIds) {
return makePathsRelative(module.identifier());
}
return module.identifier();
};
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
compilation.hooks.recordModules.tap(PLUGIN_NAME, (modules, records) => {
const chunkGraph = compilation.chunkGraph;
if (!records.modules) records.modules = {};
@ -112,7 +112,7 @@ class RecordIdsPlugin {
* @param {Chunk} chunk the chunk
* @returns {string[]} sources of the chunk
*/
const getChunkSources = chunk => {
const getChunkSources = (chunk) => {
/** @type {string[]} */
const sources = [];
for (const chunkGroup of chunk.groupsIterable) {

View File

@ -36,7 +36,7 @@ const EMPTY_RESOLVE_OPTIONS = {};
* @param {ResolveOptionsWithDependencyType} resolveOptionsWithDepType enhanced options
* @returns {ResolveOptions} merged options
*/
const convertToResolveOptions = resolveOptionsWithDepType => {
const convertToResolveOptions = (resolveOptionsWithDepType) => {
const { dependencyType, plugins, ...remaining } = resolveOptionsWithDepType;
// check type compat
@ -46,7 +46,7 @@ const convertToResolveOptions = resolveOptionsWithDepType => {
plugins:
plugins &&
/** @type {ResolvePluginInstance[]} */ (
plugins.filter(item => item !== "...")
plugins.filter((item) => item !== "...")
)
};
@ -142,7 +142,7 @@ module.exports = class ResolverFactory {
}
/** @type {WeakMap<Partial<ResolveOptionsWithDependencyType>, ResolverWithOptions>} */
const childCache = new WeakMap();
resolver.withOptions = options => {
resolver.withOptions = (options) => {
const cacheEntry = childCache.get(options);
if (cacheEntry !== undefined) return cacheEntry;
const mergedOptions = cachedCleverMerge(originalResolveOptions, options);

View File

@ -118,13 +118,13 @@ class RuntimePlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
const globalChunkLoading = compilation.outputOptions.chunkLoading;
/**
* @param {Chunk} chunk chunk
* @returns {boolean} true, when chunk loading is disabled for the chunk
*/
const isChunkLoadingDisabledForChunk = chunk => {
const isChunkLoadingDisabledForChunk = (chunk) => {
const options = chunk.getEntryOptions();
const chunkLoading =
options && options.chunkLoading !== undefined
@ -168,7 +168,7 @@ class RuntimePlugin {
}
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.definePropertyGetters)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(
chunk,
new DefinePropertyGettersRuntimeModule()
@ -177,7 +177,7 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.makeNamespaceObject)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(
chunk,
new MakeNamespaceObjectRuntimeModule()
@ -186,7 +186,7 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.createFakeNamespaceObject)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(
chunk,
new CreateFakeNamespaceObjectRuntimeModule()
@ -206,7 +206,7 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.hasOwnProperty)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(
chunk,
new HasOwnPropertyRuntimeModule()
@ -215,7 +215,7 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.compatGetDefaultExport)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(
chunk,
new CompatGetDefaultExportRuntimeModule()
@ -224,7 +224,7 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.runtimeId)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(chunk, new RuntimeIdRuntimeModule());
return true;
});
@ -259,13 +259,13 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.global)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(chunk, new GlobalRuntimeModule());
return true;
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.asyncModule)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
const experiments = compilation.options.experiments;
compilation.addRuntimeModule(
chunk,
@ -275,7 +275,7 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.systemContext)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
const entryOptions = chunk.getEntryOptions();
const libraryType =
entryOptions && entryOptions.library !== undefined
@ -308,7 +308,7 @@ class RuntimePlugin {
"javascript",
"javascript",
RuntimeGlobals.getChunkScriptFilename,
chunk =>
(chunk) =>
getJavascriptModulesPlugin().chunkHasJs(chunk, chunkGraph) &&
/** @type {TemplatePath} */ (
chunk.filenameTemplate ||
@ -338,7 +338,7 @@ class RuntimePlugin {
"css",
"css",
RuntimeGlobals.getChunkCssFilename,
chunk =>
(chunk) =>
getCssModulesPlugin().chunkHasCss(chunk, chunkGraph) &&
getChunkFilenameTemplate(chunk, compilation.outputOptions),
set.has(RuntimeGlobals.hmrDownloadUpdateHandlers)
@ -363,7 +363,7 @@ class RuntimePlugin {
"javascript",
"javascript update",
RuntimeGlobals.getChunkUpdateScriptFilename,
_chunk =>
(_chunk) =>
/** @type {NonNullable<OutputNormalized["hotUpdateChunkFilename"]>} */
(compilation.outputOptions.hotUpdateChunkFilename),
true
@ -480,7 +480,7 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.baseURI)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
if (isChunkLoadingDisabledForChunk(chunk)) {
compilation.addRuntimeModule(chunk, new BaseUriRuntimeModule());
return true;
@ -488,7 +488,7 @@ class RuntimePlugin {
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.scriptNonce)
.tap(PLUGIN_NAME, chunk => {
.tap(PLUGIN_NAME, (chunk) => {
compilation.addRuntimeModule(chunk, new NonceRuntimeModule());
return true;
});

View File

@ -49,12 +49,12 @@ This should not happen.
It's in these chunks: ${
Array.from(
chunkGraph.getModuleChunksIterable(module),
c => c.name || c.id || c.debugId
(c) => c.name || c.id || c.debugId
).join(", ") || "none"
} (If module is in no chunk this indicates a bug in some chunk/module optimization logic)
Module has these incoming connections: ${Array.from(
chunkGraph.moduleGraph.getIncomingConnections(module),
connection =>
(connection) =>
`\n - ${
connection.originModule && connection.originModule.identifier()
} ${connection.dependency && connection.dependency.type} ${
@ -229,7 +229,7 @@ class RuntimeTemplate {
if (concatenationCost <= templateCost) return this._es5Concatenation(args);
return `\`${args
.map(arg => (typeof arg === "string" ? arg : `\${${arg.expr}}`))
.map((arg) => (typeof arg === "string" ? arg : `\${${arg.expr}}`))
.join("")}\``;
}
@ -240,7 +240,7 @@ class RuntimeTemplate {
*/
_es5Concatenation(args) {
const str = args
.map(arg => (typeof arg === "string" ? JSON.stringify(arg) : arg.expr))
.map((arg) => (typeof arg === "string" ? JSON.stringify(arg) : arg.expr))
.join(" + ");
// when the first two args are expression, we need to prepend "" + to force string
@ -290,7 +290,9 @@ class RuntimeTemplate {
return this.supportsDestructuring()
? `var {${items.join(", ")}} = ${value};`
: Template.asString(
items.map(item => `var ${item} = ${value}${propertyAccess([item])};`)
items.map(
(item) => `var ${item} = ${value}${propertyAccess([item])};`
)
);
}
@ -332,12 +334,12 @@ class RuntimeTemplate {
if (this.outputOptions.pathinfo) {
content = [message, request, chunkName, chunkReason]
.filter(Boolean)
.map(item => this.requestShortener.shorten(item))
.map((item) => this.requestShortener.shorten(item))
.join(" | ");
} else {
content = [message, chunkName, chunkReason]
.filter(Boolean)
.map(item => this.requestShortener.shorten(item))
.map((item) => this.requestShortener.shorten(item))
.join(" | ");
}
if (!content) return "";
@ -763,14 +765,14 @@ class RuntimeTemplate {
if (typeof runtimeCondition === "boolean") return `${runtimeCondition}`;
/** @type {Set<string>} */
const positiveRuntimeIds = new Set();
forEachRuntime(runtimeCondition, runtime =>
forEachRuntime(runtimeCondition, (runtime) =>
positiveRuntimeIds.add(
`${chunkGraph.getRuntimeId(/** @type {string} */ (runtime))}`
)
);
/** @type {Set<string>} */
const negativeRuntimeIds = new Set();
forEachRuntime(subtractRuntime(runtime, runtimeCondition), runtime =>
forEachRuntime(subtractRuntime(runtime, runtimeCondition), (runtime) =>
negativeRuntimeIds.add(
`${chunkGraph.getRuntimeId(/** @type {string} */ (runtime))}`
)
@ -865,7 +867,7 @@ class RuntimeTemplate {
this,
exportsType,
moduleId,
Array.from(outgoingAsyncModules, mod => chunkGraph.getModuleId(mod))
Array.from(outgoingAsyncModules, (mod) => chunkGraph.getModuleId(mod))
)};\n`;
return [importContent, ""];
@ -1087,7 +1089,7 @@ class RuntimeTemplate {
return `Promise.resolve(${comment.trim()})`;
}
const chunks = chunkGroup.chunks.filter(
chunk => !chunk.hasRuntime() && chunk.id !== null
(chunk) => !chunk.hasRuntime() && chunk.id !== null
);
const comment = this.comment({
message,
@ -1119,7 +1121,7 @@ class RuntimeTemplate {
* @param {Chunk} chunk chunk
* @returns {string} require chunk id code
*/
const requireChunkId = chunk =>
const requireChunkId = (chunk) =>
`${RuntimeGlobals.ensureChunk}(${JSON.stringify(chunk.id)}${
fetchPriority ? `, ${JSON.stringify(fetchPriority)}` : ""
})`;

View File

@ -9,7 +9,7 @@
* @param {number} size the size in bytes
* @returns {string} the formatted size
*/
module.exports.formatSize = size => {
module.exports.formatSize = (size) => {
if (typeof size !== "number" || Number.isNaN(size) === true) {
return "unknown size";
}

View File

@ -27,17 +27,17 @@ class SourceMapDevToolModuleOptionsPlugin {
apply(compilation) {
const options = this.options;
if (options.module !== false) {
compilation.hooks.buildModule.tap(PLUGIN_NAME, module => {
compilation.hooks.buildModule.tap(PLUGIN_NAME, (module) => {
module.useSourceMap = true;
});
compilation.hooks.runtimeModule.tap(PLUGIN_NAME, module => {
compilation.hooks.runtimeModule.tap(PLUGIN_NAME, (module) => {
module.useSourceMap = true;
});
} else {
compilation.hooks.buildModule.tap(PLUGIN_NAME, module => {
compilation.hooks.buildModule.tap(PLUGIN_NAME, (module) => {
module.useSimpleSourceMap = true;
});
compilation.hooks.runtimeModule.tap(PLUGIN_NAME, module => {
compilation.hooks.runtimeModule.tap(PLUGIN_NAME, (module) => {
module.useSimpleSourceMap = true;
});
}

View File

@ -65,7 +65,7 @@ const URL_FORMATTING_REGEXP = /^\n\/\/(.*)$/;
* @param {RegExp} regexp Stateful Regular Expression to be reset
* @returns {void}
*/
const resetRegexpState = regexp => {
const resetRegexpState = (regexp) => {
regexp.lastIndex = -1;
};
@ -74,7 +74,7 @@ const resetRegexpState = regexp => {
* @param {string} str String to quote
* @returns {string} Escaped string
*/
const quoteMeta = str => str.replace(METACHARACTERS_REGEXP, "\\$&");
const quoteMeta = (str) => str.replace(METACHARACTERS_REGEXP, "\\$&");
/**
* Creating {@link SourceMapTask} for given file
@ -112,7 +112,7 @@ const getTaskForFile = (
const context = /** @type {string} */ (compilation.options.context);
const root = compilation.compiler.root;
const cachedAbsolutify = makePathsAbsolute.bindContextCache(context, root);
const modules = sourceMap.sources.map(source => {
const modules = sourceMap.sources.map((source) => {
if (!source.startsWith("webpack://")) return source;
source = cachedAbsolutify(source.slice(10));
const module = compilation.findModule(source);
@ -179,7 +179,7 @@ class SourceMapDevToolPlugin {
options
);
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
new SourceMapDevToolModuleOptionsPlugin(options).apply(compilation);
compilation.hooks.processAssets.tapAsync(
@ -348,7 +348,7 @@ class SourceMapDevToolPlugin {
callback();
});
},
err => {
(err) => {
if (err) {
return callback(err);
}
@ -431,7 +431,7 @@ class SourceMapDevToolPlugin {
"attach SourceMap"
);
const moduleFilenames = modules.map(m =>
const moduleFilenames = modules.map((m) =>
moduleToSourceNameMapping.get(m)
);
sourceMap.sources = /** @type {string[]} */ (moduleFilenames);
@ -454,7 +454,7 @@ class SourceMapDevToolPlugin {
: quoteMeta(contenthash);
sourceMap.file = sourceMap.file.replace(
new RegExp(pattern, "g"),
m => "x".repeat(m.length)
(m) => "x".repeat(m.length)
);
}
@ -584,7 +584,7 @@ class SourceMapDevToolPlugin {
compilation.updateAsset(file, asset);
}
task.cacheItem.store({ assets, assetsInfo }, err => {
task.cacheItem.store({ assets, assetsInfo }, (err) => {
reportProgress(
0.5 + (0.5 * ++taskIndex) / tasks.length,
task.file,
@ -597,7 +597,7 @@ class SourceMapDevToolPlugin {
callback();
});
},
err => {
(err) => {
reportProgress(1);
callback(err);
}

View File

@ -36,7 +36,7 @@ class Stats {
hasWarnings() {
return (
this.compilation.getWarnings().length > 0 ||
this.compilation.children.some(child => child.getStats().hasWarnings())
this.compilation.children.some((child) => child.getStats().hasWarnings())
);
}
@ -46,7 +46,7 @@ class Stats {
hasErrors() {
return (
this.compilation.errors.length > 0 ||
this.compilation.children.some(child => child.getStats().hasErrors())
this.compilation.children.some((child) => child.getStats().hasErrors())
);
}

View File

@ -296,7 +296,7 @@ class Template {
return null;
}
/** @type {{id: string|number, source: Source|string}[]} */
const allModules = modules.map(module => ({
const allModules = modules.map((module) => ({
id: /** @type {ModuleId} */ (chunkGraph.getModuleId(module)),
source: renderModule(module) || "false"
}));

View File

@ -24,7 +24,7 @@ const REGEXP = /\[\\*([\w:]+)\\*\]/gi;
* @param {string | number} id id
* @returns {string | number} result
*/
const prepareId = id => {
const prepareId = (id) => {
if (typeof id !== "string") return id;
if (/^"\s\+*.*\+\s*"$/.test(id)) {
@ -389,7 +389,7 @@ class TemplatedPathPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(plugin, compilation => {
compiler.hooks.compilation.tap(plugin, (compilation) => {
compilation.hooks.assetPath.tap(plugin, replacePathVariables);
});
}

View File

@ -36,7 +36,7 @@ class UseStrictPlugin {
* @param {JavascriptParserOptions} parserOptions the javascript parser options
*/
const handler = (parser, parserOptions) => {
parser.hooks.program.tap(PLUGIN_NAME, ast => {
parser.hooks.program.tap(PLUGIN_NAME, (ast) => {
const firstNode = ast.body[0];
if (
firstNode &&

View File

@ -20,7 +20,7 @@ class WarnCaseSensitiveModulesPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
compilation.hooks.seal.tap(PLUGIN_NAME, () => {
/** @type {Map<string, Map<string, Module>>} */
const moduleWithoutCase = new Map();

View File

@ -30,7 +30,7 @@ class WarnDeprecatedOptionPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, (compilation) => {
compilation.warnings.push(
new DeprecatedOptionWarning(this.option, this.value, this.suggestion)
);

View File

@ -18,7 +18,7 @@ class WarnNoModeSetPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, (compilation) => {
compilation.warnings.push(new NoModeWarning());
});
}

View File

@ -45,8 +45,8 @@ class IgnoringWatchFileSystem {
* @param {string} path path to check
* @returns {boolean} true, if path is ignored
*/
const ignored = path =>
this.paths.some(p =>
const ignored = (path) =>
this.paths.some((p) =>
p instanceof RegExp ? p.test(path) : path.indexOf(p) === 0
);

View File

@ -159,14 +159,14 @@ class Watching {
const run = () => {
if (this.compiler.idle) {
return this.compiler.cache.endIdle(err => {
return this.compiler.cache.endIdle((err) => {
if (err) return this._done(err);
this.compiler.idle = false;
run();
});
}
if (this._needRecords) {
return this.compiler.readRecords(err => {
return this.compiler.readRecords((err) => {
if (err) return this._done(err);
this._needRecords = false;
@ -175,7 +175,7 @@ class Watching {
}
this.invalid = false;
this._invalidReported = false;
this.compiler.hooks.watchRun.callAsync(this.compiler, err => {
this.compiler.hooks.watchRun.callAsync(this.compiler, (err) => {
if (err) return this._done(err);
/**
* @param {Error | null} err error
@ -196,13 +196,13 @@ class Watching {
process.nextTick(() => {
const logger = compilation.getLogger("webpack.Compiler");
logger.time("emitAssets");
this.compiler.emitAssets(compilation, err => {
this.compiler.emitAssets(compilation, (err) => {
logger.timeEnd("emitAssets");
if (err) return this._done(err, compilation);
if (this.invalid) return this._done(null, compilation);
logger.time("emitRecords");
this.compiler.emitRecords(err => {
this.compiler.emitRecords((err) => {
logger.timeEnd("emitRecords");
if (err) return this._done(err, compilation);
@ -215,11 +215,11 @@ class Watching {
compilation.endTime = Date.now();
logger.time("done hook");
const stats = new Stats(compilation);
this.compiler.hooks.done.callAsync(stats, err => {
this.compiler.hooks.done.callAsync(stats, (err) => {
logger.timeEnd("done hook");
if (err) return this._done(err, compilation);
this.compiler.hooks.additionalPass.callAsync(err => {
this.compiler.hooks.additionalPass.callAsync((err) => {
if (err) return this._done(err, compilation);
this.compiler.compile(onCompiled);
});
@ -288,7 +288,7 @@ class Watching {
logger.time("storeBuildDependencies");
this.compiler.cache.storeBuildDependencies(
compilation.buildDependencies,
err => {
(err) => {
logger.timeEnd("storeBuildDependencies");
if (err) return handleError(err);
this._go();
@ -311,7 +311,7 @@ class Watching {
const cbs = this.callbacks;
this.callbacks = [];
logger.time("done hook");
this.compiler.hooks.done.callAsync(/** @type {Stats} */ (stats), err => {
this.compiler.hooks.done.callAsync(/** @type {Stats} */ (stats), (err) => {
logger.timeEnd("done hook");
if (err) return handleError(err, cbs);
this.handler(null, stats);
@ -319,7 +319,7 @@ class Watching {
this.compiler.cache.storeBuildDependencies(
/** @type {Compilation} */
(compilation).buildDependencies,
err => {
(err) => {
logger.timeEnd("storeBuildDependencies");
if (err) return handleError(err, cbs);
logger.time("beginIdle");
@ -480,7 +480,7 @@ class Watching {
/**
* @param {WebpackError | null} err error if any
*/
const shutdown = err => {
const shutdown = (err) => {
this.compiler.hooks.watchClose.call();
const closeCallbacks =
/** @type {Callback<void>[]} */
@ -493,7 +493,7 @@ class Watching {
logger.time("storeBuildDependencies");
this.compiler.cache.storeBuildDependencies(
compilation.buildDependencies,
err2 => {
(err2) => {
logger.timeEnd("storeBuildDependencies");
shutdown(err || err2);
}

View File

@ -47,10 +47,10 @@ class WebpackIsIncludedPlugin {
* @param {JavascriptParser} parser the parser
* @returns {void}
*/
const handler = parser => {
const handler = (parser) => {
parser.hooks.call
.for("__webpack_is_included__")
.tap(PLUGIN_NAME, expr => {
.tap(PLUGIN_NAME, (expr) => {
if (
expr.type !== "CallExpression" ||
expr.arguments.length !== 1 ||

View File

@ -858,7 +858,7 @@ class WebpackOptionsApply extends OptionsApply {
}
compiler.resolverFactory.hooks.resolveOptions
.for("normal")
.tap(CLASS_NAME, resolveOptions => {
.tap(CLASS_NAME, (resolveOptions) => {
resolveOptions = cleverMerge(options.resolve, resolveOptions);
resolveOptions.fileSystem =
/** @type {InputFileSystem} */
@ -867,7 +867,7 @@ class WebpackOptionsApply extends OptionsApply {
});
compiler.resolverFactory.hooks.resolveOptions
.for("context")
.tap(CLASS_NAME, resolveOptions => {
.tap(CLASS_NAME, (resolveOptions) => {
resolveOptions = cleverMerge(options.resolve, resolveOptions);
resolveOptions.fileSystem =
/** @type {InputFileSystem} */
@ -877,7 +877,7 @@ class WebpackOptionsApply extends OptionsApply {
});
compiler.resolverFactory.hooks.resolveOptions
.for("loader")
.tap(CLASS_NAME, resolveOptions => {
.tap(CLASS_NAME, (resolveOptions) => {
resolveOptions = cleverMerge(options.resolveLoader, resolveOptions);
resolveOptions.fileSystem =
/** @type {InputFileSystem} */

View File

@ -152,7 +152,7 @@ const encodeDataUri = (encoding, source) => {
(encodedContent)
).replace(
/[!'()*]/g,
character =>
(character) =>
`%${/** @type {number} */ (character.codePointAt(0)).toString(16)}`
);
break;

View File

@ -31,7 +31,7 @@ const memoize = require("../util/memoize");
* @param {string} name name of definitions
* @returns {Schema} definition
*/
const getSchema = name => {
const getSchema = (name) => {
const { definitions } = require("../../schemas/WebpackOptions.json");
return {
@ -93,7 +93,7 @@ class AssetModulesPlugin {
(compilation, { normalModuleFactory }) => {
normalModuleFactory.hooks.createParser
.for(ASSET_MODULE_TYPE)
.tap(PLUGIN_NAME, parserOptions => {
.tap(PLUGIN_NAME, (parserOptions) => {
validateParserOptions(parserOptions);
parserOptions = cleverMerge(
/** @type {AssetParserOptions} */
@ -115,21 +115,21 @@ class AssetModulesPlugin {
});
normalModuleFactory.hooks.createParser
.for(ASSET_MODULE_TYPE_INLINE)
.tap(PLUGIN_NAME, _parserOptions => {
.tap(PLUGIN_NAME, (_parserOptions) => {
const AssetParser = getAssetParser();
return new AssetParser(true);
});
normalModuleFactory.hooks.createParser
.for(ASSET_MODULE_TYPE_RESOURCE)
.tap(PLUGIN_NAME, _parserOptions => {
.tap(PLUGIN_NAME, (_parserOptions) => {
const AssetParser = getAssetParser();
return new AssetParser(false);
});
normalModuleFactory.hooks.createParser
.for(ASSET_MODULE_TYPE_SOURCE)
.tap(PLUGIN_NAME, _parserOptions => {
.tap(PLUGIN_NAME, (_parserOptions) => {
const AssetSourceParser = getAssetSourceParser();
return new AssetSourceParser();
@ -142,7 +142,7 @@ class AssetModulesPlugin {
]) {
normalModuleFactory.hooks.createGenerator
.for(type)
.tap(PLUGIN_NAME, generatorOptions => {
.tap(PLUGIN_NAME, (generatorOptions) => {
validateGeneratorOptions[type](generatorOptions);
let dataUrl;

View File

@ -32,7 +32,7 @@ const getOutgoingAsyncModules = (moduleGraph, module) => {
for (const [module, connections] of outgoingConnectionMap) {
if (
connections.some(
c =>
(c) =>
c.dependency instanceof HarmonyImportDependency &&
c.isTargetActive(undefined)
) &&

View File

@ -19,9 +19,9 @@ class InferAsyncModulesPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
const { moduleGraph } = compilation;
compilation.hooks.finishModules.tap(PLUGIN_NAME, modules => {
compilation.hooks.finishModules.tap(PLUGIN_NAME, (modules) => {
/** @type {Set<Module>} */
const queue = new Set();
for (const module of modules) {
@ -37,7 +37,7 @@ class InferAsyncModulesPlugin {
] of moduleGraph.getIncomingConnectionsByOriginModule(module)) {
if (
connections.some(
c =>
(c) =>
c.dependency instanceof HarmonyImportDependency &&
c.isTargetActive(undefined)
)

View File

@ -269,7 +269,7 @@ const visitModules = (
* @param {Module} module The module to look up
* @returns {number} The ordinal of the module in masks
*/
const getModuleOrdinal = module => {
const getModuleOrdinal = (module) => {
let ordinal = ordinalByModule.get(module);
if (ordinal === undefined) {
ordinal = ordinalByModule.size;
@ -486,7 +486,7 @@ const visitModules = (
* @param {AsyncDependenciesBlock} b iterating over each Async DepBlock
* @returns {void}
*/
const iteratorBlock = b => {
const iteratorBlock = (b) => {
// 1. We create a chunk group with single chunk in it for this Block
// but only once (blockChunkGroups map)
/** @type {ChunkGroupInfo | undefined} */
@ -673,7 +673,7 @@ const visitModules = (
* @param {DependenciesBlock} block the block
* @returns {void}
*/
const processBlock = block => {
const processBlock = (block) => {
statProcessedBlocks++;
// get prepared block info
const blockModules = getBlockModules(block, chunkGroupInfo.runtime);
@ -765,7 +765,7 @@ const visitModules = (
* @param {DependenciesBlock} block the block
* @returns {void}
*/
const processEntryBlock = block => {
const processEntryBlock = (block) => {
statProcessedBlocks++;
// get prepared block info
const blockModules = getBlockModules(block, chunkGroupInfo.runtime);
@ -892,7 +892,7 @@ const visitModules = (
* @param {ChunkGroupInfo} chunkGroupInfo The info object for the chunk group
* @returns {bigint} The mask of available modules after the chunk group
*/
const calculateResultingAvailableModules = chunkGroupInfo => {
const calculateResultingAvailableModules = (chunkGroupInfo) => {
if (chunkGroupInfo.resultingAvailableModules !== undefined) {
return chunkGroupInfo.resultingAvailableModules;
}

View File

@ -23,7 +23,7 @@ class AddBuildDependenciesPlugin {
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
compilation.buildDependencies.addAll(this.buildDependencies);
});
}

View File

@ -68,7 +68,7 @@ class IdleFileCachePlugin {
{ name: PLUGIN_NAME, stage: Cache.STAGE_DISK },
(identifier, etag, gotHandlers) => {
const restore = () =>
strategy.restore(identifier, etag).then(cacheEntry => {
strategy.restore(identifier, etag).then((cacheEntry) => {
if (cacheEntry === undefined) {
gotHandlers.push((result, callback) => {
if (result !== undefined) {
@ -93,7 +93,7 @@ class IdleFileCachePlugin {
compiler.cache.hooks.storeBuildDependencies.tap(
{ name: PLUGIN_NAME, stage: Cache.STAGE_DISK },
dependencies => {
(dependencies) => {
pendingIdleTasks.set(BUILD_DEPENDENCIES_KEY, () =>
Promise.resolve().then(() =>
strategy.storeBuildDependencies(dependencies)
@ -113,7 +113,7 @@ class IdleFileCachePlugin {
const reportProgress = ProgressPlugin.getReporter(compiler);
const jobs = [...pendingIdleTasks.values()];
if (reportProgress) reportProgress(0, "process pending cache items");
const promises = jobs.map(fn => fn());
const promises = jobs.map((fn) => fn());
pendingIdleTasks.clear();
promises.push(currentIdlePromise);
const promise = Promise.all(promises);
@ -168,7 +168,7 @@ class IdleFileCachePlugin {
timeSpendInStore = 0;
timeSpendInBuild = 0;
})
.catch(err => {
.catch((err) => {
const logger = compiler.getInfrastructureLogger(PLUGIN_NAME);
logger.warn(`Background tasks during idle failed: ${err.message}`);
logger.debug(err.stack);
@ -229,7 +229,7 @@ class IdleFileCachePlugin {
isIdle = false;
}
);
compiler.hooks.done.tap(PLUGIN_NAME, stats => {
compiler.hooks.done.tap(PLUGIN_NAME, (stats) => {
// 10% build overhead is ignored, as it's not cacheable
timeSpendInBuild *= 0.9;
timeSpendInBuild +=

View File

@ -330,7 +330,7 @@ class Pack {
`${itemsCount} fresh items in cache put into pack ${
packs.length > 1
? packs
.map(pack => `${pack.loc} (${pack.items.size} items)`)
.map((pack) => `${pack.loc} (${pack.items.size} items)`)
.join(", ")
: packs[0].loc
}`
@ -407,7 +407,7 @@ class Pack {
for (const identifier of content.used) {
mergedUsedItems.add(identifier);
}
addToMergedMap.push(async map => {
addToMergedMap.push(async (map) => {
// unpack existing content
// after that values are accessible in .content
await content.unpack(
@ -433,7 +433,7 @@ class Pack {
memoize(async () => {
/** @type {Content} */
const map = new Map();
await Promise.all(addToMergedMap.map(fn => fn(map)));
await Promise.all(addToMergedMap.map((fn) => fn(map)));
return new PackContentItems(map);
})
);
@ -603,7 +603,7 @@ class Pack {
const content = this.content[i];
if (content !== undefined) {
write(content.items);
content.writeLazy(lazy =>
content.writeLazy((lazy) =>
/** @type {NonNullable<ObjectSerializerContext["writeSeparate"]>} */
(writeSeparate)(lazy, { name: `${i}` })
);
@ -627,7 +627,7 @@ class Pack {
item = read();
}
this.itemInfo.clear();
const infoItems = items.map(identifier => {
const infoItems = items.map((identifier) => {
const info = new PackItemInfo(identifier, undefined, undefined);
this.itemInfo.set(identifier, info);
return info;
@ -875,7 +875,7 @@ class PackContent {
}
const value = /** @type {LazyFunction} */ (this.lazy)();
if ("then" in value) {
return value.then(data => {
return value.then((data) => {
const map = data.map;
if (timeMessage) {
logger.timeEnd(timeMessage);
@ -927,7 +927,7 @@ class PackContent {
/** @type {PackContentItems | Promise<PackContentItems>} */
(this.lazy());
if ("then" in value) {
return value.then(data => {
return value.then((data) => {
if (timeMessage) {
logger.timeEnd(timeMessage);
}
@ -1022,7 +1022,7 @@ class PackContent {
if ("then" in value) {
// Move to state B1
this.lazy = write(() =>
value.then(data => {
value.then((data) => {
if (timeMessage) {
logger.timeEnd(timeMessage);
}
@ -1060,7 +1060,7 @@ class PackContent {
* @param {Buffer} buf buffer
* @returns {Buffer} buffer that can be collected
*/
const allowCollectingMemory = buf => {
const allowCollectingMemory = (buf) => {
const wasted = buf.buffer.byteLength - buf.byteLength;
if (wasted > 8192 && (wasted > 1048576 || wasted > buf.byteLength)) {
return Buffer.from(buf);
@ -1178,7 +1178,7 @@ class PackFileCacheStrategy {
? allowCollectingMemory
: undefined
})
.catch(err => {
.catch((err) => {
if (err.code !== "ENOENT") {
logger.warn(
`Restoring pack failed from ${cacheLocation}${this._extension}: ${err}`
@ -1191,7 +1191,7 @@ class PackFileCacheStrategy {
}
return undefined;
})
.then(packContainer => {
.then((packContainer) => {
logger.timeEnd("restore cache container");
if (!packContainer) return;
if (!(packContainer instanceof PackContainer)) {
@ -1277,7 +1277,7 @@ class PackFileCacheStrategy {
);
})
])
.catch(err => {
.catch((err) => {
logger.timeEnd("check build dependencies");
throw err;
})
@ -1292,7 +1292,7 @@ class PackFileCacheStrategy {
return undefined;
});
})
.then(pack => {
.then((pack) => {
if (pack) {
pack.maxAge = this.maxAge;
this.buildSnapshot = buildSnapshot;
@ -1307,7 +1307,7 @@ class PackFileCacheStrategy {
}
return new Pack(logger, this.maxAge);
})
.catch(err => {
.catch((err) => {
this.logger.warn(
`Restoring pack from ${cacheLocation}${this._extension} failed: ${err}`
);
@ -1325,7 +1325,7 @@ class PackFileCacheStrategy {
store(identifier, etag, data) {
if (this.readonly) return Promise.resolve();
return this._getPack().then(pack => {
return this._getPack().then((pack) => {
pack.set(identifier, etag === null ? null : etag.toString(), data);
});
}
@ -1337,10 +1337,10 @@ class PackFileCacheStrategy {
*/
restore(identifier, etag) {
return this._getPack()
.then(pack =>
.then((pack) =>
pack.get(identifier, etag === null ? null : etag.toString())
)
.catch(err => {
.catch((err) => {
if (err && err.code !== "ENOENT") {
this.logger.warn(
`Restoring failed for ${identifier} from pack: ${err}`
@ -1363,7 +1363,7 @@ class PackFileCacheStrategy {
if (packPromise === undefined) return Promise.resolve();
const reportProgress = ProgressPlugin.getReporter(this.compiler);
return (this.storePromise = packPromise
.then(pack => {
.then((pack) => {
pack.stopCapturingRequests();
if (!pack.invalid) return;
this.packPromise = undefined;
@ -1526,14 +1526,14 @@ class PackFileCacheStrategy {
Math.round(stats.size / 1024 / 1024)
);
})
.catch(err => {
.catch((err) => {
this.logger.timeEnd("store pack");
this.logger.warn(`Caching failed for pack: ${err}`);
this.logger.debug(err.stack);
});
});
})
.catch(err => {
.catch((err) => {
this.logger.warn(`Caching failed for pack: ${err}`);
this.logger.debug(err.stack);
}));

View File

@ -115,7 +115,7 @@ class ResolverCachePlugin {
let cachedResolves = 0;
let cacheInvalidResolves = 0;
let concurrentResolves = 0;
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, compilation => {
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, (compilation) => {
snapshotOptions = compilation.options.snapshot.resolve;
fileSystemInfo = compilation.fileSystemInfo;
compilation.hooks.finishModules.tap(PLUGIN_NAME, () => {
@ -176,14 +176,14 @@ class ResolverCachePlugin {
if (typeof newResolveContext.yield === "function") {
yieldResult = [];
withYield = true;
newResolveContext.yield = obj =>
newResolveContext.yield = (obj) =>
/** @type {ResolveRequest[]} */
(yieldResult).push(obj);
}
/**
* @param {"fileDependencies" | "contextDependencies" | "missingDependencies"} key key
*/
const propagate = key => {
const propagate = (key) => {
if (resolveContext[key]) {
addAllToSet(
/** @type {Set<string>} */ (resolveContext[key]),
@ -239,7 +239,7 @@ class ResolverCachePlugin {
(resolveResult),
snapshot
),
storeErr => {
(storeErr) => {
if (storeErr) return callback(storeErr);
if (resolveResult) {
return callback(

View File

@ -82,7 +82,7 @@ const getArguments = (schema = webpackSchema) => {
* @param {string} input input
* @returns {string} result
*/
const pathToArgumentName = input =>
const pathToArgumentName = (input) =>
input
.replace(/\./g, "-")
.replace(/\[\]/g, "")
@ -97,7 +97,7 @@ const getArguments = (schema = webpackSchema) => {
* @param {string} path path
* @returns {Schema} schema part
*/
const getSchemaPart = path => {
const getSchemaPart = (path) => {
const newPath = path.split("/");
let schemaPart = schema;
@ -119,7 +119,7 @@ const getArguments = (schema = webpackSchema) => {
* @param {PathItem[]} path path in the schema
* @returns {string | undefined} description
*/
const getDescription = path => {
const getDescription = (path) => {
for (const { schema } of path) {
if (schema.cli) {
if (schema.cli.helper) continue;
@ -133,7 +133,7 @@ const getArguments = (schema = webpackSchema) => {
* @param {PathItem[]} path path in the schema
* @returns {string | undefined} negative description
*/
const getNegatedDescription = path => {
const getNegatedDescription = (path) => {
for (const { schema } of path) {
if (schema.cli) {
if (schema.cli.helper) continue;
@ -146,7 +146,7 @@ const getArguments = (schema = webpackSchema) => {
* @param {PathItem[]} path path in the schema
* @returns {string | undefined} reset description
*/
const getResetDescription = path => {
const getResetDescription = (path) => {
for (const { schema } of path) {
if (schema.cli) {
if (schema.cli.helper) continue;
@ -159,7 +159,7 @@ const getArguments = (schema = webpackSchema) => {
* @param {Schema} schemaPart schema
* @returns {Pick<ArgumentConfig, "type" | "values"> | undefined} partial argument config
*/
const schemaToArgumentConfig = schemaPart => {
const schemaToArgumentConfig = (schemaPart) => {
if (schemaPart.enum) {
return {
type: "enum",
@ -192,7 +192,7 @@ const getArguments = (schema = webpackSchema) => {
* @param {PathItem[]} path path in the schema
* @returns {void}
*/
const addResetFlag = path => {
const addResetFlag = (path) => {
const schemaPath = path[0].path;
const name = pathToArgumentName(`${schemaPath}.reset`);
const description =
@ -253,7 +253,7 @@ const getArguments = (schema = webpackSchema) => {
if (
flags[name].configs.some(
item => JSON.stringify(item) === JSON.stringify(argConfig)
(item) => JSON.stringify(item) === JSON.stringify(argConfig)
)
) {
return 0;
@ -261,7 +261,7 @@ const getArguments = (schema = webpackSchema) => {
if (
flags[name].configs.some(
item => item.type === argConfig.type && item.multiple !== multiple
(item) => item.type === argConfig.type && item.multiple !== multiple
)
) {
if (multiple) {
@ -411,8 +411,8 @@ const getArguments = (schema = webpackSchema) => {
/** @type {NonNullable<ArgumentConfig["values"]>} */
(argConfig.values);
if (values.every(v => typeof v === "boolean")) type = "boolean";
if (values.every(v => typeof v === "number")) type = "number";
if (values.every((v) => typeof v === "boolean")) type = "boolean";
if (values.every((v) => typeof v === "number")) type = "number";
break;
}
}
@ -420,7 +420,7 @@ const getArguments = (schema = webpackSchema) => {
return t === type ? t : "string";
}, /** @type {SimpleType | undefined} */ (undefined))
);
argument.multiple = argument.configs.some(c => c.multiple);
argument.multiple = argument.configs.some((c) => c.multiple);
}
return flags;
@ -580,7 +580,7 @@ const processArgumentConfig = (argConfig, config, value, index) => {
* @param {ArgumentConfig} argConfig processing instructions
* @returns {string | undefined} expected message
*/
const getExpectedValue = argConfig => {
const getExpectedValue = (argConfig) => {
switch (argConfig.type) {
case "boolean":
return "true | false";
@ -590,7 +590,7 @@ const getExpectedValue = argConfig => {
return /** @type {NonNullable<ArgumentConfig["values"]>} */ (
argConfig.values
)
.map(v => `${v}`)
.map((v) => `${v}`)
.join(" | ");
case "reset":
return "true (will reset the previous value to an empty array)";

View File

@ -74,14 +74,14 @@ const load = (input, context) => {
* @param {string[]} browsers supported browsers list
* @returns {EcmaTargetProperties & PlatformTargetProperties & ApiTargetProperties} target properties
*/
const resolve = browsers => {
const resolve = (browsers) => {
/**
* Checks all against a version number
* @param {Record<string, number | [number, number]>} versions first supported version
* @returns {boolean} true if supports
*/
const rawChecker = versions =>
browsers.every(v => {
const rawChecker = (versions) =>
browsers.every((v) => {
const [name, parsedVersion] = v.split(" ");
if (!name) return false;
const requiredVersion = versions[name];
@ -100,8 +100,8 @@ const resolve = browsers => {
? Number(parserMinor) >= requiredVersion[1]
: Number(parsedMajor) > requiredVersion[0];
});
const anyNode = browsers.some(b => b.startsWith("node "));
const anyBrowser = browsers.some(b => /^(?!node)/.test(b));
const anyNode = browsers.some((b) => b.startsWith("node "));
const anyBrowser = browsers.some((b) => /^(?!node)/.test(b));
const browserProperty = !anyBrowser ? false : anyNode ? null : true;
const nodeProperty = !anyNode ? false : anyBrowser ? null : true;
// Internet Explorer Mobile, Blackberry browser and Opera Mini are very old browsers, they do not support new features
@ -349,7 +349,7 @@ const resolve = browsers => {
nodeBuiltins: nodeProperty,
nodePrefixForCoreModules:
nodeProperty &&
!browsers.some(b => b.startsWith("node 15")) &&
!browsers.some((b) => b.startsWith("node 15")) &&
rawChecker({
node: [14, 18]
}),

View File

@ -159,7 +159,7 @@ const A = (obj, prop, factory) => {
* @param {WebpackOptionsNormalized} options options to be modified
* @returns {void}
*/
const applyWebpackOptionsBaseDefaults = options => {
const applyWebpackOptionsBaseDefaults = (options) => {
F(options, "context", () => process.cwd());
applyInfrastructureLoggingDefaults(options.infrastructureLogging);
};
@ -242,7 +242,7 @@ const applyWebpackOptionsDefaults = (options, compilerIndex) => {
target === undefined ||
(typeof target === "string" && target.startsWith("browserslist")) ||
(Array.isArray(target) &&
target.some(target => target.startsWith("browserslist"))),
target.some((target) => target.startsWith("browserslist"))),
outputModule:
/** @type {NonNullable<ExperimentsNormalized["outputModule"]>} */
(options.experiments.outputModule),
@ -594,7 +594,7 @@ const applyJavascriptParserOptionsDefaults = (
* @param {JsonGeneratorOptions} generatorOptions generator options
* @returns {void}
*/
const applyJsonGeneratorOptionsDefaults = generatorOptions => {
const applyJsonGeneratorOptionsDefaults = (generatorOptions) => {
D(generatorOptions, "JSONParse", true);
};
@ -652,7 +652,7 @@ const applyModuleDefaults = (
* @param {Module} module module
* @returns {boolean} true, if we want to cache the module
*/
module => {
(module) => {
const name = module.nameForCondition();
if (!name) {
return false;
@ -964,7 +964,7 @@ const applyOutputDefaults = (
* @param {Library=} library the library option
* @returns {string} a readable library name
*/
const getLibraryName = library => {
const getLibraryName = (library) => {
const libraryName =
typeof library === "object" &&
library &&
@ -1015,7 +1015,7 @@ const applyOutputDefaults = (
* @param {boolean | undefined} v value
* @returns {boolean} true, when v is truthy or undefined
*/
const optimistic = v => v || v === undefined;
const optimistic = (v) => v || v === undefined;
/**
* @param {boolean | undefined} v value
* @param {boolean | undefined} c condition
@ -1322,7 +1322,7 @@ const applyOutputDefaults = (
* @param {(entryDescription: EntryDescription) => void} fn iterator
* @returns {void}
*/
const forEachEntry = fn => {
const forEachEntry = (fn) => {
for (const name of Object.keys(entry)) {
fn(/** @type {{[k: string] : EntryDescription}} */ (entry)[name]);
}
@ -1333,7 +1333,7 @@ const applyOutputDefaults = (
if (output.library) {
enabledLibraryTypes.push(output.library.type);
}
forEachEntry(desc => {
forEachEntry((desc) => {
if (desc.library) {
enabledLibraryTypes.push(desc.library.type);
}
@ -1349,7 +1349,7 @@ const applyOutputDefaults = (
if (output.workerChunkLoading) {
enabledChunkLoadingTypes.add(output.workerChunkLoading);
}
forEachEntry(desc => {
forEachEntry((desc) => {
if (desc.chunkLoading) {
enabledChunkLoadingTypes.add(desc.chunkLoading);
}
@ -1365,7 +1365,7 @@ const applyOutputDefaults = (
if (output.workerWasmLoading) {
enabledWasmLoadingTypes.add(output.workerWasmLoading);
}
forEachEntry(desc => {
forEachEntry((desc) => {
if (desc.wasmLoading) {
enabledWasmLoadingTypes.add(desc.wasmLoading);
}
@ -1553,7 +1553,7 @@ const applyOptimizationDefaults = (
D(optimization, "minimize", production);
A(optimization, "minimizer", () => [
{
apply: compiler => {
apply: (compiler) => {
// Lazy load the Terser plugin
const TerserPlugin = require("terser-webpack-plugin");
@ -1735,7 +1735,7 @@ const getResolveLoaderDefaults = ({ cache }) => {
* @param {InfrastructureLogging} infrastructureLogging options
* @returns {void}
*/
const applyInfrastructureLoggingDefaults = infrastructureLogging => {
const applyInfrastructureLoggingDefaults = (infrastructureLogging) => {
F(infrastructureLogging, "stream", () => process.stderr);
const tty =
/** @type {NonNullable<InfrastructureLogging["stream"]>} */

View File

@ -57,7 +57,7 @@ const nestedConfig = (value, fn) =>
* @param {T|undefined} value value or not
* @returns {T} result value
*/
const cloneObject = value => /** @type {T} */ ({ ...value });
const cloneObject = (value) => /** @type {T} */ ({ ...value });
/**
* @template T
* @template R
@ -124,13 +124,13 @@ const keyedNestedConfig = (value, fn, customKeys) => {
* @param {WebpackOptions} config input config
* @returns {WebpackOptionsNormalized} normalized options
*/
const getNormalizedWebpackOptions = config => ({
const getNormalizedWebpackOptions = (config) => ({
amd: config.amd,
bail: config.bail,
cache:
/** @type {NonNullable<CacheOptions>} */
(
optionalNestedConfig(config.cache, cache => {
optionalNestedConfig(config.cache, (cache) => {
if (cache === false) return false;
if (cache === true) {
return {
@ -173,7 +173,7 @@ const getNormalizedWebpackOptions = config => ({
),
context: config.context,
dependencies: config.dependencies,
devServer: optionalNestedConfig(config.devServer, devServer => {
devServer: optionalNestedConfig(config.devServer, (devServer) => {
if (devServer === false) return false;
return { ...devServer };
}),
@ -183,25 +183,25 @@ const getNormalizedWebpackOptions = config => ({
? { main: {} }
: typeof config.entry === "function"
? (
fn => () =>
(fn) => () =>
Promise.resolve().then(fn).then(getNormalizedEntryStatic)
)(config.entry)
: getNormalizedEntryStatic(config.entry),
experiments: nestedConfig(config.experiments, experiments => ({
experiments: nestedConfig(config.experiments, (experiments) => ({
...experiments,
buildHttp: optionalNestedConfig(experiments.buildHttp, options =>
buildHttp: optionalNestedConfig(experiments.buildHttp, (options) =>
Array.isArray(options) ? { allowedUris: options } : options
),
lazyCompilation: optionalNestedConfig(
experiments.lazyCompilation,
options => (options === true ? {} : options)
(options) => (options === true ? {} : options)
)
})),
externals: /** @type {NonNullable<Externals>} */ (config.externals),
externalsPresets: cloneObject(config.externalsPresets),
externalsType: config.externalsType,
ignoreWarnings: config.ignoreWarnings
? config.ignoreWarnings.map(ignore => {
? config.ignoreWarnings.map((ignore) => {
if (typeof ignore === "function") return ignore;
const i = ignore instanceof RegExp ? { message: ignore } : ignore;
return (warning, { requestShortener }) => {
@ -236,11 +236,11 @@ const getNormalizedWebpackOptions = config => ({
module:
/** @type {ModuleOptionsNormalized} */
(
nestedConfig(config.module, module => ({
nestedConfig(config.module, (module) => ({
noParse: module.noParse,
unsafeCache: module.unsafeCache,
parser: keyedNestedConfig(module.parser, cloneObject, {
javascript: parserOptions => ({
javascript: (parserOptions) => ({
unknownContextRequest: module.unknownContextRequest,
unknownContextRegExp: module.unknownContextRegExp,
unknownContextRecursive: module.unknownContextRecursive,
@ -259,26 +259,26 @@ const getNormalizedWebpackOptions = config => ({
})
}),
generator: cloneObject(module.generator),
defaultRules: optionalNestedArray(module.defaultRules, r => [...r]),
rules: nestedArray(module.rules, r => [...r])
defaultRules: optionalNestedArray(module.defaultRules, (r) => [...r]),
rules: nestedArray(module.rules, (r) => [...r])
}))
),
name: config.name,
node: nestedConfig(
config.node,
node =>
(node) =>
node && {
...node
}
),
optimization: nestedConfig(config.optimization, optimization => ({
optimization: nestedConfig(config.optimization, (optimization) => ({
...optimization,
runtimeChunk: getNormalizedOptimizationRuntimeChunk(
optimization.runtimeChunk
),
splitChunks: nestedConfig(
optimization.splitChunks,
splitChunks =>
(splitChunks) =>
splitChunks && {
...splitChunks,
defaultSizeTypes: splitChunks.defaultSizeTypes
@ -295,7 +295,7 @@ const getNormalizedWebpackOptions = config => ({
)
: optimization.emitOnErrors
})),
output: nestedConfig(config.output, output => {
output: nestedConfig(config.output, (output) => {
const { library } = output;
const libraryAsName = /** @type {LibraryName} */ (library);
const libraryBase =
@ -384,13 +384,16 @@ const getNormalizedWebpackOptions = config => ({
sourcePrefix: output.sourcePrefix,
strictModuleErrorHandling: output.strictModuleErrorHandling,
strictModuleExceptionHandling: output.strictModuleExceptionHandling,
trustedTypes: optionalNestedConfig(output.trustedTypes, trustedTypes => {
if (trustedTypes === true) return {};
if (typeof trustedTypes === "string") {
return { policyName: trustedTypes };
trustedTypes: optionalNestedConfig(
output.trustedTypes,
(trustedTypes) => {
if (trustedTypes === true) return {};
if (typeof trustedTypes === "string") {
return { policyName: trustedTypes };
}
return { ...trustedTypes };
}
return { ...trustedTypes };
}),
),
uniqueName: output.uniqueName,
wasmLoading: output.wasmLoading,
webassemblyModuleFilename: output.webassemblyModuleFilename,
@ -401,13 +404,13 @@ const getNormalizedWebpackOptions = config => ({
return result;
}),
parallelism: config.parallelism,
performance: optionalNestedConfig(config.performance, performance => {
performance: optionalNestedConfig(config.performance, (performance) => {
if (performance === false) return false;
return {
...performance
};
}),
plugins: /** @type {Plugins} */ (nestedArray(config.plugins, p => [...p])),
plugins: /** @type {Plugins} */ (nestedArray(config.plugins, (p) => [...p])),
profile: config.profile,
recordsInputPath:
config.recordsInputPath !== undefined
@ -417,39 +420,39 @@ const getNormalizedWebpackOptions = config => ({
config.recordsOutputPath !== undefined
? config.recordsOutputPath
: config.recordsPath,
resolve: nestedConfig(config.resolve, resolve => ({
resolve: nestedConfig(config.resolve, (resolve) => ({
...resolve,
byDependency: keyedNestedConfig(resolve.byDependency, cloneObject)
})),
resolveLoader: cloneObject(config.resolveLoader),
snapshot: nestedConfig(config.snapshot, snapshot => ({
snapshot: nestedConfig(config.snapshot, (snapshot) => ({
resolveBuildDependencies: optionalNestedConfig(
snapshot.resolveBuildDependencies,
resolveBuildDependencies => ({
(resolveBuildDependencies) => ({
timestamp: resolveBuildDependencies.timestamp,
hash: resolveBuildDependencies.hash
})
),
buildDependencies: optionalNestedConfig(
snapshot.buildDependencies,
buildDependencies => ({
(buildDependencies) => ({
timestamp: buildDependencies.timestamp,
hash: buildDependencies.hash
})
),
resolve: optionalNestedConfig(snapshot.resolve, resolve => ({
resolve: optionalNestedConfig(snapshot.resolve, (resolve) => ({
timestamp: resolve.timestamp,
hash: resolve.hash
})),
module: optionalNestedConfig(snapshot.module, module => ({
module: optionalNestedConfig(snapshot.module, (module) => ({
timestamp: module.timestamp,
hash: module.hash
})),
immutablePaths: optionalNestedArray(snapshot.immutablePaths, p => [...p]),
managedPaths: optionalNestedArray(snapshot.managedPaths, p => [...p]),
unmanagedPaths: optionalNestedArray(snapshot.unmanagedPaths, p => [...p])
immutablePaths: optionalNestedArray(snapshot.immutablePaths, (p) => [...p]),
managedPaths: optionalNestedArray(snapshot.managedPaths, (p) => [...p]),
unmanagedPaths: optionalNestedArray(snapshot.unmanagedPaths, (p) => [...p])
})),
stats: nestedConfig(config.stats, stats => {
stats: nestedConfig(config.stats, (stats) => {
if (stats === false) {
return {
preset: "none"
@ -478,7 +481,7 @@ const getNormalizedWebpackOptions = config => ({
* @param {EntryStatic} entry static entry options
* @returns {EntryStaticNormalized} normalized static entry options
*/
const getNormalizedEntryStatic = entry => {
const getNormalizedEntryStatic = (entry) => {
if (typeof entry === "string") {
return {
main: {
@ -540,7 +543,7 @@ const getNormalizedEntryStatic = entry => {
* @param {OptimizationRuntimeChunk=} runtimeChunk runtimeChunk option
* @returns {OptimizationRuntimeChunkNormalized=} normalized runtimeChunk option
*/
const getNormalizedOptimizationRuntimeChunk = runtimeChunk => {
const getNormalizedOptimizationRuntimeChunk = (runtimeChunk) => {
if (runtimeChunk === undefined) return;
if (runtimeChunk === false) return false;
if (runtimeChunk === "single") {
@ -550,7 +553,7 @@ const getNormalizedOptimizationRuntimeChunk = runtimeChunk => {
}
if (runtimeChunk === true || runtimeChunk === "multiple") {
return {
name: entrypoint => `runtime~${entrypoint.name}`
name: (entrypoint) => `runtime~${entrypoint.name}`
};
}
const { name } = runtimeChunk;

View File

@ -15,7 +15,7 @@ const getBrowserslistTargetHandler = memoize(() =>
* @param {string} context the context directory
* @returns {string} default target
*/
const getDefaultTarget = context => {
const getDefaultTarget = (context) => {
const browsers = getBrowserslistTargetHandler().load(null, context);
return browsers ? "browserslist" : "web";
};
@ -287,7 +287,7 @@ You can also more options via the 'target' option: 'browserslist' / 'browserslis
"esX",
"EcmaScript in this version. Examples: es2020, es5.",
/^es(\d+)$/,
version => {
(version) => {
let v = Number(version);
if (v < 1000) v += 2009;
return {
@ -333,7 +333,7 @@ const getTargetProperties = (target, context) => {
* @param {TargetProperties[]} targetProperties array of target properties
* @returns {TargetProperties} merged target properties
*/
const mergeTargetProperties = targetProperties => {
const mergeTargetProperties = (targetProperties) => {
/** @type {Set<keyof TargetProperties>} */
const keys = new Set();
for (const tp of targetProperties) {
@ -371,7 +371,7 @@ const mergeTargetProperties = targetProperties => {
* @returns {TargetProperties} target properties
*/
const getTargetsProperties = (targets, context) =>
mergeTargetProperties(targets.map(t => getTargetProperties(t, context)));
mergeTargetProperties(targets.map((t) => getTargetProperties(t, context)));
module.exports.getDefaultTarget = getDefaultTarget;
module.exports.getTargetProperties = getTargetProperties;

View File

@ -159,7 +159,7 @@ class ContainerEntryModule extends Module {
for (const block of this.blocks) {
const { dependencies } = block;
const modules = dependencies.map(dependency => {
const modules = dependencies.map((dependency) => {
const dep = /** @type {ContainerExposedDependency} */ (dependency);
return {
name: dep.exposedName,
@ -170,9 +170,9 @@ class ContainerEntryModule extends Module {
let str;
if (modules.some(m => !m.module)) {
if (modules.some((m) => !m.module)) {
str = runtimeTemplate.throwMissingModuleErrorBlock({
request: modules.map(m => m.request).join(", ")
request: modules.map((m) => m.request).join(", ")
});
} else {
str = `return ${runtimeTemplate.blockPromise({

View File

@ -51,11 +51,11 @@ class ContainerPlugin {
exposes: /** @type {ExposesList} */ (
parseOptions(
options.exposes,
item => ({
(item) => ({
import: Array.isArray(item) ? item : [item],
name: undefined
}),
item => ({
(item) => ({
import: Array.isArray(item.import) ? item.import : [item.import],
name: item.name || undefined
})
@ -91,7 +91,7 @@ class ContainerPlugin {
runtime,
library
},
error => {
(error) => {
if (error) return callback(error);
hooks.addContainerEntryDependency.call(dep);
callback();

Some files were not shown because too many files have changed in this diff Show More