Merge branch 'master' into dependabot/npm_and_yarn/mime-types-2.1.30

This commit is contained in:
Tobias Koppers 2021-06-21 10:37:33 +02:00
commit f57551dd0b
598 changed files with 10654 additions and 4014 deletions

View File

@ -5,11 +5,14 @@ node_modules
benchmark
coverage
# Ignore not support files
# Ignore not supported files
!.*.js
.eslintrc.js
*.d.ts
# Ignore precompiled schemas
schemas/**/*.check.js
# Ignore some test files
test/*
!test/*Cases

12
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,12 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/"
schedule:
interval: daily
time: "04:00"
timezone: Europe/Berlin
open-pull-requests-limit: 20
labels:
- dependencies
versioning-strategy: widen

View File

@ -21,7 +21,7 @@ jobs:
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 16.x
- id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v1
@ -43,7 +43,7 @@ jobs:
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 16.x
- id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v1
@ -54,7 +54,7 @@ jobs:
- run: yarn --frozen-lockfile
- run: yarn link --frozen-lockfile || true
- run: yarn link webpack --frozen-lockfile
- run: yarn test:basic --ci --reporters=default --reporters=jest-junit
- run: yarn test:basic --ci
- uses: codecov/codecov-action@v1
with:
flags: basic
@ -66,7 +66,7 @@ jobs:
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 16.x
- id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v1
@ -82,7 +82,7 @@ jobs:
path: .jest-cache
key: jest-unit-${{ env.GITHUB_SHA }}
restore-keys: jest-unit-
- run: yarn cover:unit --ci --cacheDirectory .jest-cache --reporters=default --reporters=jest-junit
- run: yarn cover:unit --ci --cacheDirectory .jest-cache
- uses: codecov/codecov-action@v1
with:
flags: unit
@ -93,10 +93,10 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
node-version: [10.x, 14.x]
node-version: [10.x, 16.x]
include:
- os: ubuntu-latest
node-version: 15.x
node-version: 14.x
- os: ubuntu-latest
node-version: 12.x
runs-on: ${{ matrix.os }}
@ -121,9 +121,9 @@ jobs:
path: .jest-cache
key: jest-integration-${{ env.GITHUB_SHA }}
restore-keys: jest-integration-
- run: yarn cover:integration --ci --cacheDirectory .jest-cache --reporters=default --reporters=jest-junit
- if: ${{ matrix.os != 'windows-latest' }}
uses: codecov/codecov-action@v1
- run: yarn cover:integration --ci --cacheDirectory .jest-cache
- run: yarn cover:merge
- uses: codecov/codecov-action@v1
with:
flags: integration
functionalities: gcov

2
.gitignore vendored
View File

@ -8,6 +8,8 @@
/benchmark/fixtures
/examples/**/dist
/coverage
/.nyc_output
/.jest-cache
.DS_Store
*.log
.idea

View File

@ -40,7 +40,7 @@
</a>
<h1>webpack</h1>
<p>
webpack is a module bundler. Its main purpose is to bundle JavaScript files for usage in a browser, yet it is also capable of transforming, bundling, or packaging just about any resource or asset.
Webpack is a module bundler. Its main purpose is to bundle JavaScript files for usage in a browser, yet it is also capable of transforming, bundling, or packaging just about any resource or asset.
</p>
</div>
@ -77,7 +77,7 @@ yarn add webpack --dev
<h2 align="center">Introduction</h2>
webpack is a bundler for modules. The main purpose is to bundle JavaScript
Webpack is a bundler for modules. The main purpose is to bundle JavaScript
files for usage in a browser, yet it is also capable of transforming, bundling,
or packaging just about any resource or asset.
@ -95,14 +95,14 @@ Check out webpack's quick [**Get Started**](https://webpack.js.org/guides/gettin
### Browser Compatibility
webpack supports all browsers that are [ES5-compliant](https://kangax.github.io/compat-table/es5/) (IE8 and below are not supported).
webpack also needs `Promise` for `import()` and `require.ensure()`. If you want to support older browsers, you will need to [load a polyfill](https://webpack.js.org/guides/shimming/) before using these expressions.
Webpack supports all browsers that are [ES5-compliant](https://kangax.github.io/compat-table/es5/) (IE8 and below are not supported).
Webpack also needs `Promise` for `import()` and `require.ensure()`. If you want to support older browsers, you will need to [load a polyfill](https://webpack.js.org/guides/shimming/) before using these expressions.
<h2 align="center">Concepts</h2>
### [Plugins](https://webpack.js.org/plugins/)
webpack has a [rich plugin
Webpack has a [rich plugin
interface](https://webpack.js.org/plugins/). Most of the features
within webpack itself use this plugin interface. This makes webpack very
**flexible**.
@ -129,7 +129,7 @@ within webpack itself use this plugin interface. This makes webpack very
### [Loaders](https://webpack.js.org/loaders/)
webpack enables the use of loaders to preprocess files. This allows you to bundle
Webpack enables the use of loaders to preprocess files. This allows you to bundle
**any static resource** way beyond JavaScript. You can easily [write your own
loaders](https://webpack.js.org/api/loaders/) using Node.js.
@ -169,17 +169,14 @@ or are automatically applied via regex from your webpack configuration.
#### Transpiling
| Name | Status | Install Size | Description |
| :--------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------: | :-------------: | :--------------------------------------------------------------------------------------------------- |
| <a href="https://github.com/babel/babel-loader"><img width="48" height="48" title="babel-loader" src="https://worldvectorlogo.com/logos/babel-10.svg"></a> | ![babel-npm] | ![babel-size] | Loads ES2015+ code and transpiles to ES5 using <a href="https://github.com/babel/babel">Babel</a> |
| <a href="https://github.com/jupl/traceur-loader"><img width="48" height="48" src="https://google.github.com/traceur-compiler/logo/tc.svg"></a> | ![traceur-npm] | ![traceur-size] | Loads ES2015+ code and transpiles to ES5 using [Traceur](https://github.com/google/traceur-compiler) |
| <a href="https://github.com/TypeStrong/ts-loader"><img width="48" height="48" src="https://cdn.rawgit.com/Microsoft/TypeScript/master/doc/logo.svg"></a> | ![type-npm] | ![type-size] | Loads TypeScript like JavaScript |
| <a href="https://github.com/webpack-contrib/coffee-loader"><img width="48" height="48" src="https://worldvectorlogo.com/logos/coffeescript.svg"></a> | ![coffee-npm] | ![coffee-size] | Loads CoffeeScript like JavaScript |
| Name | Status | Install Size | Description |
| :--------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------: | :------------: | :------------------------------------------------------------------------------------------------ |
| <a href="https://github.com/babel/babel-loader"><img width="48" height="48" title="babel-loader" src="https://worldvectorlogo.com/logos/babel-10.svg"></a> | ![babel-npm] | ![babel-size] | Loads ES2015+ code and transpiles to ES5 using <a href="https://github.com/babel/babel">Babel</a> |
| <a href="https://github.com/TypeStrong/ts-loader"><img width="48" height="48" src="https://cdn.rawgit.com/Microsoft/TypeScript/master/doc/logo.svg"></a> | ![type-npm] | ![type-size] | Loads TypeScript like JavaScript |
| <a href="https://github.com/webpack-contrib/coffee-loader"><img width="48" height="48" src="https://worldvectorlogo.com/logos/coffeescript.svg"></a> | ![coffee-npm] | ![coffee-size] | Loads CoffeeScript like JavaScript |
[babel-npm]: https://img.shields.io/npm/v/babel-loader.svg
[babel-size]: https://packagephobia.com/badge?p=babel-loader
[traceur-npm]: https://img.shields.io/npm/v/traceur-loader.svg
[traceur-size]: https://packagephobia.com/badge?p=traceur-loader
[coffee-npm]: https://img.shields.io/npm/v/coffee-loader.svg
[coffee-size]: https://packagephobia.com/badge?p=coffee-loader
[type-npm]: https://img.shields.io/npm/v/ts-loader.svg
@ -252,23 +249,23 @@ or are automatically applied via regex from your webpack configuration.
### Performance
webpack uses async I/O and has multiple caching levels. This makes webpack fast
Webpack uses async I/O and has multiple caching levels. This makes webpack fast
and incredibly **fast** on incremental compilations.
### Module Formats
webpack supports ES2015+, CommonJS and AMD modules **out of the box**. It performs clever static
Webpack supports ES2015+, CommonJS and AMD modules **out of the box**. It performs clever static
analysis on the AST of your code. It even has an evaluation engine to evaluate
simple expressions. This allows you to **support most existing libraries** out of the box.
### [Code Splitting](https://webpack.js.org/guides/code-splitting/)
webpack allows you to split your codebase into multiple chunks. Chunks are
Webpack allows you to split your codebase into multiple chunks. Chunks are
loaded asynchronously at runtime. This reduces the initial loading time.
### [Optimizations](https://webpack.js.org/guides/production-build/)
webpack can do many optimizations to **reduce the output size of your
Webpack can do many optimizations to **reduce the output size of your
JavaScript** by deduplicating frequently used modules, minifying, and giving
you full control of what is loaded initially and what is loaded at runtime
through code splitting. It can also make your code chunks **cache

View File

@ -31,7 +31,7 @@ test_script:
- yarn --version
- cmd: set JEST=--maxWorkers=2 --cacheDirectory .jest-cache
- cmd: yarn appveyor:integration
- cmd: yarn istanbul report --report lcovonly
- cmd: yarn cover:report --reporter=lcovonly
- cmd: yarn unlink webpack
- cmd: yarn global add codecov && codecov -F integration --disable=gcov
- cmd: del /F /Q .jest-cache\\haste-map* .jest-cache\\perf-cache* 2> null || Ver > null

View File

@ -8,7 +8,7 @@ jobs:
steps:
- task: NodeTool@0
inputs:
versionSpec: "^14.0.0"
versionSpec: "^16.0.0"
displayName: "Install Node.js"
- script: |
curl -o- -L https://yarnpkg.com/install.sh | bash
@ -35,9 +35,9 @@ jobs:
set -e
export PATH="$HOME/.yarn/bin:$HOME/.config/yarn/global/node_modules/.bin:$PATH"
export JEST_JUNIT_OUTPUT_NAME=basic-junit.xml
yarn test:basic --ci --reporters=jest-junit
yarn test:basic --ci --reporters=default --reporters=jest-junit
export JEST_JUNIT_OUTPUT_NAME=unit-junit.xml
yarn test:unit --ci --reporters=jest-junit
yarn test:unit --ci --reporters=default --reporters=jest-junit
displayName: "Run basic tests"
- task: PublishTestResults@2
inputs:
@ -111,8 +111,8 @@ jobs:
node_version: ^10.13.0
node-12:
node_version: ^12.4.0
node-14:
node_version: ^14.0.0
node-16:
node_version: ^16.0.0
steps:
- task: NodeTool@0
inputs:
@ -138,7 +138,8 @@ jobs:
- script: yarn link webpack --frozen-lockfile
displayName: "Link webpack into node_modules"
- script: |
yarn cover:integration --ci --maxWorkers=2 --reporters=jest-junit
yarn cover:integration --ci --maxWorkers=2 --reporters=default --reporters=jest-junit
yarn cover:merge
displayName: "Run tests with coverage"
- task: PublishTestResults@2
inputs:
@ -161,8 +162,8 @@ jobs:
node_version: ^12.4.0
node-14:
node_version: ^14.0.0
node-15:
node_version: ^15.0.0
node-16:
node_version: ^16.0.0
steps:
- task: NodeTool@0
inputs:
@ -192,7 +193,8 @@ jobs:
- script: |
set -e
export PATH="$HOME/.yarn/bin:$HOME/.config/yarn/global/node_modules/.bin:$PATH"
yarn cover:integration --ci --maxWorkers=2 --reporters=jest-junit
yarn cover:integration --ci --maxWorkers=2 --reporters=default --reporters=jest-junit
yarn cover:merge
displayName: "Run tests with coverage"
- task: PublishTestResults@2
inputs:
@ -212,8 +214,8 @@ jobs:
matrix:
node-12:
node_version: ^12.4.0
node-14:
node_version: ^14.0.0
node-16:
node_version: ^16.0.0
steps:
- task: NodeTool@0
inputs:
@ -243,7 +245,8 @@ jobs:
- script: |
set -e
export PATH="$HOME/.yarn/bin:$HOME/.config/yarn/global/node_modules/.bin:$PATH"
yarn cover:integration --ci --reporters=jest-junit
yarn cover:integration --ci --reporters=default --reporters=jest-junit
yarn cover:merge
displayName: "Run tests with coverage"
- task: PublishTestResults@2
inputs:

View File

@ -32,13 +32,28 @@ const runCommand = (command, args) => {
* @returns {boolean} is the package installed?
*/
const isInstalled = packageName => {
try {
require.resolve(packageName);
if (process.versions.pnp) {
return true;
} catch (err) {
return false;
}
const path = require("path");
const fs = require("graceful-fs");
let dir = __dirname;
do {
try {
if (
fs.statSync(path.join(dir, "node_modules", packageName)).isDirectory()
) {
return true;
}
} catch (_error) {
// Nothing
}
} while (dir !== (dir = path.dirname(dir)));
return false;
};
/**

21
declarations.d.ts vendored
View File

@ -251,9 +251,7 @@ declare module "webpack-sources" {
map(options?: MapOptions): Object;
sourceAndMap(
options?: MapOptions
): {
sourceAndMap(options?: MapOptions): {
source: string | Buffer;
map: Object;
};
@ -373,6 +371,23 @@ declare module "browserslist" {
export = browserslist;
}
// TODO remove that when @types/estree is updated
declare type PrivateIdentifierNode = {
type: "PrivateIdentifier";
name: string;
loc?: import("estree").SourceLocation | null;
range?: [number, number];
};
declare type PropertyDefinitionNode = {
type: "PropertyDefinition";
key: import("estree").Expression | PrivateIdentifierNode;
value: import("estree").Expression | null;
computed: boolean;
static: boolean;
loc?: import("estree").SourceLocation | null;
range?: [number, number];
};
type TODO = any;
type RecursiveArrayOrRecord<T> =

275
declarations/LoaderContext.d.ts vendored Normal file
View File

@ -0,0 +1,275 @@
import type { SourceMap } from "../lib/NormalModule";
import type { validate } from "schema-utils";
import type { AssetInfo } from "../lib/Compilation";
import type { ResolveOptionsWithDependencyType } from "../lib/ResolverFactory";
import type Compilation from "../lib/Compilation";
import type Compiler from "../lib/Compiler";
import type NormalModule from "../lib/NormalModule";
import type { InputFileSystem } from "../lib/util/fs";
import type { Logger } from "../lib/logging/Logger";
import type {
ImportModuleCallback,
ImportModuleOptions
} from "../lib/dependencies/LoaderPlugin";
import type { Resolver } from "enhanced-resolve";
type ResolveCallback = Parameters<Resolver["resolve"]>[4];
type Schema = Parameters<typeof validate>[0];
/** These properties are added by the NormalModule */
export interface NormalModuleLoaderContext<OptionsType> {
version: number;
getOptions(): OptionsType;
getOptions(schema: Schema): OptionsType;
emitWarning(warning: Error): void;
emitError(error: Error): void;
getLogger(name?: string): Logger;
resolve(context: string, request: string, callback: ResolveCallback): any;
getResolve(
options?: ResolveOptionsWithDependencyType
): ((context: string, request: string, callback: ResolveCallback) => void) &
((context: string, request: string) => Promise<string>);
emitFile(
name: string,
content: string | Buffer,
sourceMap?: string,
assetInfo?: AssetInfo
): void;
addBuildDependency(dep: string): void;
utils: {
absolutify: (context: string, request: string) => string;
contextify: (context: string, request: string) => string;
};
rootContext: string;
fs: InputFileSystem;
sourceMap?: boolean;
mode: "development" | "production" | "none";
webpack?: boolean;
_module?: NormalModule;
_compilation?: Compilation;
_compiler?: Compiler;
}
/** These properties are added by the HotModuleReplacementPlugin */
export interface HotModuleReplacementPluginLoaderContext {
hot?: boolean;
}
/** These properties are added by the LoaderPlugin */
export interface LoaderPluginLoaderContext {
/**
* Resolves the given request to a module, applies all configured loaders and calls
* back with the generated source, the sourceMap and the module instance (usually an
* instance of NormalModule). Use this function if you need to know the source code
* of another module to generate the result.
*/
loadModule(
request: string,
callback: (
err: Error | null,
source: string,
sourceMap: any,
module: NormalModule
) => void
): void;
importModule(
request: string,
options: ImportModuleOptions,
callback: ImportModuleCallback
): void;
importModule(request: string, options?: ImportModuleOptions): Promise<any>;
}
/** The properties are added by https://github.com/webpack/loader-runner */
export interface LoaderRunnerLoaderContext<OptionsType> {
/**
* Add a directory as dependency of the loader result.
*/
addContextDependency(context: string): void;
/**
* Adds a file as dependency of the loader result in order to make them watchable.
* For example, html-loader uses this technique as it finds src and src-set attributes.
* Then, it sets the url's for those attributes as dependencies of the html file that is parsed.
*/
addDependency(file: string): void;
addMissingDependency(context: string): void;
/**
* Make this loader async.
*/
async(): WebpackLoaderContextCallback;
/**
* Make this loader result cacheable. By default it's cacheable.
* A cacheable loader must have a deterministic result, when inputs and dependencies haven't changed.
* This means the loader shouldn't have other dependencies than specified with this.addDependency.
* Most loaders are deterministic and cacheable.
*/
cacheable(flag?: boolean): void;
callback: WebpackLoaderContextCallback;
/**
* Remove all dependencies of the loader result. Even initial dependencies and these of other loaders.
*/
clearDependencies(): void;
/**
* The directory of the module. Can be used as context for resolving other stuff.
* eg '/workspaces/ts-loader/examples/vanilla/src'
*/
context: string;
readonly currentRequest: string;
readonly data: any;
/**
* alias of addDependency
* Adds a file as dependency of the loader result in order to make them watchable.
* For example, html-loader uses this technique as it finds src and src-set attributes.
* Then, it sets the url's for those attributes as dependencies of the html file that is parsed.
*/
dependency(file: string): void;
getContextDependencies(): string[];
getDependencies(): string[];
getMissingDependencies(): string[];
/**
* The index in the loaders array of the current loader.
* In the example: in loader1: 0, in loader2: 1
*/
loaderIndex: number;
readonly previousRequest: string;
readonly query: string | OptionsType;
readonly remainingRequest: string;
readonly request: string;
/**
* An array of all the loaders. It is writeable in the pitch phase.
* loaders = [{request: string, path: string, query: string, module: function}]
*
* In the example:
* [
* { request: "/abc/loader1.js?xyz",
* path: "/abc/loader1.js",
* query: "?xyz",
* module: [Function]
* },
* { request: "/abc/node_modules/loader2/index.js",
* path: "/abc/node_modules/loader2/index.js",
* query: "",
* module: [Function]
* }
* ]
*/
loaders: {
request: string;
path: string;
query: string;
fragment: string;
options: object | string | undefined;
ident: string;
normal: Function | undefined;
pitch: Function | undefined;
raw: boolean | undefined;
data: object | undefined;
pitchExecuted: boolean;
normalExecuted: boolean;
}[];
/**
* The resource path.
* In the example: "/abc/resource.js"
*/
resourcePath: string;
/**
* The resource query string.
* Example: "?query"
*/
resourceQuery: string;
/**
* The resource fragment.
* Example: "#frag"
*/
resourceFragment: string;
/**
* The resource inclusive query and fragment.
* Example: "/abc/resource.js?query#frag"
*/
resource: string;
}
type AdditionalData = {
webpackAST: object;
[index: string]: any;
};
type WebpackLoaderContextCallback = (
err: Error | undefined | null,
content?: string | Buffer,
sourceMap?: string | SourceMap,
additionalData?: AdditionalData
) => void;
type LoaderContext<OptionsType> = NormalModuleLoaderContext<OptionsType> &
LoaderRunnerLoaderContext<OptionsType> &
LoaderPluginLoaderContext &
HotModuleReplacementPluginLoaderContext;
type PitchLoaderDefinitionFunction<OptionsType = {}, ContextAdditions = {}> = (
this: LoaderContext<OptionsType> & ContextAdditions,
remainingRequest: string,
previousRequest: string,
data: object
) => string | Buffer | Promise<string | Buffer> | void;
type LoaderDefinitionFunction<OptionsType = {}, ContextAdditions = {}> = (
this: LoaderContext<OptionsType> & ContextAdditions,
content: string,
sourceMap?: string | SourceMap,
additionalData?: AdditionalData
) => string | Buffer | Promise<string | Buffer> | void;
type RawLoaderDefinitionFunction<OptionsType = {}, ContextAdditions = {}> = (
this: LoaderContext<OptionsType> & ContextAdditions,
content: Buffer,
sourceMap?: string | SourceMap,
additionalData?: AdditionalData
) => string | Buffer | Promise<string | Buffer> | void;
export type LoaderDefinition<
OptionsType = {},
ContextAdditions = {}
> = LoaderDefinitionFunction<OptionsType, ContextAdditions> & {
raw?: false;
pitch?: PitchLoaderDefinitionFunction<OptionsType, ContextAdditions>;
};
export type RawLoaderDefinition<
OptionsType = {},
ContextAdditions = {}
> = RawLoaderDefinitionFunction<OptionsType, ContextAdditions> & {
raw: true;
pitch?: PitchLoaderDefinitionFunction<OptionsType, ContextAdditions>;
};
export interface LoaderModule<OptionsType = {}, ContextAdditions = {}> {
default?:
| RawLoaderDefinitionFunction<OptionsType, ContextAdditions>
| LoaderDefinitionFunction<OptionsType, ContextAdditions>;
raw?: false;
pitch?: PitchLoaderDefinitionFunction<OptionsType, ContextAdditions>;
}

View File

@ -122,6 +122,19 @@ export type LibraryType =
* If `output.libraryTarget` is set to umd and `output.library` is set, setting this to true will name the AMD module.
*/
export type UmdNamedDefine = boolean;
/**
* The 'publicPath' specifies the public URL address of the output files when referenced in a browser.
*/
export type PublicPath = "auto" | RawPublicPath;
/**
* The 'publicPath' specifies the public URL address of the output files when referenced in a browser.
*/
export type RawPublicPath =
| string
| ((
pathData: import("../lib/Compilation").PathData,
assetInfo?: import("../lib/Compilation").AssetInfo
) => string);
/**
* The name of the runtime chunk. If set a runtime chunk with this name is created or an existing entrypoint is used as runtime.
*/
@ -227,21 +240,8 @@ export type RuleSetConditionOrConditions = RuleSetCondition | RuleSetConditions;
export type RuleSetCondition =
| RegExp
| string
| {
/**
* Logical AND.
*/
and?: RuleSetConditions;
/**
* Logical NOT.
*/
not?: RuleSetConditions;
/**
* Logical OR.
*/
or?: RuleSetConditions;
}
| ((value: string) => boolean)
| RuleSetLogicalConditions
| RuleSetConditions;
/**
* A list of rule conditions.
@ -259,21 +259,8 @@ export type RuleSetConditionOrConditionsAbsolute =
export type RuleSetConditionAbsolute =
| RegExp
| string
| {
/**
* Logical AND.
*/
and?: RuleSetConditionsAbsolute;
/**
* Logical NOT.
*/
not?: RuleSetConditionsAbsolute;
/**
* Logical OR.
*/
or?: RuleSetConditionsAbsolute;
}
| ((value: string) => boolean)
| RuleSetLogicalConditionsAbsolute
| RuleSetConditionsAbsolute;
/**
* A list of rule conditions matching an absolute path.
@ -537,19 +524,6 @@ export type Path = string;
* Include comments with information about the modules.
*/
export type Pathinfo = "verbose" | boolean;
/**
* The 'publicPath' specifies the public URL address of the output files when referenced in a browser.
*/
export type PublicPath = "auto" | RawPublicPath;
/**
* The 'publicPath' specifies the public URL address of the output files when referenced in a browser.
*/
export type RawPublicPath =
| string
| ((
pathData: import("../lib/Compilation").PathData,
assetInfo?: import("../lib/Compilation").AssetInfo
) => string);
/**
* This option enables loading async chunks via a custom script type, such as script type="module".
*/
@ -916,6 +890,10 @@ export interface MemoryCacheOptions {
* Options object for persistent file-based caching.
*/
export interface FileCacheOptions {
/**
* Allows to collect unused memory allocated during deserialization. This requires copying data into smaller buffers and has a performance cost.
*/
allowCollectingMemory?: boolean;
/**
* Dependencies the build depends on (in multiple categories, default categories: 'defaultWebpack').
*/
@ -938,11 +916,11 @@ export interface FileCacheOptions {
*/
hashAlgorithm?: string;
/**
* Time in ms after which idle period the cache storing should happen (only for store: 'pack' or 'idle').
* Time in ms after which idle period the cache storing should happen (only for store: 'pack').
*/
idleTimeout?: number;
/**
* Time in ms after which idle period the initial cache storing should happen (only for store: 'pack' or 'idle').
* Time in ms after which idle period the initial cache storing should happen (only for store: 'pack').
*/
idleTimeoutForInitialStore?: number;
/**
@ -965,6 +943,10 @@ export interface FileCacheOptions {
* Name for the cache. Different names will lead to different coexisting caches.
*/
name?: string;
/**
* Track and log detailed timing information for individual cache items.
*/
profile?: boolean;
/**
* When to store data to the filesystem. (pack: Store data when compiler is idle in a single file).
*/
@ -1021,6 +1003,10 @@ export interface EntryDescription {
* Options for library.
*/
library?: LibraryOptions;
/**
* The 'publicPath' specifies the public URL address of the output files when referenced in a browser.
*/
publicPath?: PublicPath;
/**
* The name of the runtime chunk. If set a runtime chunk with this name is created or an existing entrypoint is used as runtime.
*/
@ -1105,6 +1091,10 @@ export interface Experiments {
* Support WebAssembly as asynchronous EcmaScript Module.
*/
asyncWebAssembly?: boolean;
/**
* Enable build-time execution of modules from the module graph for plugins and loaders.
*/
executeModule?: boolean;
/**
* Enable module and chunk layers.
*/
@ -1199,6 +1189,18 @@ export interface ExternalsPresets {
* Options for infrastructure level logging.
*/
export interface InfrastructureLogging {
/**
* Only appends lines to the output. Avoids updating existing output e. g. for status messages. This option is only used when no custom console is provided.
*/
appendOnly?: boolean;
/**
* Enables/Disables colorful output. This option is only used when no custom console is provided.
*/
colors?: boolean;
/**
* Custom console used for logging.
*/
console?: Console;
/**
* Enable debug logging for specific loggers.
*/
@ -1207,6 +1209,10 @@ export interface InfrastructureLogging {
* Log level.
*/
level?: "none" | "error" | "warn" | "info" | "log" | "verbose";
/**
* Stream used for logging output. Defaults to process.stderr. This option is only used when no custom console is provided.
*/
stream?: NodeJS.WritableStream;
}
/**
* Custom values available in the loader context.
@ -1389,6 +1395,10 @@ export interface RuleSetRule {
* Match and execute these rules when this rule is matched.
*/
rules?: RuleSetRule[];
/**
* Match module scheme.
*/
scheme?: RuleSetConditionOrConditions;
/**
* Flags a module as with or without side effects.
*/
@ -1406,6 +1416,40 @@ export interface RuleSetRule {
*/
use?: RuleSetUse;
}
/**
* Logic operators used in a condition matcher.
*/
export interface RuleSetLogicalConditions {
/**
* Logical AND.
*/
and?: RuleSetConditions;
/**
* Logical NOT.
*/
not?: RuleSetCondition;
/**
* Logical OR.
*/
or?: RuleSetConditions;
}
/**
* Logic operators used in a condition matcher.
*/
export interface RuleSetLogicalConditionsAbsolute {
/**
* Logical AND.
*/
and?: RuleSetConditionsAbsolute;
/**
* Logical NOT.
*/
not?: RuleSetConditionAbsolute;
/**
* Logical OR.
*/
or?: RuleSetConditionsAbsolute;
}
/**
* Options object for resolving requests.
*/
@ -2053,6 +2097,10 @@ export interface Output {
* Handles exceptions in module loading correctly at a performance cost (Deprecated). This will handle module error compatible with the Node.js CommonJS way.
*/
strictModuleExceptionHandling?: StrictModuleExceptionHandling;
/**
* Use a Trusted Types policy to create urls for chunks. 'output.uniqueName' is used a default policy name. Passing a string sets a custom policy name.
*/
trustedTypes?: true | string | TrustedTypes;
/**
* If `output.libraryTarget` is set to umd and `output.library` is set, setting this to true will name the AMD module.
*/
@ -2124,6 +2172,15 @@ export interface Environment {
*/
module?: boolean;
}
/**
* Use a Trusted Types policy to create urls for chunks.
*/
export interface TrustedTypes {
/**
* The name of the Trusted Types policy created by webpack to serve bundle chunks.
*/
policyName?: string;
}
/**
* Configuration object for web performance recommendations.
*/
@ -2652,6 +2709,10 @@ export interface EntryDescriptionNormalized {
* Options for library.
*/
library?: LibraryOptions;
/**
* The 'publicPath' specifies the public URL address of the output files when referenced in a browser.
*/
publicPath?: PublicPath;
/**
* The name of the runtime chunk. If set a runtime chunk with this name is created or an existing entrypoint is used as runtime.
*/
@ -3002,6 +3063,10 @@ export interface OutputNormalized {
* Handles exceptions in module loading correctly at a performance cost (Deprecated). This will handle module error compatible with the Node.js CommonJS way.
*/
strictModuleExceptionHandling?: StrictModuleExceptionHandling;
/**
* Use a Trusted Types policy to create urls for chunks.
*/
trustedTypes?: TrustedTypes;
/**
* A unique name of the webpack build to avoid multiple webpack runtimes to conflict when using globals.
*/

View File

@ -1,78 +0,0 @@
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn special-lint-fix` to update
*/
/**
* Modules that should be exposed by this container. When provided, property name is used as public name, otherwise public name is automatically inferred from request.
*/
export type Exposes = (ExposesItem | ExposesObject)[] | ExposesObject;
/**
* Module that should be exposed by this container.
*/
export type ExposesItem = string;
/**
* Modules that should be exposed by this container.
*/
export type ExposesItems = ExposesItem[];
/**
* Container locations and request scopes from which modules should be resolved and loaded at runtime. When provided, property name is used as request scope, otherwise request scope is automatically inferred from container location.
*/
export type Remotes = (RemotesItem | RemotesObject)[] | RemotesObject;
/**
* Container location from which modules should be resolved and loaded at runtime.
*/
export type RemotesItem = string;
/**
* Container locations from which modules should be resolved and loaded at runtime.
*/
export type RemotesItems = RemotesItem[];
export interface _Container {
[k: string]: any;
}
/**
* Modules that should be exposed by this container. Property names are used as public paths.
*/
export interface ExposesObject {
/**
* Modules that should be exposed by this container.
*/
[k: string]: ExposesConfig | ExposesItem | ExposesItems;
}
/**
* Advanced configuration for modules that should be exposed by this container.
*/
export interface ExposesConfig {
/**
* Request to a module that should be exposed by this container.
*/
import: ExposesItem | ExposesItems;
/**
* Custom chunk name for the exposed module.
*/
name?: string;
}
/**
* Container locations from which modules should be resolved and loaded at runtime. Property names are used as request scopes.
*/
export interface RemotesObject {
/**
* Container locations from which modules should be resolved and loaded at runtime.
*/
[k: string]: RemotesConfig | RemotesItem | RemotesItems;
}
/**
* Advanced configuration for container locations from which modules should be resolved and loaded at runtime.
*/
export interface RemotesConfig {
/**
* Container locations from which modules should be resolved and loaded at runtime.
*/
external: RemotesItem | RemotesItems;
/**
* The name of the share scope shared with this remote.
*/
shareScope?: string;
}

View File

@ -1,68 +0,0 @@
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn special-lint-fix` to update
*/
/**
* Modules that should be shared in the share scope. When provided, property names are used to match requested modules in this compilation.
*/
export type Shared = (SharedItem | SharedObject)[] | SharedObject;
/**
* A module that should be shared in the share scope.
*/
export type SharedItem = string;
export interface _Sharing {
[k: string]: any;
}
/**
* Modules that should be shared in the share scope. Property names are used to match requested modules in this compilation. Relative requests are resolved, module requests are matched unresolved, absolute paths will match resolved requests. A trailing slash will match all requests with this prefix. In this case shareKey must also have a trailing slash.
*/
export interface SharedObject {
/**
* Modules that should be shared in the share scope.
*/
[k: string]: SharedConfig | SharedItem;
}
/**
* Advanced configuration for modules that should be shared in the share scope.
*/
export interface SharedConfig {
/**
* Include the provided and fallback module directly instead behind an async request. This allows to use this shared module in initial load too. All possible shared modules need to be eager too.
*/
eager?: boolean;
/**
* Provided module that should be provided to share scope. Also acts as fallback module if no shared module is found in share scope or version isn't valid. Defaults to the property name.
*/
import?: false | SharedItem;
/**
* Package name to determine required version from description file. This is only needed when package name can't be automatically determined from request.
*/
packageName?: string;
/**
* Version requirement from module in share scope.
*/
requiredVersion?: false | string;
/**
* Module is looked up under this key from the share scope.
*/
shareKey?: string;
/**
* Share scope name.
*/
shareScope?: string;
/**
* Allow only a single version of the shared module in share scope (disabled by default).
*/
singleton?: boolean;
/**
* Do not accept shared module if version is not valid (defaults to yes, if local fallback module is available and shared module is not a singleton, otherwise no, has no effect if there is no required version specified).
*/
strictVersion?: boolean;
/**
* Version of the provided module. Will replace lower matching versions, but not higher.
*/
version?: false | string;
}

9
declarations/index.d.ts vendored Normal file
View File

@ -0,0 +1,9 @@
export type {
LoaderModule,
RawLoaderDefinition,
LoaderDefinition,
LoaderDefinitionFunction,
PitchLoaderDefinitionFunction,
RawLoaderDefinitionFunction,
LoaderContext
} from "./LoaderContext";

View File

@ -1,7 +1,9 @@
module.exports = {
nameMapping: {
FsStats: /^Stats Import fs/,
validateFunction: /^validate Import/,
Configuration: /^WebpackOptions /
},
exclude: [/^devServer in WebpackOptions /]
exclude: [/^devServer in WebpackOptions /],
include: [/^(_module|_compilation|_compiler) in NormalModuleLoaderContext /]
};

View File

@ -25,8 +25,6 @@ class AsyncDependencyToInitialChunkError extends WebpackError {
this.name = "AsyncDependencyToInitialChunkError";
this.module = module;
this.loc = loc;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -5,18 +5,25 @@
"use strict";
const { validate } = require("schema-utils");
const { ConcatSource } = require("webpack-sources");
const Compilation = require("./Compilation");
const ModuleFilenameHelpers = require("./ModuleFilenameHelpers");
const Template = require("./Template");
const schema = require("../schemas/plugins/BannerPlugin.json");
const createSchemaValidation = require("./util/create-schema-validation");
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginArgument} BannerPluginArgument */
/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginOptions} BannerPluginOptions */
/** @typedef {import("./Compiler")} Compiler */
const validate = createSchemaValidation(
require("../schemas/plugins/BannerPlugin.check.js"),
() => require("../schemas/plugins/BannerPlugin.json"),
{
name: "Banner Plugin",
baseDataPath: "options"
}
);
const wrapComment = str => {
if (!str.includes("\n")) {
return Template.toComment(str);
@ -40,10 +47,7 @@ class BannerPlugin {
};
}
validate(schema, options, {
name: "Banner Plugin",
baseDataPath: "options"
});
validate(options);
this.options = options;

View File

@ -90,9 +90,9 @@ class MultiItemCache {
* @returns {Promise<void>} promise signals when the value is stored
*/
storePromise(data) {
return Promise.all(
this._items.map(item => item.storePromise(data))
).then(() => {});
return Promise.all(this._items.map(item => item.storePromise(data))).then(
() => {}
);
}
}

View File

@ -15,7 +15,7 @@ const WebpackError = require("./WebpackError");
* @returns {Module[]} sorted version of original modules
*/
const sortModules = modules => {
return modules.slice().sort((a, b) => {
return modules.sort((a, b) => {
const aIdent = a.identifier();
const bIdent = b.identifier();
/* istanbul ignore next */
@ -52,11 +52,11 @@ const createModulesListMessage = (modules, moduleGraph) => {
class CaseSensitiveModulesWarning extends WebpackError {
/**
* Creates an instance of CaseSensitiveModulesWarning.
* @param {Module[]} modules modules that were detected
* @param {Iterable<Module>} modules modules that were detected
* @param {ModuleGraph} moduleGraph the module graph
*/
constructor(modules, moduleGraph) {
const sortedModules = sortModules(modules);
const sortedModules = sortModules(Array.from(modules));
const modulesList = createModulesListMessage(sortedModules, moduleGraph);
super(`There are multiple modules with names that only differ in casing.
This can lead to unexpected behavior when compiling on a filesystem with other case-semantic.
@ -65,8 +65,6 @@ ${modulesList}`);
this.name = "CaseSensitiveModulesWarning";
this.module = sortedModules[0];
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -547,9 +547,8 @@ class Chunk {
xor.add(chunkGraph.getModuleHash(m, this.runtime));
}
xor.updateHash(hash);
const entryModules = chunkGraph.getChunkEntryModulesWithChunkGroupIterable(
this
);
const entryModules =
chunkGraph.getChunkEntryModulesWithChunkGroupIterable(this);
for (const [m, chunkGroup] of entryModules) {
hash.update("entry");
hash.update(`${chunkGraph.getModuleId(m)}`);
@ -568,9 +567,15 @@ class Chunk {
Array.from(this.groupsIterable, g => new Set(g.chunks))
);
for (const chunkGroup of this.groupsIterable) {
const initialQueue = new Set(this.groupsIterable);
for (const chunkGroup of initialQueue) {
for (const child of chunkGroup.childrenIterable) {
queue.add(child);
if (child instanceof Entrypoint) {
initialQueue.add(child);
} else {
queue.add(child);
}
}
}
@ -593,8 +598,12 @@ class Chunk {
*/
getAllInitialChunks() {
const chunks = new Set();
for (const group of this.groupsIterable) {
for (const c of group.chunks) chunks.add(c);
const queue = new Set(this.groupsIterable);
for (const group of queue) {
if (group.isInitial()) {
for (const c of group.chunks) chunks.add(c);
for (const g of group.childrenIterable) queue.add(g);
}
}
return chunks;
}

View File

@ -6,10 +6,10 @@
"use strict";
const util = require("util");
const Entrypoint = require("./Entrypoint");
const ModuleGraphConnection = require("./ModuleGraphConnection");
const { first } = require("./util/SetHelpers");
const SortableSet = require("./util/SortableSet");
const StringXor = require("./util/StringXor");
const {
compareModulesById,
compareIterables,
@ -31,7 +31,6 @@ const {
/** @typedef {import("./AsyncDependenciesBlock")} AsyncDependenciesBlock */
/** @typedef {import("./Chunk")} Chunk */
/** @typedef {import("./ChunkGroup")} ChunkGroup */
/** @typedef {import("./Entrypoint")} Entrypoint */
/** @typedef {import("./Module")} Module */
/** @typedef {import("./ModuleGraph")} ModuleGraph */
/** @typedef {import("./RuntimeModule")} RuntimeModule */
@ -40,6 +39,8 @@ const {
/** @type {ReadonlySet<string>} */
const EMPTY_SET = new Set();
const ZERO_BIG_INT = BigInt(0);
const compareModuleIterables = compareIterables(compareModulesByIdentifier);
/** @typedef {(c: Chunk, chunkGraph: ChunkGraph) => boolean} ChunkFilterPredicate */
@ -1042,16 +1043,22 @@ class ChunkGraph {
* @returns {Iterable<Chunk>} iterable of chunks
*/
getChunkEntryDependentChunksIterable(chunk) {
const cgc = this._getChunkGraphChunk(chunk);
/** @type {Set<Chunk>} */
const set = new Set();
for (const chunkGroup of cgc.entryModules.values()) {
for (const c of chunkGroup.chunks) {
if (c !== chunk && !c.hasRuntime()) {
set.add(c);
for (const chunkGroup of chunk.groupsIterable) {
if (chunkGroup instanceof Entrypoint) {
const entrypointChunk = chunkGroup.getEntrypointChunk();
const cgc = this._getChunkGraphChunk(entrypointChunk);
for (const chunkGroup of cgc.entryModules.values()) {
for (const c of chunkGroup.chunks) {
if (c !== chunk && c !== entrypointChunk && !c.hasRuntime()) {
set.add(c);
}
}
}
}
}
return set;
}
@ -1367,8 +1374,41 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
return runtimeRequirements === undefined ? EMPTY_SET : runtimeRequirements;
}
/**
* @param {Module} module the module
* @param {RuntimeSpec} runtime the runtime
* @param {boolean} withConnections include connections
* @returns {string} hash
*/
getModuleGraphHash(module, runtime, withConnections = true) {
const cgm = this._getChunkGraphModule(module);
return withConnections
? this._getModuleGraphHashWithConnections(cgm, module, runtime)
: this._getModuleGraphHashBigInt(cgm, module, runtime).toString(16);
}
/**
* @param {Module} module the module
* @param {RuntimeSpec} runtime the runtime
* @param {boolean} withConnections include connections
* @returns {bigint} hash
*/
getModuleGraphHashBigInt(module, runtime, withConnections = true) {
const cgm = this._getChunkGraphModule(module);
return withConnections
? BigInt(
`0x${this._getModuleGraphHashWithConnections(cgm, module, runtime)}`
)
: this._getModuleGraphHashBigInt(cgm, module, runtime);
}
/**
* @param {ChunkGraphModule} cgm the ChunkGraphModule
* @param {Module} module the module
* @param {RuntimeSpec} runtime the runtime
* @returns {bigint} hash as big int
*/
_getModuleGraphHashBigInt(cgm, module, runtime) {
if (cgm.graphHashes === undefined) {
cgm.graphHashes = new RuntimeSpecMap();
}
@ -1377,38 +1417,72 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
hash.update(`${cgm.id}`);
hash.update(`${this.moduleGraph.isAsync(module)}`);
this.moduleGraph.getExportsInfo(module).updateHash(hash, runtime);
return /** @type {string} */ (hash.digest("hex"));
return BigInt(`0x${/** @type {string} */ (hash.digest("hex"))}`);
});
if (!withConnections) return graphHash;
return graphHash;
}
/**
* @param {ChunkGraphModule} cgm the ChunkGraphModule
* @param {Module} module the module
* @param {RuntimeSpec} runtime the runtime
* @returns {string} hash
*/
_getModuleGraphHashWithConnections(cgm, module, runtime) {
if (cgm.graphHashesWithConnections === undefined) {
cgm.graphHashesWithConnections = new RuntimeSpecMap();
}
const activeStateToString = state => {
if (state === false) return "false";
if (state === true) return "true";
if (state === ModuleGraphConnection.TRANSITIVE_ONLY) return "transitive";
if (state === false) return "F";
if (state === true) return "T";
if (state === ModuleGraphConnection.TRANSITIVE_ONLY) return "O";
throw new Error("Not implemented active state");
};
const strict = module.buildMeta && module.buildMeta.strictHarmonyModule;
return cgm.graphHashesWithConnections.provide(runtime, () => {
const graphHash = this._getModuleGraphHashBigInt(
cgm,
module,
runtime
).toString(16);
const connections = this.moduleGraph.getOutgoingConnections(module);
/** @type {Set<Module>} */
const activeNamespaceModules = new Set();
/** @type {Map<string, Module | Set<Module>>} */
const connectedModules = new Map();
for (const connection of connections) {
let stateInfo;
if (typeof runtime === "string") {
const processConnection = (connection, stateInfo) => {
const module = connection.module;
stateInfo += module.getExportsType(this.moduleGraph, strict);
// cspell:word Tnamespace
if (stateInfo === "Tnamespace") activeNamespaceModules.add(module);
else {
const oldModule = connectedModules.get(stateInfo);
if (oldModule === undefined) {
connectedModules.set(stateInfo, module);
} else if (oldModule instanceof Set) {
oldModule.add(module);
} else if (oldModule !== module) {
connectedModules.set(stateInfo, new Set([oldModule, module]));
}
}
};
if (runtime === undefined || typeof runtime === "string") {
for (const connection of connections) {
const state = connection.getActiveState(runtime);
if (state === false) continue;
stateInfo = activeStateToString(state);
} else {
processConnection(connection, state === true ? "T" : "O");
}
} else {
// cspell:word Tnamespace
for (const connection of connections) {
const states = new Set();
stateInfo = "";
let stateInfo = "";
forEachRuntime(
runtime,
runtime => {
const state = connection.getActiveState(runtime);
states.add(state);
stateInfo += runtime + activeStateToString(state);
stateInfo += activeStateToString(state) + runtime;
},
true
);
@ -1417,34 +1491,49 @@ Caller might not support runtime-dependent code generation (opt-out via optimiza
if (state === false) continue;
stateInfo = activeStateToString(state);
}
}
const module = connection.module;
stateInfo += module.getExportsType(this.moduleGraph, strict);
const oldModule = connectedModules.get(stateInfo);
if (oldModule === undefined) {
connectedModules.set(stateInfo, module);
} else if (oldModule instanceof Set) {
oldModule.add(module);
} else if (oldModule !== module) {
connectedModules.set(stateInfo, new Set([oldModule, module]));
processConnection(connection, stateInfo);
}
}
if (connectedModules.size === 0) return graphHash;
// cspell:word Tnamespace
if (activeNamespaceModules.size === 0 && connectedModules.size === 0)
return graphHash;
const connectedModulesInOrder =
connectedModules.size > 1
? Array.from(connectedModules).sort(([a], [b]) => (a < b ? -1 : 1))
: connectedModules;
const hash = createHash("md4");
const addModuleToHash = module => {
hash.update(
this._getModuleGraphHashBigInt(
this._getChunkGraphModule(module),
module,
runtime
).toString(16)
);
};
const addModulesToHash = modules => {
let xor = ZERO_BIG_INT;
for (const m of modules) {
xor =
xor ^
this._getModuleGraphHashBigInt(
this._getChunkGraphModule(m),
m,
runtime
);
}
hash.update(xor.toString(16));
};
if (activeNamespaceModules.size === 1)
addModuleToHash(activeNamespaceModules.values().next().value);
else if (activeNamespaceModules.size > 1)
addModulesToHash(activeNamespaceModules);
for (const [stateInfo, modules] of connectedModulesInOrder) {
hash.update(stateInfo);
if (modules instanceof Set) {
const xor = new StringXor();
for (const m of modules) {
xor.add(this.getModuleGraphHash(m, runtime, false));
}
xor.updateHash(hash);
addModulesToHash(modules);
} else {
hash.update(this.getModuleGraphHash(modules, runtime, false));
addModuleToHash(modules);
}
}
hash.update(graphHash);

View File

@ -25,8 +25,6 @@ class ChunkRenderError extends WebpackError {
this.details = error.stack;
this.file = file;
this.chunk = chunk;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -6,11 +6,10 @@
"use strict";
const asyncLib = require("neo-async");
const { validate } = require("schema-utils");
const { SyncBailHook } = require("tapable");
const Compilation = require("../lib/Compilation");
const createSchemaValidation = require("./util/create-schema-validation");
const { join } = require("./util/fs");
const memoize = require("./util/memoize");
const processAsyncTree = require("./util/processAsyncTree");
/** @typedef {import("../declarations/WebpackOptions").CleanOptions} CleanOptions */
@ -26,13 +25,20 @@ const processAsyncTree = require("./util/processAsyncTree");
* @property {SyncBailHook<[string], boolean>} keep when returning true the file/directory will be kept during cleaning, returning false will clean it and ignore the following plugins and config
*/
const getSchema = memoize(() => {
const { definitions } = require("../schemas/WebpackOptions.json");
return {
definitions,
oneOf: [{ $ref: "#/definitions/CleanOptions" }]
};
});
const validate = createSchemaValidation(
undefined,
() => {
const { definitions } = require("../schemas/WebpackOptions.json");
return {
definitions,
oneOf: [{ $ref: "#/definitions/CleanOptions" }]
};
},
{
name: "Clean Plugin",
baseDataPath: "options"
}
);
/**
* @param {OutputFileSystem} fs filesystem
@ -255,13 +261,9 @@ class CleanPlugin {
return hooks;
}
/** @param {CleanOptions} [options] options */
/** @param {CleanOptions} options options */
constructor(options = {}) {
validate(getSchema(), options, {
name: "Clean Plugin",
baseDataPath: "options"
});
validate(options);
this.options = { dry: false, ...options };
}

View File

@ -23,8 +23,6 @@ class CodeGenerationError extends WebpackError {
this.message = error.message;
this.details = error.stack;
this.module = module;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -22,8 +22,6 @@ class CommentCompilationWarning extends WebpackError {
this.name = "CommentCompilationWarning";
this.loc = loc;
Error.captureStackTrace(this, this.constructor);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -230,6 +230,8 @@ class Compiler {
this.fileTimestamps = undefined;
/** @type {Map<string, FileSystemInfoEntry | "ignore" | null>} */
this.contextTimestamps = undefined;
/** @type {number} */
this.fsStartTime = undefined;
/** @type {ResolverFactory} */
this.resolverFactory = new ResolverFactory();
@ -263,6 +265,8 @@ class Compiler {
this._assetEmittingSourceCache = new WeakMap();
/** @private @type {Map<string, number>} */
this._assetEmittingWrittenFiles = new Map();
/** @private @type {Set<string>} */
this._assetEmittingPreviousFiles = new Set();
}
/**
@ -556,6 +560,8 @@ class Compiler {
compilation.assets = { ...compilation.assets };
/** @type {Map<string, { path: string, source: Source, size: number, waiting: { cacheEntry: any, file: string }[] }>} */
const caseInsensitiveMap = new Map();
/** @type {Set<string>} */
const allTargetPaths = new Set();
asyncLib.forEachLimit(
assets,
15,
@ -583,11 +589,11 @@ class Compiler {
outputPath,
targetFile
);
allTargetPaths.add(targetPath);
// check if the target file has already been written by this Compiler
const targetFileGeneration = this._assetEmittingWrittenFiles.get(
targetPath
);
const targetFileGeneration =
this._assetEmittingWrittenFiles.get(targetPath);
// create an cache entry for this Source if not already existing
let cacheEntry = this._assetEmittingSourceCache.get(source);
@ -617,7 +623,8 @@ class Compiler {
}
alreadyWritten();
} else {
const err = new WebpackError(`Prevent writing to file that only differs in casing or query string from already written file.
const err =
new WebpackError(`Prevent writing to file that only differs in casing or query string from already written file.
This will lead to a race-condition and corrupted files on case-insensitive file systems.
${targetPath}
${other}`);
@ -775,18 +782,22 @@ ${other}`);
// check if the Source has been written to this target file
const writtenGeneration = cacheEntry.writtenTo.get(targetPath);
if (writtenGeneration === targetFileGeneration) {
// if yes, we skip writing the file
// as it's already there
// (we assume one doesn't remove files while the Compiler is running)
// if yes, we may skip writing the file
// if it's already there
// (we assume one doesn't modify files while the Compiler is running, other then removing them)
compilation.updateAsset(file, cacheEntry.sizeOnlySource, {
size: cacheEntry.sizeOnlySource.size()
});
if (this._assetEmittingPreviousFiles.has(targetPath)) {
// We assume that assets from the last compilation say intact on disk (they are not removed)
compilation.updateAsset(file, cacheEntry.sizeOnlySource, {
size: cacheEntry.sizeOnlySource.size()
});
return callback();
}
if (!immutable) {
return callback();
} else {
// Settings immutable will make it accept file content without comparing when file exist
immutable = true;
}
} else if (!immutable) {
if (checkSimilarFile()) return;
// We wrote to this file before which has very likely a different content
// skip comparing and assume content is different for performance
@ -820,7 +831,14 @@ ${other}`);
}
},
err => {
if (err) return callback(err);
// Clear map to free up memory
caseInsensitiveMap.clear();
if (err) {
this._assetEmittingPreviousFiles.clear();
return callback(err);
}
this._assetEmittingPreviousFiles = allTargetPaths;
this.hooks.afterEmit.callAsync(compilation, err => {
if (err) return callback(err);
@ -915,8 +933,8 @@ ${other}`);
* @param {Compilation} compilation the compilation
* @param {string} compilerName the compiler's name
* @param {number} compilerIndex the compiler's index
* @param {OutputOptions} outputOptions the output options
* @param {WebpackPluginInstance[]} plugins the plugins to apply
* @param {OutputOptions=} outputOptions the output options
* @param {WebpackPluginInstance[]=} plugins the plugins to apply
* @returns {Compiler} a child compiler
*/
createChildCompiler(
@ -936,6 +954,7 @@ ${other}`);
childCompiler.removedFiles = this.removedFiles;
childCompiler.fileTimestamps = this.fileTimestamps;
childCompiler.contextTimestamps = this.contextTimestamps;
childCompiler.fsStartTime = this.fsStartTime;
childCompiler.cache = this.cache;
childCompiler.compilerPath = `${this.compilerPath}${compilerName}|${compilerIndex}|`;

View File

@ -7,7 +7,8 @@
/** @typedef {import("./Module")} Module */
const MODULE_REFERENCE_REGEXP = /^__WEBPACK_MODULE_REFERENCE__(\d+)_([\da-f]+|ns)(_call)?(_directImport)?(?:_asiSafe(\d))?__$/;
const MODULE_REFERENCE_REGEXP =
/^__WEBPACK_MODULE_REFERENCE__(\d+)_([\da-f]+|ns)(_call)?(_directImport)?(?:_asiSafe(\d))?__$/;
const DEFAULT_EXPORT = "__WEBPACK_DEFAULT_EXPORT__";
const NAMESPACE_OBJECT_EXPORT = "__WEBPACK_NAMESPACE_OBJECT__";

View File

@ -14,7 +14,5 @@ module.exports = class ConcurrentCompilationError extends WebpackError {
this.name = "ConcurrentCompilationError";
this.message =
"You ran Webpack twice. Each instance only supports a single concurrent compilation at a time.";
Error.captureStackTrace(this, this.constructor);
}
};

View File

@ -54,6 +54,7 @@ const makeSerializable = require("./util/makeSerializable");
* @property {RegExp=} include
* @property {RegExp=} exclude
* @property {RawChunkGroupOptions=} groupOptions
* @property {string=} typePrefix
* @property {string=} category
* @property {string[][]=} referencedExports exports referenced from modules (won't be mangled)
*/
@ -577,7 +578,7 @@ class ContextModule extends Module {
fakeMapDataExpression = "fakeMap[id]"
) {
if (typeof fakeMap === "number") {
return `return ${this.getReturn(fakeMap)};`;
return `return ${this.getReturn(fakeMap, asyncModule)};`;
}
return `return ${
RuntimeGlobals.createFakeNamespaceObject
@ -1016,9 +1017,9 @@ module.exports = webpackEmptyAsyncContext;`;
this.getSource(this.getSourceString(this.options.mode, context))
);
const set = new Set();
const allDeps = /** @type {ContextElementDependency[]} */ (this.dependencies.concat(
this.blocks.map(b => b.dependencies[0])
));
const allDeps = /** @type {ContextElementDependency[]} */ (
this.dependencies.concat(this.blocks.map(b => b.dependencies[0]))
);
set.add(RuntimeGlobals.module);
set.add(RuntimeGlobals.hasOwnProperty);
if (allDeps.length > 0) {

View File

@ -10,6 +10,7 @@ const { AsyncSeriesWaterfallHook, SyncWaterfallHook } = require("tapable");
const ContextModule = require("./ContextModule");
const ModuleFactory = require("./ModuleFactory");
const ContextElementDependency = require("./dependencies/ContextElementDependency");
const LazySet = require("./util/LazySet");
const { cachedSetProperty } = require("./util/cleverMerge");
const { createFakeHook } = require("./util/deprecation");
const { join } = require("./util/fs");
@ -87,9 +88,9 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
const dependencies = data.dependencies;
const resolveOptions = data.resolveOptions;
const dependency = /** @type {ContextDependency} */ (dependencies[0]);
const fileDependencies = new Set();
const missingDependencies = new Set();
const contextDependencies = new Set();
const fileDependencies = new LazySet();
const missingDependencies = new LazySet();
const contextDependencies = new LazySet();
this.hooks.beforeResolve.callAsync(
{
context: context,
@ -272,7 +273,8 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
include,
exclude,
referencedExports,
category
category,
typePrefix
} = options;
if (!regExp || !resource) return callback(null, []);
@ -345,6 +347,7 @@ module.exports = class ContextModuleFactory extends ModuleFactory {
const dep = new ContextElementDependency(
obj.request + resourceQuery + resourceFragment,
obj.request,
typePrefix,
category,
referencedExports
);

View File

@ -104,9 +104,10 @@ class ContextReplacementPlugin {
result.regExp = newContentRegExp;
}
if (typeof newContentCreateContextMap === "function") {
result.resolveDependencies = createResolveDependenciesFromContextMap(
newContentCreateContextMap
);
result.resolveDependencies =
createResolveDependenciesFromContextMap(
newContentCreateContextMap
);
}
if (typeof newContentCallback === "function") {
const origResource = result.resource;

View File

@ -13,6 +13,7 @@ const {
evaluateToString,
toConstantDependency
} = require("./javascript/JavascriptParserHelpers");
const { provide } = require("./util/MapHelpers");
/** @typedef {import("estree").Expression} Expression */
/** @typedef {import("./Compiler")} Compiler */
@ -53,7 +54,7 @@ class RuntimeValue {
/**
* @param {JavascriptParser} parser the parser
* @param {Map<string, string>} valueCacheVersions valueCacheVersions
* @param {Map<string, string | Set<string>>} valueCacheVersions valueCacheVersions
* @param {string} key the defined key
* @returns {CodeValuePrimitive} code
*/
@ -88,7 +89,9 @@ class RuntimeValue {
module: parser.state.module,
key,
get version() {
return valueCacheVersions.get(VALUE_DEP_PREFIX + key);
return /** @type {string} */ (
valueCacheVersions.get(VALUE_DEP_PREFIX + key)
);
}
});
}
@ -105,7 +108,7 @@ class RuntimeValue {
/**
* @param {any[]|{[k: string]: any}} obj obj
* @param {JavascriptParser} parser Parser
* @param {Map<string, string>} valueCacheVersions valueCacheVersions
* @param {Map<string, string | Set<string>>} valueCacheVersions valueCacheVersions
* @param {string} key the defined key
* @param {RuntimeTemplate} runtimeTemplate the runtime template
* @param {boolean|undefined|null=} asiSafe asi safe (undefined: unknown, null: unneeded)
@ -156,7 +159,7 @@ const stringifyObj = (
* Convert code to a string that evaluates
* @param {CodeValue} code Code to evaluate
* @param {JavascriptParser} parser Parser
* @param {Map<string, string>} valueCacheVersions valueCacheVersions
* @param {Map<string, string | Set<string>>} valueCacheVersions valueCacheVersions
* @param {string} key the defined key
* @param {RuntimeTemplate} runtimeTemplate the runtime template
* @param {boolean|undefined|null=} asiSafe asi safe (undefined: unknown, null: unneeded)
@ -247,6 +250,7 @@ const toCacheVersion = code => {
};
const VALUE_DEP_PREFIX = "webpack/DefinePlugin ";
const VALUE_DEP_MAIN = "webpack/DefinePlugin";
class DefinePlugin {
/**
@ -282,26 +286,41 @@ class DefinePlugin {
);
const { runtimeTemplate } = compilation;
const mainValue = /** @type {Set<string>} */ (
provide(
compilation.valueCacheVersions,
VALUE_DEP_MAIN,
() => new Set()
)
);
/**
* Handler
* @param {JavascriptParser} parser Parser
* @returns {void}
*/
const handler = parser => {
const addValueDependency = key => {
parser.hooks.program.tap("DefinePlugin", () => {
const { buildInfo } = parser.state.module;
if (!buildInfo.valueDependencies)
buildInfo.valueDependencies = new Map();
buildInfo.valueDependencies.set(VALUE_DEP_MAIN, mainValue);
});
const addValueDependency = key => {
const { buildInfo } = parser.state.module;
buildInfo.valueDependencies.set(
VALUE_DEP_PREFIX + key,
compilation.valueCacheVersions.get(VALUE_DEP_PREFIX + key)
);
};
const withValueDependency = (key, fn) => (...args) => {
addValueDependency(key);
return fn(...args);
};
const withValueDependency =
(key, fn) =>
(...args) => {
addValueDependency(key);
return fn(...args);
};
/**
* Walk definitions
@ -546,6 +565,7 @@ class DefinePlugin {
const code = definitions[key];
const version = toCacheVersion(code);
const name = VALUE_DEP_PREFIX + prefix + key;
mainValue.add(name);
const oldVersion = compilation.valueCacheVersions.get(name);
if (oldVersion === undefined) {
compilation.valueCacheVersions.set(name, version);

View File

@ -9,6 +9,7 @@ const memoize = require("./util/memoize");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("./ChunkGraph")} ChunkGraph */
/** @typedef {import("./DependenciesBlock")} DependenciesBlock */
/** @typedef {import("./DependencyTemplates")} DependencyTemplates */
/** @typedef {import("./Module")} Module */
/** @typedef {import("./ModuleGraph")} ModuleGraph */
@ -55,6 +56,7 @@ const memoize = require("./util/memoize");
* @property {(string | ExportSpec)[]=} exports nested exports
* @property {ModuleGraphConnection=} from when reexported: from which module
* @property {string[] | null=} export when reexported: from which export
* @property {number=} priority when reexported: with which priority
* @property {boolean=} hidden export is not visible, because another export blends over it
*/
@ -64,6 +66,7 @@ const memoize = require("./util/memoize");
* @property {Set<string>=} excludeExports when exports = true, list of unaffected exports
* @property {Set<string>=} hideExports list of maybe prior exposed, but now hidden exports
* @property {ModuleGraphConnection=} from when reexported: from which module
* @property {number=} priority when reexported: with which priority
* @property {boolean=} canMangle can the export be renamed (defaults to true)
* @property {boolean=} terminalBinding are the exports terminal bindings that should be checked for export star conflicts
* @property {Module[]=} dependencies module on which the result depends on
@ -82,14 +85,23 @@ const getIgnoredModule = memoize(() => {
class Dependency {
constructor() {
/** @type {Module} */
this._parentModule = undefined;
/** @type {DependenciesBlock} */
this._parentDependenciesBlock = undefined;
// TODO check if this can be moved into ModuleDependency
/** @type {boolean} */
this.weak = false;
// TODO check if this can be moved into ModuleDependency
/** @type {boolean} */
this.optional = false;
/** @type {DependencyLocation} */
this.loc = undefined;
this._locSL = 0;
this._locSC = 0;
this._locEL = 0;
this._locEC = 0;
this._locI = undefined;
this._locN = undefined;
this._loc = undefined;
}
/**
@ -106,6 +118,56 @@ class Dependency {
return "unknown";
}
/**
* @returns {DependencyLocation} location
*/
get loc() {
if (this._loc !== undefined) return this._loc;
/** @type {SyntheticDependencyLocation & RealDependencyLocation} */
const loc = {};
if (this._locSL > 0) {
loc.start = { line: this._locSL, column: this._locSC };
}
if (this._locEL > 0) {
loc.end = { line: this._locEL, column: this._locEC };
}
if (this._locN !== undefined) {
loc.name = this._locN;
}
if (this._locI !== undefined) {
loc.index = this._locI;
}
return (this._loc = loc);
}
set loc(loc) {
if ("start" in loc && typeof loc.start === "object") {
this._locSL = loc.start.line || 0;
this._locSC = loc.start.column || 0;
} else {
this._locSL = 0;
this._locSC = 0;
}
if ("end" in loc && typeof loc.end === "object") {
this._locEL = loc.end.line || 0;
this._locEC = loc.end.column || 0;
} else {
this._locEL = 0;
this._locEC = 0;
}
if ("index" in loc) {
this._locI = loc.index;
} else {
this._locI = undefined;
}
if ("name" in loc) {
this._locN = loc.name;
} else {
this._locN = undefined;
}
this._loc = loc;
}
/**
* @returns {string | null} an identifier to merge equal requests
*/
@ -205,13 +267,23 @@ class Dependency {
serialize({ write }) {
write(this.weak);
write(this.optional);
write(this.loc);
write(this._locSL);
write(this._locSC);
write(this._locEL);
write(this._locEC);
write(this._locI);
write(this._locN);
}
deserialize({ read }) {
this.weak = read();
this.optional = read();
this.loc = read();
this._locSL = read();
this._locSC = read();
this._locEL = read();
this._locEC = read();
this._locI = read();
this._locN = read();
}
}

View File

@ -8,22 +8,26 @@
const DllEntryPlugin = require("./DllEntryPlugin");
const FlagAllModulesAsUsedPlugin = require("./FlagAllModulesAsUsedPlugin");
const LibManifestPlugin = require("./LibManifestPlugin");
const { validate } = require("schema-utils");
const schema = require("../schemas/plugins/DllPlugin.json");
const createSchemaValidation = require("./util/create-schema-validation");
/** @typedef {import("../declarations/plugins/DllPlugin").DllPluginOptions} DllPluginOptions */
/** @typedef {import("./Compiler")} Compiler */
const validate = createSchemaValidation(
require("../schemas/plugins/DllPlugin.check.js"),
() => require("../schemas/plugins/DllPlugin.json"),
{
name: "Dll Plugin",
baseDataPath: "options"
}
);
class DllPlugin {
/**
* @param {DllPluginOptions} options options object
*/
constructor(options) {
validate(schema, options, {
name: "Dll Plugin",
baseDataPath: "options"
});
validate(options);
this.options = {
...options,
entryOnly: options.entryOnly !== false

View File

@ -10,24 +10,28 @@ const DelegatedModuleFactoryPlugin = require("./DelegatedModuleFactoryPlugin");
const ExternalModuleFactoryPlugin = require("./ExternalModuleFactoryPlugin");
const WebpackError = require("./WebpackError");
const DelegatedSourceDependency = require("./dependencies/DelegatedSourceDependency");
const createSchemaValidation = require("./util/create-schema-validation");
const makePathsRelative = require("./util/identifier").makePathsRelative;
const { validate } = require("schema-utils");
const schema = require("../schemas/plugins/DllReferencePlugin.json");
/** @typedef {import("../declarations/WebpackOptions").Externals} Externals */
/** @typedef {import("../declarations/plugins/DllReferencePlugin").DllReferencePluginOptions} DllReferencePluginOptions */
/** @typedef {import("../declarations/plugins/DllReferencePlugin").DllReferencePluginOptionsManifest} DllReferencePluginOptionsManifest */
const validate = createSchemaValidation(
require("../schemas/plugins/DllReferencePlugin.check.js"),
() => require("../schemas/plugins/DllReferencePlugin.json"),
{
name: "Dll Reference Plugin",
baseDataPath: "options"
}
);
class DllReferencePlugin {
/**
* @param {DllReferencePluginOptions} options options object
*/
constructor(options) {
validate(schema, options, {
name: "Dll Reference Plugin",
baseDataPath: "options"
});
validate(options);
this.options = options;
/** @type {WeakMap<Object, {path: string, data: DllReferencePluginOptionsManifest?, error: Error?}>} */
this._compilationData = new WeakMap();
@ -152,8 +156,6 @@ class DllManifestError extends WebpackError {
this.name = "DllManifestError";
this.message = `Dll manifest ${filename}\n${message}`;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -62,6 +62,7 @@ class EntryOptionPlugin {
runtime: desc.runtime,
layer: desc.layer,
dependOn: desc.dependOn,
publicPath: desc.publicPath,
chunkLoading: desc.chunkLoading,
wasmLoading: desc.wasmLoading,
library: desc.library

View File

@ -17,7 +17,7 @@ class EntryPlugin {
*
* @param {string} context context path
* @param {string} entry entry path
* @param {EntryOptions | string} options entry options (passing a string is deprecated)
* @param {EntryOptions | string=} options entry options (passing a string is deprecated)
*/
constructor(context, entry, options) {
this.context = context;
@ -41,10 +41,10 @@ class EntryPlugin {
}
);
compiler.hooks.make.tapAsync("EntryPlugin", (compilation, callback) => {
const { entry, options, context } = this;
const { entry, options, context } = this;
const dep = EntryPlugin.createDependency(entry, options);
const dep = EntryPlugin.createDependency(entry, options);
compiler.hooks.make.tapAsync("EntryPlugin", (compilation, callback) => {
compilation.addEntry(context, dep, options, err => {
callback(err);
});

View File

@ -91,7 +91,9 @@ class EvalSourceMapDevToolPlugin {
} else if (m instanceof ConcatenatedModule) {
const concatModule = /** @type {ConcatenatedModule} */ (m);
if (concatModule.rootModule instanceof NormalModule) {
const module = /** @type {NormalModule} */ (concatModule.rootModule);
const module = /** @type {NormalModule} */ (
concatModule.rootModule
);
if (!matchModule(module.resource)) {
return result(source);
}

View File

@ -137,18 +137,22 @@ class ExportsInfo {
_sortExportsMap(exports) {
if (exports.size > 1) {
const entriesInOrder = Array.from(exports.values());
if (
entriesInOrder.length !== 2 ||
entriesInOrder[0].name > entriesInOrder[1].name
) {
entriesInOrder.sort((a, b) => {
return a.name < b.name ? -1 : 1;
});
exports.clear();
for (const entry of entriesInOrder) {
exports.set(entry.name, entry);
}
const namesInOrder = [];
for (const entry of exports.values()) {
namesInOrder.push(entry.name);
}
namesInOrder.sort();
let i = 0;
for (const entry of exports.values()) {
const name = namesInOrder[i];
if (entry.name !== name) break;
i++;
}
for (; i < namesInOrder.length; i++) {
const name = namesInOrder[i];
const correctEntry = exports.get(name);
exports.delete(name);
exports.set(name, correctEntry);
}
}
}
@ -269,13 +273,15 @@ class ExportsInfo {
* @param {Set<string>=} excludeExports list of unaffected exports
* @param {any=} targetKey use this as key for the target
* @param {ModuleGraphConnection=} targetModule set this module as target
* @param {number=} priority priority
* @returns {boolean} true, if this call changed something
*/
setUnknownExportsProvided(
canMangle,
excludeExports,
targetKey,
targetModule
targetModule,
priority
) {
let changed = false;
if (excludeExports) {
@ -295,7 +301,7 @@ class ExportsInfo {
changed = true;
}
if (targetKey) {
exportInfo.setTarget(targetKey, targetModule, [exportInfo.name]);
exportInfo.setTarget(targetKey, targetModule, [exportInfo.name], -1);
}
}
if (this._redirectTo !== undefined) {
@ -304,7 +310,8 @@ class ExportsInfo {
canMangle,
excludeExports,
targetKey,
targetModule
targetModule,
priority
)
) {
changed = true;
@ -322,7 +329,12 @@ class ExportsInfo {
changed = true;
}
if (targetKey) {
this._otherExportsInfo.setTarget(targetKey, targetModule, undefined);
this._otherExportsInfo.setTarget(
targetKey,
targetModule,
undefined,
priority
);
}
}
return changed;
@ -714,7 +726,7 @@ class ExportsInfo {
const otherCanMangleProvide = this._otherExportsInfo.canMangleProvide;
const otherTerminalBinding = this._otherExportsInfo.terminalBinding;
const exports = [];
for (const exportInfo of this._exports.values()) {
for (const exportInfo of this.orderedExports) {
if (
exportInfo.provided !== otherProvided ||
exportInfo.canMangleProvide !== otherCanMangleProvide ||
@ -746,7 +758,9 @@ class ExportsInfo {
otherTerminalBinding,
exports
}) {
let wasEmpty = true;
for (const exportInfo of this._exports.values()) {
wasEmpty = false;
exportInfo.provided = otherProvided;
exportInfo.canMangleProvide = otherCanMangleProvide;
exportInfo.terminalBinding = otherTerminalBinding;
@ -764,6 +778,7 @@ class ExportsInfo {
exportsInfo.restoreProvided(exp.exportsInfo);
}
}
if (wasEmpty) this._exportsAreOrdered = true;
}
}
@ -819,17 +834,20 @@ class ExportInfo {
this.exportsInfoOwned = false;
/** @type {ExportsInfo=} */
this.exportsInfo = undefined;
/** @type {Map<any, { connection: ModuleGraphConnection, export: string[] } | null>=} */
/** @type {Map<any, { connection: ModuleGraphConnection | null, export: string[], priority: number }>=} */
this._target = undefined;
if (initFrom && initFrom._target) {
this._target = new Map();
for (const [key, value] of initFrom._target) {
this._target.set(
key,
value ? { connection: value.connection, export: [name] } : null
);
this._target.set(key, {
connection: value.connection,
export: value.export || [name],
priority: value.priority
});
}
}
/** @type {Map<any, { connection: ModuleGraphConnection | null, export: string[], priority: number }>=} */
this._maxTarget = undefined;
}
// TODO webpack 5 remove
@ -1023,46 +1041,45 @@ class ExportInfo {
*/
unsetTarget(key) {
if (!this._target) return false;
return this._target.delete(key);
if (this._target.delete(key)) {
this._maxTarget = undefined;
return true;
}
return false;
}
/**
* @param {any} key the key
* @param {ModuleGraphConnection=} connection the target module if a single one
* @param {ModuleGraphConnection} connection the target module if a single one
* @param {string[]=} exportName the exported name
* @param {number=} priority priority
* @returns {boolean} true, if something has changed
*/
setTarget(key, connection, exportName) {
setTarget(key, connection, exportName, priority = 0) {
if (exportName) exportName = [...exportName];
if (!this._target) {
this._target = new Map();
this._target.set(
key,
connection ? { connection, export: exportName } : null
);
this._target.set(key, { connection, export: exportName, priority });
return true;
}
const oldTarget = this._target.get(key);
if (!oldTarget) {
if (oldTarget === null && !connection) return false;
this._target.set(
key,
connection ? { connection, export: exportName } : null
);
return true;
}
if (!connection) {
this._target.set(key, null);
this._target.set(key, { connection, export: exportName, priority });
this._maxTarget = undefined;
return true;
}
if (
oldTarget.connection !== connection ||
oldTarget.priority !== priority ||
(exportName
? !oldTarget.export || !equals(oldTarget.export, exportName)
: oldTarget.export)
) {
oldTarget.connection = connection;
oldTarget.export = exportName;
oldTarget.priority = priority;
this._maxTarget = undefined;
return true;
}
return false;
@ -1171,6 +1188,29 @@ class ExportInfo {
return !this.terminalBinding && this._target && this._target.size > 0;
}
_getMaxTarget() {
if (this._maxTarget !== undefined) return this._maxTarget;
if (this._target.size <= 1) return (this._maxTarget = this._target);
let maxPriority = -Infinity;
let minPriority = Infinity;
for (const { priority } of this._target.values()) {
if (maxPriority < priority) maxPriority = priority;
if (minPriority > priority) minPriority = priority;
}
// This should be very common
if (maxPriority === minPriority) return (this._maxTarget = this._target);
// This is an edge case
const map = new Map();
for (const [key, value] of this._target) {
if (maxPriority === value.priority) {
map.set(key, value);
}
}
this._maxTarget = map;
return map;
}
/**
* @param {ModuleGraph} moduleGraph the module graph
* @param {function(Module): boolean} validTargetModuleFilter a valid target module
@ -1188,7 +1228,7 @@ class ExportInfo {
*/
_findTarget(moduleGraph, validTargetModuleFilter, alreadyVisited) {
if (!this._target || this._target.size === 0) return undefined;
let rawTarget = this._target.values().next().value;
let rawTarget = this._getMaxTarget().values().next().value;
if (!rawTarget) return undefined;
/** @type {{ module: Module, export: string[] | undefined }} */
let target = {
@ -1296,7 +1336,7 @@ class ExportInfo {
if (alreadyVisited && alreadyVisited.has(this)) return CIRCULAR;
const newAlreadyVisited = new Set(alreadyVisited);
newAlreadyVisited.add(this);
const values = this._target.values();
const values = this._getMaxTarget().values();
const target = resolveTarget(values.next().value, newAlreadyVisited);
if (target === CIRCULAR) return CIRCULAR;
if (target === null) return undefined;
@ -1324,7 +1364,7 @@ class ExportInfo {
const target = this._getTarget(moduleGraph, resolveTargetFilter, undefined);
if (target === CIRCULAR) return undefined;
if (!target) return undefined;
const originalTarget = this._target.values().next().value;
const originalTarget = this._getMaxTarget().values().next().value;
if (
originalTarget.connection === target.connection &&
originalTarget.export === target.export
@ -1336,7 +1376,8 @@ class ExportInfo {
connection: updateOriginalConnection
? updateOriginalConnection(target)
: target.connection,
export: target.export
export: target.export,
priority: 0
});
return target;
}

View File

@ -27,11 +27,12 @@ const RBDT_RESOLVE_CJS = 0;
const RBDT_RESOLVE_ESM = 1;
const RBDT_RESOLVE_DIRECTORY = 2;
const RBDT_RESOLVE_CJS_FILE = 3;
const RBDT_RESOLVE_ESM_FILE = 4;
const RBDT_DIRECTORY = 5;
const RBDT_FILE = 6;
const RBDT_DIRECTORY_DEPENDENCIES = 7;
const RBDT_FILE_DEPENDENCIES = 8;
const RBDT_RESOLVE_CJS_FILE_AS_CHILD = 4;
const RBDT_RESOLVE_ESM_FILE = 5;
const RBDT_DIRECTORY = 6;
const RBDT_FILE = 7;
const RBDT_DIRECTORY_DEPENDENCIES = 8;
const RBDT_FILE_DEPENDENCIES = 9;
const INVALID = Symbol("invalid");
@ -63,7 +64,7 @@ const INVALID = Symbol("invalid");
* @property {Set<string>} files list of files
* @property {Set<string>} directories list of directories
* @property {Set<string>} missing list of missing entries
* @property {Map<string, string>} resolveResults stored resolve results
* @property {Map<string, string | false>} resolveResults stored resolve results
* @property {Object} resolveDependencies dependencies of the resolving
* @property {Set<string>} resolveDependencies.files list of files
* @property {Set<string>} resolveDependencies.directories list of directories
@ -76,6 +77,92 @@ const DONE_ITERATOR_RESULT = new Set().keys().next();
// Tsh = Timestamp + Hash
// Tshs = Timestamp + Hash combinations
class SnapshotIterator {
constructor(next) {
this.next = next;
}
}
class SnapshotIterable {
constructor(snapshot, getMaps) {
this.snapshot = snapshot;
this.getMaps = getMaps;
}
[Symbol.iterator]() {
let state = 0;
/** @type {IterableIterator<string>} */
let it;
/** @type {(Snapshot) => (Map<string, any> | Set<string>)[]} */
let getMaps;
/** @type {(Map<string, any> | Set<string>)[]} */
let maps;
/** @type {Snapshot} */
let snapshot;
let queue;
return new SnapshotIterator(() => {
for (;;) {
switch (state) {
case 0:
snapshot = this.snapshot;
getMaps = this.getMaps;
maps = getMaps(snapshot);
state = 1;
/* falls through */
case 1:
if (maps.length > 0) {
const map = maps.pop();
if (map !== undefined) {
it = map.keys();
state = 2;
} else {
break;
}
} else {
state = 3;
break;
}
/* falls through */
case 2: {
const result = it.next();
if (!result.done) return result;
state = 1;
break;
}
case 3: {
const children = snapshot.children;
if (children !== undefined) {
if (children.size === 1) {
// shortcut for a single child
// avoids allocation of queue
for (const child of children) snapshot = child;
maps = getMaps(snapshot);
state = 1;
break;
}
if (queue === undefined) queue = [];
for (const child of children) {
queue.push(child);
}
}
if (queue !== undefined && queue.length > 0) {
snapshot = queue.pop();
maps = getMaps(snapshot);
state = 1;
break;
} else {
state = 4;
}
}
/* falls through */
case 4:
return DONE_ITERATOR_RESULT;
}
}
});
}
}
class Snapshot {
constructor() {
this._flags = 0;
@ -282,63 +369,7 @@ class Snapshot {
* @returns {Iterable<string>} iterable
*/
_createIterable(getMaps) {
let snapshot = this;
return {
[Symbol.iterator]() {
let state = 0;
/** @type {IterableIterator<string>} */
let it;
let maps = getMaps(snapshot);
const queue = [];
return {
next() {
for (;;) {
switch (state) {
case 0:
if (maps.length > 0) {
const map = maps.pop();
if (map !== undefined) {
it = map.keys();
state = 1;
} else {
break;
}
} else {
state = 2;
break;
}
/* falls through */
case 1: {
const result = it.next();
if (!result.done) return result;
state = 0;
break;
}
case 2: {
const children = snapshot.children;
if (children !== undefined) {
for (const child of children) {
queue.push(child);
}
}
if (queue.length > 0) {
snapshot = queue.pop();
maps = getMaps(snapshot);
state = 0;
break;
} else {
state = 3;
}
}
/* falls through */
case 3:
return DONE_ITERATOR_RESULT;
}
}
}
};
}
};
return new SnapshotIterable(this, getMaps);
}
/**
@ -1103,15 +1134,23 @@ class FileSystemInfo {
const resolveCjs = createResolver({
extensions: [".js", ".json", ".node"],
conditionNames: ["require", "node"],
exportsFields: ["exports"],
fileSystem: this.fs
});
const resolveCjsAsChild = createResolver({
extensions: [".js", ".json", ".node"],
conditionNames: ["require", "node"],
exportsFields: [],
fileSystem: this.fs
});
const resolveEsm = createResolver({
extensions: [".js", ".json", ".node"],
fullySpecified: true,
conditionNames: ["import", "node"],
exportsFields: ["exports"],
fileSystem: this.fs
});
return { resolveContext, resolveEsm, resolveCjs };
return { resolveContext, resolveEsm, resolveCjs, resolveCjsAsChild };
}
/**
@ -1121,11 +1160,8 @@ class FileSystemInfo {
* @returns {void}
*/
resolveBuildDependencies(context, deps, callback) {
const {
resolveContext,
resolveEsm,
resolveCjs
} = this._createBuildDependenciesResolvers();
const { resolveContext, resolveEsm, resolveCjs, resolveCjsAsChild } =
this._createBuildDependenciesResolvers();
/** @type {Set<string>} */
const files = new Set();
@ -1143,7 +1179,7 @@ class FileSystemInfo {
const resolveDirectories = new Set();
/** @type {Set<string>} */
const resolveMissing = new Set();
/** @type {Map<string, string>} */
/** @type {Map<string, string | false>} */
const resolveResults = new Map();
const invalidResolveResults = new Set();
const resolverContext = {
@ -1209,23 +1245,22 @@ class FileSystemInfo {
return callback();
}
resolveResults.set(key, undefined);
resolveContext(context, path, resolverContext, (err, result) => {
resolveContext(context, path, resolverContext, (err, _, result) => {
if (err) {
invalidResolveResults.add(key);
if (
err.code === "ENOENT" ||
err.code === "UNDECLARED_DEPENDENCY"
) {
if (expected === false) {
resolveResults.set(key, false);
return callback();
}
invalidResolveResults.add(key);
err.message += `\nwhile resolving '${path}' in ${context} to a directory`;
return callback(err);
}
resolveResults.set(key, result);
const resultPath = result.path;
resolveResults.set(key, resultPath);
push({
type: RBDT_DIRECTORY,
context: undefined,
path: result,
path: resultPath,
expected: undefined,
issuer: job
});
@ -1238,37 +1273,38 @@ class FileSystemInfo {
return callback();
}
resolveResults.set(key, undefined);
resolve(context, path, resolverContext, (err, result) => {
if (expected) {
if (result === expected) {
resolveResults.set(key, result);
resolve(context, path, resolverContext, (err, _, result) => {
if (typeof expected === "string") {
if (!err && result && result.path === expected) {
resolveResults.set(key, result.path);
} else {
invalidResolveResults.add(key);
this.logger.debug(
`Resolving '${path}' in ${context} for build dependencies doesn't lead to expected result '${expected}', but to '${result}' instead. Resolving dependencies are ignored for this path.\n${pathToString(
this.logger.warn(
`Resolving '${path}' in ${context} for build dependencies doesn't lead to expected result '${expected}', but to '${
err || (result && result.path)
}' instead. Resolving dependencies are ignored for this path.\n${pathToString(
job
)}`
);
}
} else {
if (err) {
invalidResolveResults.add(key);
if (
err.code === "ENOENT" ||
err.code === "UNDECLARED_DEPENDENCY"
) {
if (expected === false) {
resolveResults.set(key, false);
return callback();
}
invalidResolveResults.add(key);
err.message += `\nwhile resolving '${path}' in ${context} as file\n${pathToString(
job
)}`;
return callback(err);
}
resolveResults.set(key, result);
const resultPath = result.path;
resolveResults.set(key, resultPath);
push({
type: RBDT_FILE,
context: undefined,
path: result,
path: resultPath,
expected: undefined,
issuer: job
});
@ -1303,6 +1339,10 @@ class FileSystemInfo {
resolveFile(path, "f", resolveCjs);
break;
}
case RBDT_RESOLVE_CJS_FILE_AS_CHILD: {
resolveFile(path, "c", resolveCjsAsChild);
break;
}
case RBDT_RESOLVE_ESM_FILE: {
resolveFile(path, "e", resolveEsm);
break;
@ -1382,11 +1422,29 @@ class FileSystemInfo {
const context = dirname(this.fs, path);
for (const modulePath of module.paths) {
if (childPath.startsWith(modulePath)) {
let request = childPath.slice(modulePath.length + 1);
let subPath = childPath.slice(modulePath.length + 1);
const packageMatch = /^(@[^\\/]+[\\/])[^\\/]+/.exec(
subPath
);
if (packageMatch) {
push({
type: RBDT_FILE,
context: undefined,
path:
modulePath +
childPath[modulePath.length] +
packageMatch[0] +
childPath[modulePath.length] +
"package.json",
expected: false,
issuer: job
});
}
let request = subPath.replace(/\\/g, "/");
if (request.endsWith(".js"))
request = request.slice(0, -3);
push({
type: RBDT_RESOLVE_CJS_FILE,
type: RBDT_RESOLVE_CJS_FILE_AS_CHILD,
context,
path: request,
expected: child.filename,
@ -1410,7 +1468,7 @@ class FileSystemInfo {
}
} else if (supportsEsm && /\.m?js$/.test(path)) {
if (!this._warnAboutExperimentalEsmTracking) {
this.logger.info(
this.logger.log(
"Node.js doesn't offer a (nice) way to introspect the ESM dependency graph yet.\n" +
"Until a full solution is available webpack uses an experimental ESM tracking based on parsing.\n" +
"As best effort webpack parses the ESM files to guess dependencies. But this can lead to expensive and incorrect tracking."
@ -1485,9 +1543,8 @@ class FileSystemInfo {
break;
}
case RBDT_DIRECTORY_DEPENDENCIES: {
const match = /(^.+[\\/]node_modules[\\/](?:@[^\\/]+[\\/])?[^\\/]+)/.exec(
path
);
const match =
/(^.+[\\/]node_modules[\\/](?:@[^\\/]+[\\/])?[^\\/]+)/.exec(path);
const packagePath = match ? match[1] : path;
const packageJson = join(this.fs, packagePath, "package.json");
this.fs.readFile(packageJson, (err, content) => {
@ -1517,17 +1574,32 @@ class FileSystemInfo {
return callback(e);
}
const depsObject = packageData.dependencies;
const optionalDepsObject = packageData.optionalDependencies;
const allDeps = new Set();
const optionalDeps = new Set();
if (typeof depsObject === "object" && depsObject) {
for (const dep of Object.keys(depsObject)) {
push({
type: RBDT_RESOLVE_DIRECTORY,
context: packagePath,
path: dep,
expected: undefined,
issuer: job
});
allDeps.add(dep);
}
}
if (
typeof optionalDepsObject === "object" &&
optionalDepsObject
) {
for (const dep of Object.keys(optionalDepsObject)) {
allDeps.add(dep);
optionalDeps.add(dep);
}
}
for (const dep of allDeps) {
push({
type: RBDT_RESOLVE_DIRECTORY,
context: packagePath,
path: dep,
expected: !optionalDeps.has(dep),
issuer: job
});
}
callback();
});
break;
@ -1555,16 +1627,13 @@ class FileSystemInfo {
}
/**
* @param {Map<string, string>} resolveResults results from resolving
* @param {Map<string, string | false>} resolveResults results from resolving
* @param {function(Error=, boolean=): void} callback callback with true when resolveResults resolve the same way
* @returns {void}
*/
checkResolveResultsValid(resolveResults, callback) {
const {
resolveCjs,
resolveEsm,
resolveContext
} = this._createBuildDependenciesResolvers();
const { resolveCjs, resolveCjsAsChild, resolveEsm, resolveContext } =
this._createBuildDependenciesResolvers();
asyncLib.eachLimit(
resolveResults,
20,
@ -1572,23 +1641,42 @@ class FileSystemInfo {
const [type, context, path] = key.split("\n");
switch (type) {
case "d":
resolveContext(context, path, {}, (err, result) => {
resolveContext(context, path, {}, (err, _, result) => {
if (expectedResult === false)
return callback(err ? undefined : INVALID);
if (err) return callback(err);
if (result !== expectedResult) return callback(INVALID);
const resultPath = result.path;
if (resultPath !== expectedResult) return callback(INVALID);
callback();
});
break;
case "f":
resolveCjs(context, path, {}, (err, result) => {
resolveCjs(context, path, {}, (err, _, result) => {
if (expectedResult === false)
return callback(err ? undefined : INVALID);
if (err) return callback(err);
if (result !== expectedResult) return callback(INVALID);
const resultPath = result.path;
if (resultPath !== expectedResult) return callback(INVALID);
callback();
});
break;
case "c":
resolveCjsAsChild(context, path, {}, (err, _, result) => {
if (expectedResult === false)
return callback(err ? undefined : INVALID);
if (err) return callback(err);
const resultPath = result.path;
if (resultPath !== expectedResult) return callback(INVALID);
callback();
});
break;
case "e":
resolveEsm(context, path, {}, (err, result) => {
resolveEsm(context, path, {}, (err, _, result) => {
if (expectedResult === false)
return callback(err ? undefined : INVALID);
if (err) return callback(err);
if (result !== expectedResult) return callback(INVALID);
const resultPath = result.path;
if (resultPath !== expectedResult) return callback(INVALID);
callback();
});
break;
@ -1747,11 +1835,12 @@ class FileSystemInfo {
unsharedManagedFiles
);
}
const unsharedManagedContexts = this._managedContextsOptimization.optimize(
managedContexts,
undefined,
children
);
const unsharedManagedContexts =
this._managedContextsOptimization.optimize(
managedContexts,
undefined,
children
);
if (managedContexts.size !== 0) {
snapshot.setManagedContexts(managedContexts);
this._managedContextsOptimization.storeUnsharedSnapshot(
@ -1759,11 +1848,12 @@ class FileSystemInfo {
unsharedManagedContexts
);
}
const unsharedManagedMissing = this._managedMissingOptimization.optimize(
managedMissing,
undefined,
children
);
const unsharedManagedMissing =
this._managedMissingOptimization.optimize(
managedMissing,
undefined,
children
);
if (managedMissing.size !== 0) {
snapshot.setManagedMissing(managedMissing);
this._managedMissingOptimization.storeUnsharedSnapshot(
@ -1967,11 +2057,12 @@ class FileSystemInfo {
}
break;
case 1:
unsharedContextTimestamps = this._contextTimestampsOptimization.optimize(
capturedDirectories,
startTime,
children
);
unsharedContextTimestamps =
this._contextTimestampsOptimization.optimize(
capturedDirectories,
startTime,
children
);
for (const path of capturedDirectories) {
const cache = this._contextTimestamps.get(path);
if (cache !== undefined) {

View File

@ -12,6 +12,7 @@ const Queue = require("./util/Queue");
/** @typedef {import("./DependenciesBlock")} DependenciesBlock */
/** @typedef {import("./Dependency")} Dependency */
/** @typedef {import("./Dependency").ExportSpec} ExportSpec */
/** @typedef {import("./Dependency").ExportsSpec} ExportsSpec */
/** @typedef {import("./ExportsInfo")} ExportsInfo */
/** @typedef {import("./Module")} Module */
@ -34,6 +35,7 @@ class FlagDependencyExportsPlugin {
"webpack.FlagDependencyExportsPlugin"
);
let statRestoredFromCache = 0;
let statNoExports = 0;
let statFlaggedUncached = 0;
let statNotCached = 0;
let statQueueItemsProcessed = 0;
@ -46,6 +48,16 @@ class FlagDependencyExportsPlugin {
asyncLib.each(
modules,
(module, callback) => {
const exportsInfo = moduleGraph.getExportsInfo(module);
if (!module.buildMeta || !module.buildMeta.exportsType) {
if (exportsInfo.otherExportsInfo.provided !== null) {
// It's a module without declared exports
statNoExports++;
exportsInfo.setHasProvideInfo();
exportsInfo.setUnknownExportsProvided();
return callback();
}
}
if (
module.buildInfo.cacheable !== true ||
typeof module.buildInfo.hash !== "string"
@ -53,7 +65,7 @@ class FlagDependencyExportsPlugin {
statFlaggedUncached++;
// Enqueue uncacheable module for determining the exports
queue.enqueue(module);
moduleGraph.getExportsInfo(module).setHasProvideInfo();
exportsInfo.setHasProvideInfo();
return callback();
}
cache.get(
@ -71,7 +83,7 @@ class FlagDependencyExportsPlugin {
statNotCached++;
// Without cached info enqueue module for determining the exports
queue.enqueue(module);
moduleGraph.getExportsInfo(module).setHasProvideInfo();
exportsInfo.setHasProvideInfo();
}
callback();
}
@ -93,6 +105,9 @@ class FlagDependencyExportsPlugin {
/** @type {ExportsInfo} */
let exportsInfo;
/** @type {Map<Dependency, ExportsSpec>} */
const exportsSpecsFromDependencies = new Map();
let cacheable = true;
let changed = false;
@ -116,9 +131,19 @@ class FlagDependencyExportsPlugin {
const processDependency = dep => {
const exportDesc = dep.getExports(moduleGraph);
if (!exportDesc) return;
exportsSpecsFromDependencies.set(dep, exportDesc);
};
/**
* @param {Dependency} dep dependency
* @param {ExportsSpec} exportDesc info
* @returns {void}
*/
const processExportsSpec = (dep, exportDesc) => {
const exports = exportDesc.exports;
const globalCanMangle = exportDesc.canMangle;
const globalFrom = exportDesc.from;
const globalPriority = exportDesc.priority;
const globalTerminalBinding =
exportDesc.terminalBinding || false;
const exportDeps = exportDesc.dependencies;
@ -135,7 +160,8 @@ class FlagDependencyExportsPlugin {
globalCanMangle,
exportDesc.excludeExports,
globalFrom && dep,
globalFrom
globalFrom,
globalPriority
)
) {
changed = true;
@ -154,6 +180,7 @@ class FlagDependencyExportsPlugin {
let exports = undefined;
let from = globalFrom;
let fromExport = undefined;
let priority = globalPriority;
let hidden = false;
if (typeof exportNameOrSpec === "string") {
name = exportNameOrSpec;
@ -167,6 +194,8 @@ class FlagDependencyExportsPlugin {
exports = exportNameOrSpec.exports;
if (exportNameOrSpec.from !== undefined)
from = exportNameOrSpec.from;
if (exportNameOrSpec.priority !== undefined)
priority = exportNameOrSpec.priority;
if (exportNameOrSpec.terminalBinding !== undefined)
terminalBinding = exportNameOrSpec.terminalBinding;
if (exportNameOrSpec.hidden !== undefined)
@ -174,7 +203,10 @@ class FlagDependencyExportsPlugin {
}
const exportInfo = exportsInfo.getExportInfo(name);
if (exportInfo.provided === false) {
if (
exportInfo.provided === false ||
exportInfo.provided === null
) {
exportInfo.provided = true;
changed = true;
}
@ -193,7 +225,8 @@ class FlagDependencyExportsPlugin {
}
if (exports) {
const nestedExportsInfo = exportInfo.createNestedExportsInfo();
const nestedExportsInfo =
exportInfo.createNestedExportsInfo();
mergeExports(nestedExportsInfo, exports);
}
@ -204,7 +237,8 @@ class FlagDependencyExportsPlugin {
: exportInfo.setTarget(
dep,
from,
fromExport === undefined ? [name] : fromExport
fromExport === undefined ? [name] : fromExport,
priority
))
) {
changed = true;
@ -214,12 +248,12 @@ class FlagDependencyExportsPlugin {
const target = exportInfo.getTarget(moduleGraph);
let targetExportsInfo = undefined;
if (target) {
const targetModuleExportsInfo = moduleGraph.getExportsInfo(
target.module
);
targetExportsInfo = targetModuleExportsInfo.getNestedExportsInfo(
target.export
);
const targetModuleExportsInfo =
moduleGraph.getExportsInfo(target.module);
targetExportsInfo =
targetModuleExportsInfo.getNestedExportsInfo(
target.export
);
// add dependency for this module
const set = dependencies.get(target.module);
if (set === undefined) {
@ -278,40 +312,39 @@ class FlagDependencyExportsPlugin {
statQueueItemsProcessed++;
exportsInfo = moduleGraph.getExportsInfo(module);
if (!module.buildMeta || !module.buildMeta.exportsType) {
if (exportsInfo.otherExportsInfo.provided !== null) {
// It's a module without declared exports
exportsInfo.setUnknownExportsProvided();
modulesToStore.add(module);
notifyDependencies();
}
} else {
// It's a module with declared exports
cacheable = true;
changed = false;
cacheable = true;
changed = false;
processDependenciesBlock(module);
exportsSpecsFromDependencies.clear();
moduleGraph.freeze();
processDependenciesBlock(module);
moduleGraph.unfreeze();
for (const [
dep,
exportsSpec
] of exportsSpecsFromDependencies) {
processExportsSpec(dep, exportsSpec);
}
if (cacheable) {
modulesToStore.add(module);
}
if (cacheable) {
modulesToStore.add(module);
}
if (changed) {
notifyDependencies();
}
if (changed) {
notifyDependencies();
}
}
logger.timeEnd("figure out provided exports");
logger.log(
`${Math.round(
100 -
(100 * statRestoredFromCache) /
(statRestoredFromCache +
statNotCached +
statFlaggedUncached)
)}% of exports of modules have been determined (${statNotCached} not cached, ${statFlaggedUncached} flagged uncacheable, ${statRestoredFromCache} from cache, ${
(100 * (statFlaggedUncached + statNotCached)) /
(statRestoredFromCache +
statNotCached +
statFlaggedUncached +
statNoExports)
)}% of exports of modules have been determined (${statNoExports} no declared exports, ${statNotCached} not cached, ${statFlaggedUncached} flagged uncacheable, ${statRestoredFromCache} from cache, ${
statQueueItemsProcessed -
statNotCached -
statFlaggedUncached

View File

@ -201,10 +201,8 @@ class FlagDependencyUsagePlugin {
if (oldReferencedExports === EXPORTS_OBJECT_REFERENCED) {
continue;
}
const referencedExports = compilation.getDependencyReferencedExports(
dep,
runtime
);
const referencedExports =
compilation.getDependencyReferencedExports(dep, runtime);
if (
oldReferencedExports === undefined ||
oldReferencedExports === NO_EXPORTS_REFERENCED ||

View File

@ -12,7 +12,5 @@ module.exports = class HarmonyLinkingError extends WebpackError {
super(message);
this.name = "HarmonyLinkingError";
this.hideStack = true;
Error.captureStackTrace(this, this.constructor);
}
};

View File

@ -32,7 +32,6 @@ class HookWebpackError extends WebpackError {
this.hideStack = true;
this.details = `caused by plugins in ${hook}\n${error.stack}`;
Error.captureStackTrace(this, this.constructor);
this.stack += `\n-- inner error --\n${error.stack}`;
}
}

View File

@ -31,7 +31,8 @@ const {
keyToRuntime,
forEachRuntime,
mergeRuntimeOwned,
subtractRuntime
subtractRuntime,
intersectRuntime
} = require("./util/runtime");
/** @typedef {import("./Chunk")} Chunk */
@ -87,10 +88,8 @@ class HotModuleReplacementPlugin {
const runtimeRequirements = [RuntimeGlobals.module];
const createAcceptHandler = (parser, ParamDependency) => {
const {
hotAcceptCallback,
hotAcceptWithoutCallback
} = HotModuleReplacementPlugin.getParserHooks(parser);
const { hotAcceptCallback, hotAcceptWithoutCallback } =
HotModuleReplacementPlugin.getParserHooks(parser);
return expr => {
const module = parser.state.module;
@ -306,9 +305,7 @@ class HotModuleReplacementPlugin {
}
records.chunkModuleIds = {};
for (const chunk of compilation.chunks) {
records.chunkModuleIds[
chunk.id
] = Array.from(
records.chunkModuleIds[chunk.id] = Array.from(
chunkGraph.getOrderedChunkModulesIterable(
chunk,
compareModulesById(chunkGraph)
@ -341,9 +338,8 @@ class HotModuleReplacementPlugin {
return chunkGraph.getModuleHash(module, chunk.runtime);
}
};
const fullHashModulesInThisChunk = chunkGraph.getChunkFullHashModulesSet(
chunk
);
const fullHashModulesInThisChunk =
chunkGraph.getChunkFullHashModulesSet(chunk);
if (fullHashModulesInThisChunk !== undefined) {
for (const module of fullHashModulesInThisChunk) {
fullHashModules.add(module, chunk);
@ -451,16 +447,14 @@ class HotModuleReplacementPlugin {
allOldRuntime = mergeRuntimeOwned(allOldRuntime, runtime);
}
forEachRuntime(allOldRuntime, runtime => {
const {
path: filename,
info: assetInfo
} = compilation.getPathWithInfo(
compilation.outputOptions.hotUpdateMainFilename,
{
hash: records.hash,
runtime
}
);
const { path: filename, info: assetInfo } =
compilation.getPathWithInfo(
compilation.outputOptions.hotUpdateMainFilename,
{
hash: records.hash,
runtime
}
);
hotUpdateMainContentByRuntime.set(runtime, {
updatedChunkIds: new Set(),
removedChunkIds: new Set(),
@ -509,16 +503,19 @@ class HotModuleReplacementPlugin {
);
if (currentChunk) {
chunkId = currentChunk.id;
newRuntime = currentChunk.runtime;
newRuntime = intersectRuntime(
currentChunk.runtime,
allOldRuntime
);
if (newRuntime === undefined) continue;
newModules = chunkGraph
.getChunkModules(currentChunk)
.filter(module => updatedModules.has(module, currentChunk));
newRuntimeModules = Array.from(
chunkGraph.getChunkRuntimeModulesIterable(currentChunk)
).filter(module => updatedModules.has(module, currentChunk));
const fullHashModules = chunkGraph.getChunkFullHashModulesIterable(
currentChunk
);
const fullHashModules =
chunkGraph.getChunkFullHashModulesIterable(currentChunk);
newFullHashModules =
fullHashModules &&
Array.from(fullHashModules).filter(module =>
@ -631,13 +628,11 @@ class HotModuleReplacementPlugin {
filename = entry.filename;
assetInfo = entry.info;
} else {
({
path: filename,
info: assetInfo
} = compilation.getPathWithInfo(
entry.filenameTemplate,
entry.pathOptions
));
({ path: filename, info: assetInfo } =
compilation.getPathWithInfo(
entry.filenameTemplate,
entry.pathOptions
));
}
const source = entry.render();
compilation.additionalChunkAssets.push(filename);

View File

@ -5,22 +5,27 @@
"use strict";
const { validate } = require("schema-utils");
const schema = require("../schemas/plugins/IgnorePlugin.json");
const createSchemaValidation = require("./util/create-schema-validation");
/** @typedef {import("../declarations/plugins/IgnorePlugin").IgnorePluginOptions} IgnorePluginOptions */
/** @typedef {import("./Compiler")} Compiler */
/** @typedef {import("./NormalModuleFactory").ResolveData} ResolveData */
const validate = createSchemaValidation(
require("../schemas/plugins/IgnorePlugin.check.js"),
() => require("../schemas/plugins/IgnorePlugin.json"),
{
name: "Ignore Plugin",
baseDataPath: "options"
}
);
class IgnorePlugin {
/**
* @param {IgnorePluginOptions} options IgnorePlugin options
*/
constructor(options) {
validate(schema, options, {
name: "Ignore Plugin",
baseDataPath: "options"
});
validate(options);
this.options = options;
/** @private @type {Function} */

View File

@ -73,13 +73,25 @@ class InitFragment {
// Deduplicate fragments. If a fragment has no key, it is always included.
const keyedFragments = new Map();
for (const [fragment] of sortedFragments) {
if (typeof fragment.merge === "function") {
if (typeof fragment.mergeAll === "function") {
if (!fragment.key) {
throw new Error(
`InitFragment with mergeAll function must have a valid key: ${fragment.constructor.name}`
);
}
const oldValue = keyedFragments.get(fragment.key);
if (oldValue === undefined) {
keyedFragments.set(fragment.key, fragment);
} else if (Array.isArray(oldValue)) {
oldValue.push(fragment);
} else {
keyedFragments.set(fragment.key, [oldValue, fragment]);
}
continue;
} else if (typeof fragment.merge === "function") {
const oldValue = keyedFragments.get(fragment.key);
if (oldValue !== undefined) {
keyedFragments.set(
fragment.key || Symbol(),
fragment.merge(oldValue)
);
keyedFragments.set(fragment.key, fragment.merge(oldValue));
continue;
}
}
@ -88,7 +100,10 @@ class InitFragment {
const concatSource = new ConcatSource();
const endContents = [];
for (const fragment of keyedFragments.values()) {
for (let fragment of keyedFragments.values()) {
if (Array.isArray(fragment)) {
fragment = fragment[0].mergeAll(fragment);
}
concatSource.add(fragment.getContent(generateContext));
const endContent = fragment.getEndContent(generateContext);
if (endContent) {

View File

@ -33,8 +33,6 @@ ${depsList.slice(0, 3).join("\n")}${
this.name = "InvalidDependenciesModuleWarning";
this.details = depsList.slice(3).join("\n");
this.module = module;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -34,7 +34,8 @@ class JavascriptMetaInfoPlugin {
let topLevelDeclarations =
parser.state.module.buildInfo.topLevelDeclarations;
if (topLevelDeclarations === undefined) {
topLevelDeclarations = parser.state.module.buildInfo.topLevelDeclarations = new Set();
topLevelDeclarations =
parser.state.module.buildInfo.topLevelDeclarations = new Set();
}
for (const name of parser.scope.definitions.asSet()) {
const freeInfo = parser.getFreeInfoFromVariable(name);

View File

@ -7,22 +7,25 @@
const ModuleFilenameHelpers = require("./ModuleFilenameHelpers");
const NormalModule = require("./NormalModule");
const { validate } = require("schema-utils");
const schema = require("../schemas/plugins/LoaderOptionsPlugin.json");
const createSchemaValidation = require("./util/create-schema-validation");
/** @typedef {import("../declarations/plugins/LoaderOptionsPlugin").LoaderOptionsPluginOptions} LoaderOptionsPluginOptions */
/** @typedef {import("./Compiler")} Compiler */
const validate = createSchemaValidation(
require("../schemas/plugins/LoaderOptionsPlugin.check.js"),
() => require("../schemas/plugins/LoaderOptionsPlugin.json"),
{
name: "Loader Options Plugin",
baseDataPath: "options"
}
);
class LoaderOptionsPlugin {
/**
* @param {LoaderOptionsPluginOptions} options options object
*/
constructor(options = {}) {
validate(schema, options, {
name: "Loader Options Plugin",
baseDataPath: "options"
});
validate(options);
if (typeof options !== "object") options = {};
if (!options.test) {
options.test = {

View File

@ -239,9 +239,8 @@ class MainTemplate {
"chunkIdExpression"
]),
get jsonpScript() {
const hooks = getLoadScriptRuntimeModule().getCompilationHooks(
compilation
);
const hooks =
getLoadScriptRuntimeModule().getCompilationHooks(compilation);
return hooks.createScript;
},
get linkPrefetch() {

View File

@ -94,7 +94,7 @@ const makeSerializable = require("./util/makeSerializable");
/**
* @typedef {Object} NeedBuildContext
* @property {FileSystemInfo} fileSystemInfo
* @property {Map<string, string>} valueCacheVersions
* @property {Map<string, string | Set<string>>} valueCacheVersions
*/
/** @typedef {KnownBuildMeta & Record<string, any>} BuildMeta */

View File

@ -53,8 +53,6 @@ class ModuleBuildError extends WebpackError {
this.name = "ModuleBuildError";
this.details = details;
this.error = err;
Error.captureStackTrace(this, this.constructor);
}
serialize(context) {

View File

@ -30,8 +30,6 @@ class ModuleDependencyError extends WebpackError {
/** error is not (de)serialized, so it might be undefined after deserialization */
this.error = err;
Error.captureStackTrace(this, this.constructor);
if (err && /** @type {any} */ (err).hideStack) {
this.stack =
err.stack.split("\n").slice(1).join("\n") + "\n\n" + this.stack;

View File

@ -30,8 +30,6 @@ class ModuleDependencyWarning extends WebpackError {
/** error is not (de)serialized, so it might be undefined after deserialization */
this.error = err;
Error.captureStackTrace(this, this.constructor);
if (err && /** @type {any} */ (err).hideStack) {
this.stack =
err.stack.split("\n").slice(1).join("\n") + "\n\n" + this.stack;

View File

@ -37,8 +37,6 @@ class ModuleError extends WebpackError {
err && typeof err === "object" && err.stack
? cleanUp(err.stack, this.message)
: undefined;
Error.captureStackTrace(this, this.constructor);
}
serialize(context) {

View File

@ -12,14 +12,16 @@ const ModuleFilenameHelpers = exports;
// TODO webpack 6: consider removing these
ModuleFilenameHelpers.ALL_LOADERS_RESOURCE = "[all-loaders][resource]";
ModuleFilenameHelpers.REGEXP_ALL_LOADERS_RESOURCE = /\[all-?loaders\]\[resource\]/gi;
ModuleFilenameHelpers.REGEXP_ALL_LOADERS_RESOURCE =
/\[all-?loaders\]\[resource\]/gi;
ModuleFilenameHelpers.LOADERS_RESOURCE = "[loaders][resource]";
ModuleFilenameHelpers.REGEXP_LOADERS_RESOURCE = /\[loaders\]\[resource\]/gi;
ModuleFilenameHelpers.RESOURCE = "[resource]";
ModuleFilenameHelpers.REGEXP_RESOURCE = /\[resource\]/gi;
ModuleFilenameHelpers.ABSOLUTE_RESOURCE_PATH = "[absolute-resource-path]";
// cSpell:words olute
ModuleFilenameHelpers.REGEXP_ABSOLUTE_RESOURCE_PATH = /\[abs(olute)?-?resource-?path\]/gi;
ModuleFilenameHelpers.REGEXP_ABSOLUTE_RESOURCE_PATH =
/\[abs(olute)?-?resource-?path\]/gi;
ModuleFilenameHelpers.RESOURCE_PATH = "[resource-path]";
ModuleFilenameHelpers.REGEXP_RESOURCE_PATH = /\[resource-?path\]/gi;
ModuleFilenameHelpers.ALL_LOADERS = "[all-loaders]";

View File

@ -9,6 +9,7 @@ const util = require("util");
const ExportsInfo = require("./ExportsInfo");
const ModuleGraphConnection = require("./ModuleGraphConnection");
const SortableSet = require("./util/SortableSet");
const WeakTupleMap = require("./util/WeakTupleMap");
/** @typedef {import("./DependenciesBlock")} DependenciesBlock */
/** @typedef {import("./Dependency")} Dependency */
@ -24,7 +25,7 @@ const SortableSet = require("./util/SortableSet");
* @returns {string}
*/
const EMPTY_ARRAY = [];
const EMPTY_SET = new Set();
/**
* @param {SortableSet<ModuleGraphConnection>} set input
@ -81,20 +82,9 @@ class ModuleGraphModule {
}
}
class ModuleGraphDependency {
constructor() {
/** @type {ModuleGraphConnection} */
this.connection = undefined;
/** @type {Module} */
this.parentModule = undefined;
/** @type {DependenciesBlock} */
this.parentBlock = undefined;
}
}
class ModuleGraph {
constructor() {
/** @type {Map<Dependency, ModuleGraphDependency>} */
/** @type {Map<Dependency, ModuleGraphConnection>} */
this._dependencyMap = new Map();
/** @type {Map<Module, ModuleGraphModule>} */
this._moduleMap = new Map();
@ -110,6 +100,9 @@ class ModuleGraph {
this._cacheModuleGraphModuleValue2 = undefined;
this._cacheModuleGraphDependencyKey = undefined;
this._cacheModuleGraphDependencyValue = undefined;
/** @type {WeakTupleMap<any[], any>} */
this._cache = undefined;
}
/**
@ -133,23 +126,6 @@ class ModuleGraph {
return mgm;
}
/**
* @param {Dependency} dependency the dependency
* @returns {ModuleGraphDependency} the internal dependency
*/
_getModuleGraphDependency(dependency) {
if (this._cacheModuleGraphDependencyKey === dependency)
return this._cacheModuleGraphDependencyValue;
let mgd = this._dependencyMap.get(dependency);
if (mgd === undefined) {
mgd = new ModuleGraphDependency();
this._dependencyMap.set(dependency, mgd);
}
this._cacheModuleGraphDependencyKey = dependency;
this._cacheModuleGraphDependencyValue = mgd;
return mgd;
}
/**
* @param {Dependency} dependency the dependency
* @param {DependenciesBlock} block parent block
@ -157,9 +133,8 @@ class ModuleGraph {
* @returns {void}
*/
setParents(dependency, block, module) {
const mgd = this._getModuleGraphDependency(dependency);
mgd.parentBlock = block;
mgd.parentModule = module;
dependency._parentDependenciesBlock = block;
dependency._parentModule = module;
}
/**
@ -167,8 +142,7 @@ class ModuleGraph {
* @returns {Module} parent module
*/
getParentModule(dependency) {
const mgd = this._getModuleGraphDependency(dependency);
return mgd.parentModule;
return dependency._parentModule;
}
/**
@ -176,8 +150,7 @@ class ModuleGraph {
* @returns {DependenciesBlock} parent block
*/
getParentBlock(dependency) {
const mgd = this._getModuleGraphDependency(dependency);
return mgd.parentBlock;
return dependency._parentDependenciesBlock;
}
/**
@ -195,8 +168,7 @@ class ModuleGraph {
dependency.weak,
dependency.getCondition(this)
);
const mgd = this._getModuleGraphDependency(dependency);
mgd.connection = connection;
this._dependencyMap.set(dependency, connection);
const connections = this._getModuleGraphModule(module).incomingConnections;
connections.add(connection);
const mgm = this._getModuleGraphModule(originModule);
@ -212,12 +184,11 @@ class ModuleGraph {
* @returns {void}
*/
updateModule(dependency, module) {
const mgd = this._getModuleGraphDependency(dependency);
if (mgd.connection.module === module) return;
const { connection } = mgd;
const connection = this._dependencyMap.get(dependency);
if (connection.module === module) return;
const newConnection = connection.clone();
newConnection.module = module;
mgd.connection = newConnection;
this._dependencyMap.set(dependency, newConnection);
connection.setActive(false);
const originMgm = this._getModuleGraphModule(connection.originModule);
originMgm.outgoingConnections.add(newConnection);
@ -230,13 +201,12 @@ class ModuleGraph {
* @returns {void}
*/
removeConnection(dependency) {
const mgd = this._getModuleGraphDependency(dependency);
const { connection } = mgd;
const connection = this._dependencyMap.get(dependency);
const targetMgm = this._getModuleGraphModule(connection.module);
targetMgm.incomingConnections.delete(connection);
const originMgm = this._getModuleGraphModule(connection.originModule);
originMgm.outgoingConnections.delete(connection);
mgd.connection = undefined;
this._dependencyMap.delete(dependency);
}
/**
@ -245,7 +215,7 @@ class ModuleGraph {
* @returns {void}
*/
addExplanation(dependency, explanation) {
const { connection } = this._getModuleGraphDependency(dependency);
const connection = this._dependencyMap.get(dependency);
connection.addExplanation(explanation);
}
@ -371,7 +341,7 @@ class ModuleGraph {
* @returns {Module} the referenced module
*/
getResolvedModule(dependency) {
const { connection } = this._getModuleGraphDependency(dependency);
const connection = this._dependencyMap.get(dependency);
return connection !== undefined ? connection.resolvedModule : null;
}
@ -380,7 +350,7 @@ class ModuleGraph {
* @returns {ModuleGraphConnection | undefined} the connection
*/
getConnection(dependency) {
const { connection } = this._getModuleGraphDependency(dependency);
const connection = this._dependencyMap.get(dependency);
return connection;
}
@ -389,7 +359,7 @@ class ModuleGraph {
* @returns {Module} the referenced module
*/
getModule(dependency) {
const { connection } = this._getModuleGraphDependency(dependency);
const connection = this._dependencyMap.get(dependency);
return connection !== undefined ? connection.module : null;
}
@ -398,7 +368,7 @@ class ModuleGraph {
* @returns {Module} the referencing module
*/
getOrigin(dependency) {
const { connection } = this._getModuleGraphDependency(dependency);
const connection = this._dependencyMap.get(dependency);
return connection !== undefined ? connection.originModule : null;
}
@ -407,7 +377,7 @@ class ModuleGraph {
* @returns {Module} the original referencing module
*/
getResolvedOrigin(dependency) {
const { connection } = this._getModuleGraphDependency(dependency);
const connection = this._dependencyMap.get(dependency);
return connection !== undefined ? connection.resolvedOriginModule : null;
}
@ -426,7 +396,7 @@ class ModuleGraph {
*/
getOutgoingConnections(module) {
const connections = this._getModuleGraphModule(module).outgoingConnections;
return connections === undefined ? EMPTY_ARRAY : connections;
return connections === undefined ? EMPTY_SET : connections;
}
/**
@ -699,6 +669,26 @@ class ModuleGraph {
return this._metaMap.get(thing);
}
freeze() {
this._cache = new WeakTupleMap();
}
unfreeze() {
this._cache = undefined;
}
/**
* @template {any[]} T
* @template V
* @param {(moduleGraph: ModuleGraph, ...args: T) => V} fn computer
* @param {T} args arguments
* @returns {V} computed value or cached
*/
cached(fn, ...args) {
if (this._cache === undefined) return fn(this, ...args);
return this._cache.provide(fn, ...args, () => fn(this, ...args));
}
// TODO remove in webpack 6
/**
* @param {Module} module the module

View File

@ -183,5 +183,9 @@ class ModuleGraphConnection {
module.exports = ModuleGraphConnection;
module.exports.addConnectionStates = addConnectionStates;
module.exports.TRANSITIVE_ONLY = /** @type {typeof TRANSITIVE_ONLY} */ (TRANSITIVE_ONLY);
module.exports.CIRCULAR_CONNECTION = /** @type {typeof CIRCULAR_CONNECTION} */ (CIRCULAR_CONNECTION);
module.exports.TRANSITIVE_ONLY = /** @type {typeof TRANSITIVE_ONLY} */ (
TRANSITIVE_ONLY
);
module.exports.CIRCULAR_CONNECTION = /** @type {typeof CIRCULAR_CONNECTION} */ (
CIRCULAR_CONNECTION
);

View File

@ -222,9 +222,8 @@ class ModuleInfoHeaderPlugin {
)}`
) + "\n"
);
const optimizationBailout = moduleGraph.getOptimizationBailout(
module
);
const optimizationBailout =
moduleGraph.getOptimizationBailout(module);
if (optimizationBailout) {
for (const text of optimizationBailout) {
let code;

View File

@ -80,8 +80,6 @@ class ModuleNotFoundError extends WebpackError {
this.module = module;
this.error = err;
this.loc = loc;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -85,8 +85,6 @@ class ModuleParseError extends WebpackError {
this.name = "ModuleParseError";
this.loc = loc;
this.error = err;
Error.captureStackTrace(this, this.constructor);
}
serialize(context) {

View File

@ -36,8 +36,6 @@ class ModuleRestoreError extends WebpackError {
this.details = details;
this.module = module;
this.error = err;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -36,8 +36,6 @@ class ModuleStoreError extends WebpackError {
this.details = details;
this.module = module;
this.error = err;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -37,8 +37,6 @@ class ModuleWarning extends WebpackError {
warning && typeof warning === "object" && warning.stack
? cleanUp(warning.stack, this.message)
: undefined;
Error.captureStackTrace(this, this.constructor);
}
serialize(context) {

View File

@ -318,16 +318,17 @@ module.exports = class MultiCompiler {
* @returns {SetupResult[]} result of setup
*/
_runGraph(setup, run, callback) {
/** @typedef {{ compiler: Compiler, result: Stats, state: "pending" | "blocked" | "queued" | "running" | "running-outdated" | "done", children: Node[], parents: Node[] }} Node */
/** @typedef {{ compiler: Compiler, result: Stats, state: "pending" | "blocked" | "queued" | "starting" | "running" | "running-outdated" | "done", children: Node[], parents: Node[] }} Node */
// State transitions for nodes:
// -> blocked (initial)
// blocked -> queued [add to queue] (when all parents done)
// queued -> running [running++] (when processing the queue)
// blocked -> starting [running++] (when all parents done)
// queued -> starting [running++] (when processing the queue)
// starting -> running (when run has been called)
// running -> done [running--] (when compilation is done)
// done -> pending (when invalidated from file change)
// pending -> blocked (when invalidated from aggregated changes)
// done -> blocked (when invalidated, from parent invalidation)
// pending -> blocked [add to queue] (when invalidated from aggregated changes)
// done -> blocked [add to queue] (when invalidated, from parent invalidation)
// running -> running-outdated (when invalidated, either from change or parent invalidation)
// running-outdated -> blocked [running--] (when compilation is done)
@ -351,6 +352,7 @@ module.exports = class MultiCompiler {
parent.children.push(node);
}
}
/** @type {ArrayQueue<Node>} */
const queue = new ArrayQueue();
for (const node of nodes) {
if (node.parents.length === 0) {
@ -388,13 +390,13 @@ module.exports = class MultiCompiler {
if (node.state === "running") {
node.state = "done";
for (const child of node.children) {
checkUnblocked(child);
if (child.state === "blocked") queue.enqueue(child);
}
} else if (node.state === "running-outdated") {
node.state = "blocked";
checkUnblocked(node);
queue.enqueue(node);
}
process.nextTick(processQueue);
processQueue();
};
/**
* @param {Node} node node
@ -433,20 +435,9 @@ module.exports = class MultiCompiler {
if (node.state === "pending") {
node.state = "blocked";
}
checkUnblocked(node);
processQueue();
};
/**
* @param {Node} node node
* @returns {void}
*/
const checkUnblocked = node => {
if (
node.state === "blocked" &&
node.parents.every(p => p.state === "done")
) {
node.state = "queued";
if (node.state === "blocked") {
queue.enqueue(node);
processQueue();
}
};
@ -457,20 +448,33 @@ module.exports = class MultiCompiler {
node.compiler,
i,
nodeDone.bind(null, node),
() => node.state !== "done" && node.state !== "running",
() => node.state !== "starting" && node.state !== "running",
() => nodeChange(node),
() => nodeInvalid(node)
)
);
});
let processing = true;
const processQueue = () => {
if (processing) return;
processing = true;
process.nextTick(processQueueWorker);
};
const processQueueWorker = () => {
while (running < parallelism && queue.length > 0 && !errored) {
const node = queue.dequeue();
if (node.state !== "queued") continue;
running++;
node.state = "running";
run(node.compiler, nodeDone.bind(null, node));
if (
node.state === "queued" ||
(node.state === "blocked" &&
node.parents.every(p => p.state === "done"))
) {
running++;
node.state = "starting";
run(node.compiler, nodeDone.bind(null, node));
node.state = "running";
}
}
processing = false;
if (
!errored &&
running === 0 &&
@ -489,7 +493,7 @@ module.exports = class MultiCompiler {
}
}
};
processQueue();
processQueueWorker();
return setupResults;
}

View File

@ -18,7 +18,5 @@ module.exports = class NoModeWarning extends WebpackError {
"Set 'mode' option to 'development' or 'production' to enable defaults for each environment.\n" +
"You can also set it to 'none' to disable any default behavior. " +
"Learn more: https://webpack.js.org/configuration/mode/";
Error.captureStackTrace(this, this.constructor);
}
};

View File

@ -8,7 +8,6 @@
const parseJson = require("json-parse-better-errors");
const { getContext, runLoaders } = require("loader-runner");
const querystring = require("querystring");
const { validate } = require("schema-utils");
const { HookMap, SyncHook, AsyncSeriesBailHook } = require("tapable");
const {
CachedSource,
@ -28,6 +27,7 @@ const UnhandledSchemeError = require("./UnhandledSchemeError");
const WebpackError = require("./WebpackError");
const formatLocation = require("./formatLocation");
const LazySet = require("./util/LazySet");
const { isSubset } = require("./util/SetHelpers");
const { getScheme } = require("./util/URLAbsoluteSpecifier");
const {
compareLocations,
@ -41,10 +41,12 @@ const { contextify, absolutify } = require("./util/identifier");
const makeSerializable = require("./util/makeSerializable");
const memoize = require("./util/memoize");
/** @typedef {import("source-map").RawSourceMap} SourceMap */
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../declarations/LoaderContext").NormalModuleLoaderContext} NormalModuleLoaderContext */
/** @typedef {import("../declarations/WebpackOptions").Mode} Mode */
/** @typedef {import("../declarations/WebpackOptions").WebpackOptionsNormalized} WebpackOptions */
/** @typedef {import("./ChunkGraph")} ChunkGraph */
/** @typedef {import("./Compiler")} Compiler */
/** @typedef {import("./Dependency").UpdateHashContext} UpdateHashContext */
/** @typedef {import("./DependencyTemplates")} DependencyTemplates */
/** @typedef {import("./Generator")} Generator */
@ -60,13 +62,26 @@ const memoize = require("./util/memoize");
/** @typedef {import("./RequestShortener")} RequestShortener */
/** @typedef {import("./ResolverFactory").ResolverWithOptions} ResolverWithOptions */
/** @typedef {import("./RuntimeTemplate")} RuntimeTemplate */
/** @typedef {import("./logging/Logger").Logger} WebpackLogger */
/** @typedef {import("./util/Hash")} Hash */
/** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */
/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */
/**
* @typedef {Object} SourceMap
* @property {number} version
* @property {string[]} sources
* @property {string} mappings
* @property {string=} file
* @property {string=} sourceRoot
* @property {string[]=} sourcesContent
* @property {string[]=} names
*/
const getInvalidDependenciesModuleWarning = memoize(() =>
require("./InvalidDependenciesModuleWarning")
);
const getValidate = memoize(() => require("schema-utils").validate);
const ABSOLUTE_PATH_REGEX = /^([a-zA-Z]:\\|\\\\|\/)/;
@ -149,8 +164,6 @@ class NonErrorEmittedError extends WebpackError {
this.name = "NonErrorEmittedError";
this.message = "(Emitted value instead of an instance of Error) " + error;
Error.captureStackTrace(this, this.constructor);
}
}
@ -204,6 +217,7 @@ class NormalModule extends Module {
* @param {string} options.rawRequest request without resolving
* @param {LoaderItem[]} options.loaders list of loaders
* @param {string} options.resource path + query of the real resource
* @param {Record<string, any>=} options.resourceResolveData resource resolve data
* @param {string | undefined} options.matchResource path + query of the matched resource (virtual)
* @param {Parser} options.parser the parser used
* @param {object} options.parserOptions the options of the parser used
@ -219,6 +233,7 @@ class NormalModule extends Module {
rawRequest,
loaders,
resource,
resourceResolveData,
matchResource,
parser,
parserOptions,
@ -245,6 +260,7 @@ class NormalModule extends Module {
this.generatorOptions = generatorOptions;
/** @type {string} */
this.resource = resource;
this.resourceResolveData = resourceResolveData;
/** @type {string | undefined} */
this.matchResource = matchResource;
/** @type {LoaderItem[]} */
@ -261,6 +277,8 @@ class NormalModule extends Module {
this._source = null;
/** @private @type {Map<string, number> | undefined} **/
this._sourceSizes = undefined;
/** @private @type {Set<string>} */
this._sourceTypes = undefined;
// Cache
this._lastSuccessfulBuildMeta = {};
@ -332,12 +350,19 @@ class NormalModule extends Module {
this.resource = m.resource;
this.matchResource = m.matchResource;
this.loaders = m.loaders;
this._sourceTypes = m._sourceTypes;
this._sourceSizes = m._sourceSizes;
}
/**
* Assuming this module is in the cache. Remove internal references to allow freeing some memory.
*/
cleanupForCache() {
// Make sure to cache types and sizes before cleanup
if (this._sourceTypes === undefined) this.getSourceTypes();
for (const type of this._sourceTypes) {
this.size(type);
}
super.cleanupForCache();
this.parser = undefined;
this.parserOptions = undefined;
@ -375,6 +400,7 @@ class NormalModule extends Module {
this.type,
this.generatorOptions
);
// we assume the generator behaves identically and keep cached sourceTypes/Sizes
}
/**
@ -420,7 +446,7 @@ class NormalModule extends Module {
* @param {WebpackOptions} options webpack options
* @param {Compilation} compilation the compilation
* @param {InputFileSystem} fs file system from reading
* @returns {any} loader context
* @returns {NormalModuleLoaderContext} loader context
*/
createLoaderContext(resolver, options, compilation, fs) {
const { requestShortener } = compilation.runtimeTemplate;
@ -498,7 +524,7 @@ class NormalModule extends Module {
if (schema.title && (match = /^(.+) (.+)$/.exec(schema.title))) {
[, name, baseDataPath] = match;
}
validate(schema, options, {
getValidate()(schema, options, {
name,
baseDataPath
});
@ -750,7 +776,11 @@ class NormalModule extends Module {
},
(err, result) => {
// Cleanup loaderContext to avoid leaking memory in ICs
loaderContext._compilation = loaderContext._compiler = loaderContext._module = loaderContext.fs = undefined;
loaderContext._compilation =
loaderContext._compiler =
loaderContext._module =
loaderContext.fs =
undefined;
if (!result) {
return processResult(
@ -854,6 +884,7 @@ class NormalModule extends Module {
this._forceBuild = false;
this._source = null;
if (this._sourceSizes !== undefined) this._sourceSizes.clear();
this._sourceTypes = undefined;
this._ast = null;
this.error = null;
this.clearWarningsAndErrors();
@ -872,7 +903,7 @@ class NormalModule extends Module {
assetsInfo: undefined
};
const startTime = Date.now();
const startTime = compilation.compiler.fsStartTime || Date.now();
return this.doBuild(options, compilation, resolver, fs, err => {
// if we have an error mark module as failed and exit
@ -943,7 +974,8 @@ class NormalModule extends Module {
checkDependencies(this.buildInfo.missingDependencies);
checkDependencies(this.buildInfo.contextDependencies);
if (nonAbsoluteDependencies !== undefined) {
const InvalidDependenciesModuleWarning = getInvalidDependenciesModuleWarning();
const InvalidDependenciesModuleWarning =
getInvalidDependenciesModuleWarning();
this.addWarning(
new InvalidDependenciesModuleWarning(this, nonAbsoluteDependencies)
);
@ -1055,7 +1087,10 @@ class NormalModule extends Module {
* @returns {Set<string>} types available (do not mutate)
*/
getSourceTypes() {
return this.generator.getTypes(this);
if (this._sourceTypes === undefined) {
this._sourceTypes = this.generator.getTypes(this);
}
return this._sourceTypes;
}
/**
@ -1151,12 +1186,22 @@ class NormalModule extends Module {
if (!this.buildInfo.snapshot) return callback(null, true);
// build when valueDependencies have changed
if (this.buildInfo.valueDependencies) {
/** @type {Map<string, string | Set<string>>} */
const valueDependencies = this.buildInfo.valueDependencies;
if (valueDependencies) {
if (!valueCacheVersions) return callback(null, true);
for (const [key, value] of this.buildInfo.valueDependencies) {
for (const [key, value] of valueDependencies) {
if (value === undefined) return callback(null, true);
const current = valueCacheVersions.get(key);
if (value !== current) return callback(null, true);
if (
value !== current &&
(typeof value === "string" ||
typeof current === "string" ||
current === undefined ||
!isSubset(value, current))
) {
return callback(null, true);
}
}
}
@ -1234,7 +1279,6 @@ class NormalModule extends Module {
const { write } = context;
// deserialize
write(this._source);
write(this._sourceSizes);
write(this.error);
write(this._lastSuccessfulBuildMeta);
write(this._forceBuild);
@ -1266,7 +1310,6 @@ class NormalModule extends Module {
deserialize(context) {
const { read } = context;
this._source = read();
this._sourceSizes = read();
this.error = read();
this._lastSuccessfulBuildMeta = read();
this._forceBuild = read();

View File

@ -168,6 +168,7 @@ const unsafeCacheData = new WeakMap();
const ruleSetCompiler = new RuleSetCompiler([
new BasicMatcherRulePlugin("test", "resource"),
new BasicMatcherRulePlugin("scheme"),
new BasicMatcherRulePlugin("mimetype"),
new BasicMatcherRulePlugin("dependency"),
new BasicMatcherRulePlugin("include", "resource"),
@ -443,6 +444,7 @@ class NormalModuleFactory extends ModuleFactory {
realResource: resourceData.path,
resourceQuery: resourceDataForRules.query,
resourceFragment: resourceDataForRules.fragment,
scheme,
mimetype: matchResourceData ? "" : resourceData.data.mimetype || "",
dependency: dependencyType,
descriptionData: matchResourceData
@ -496,7 +498,21 @@ class NormalModuleFactory extends ModuleFactory {
for (const loader of loaders) allLoaders.push(loader);
}
for (const loader of preLoaders) allLoaders.push(loader);
const type = settings.type;
let type = settings.type;
if (!type) {
const resource =
(matchResourceData && matchResourceData.resource) ||
resourceData.resource;
let match;
if (
typeof resource === "string" &&
(match = /\.webpack\[([^\]]+)\]$/.exec(resource))
) {
type = match[1];
} else {
type = "javascript/auto";
}
}
const resolveOptions = settings.resolve;
const layer = settings.layer;
if (layer !== undefined && !layers) {
@ -506,29 +522,33 @@ class NormalModuleFactory extends ModuleFactory {
)
);
}
Object.assign(data.createData, {
layer:
layer === undefined ? contextInfo.issuerLayer || null : layer,
request: stringifyLoadersAndResource(
allLoaders,
resourceData.resource
),
userRequest,
rawRequest: request,
loaders: allLoaders,
resource: resourceData.resource,
matchResource: matchResourceData
? matchResourceData.resource
: undefined,
resourceResolveData: resourceData.data,
settings,
type,
parser: this.getParser(type, settings.parser),
parserOptions: settings.parser,
generator: this.getGenerator(type, settings.generator),
generatorOptions: settings.generator,
resolveOptions
});
try {
Object.assign(data.createData, {
layer:
layer === undefined ? contextInfo.issuerLayer || null : layer,
request: stringifyLoadersAndResource(
allLoaders,
resourceData.resource
),
userRequest,
rawRequest: request,
loaders: allLoaders,
resource: resourceData.resource,
matchResource: matchResourceData
? matchResourceData.resource
: undefined,
resourceResolveData: resourceData.data,
settings,
type,
parser: this.getParser(type, settings.parser),
parserOptions: settings.parser,
generator: this.getGenerator(type, settings.generator),
generatorOptions: settings.generator,
resolveOptions
});
} catch (e) {
return callback(e);
}
callback();
});
this.resolveRequestArray(

View File

@ -5,17 +5,24 @@
"use strict";
const { validate } = require("schema-utils");
const schema = require("../schemas/plugins/ProgressPlugin.json");
const Compiler = require("./Compiler");
const MultiCompiler = require("./MultiCompiler");
const NormalModule = require("./NormalModule");
const createSchemaValidation = require("./util/create-schema-validation");
const { contextify } = require("./util/identifier");
/** @typedef {import("../declarations/plugins/ProgressPlugin").HandlerFunction} HandlerFunction */
/** @typedef {import("../declarations/plugins/ProgressPlugin").ProgressPluginArgument} ProgressPluginArgument */
/** @typedef {import("../declarations/plugins/ProgressPlugin").ProgressPluginOptions} ProgressPluginOptions */
const validate = createSchemaValidation(
require("../schemas/plugins/ProgressPlugin.check.js"),
() => require("../schemas/plugins/ProgressPlugin.json"),
{
name: "Progress Plugin",
baseDataPath: "options"
}
);
const median3 = (a, b, c) => {
return a + b + c - Math.max(a, b, c) - Math.min(a, b, c);
};
@ -115,10 +122,7 @@ class ProgressPlugin {
};
}
validate(schema, options, {
name: "Progress Plugin",
baseDataPath: "options"
});
validate(options);
options = { ...ProgressPlugin.defaultOptions, ...options };
this.profile = options.profile;

View File

@ -48,10 +48,11 @@ class RecordIdsPlugin {
apply(compiler) {
const portableIds = this.options.portableIds;
const makePathsRelative = identifierUtils.makePathsRelative.bindContextCache(
compiler.context,
compiler.root
);
const makePathsRelative =
identifierUtils.makePathsRelative.bindContextCache(
compiler.context,
compiler.root
);
/**
* @param {Module} module the module

View File

@ -42,9 +42,9 @@ const convertToResolveOptions = resolveOptionsWithDepType => {
...remaining,
plugins:
plugins &&
/** @type {ResolvePluginInstance[]} */ (plugins.filter(
item => item !== "..."
))
/** @type {ResolvePluginInstance[]} */ (
plugins.filter(item => item !== "...")
)
};
if (!partialOptions.fileSystem) {
@ -53,7 +53,10 @@ const convertToResolveOptions = resolveOptionsWithDepType => {
);
}
// These weird types validate that we checked all non-optional properties
const options = /** @type {Partial<ResolveOptions> & Pick<ResolveOptions, "fileSystem">} */ (partialOptions);
const options =
/** @type {Partial<ResolveOptions> & Pick<ResolveOptions, "fileSystem">} */ (
partialOptions
);
return removeOperations(
resolveByProperty(options, "byDependency", dependencyType)
@ -124,9 +127,9 @@ module.exports = class ResolverFactory {
const resolveOptions = convertToResolveOptions(
this.hooks.resolveOptions.for(type).call(resolveOptionsWithDepType)
);
const resolver = /** @type {ResolverWithOptions} */ (Factory.createResolver(
resolveOptions
));
const resolver = /** @type {ResolverWithOptions} */ (
Factory.createResolver(resolveOptions)
);
if (!resolver) {
throw new Error("No resolver created");
}

View File

@ -168,6 +168,13 @@ exports.scriptNonce = "__webpack_require__.nc";
*/
exports.loadScript = "__webpack_require__.l";
/**
* function to promote a string to a TrustedScriptURL using webpack's Trusted
* Types policy
* Arguments: (url: string) => TrustedScriptURL
*/
exports.createScriptUrl = "__webpack_require__.tu";
/**
* the chunk name of the chunk with the runtime
*/

View File

@ -41,6 +41,8 @@ class RuntimeModule extends Module {
this.compilation = undefined;
/** @type {Chunk} */
this.chunk = undefined;
/** @type {ChunkGraph} */
this.chunkGraph = undefined;
this.fullHash = false;
/** @type {string} */
this._cachedGeneratedCode = undefined;
@ -49,11 +51,13 @@ class RuntimeModule extends Module {
/**
* @param {Compilation} compilation the compilation
* @param {Chunk} chunk the chunk
* @param {ChunkGraph} chunkGraph the chunk graph
* @returns {void}
*/
attach(compilation, chunk) {
attach(compilation, chunk, chunkGraph = compilation.chunkGraph) {
this.compilation = compilation;
this.chunk = chunk;
this.chunkGraph = chunkGraph;
}
/**

View File

@ -13,6 +13,7 @@ const AutoPublicPathRuntimeModule = require("./runtime/AutoPublicPathRuntimeModu
const CompatGetDefaultExportRuntimeModule = require("./runtime/CompatGetDefaultExportRuntimeModule");
const CompatRuntimeModule = require("./runtime/CompatRuntimeModule");
const CreateFakeNamespaceObjectRuntimeModule = require("./runtime/CreateFakeNamespaceObjectRuntimeModule");
const CreateScriptUrlRuntimeModule = require("./runtime/CreateScriptUrlRuntimeModule");
const DefinePropertyGettersRuntimeModule = require("./runtime/DefinePropertyGettersRuntimeModule");
const EnsureChunkRuntimeModule = require("./runtime/EnsureChunkRuntimeModule");
const GetChunkFilenameRuntimeModule = require("./runtime/GetChunkFilenameRuntimeModule");
@ -38,6 +39,7 @@ const GLOBALS_ON_REQUIRE = [
RuntimeGlobals.runtimeId,
RuntimeGlobals.compatGetDefaultExport,
RuntimeGlobals.createFakeNamespaceObject,
RuntimeGlobals.createScriptUrl,
RuntimeGlobals.definePropertyGetters,
RuntimeGlobals.ensureChunk,
RuntimeGlobals.entryModuleId,
@ -176,14 +178,19 @@ class RuntimePlugin {
.for(RuntimeGlobals.publicPath)
.tap("RuntimePlugin", (chunk, set) => {
const { outputOptions } = compilation;
const { publicPath, scriptType } = outputOptions;
const { publicPath: globalPublicPath, scriptType } = outputOptions;
const entryOptions = chunk.getEntryOptions();
const publicPath =
entryOptions && entryOptions.publicPath !== undefined
? entryOptions.publicPath
: globalPublicPath;
if (publicPath === "auto") {
const module = new AutoPublicPathRuntimeModule();
if (scriptType !== "module") set.add(RuntimeGlobals.global);
compilation.addRuntimeModule(chunk, module);
} else {
const module = new PublicPathRuntimeModule();
const module = new PublicPathRuntimeModule(publicPath);
if (
typeof publicPath !== "string" ||
@ -314,7 +321,23 @@ class RuntimePlugin {
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.loadScript)
.tap("RuntimePlugin", (chunk, set) => {
compilation.addRuntimeModule(chunk, new LoadScriptRuntimeModule());
const withCreateScriptUrl = !!compilation.outputOptions.trustedTypes;
if (withCreateScriptUrl) {
set.add(RuntimeGlobals.createScriptUrl);
}
compilation.addRuntimeModule(
chunk,
new LoadScriptRuntimeModule(withCreateScriptUrl)
);
return true;
});
compilation.hooks.runtimeRequirementInTree
.for(RuntimeGlobals.createScriptUrl)
.tap("RuntimePlugin", (chunk, set) => {
compilation.addRuntimeModule(
chunk,
new CreateScriptUrlRuntimeModule()
);
return true;
});
compilation.hooks.runtimeRequirementInTree

View File

@ -6,19 +6,16 @@
"use strict";
const asyncLib = require("neo-async");
const { validate } = require("schema-utils");
const { ConcatSource, RawSource } = require("webpack-sources");
const Compilation = require("./Compilation");
const ModuleFilenameHelpers = require("./ModuleFilenameHelpers");
const ProgressPlugin = require("./ProgressPlugin");
const SourceMapDevToolModuleOptionsPlugin = require("./SourceMapDevToolModuleOptionsPlugin");
const createSchemaValidation = require("./util/create-schema-validation");
const createHash = require("./util/createHash");
const { relative, dirname } = require("./util/fs");
const { absolutify } = require("./util/identifier");
const schema = require("../schemas/plugins/SourceMapDevToolPlugin.json");
/** @typedef {import("source-map").RawSourceMap} SourceMap */
/** @typedef {import("webpack-sources").MapOptions} MapOptions */
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../declarations/plugins/SourceMapDevToolPlugin").SourceMapDevToolPluginOptions} SourceMapDevToolPluginOptions */
@ -28,8 +25,17 @@ const schema = require("../schemas/plugins/SourceMapDevToolPlugin.json");
/** @typedef {import("./Compilation").AssetInfo} AssetInfo */
/** @typedef {import("./Compiler")} Compiler */
/** @typedef {import("./Module")} Module */
/** @typedef {import("./NormalModule").SourceMap} SourceMap */
/** @typedef {import("./util/Hash")} Hash */
const validate = createSchemaValidation(
require("../schemas/plugins/SourceMapDevToolPlugin.check.js"),
() => require("../schemas/plugins/SourceMapDevToolPlugin.json"),
{
name: "SourceMap DevTool Plugin",
baseDataPath: "options"
}
);
/**
* @typedef {object} SourceMapTask
* @property {Source} asset
@ -110,10 +116,7 @@ class SourceMapDevToolPlugin {
* @throws {Error} throws error, if got more than 1 arguments
*/
constructor(options = {}) {
validate(schema, options, {
name: "SourceMap DevTool Plugin",
baseDataPath: "options"
});
validate(options);
/** @type {string | false} */
this.sourceMapFilename = options.filename;
@ -427,19 +430,20 @@ class SourceMapDevToolPlugin {
currentSourceMappingURLComment !== false &&
/\.css($|\?)/i.test(file)
) {
currentSourceMappingURLComment = currentSourceMappingURLComment.replace(
/^\n\/\/(.*)$/,
"\n/*$1*/"
);
currentSourceMappingURLComment =
currentSourceMappingURLComment.replace(
/^\n\/\/(.*)$/,
"\n/*$1*/"
);
}
const sourceMapString = JSON.stringify(sourceMap);
if (sourceMapFilename) {
let filename = file;
const sourceMapContentHash =
usesContentHash &&
/** @type {string} */ (createHash("md4")
.update(sourceMapString)
.digest("hex"));
/** @type {string} */ (
createHash("md4").update(sourceMapString).digest("hex")
);
const pathParams = {
chunk,
filename: options.fileContext
@ -451,13 +455,11 @@ class SourceMapDevToolPlugin {
: filename,
contentHash: sourceMapContentHash
};
const {
path: sourceMapFile,
info: sourceMapInfo
} = compilation.getPathWithInfo(
sourceMapFilename,
pathParams
);
const { path: sourceMapFile, info: sourceMapInfo } =
compilation.getPathWithInfo(
sourceMapFilename,
pathParams
);
const sourceMapUrl = options.publicPath
? options.publicPath + sourceMapFile
: relative(

View File

@ -413,5 +413,7 @@ class Template {
}
module.exports = Template;
module.exports.NUMBER_OF_IDENTIFIER_START_CHARS = NUMBER_OF_IDENTIFIER_START_CHARS;
module.exports.NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS = NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS;
module.exports.NUMBER_OF_IDENTIFIER_START_CHARS =
NUMBER_OF_IDENTIFIER_START_CHARS;
module.exports.NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS =
NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS;

View File

@ -21,8 +21,6 @@ class UnsupportedFeatureWarning extends WebpackError {
this.name = "UnsupportedFeatureWarning";
this.loc = loc;
this.hideStack = true;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -8,6 +8,7 @@
const CaseSensitiveModulesWarning = require("./CaseSensitiveModulesWarning");
/** @typedef {import("./Compiler")} Compiler */
/** @typedef {import("./Module")} Module */
class WarnCaseSensitiveModulesPlugin {
/**
@ -20,21 +21,26 @@ class WarnCaseSensitiveModulesPlugin {
"WarnCaseSensitiveModulesPlugin",
compilation => {
compilation.hooks.seal.tap("WarnCaseSensitiveModulesPlugin", () => {
/** @type {Map<string, Map<string, Module>>} */
const moduleWithoutCase = new Map();
for (const module of compilation.modules) {
const identifier = module.identifier().toLowerCase();
const array = moduleWithoutCase.get(identifier);
if (array) {
array.push(module);
} else {
moduleWithoutCase.set(identifier, [module]);
const identifier = module.identifier();
const lowerIdentifier = identifier.toLowerCase();
let map = moduleWithoutCase.get(lowerIdentifier);
if (map === undefined) {
map = new Map();
moduleWithoutCase.set(lowerIdentifier, map);
}
map.set(identifier, module);
}
for (const pair of moduleWithoutCase) {
const array = pair[1];
if (array.length > 1) {
const map = pair[1];
if (map.size > 1) {
compilation.warnings.push(
new CaseSensitiveModulesWarning(array, compilation.moduleGraph)
new CaseSensitiveModulesWarning(
map.values(),
compilation.moduleGraph
)
);
}
}

View File

@ -48,8 +48,6 @@ class DeprecatedOptionWarning extends WebpackError {
"configuration\n" +
`The value '${value}' for option '${option}' is deprecated. ` +
`Use '${suggestion}' instead.`;
Error.captureStackTrace(this, this.constructor);
}
}

View File

@ -5,13 +5,21 @@
"use strict";
const { validate } = require("schema-utils");
const schema = require("../schemas/plugins/WatchIgnorePlugin.json");
const createSchemaValidation = require("./util/create-schema-validation");
/** @typedef {import("../declarations/plugins/WatchIgnorePlugin").WatchIgnorePluginOptions} WatchIgnorePluginOptions */
/** @typedef {import("./Compiler")} Compiler */
/** @typedef {import("./util/fs").WatchFileSystem} WatchFileSystem */
const validate = createSchemaValidation(
require("../schemas/plugins/WatchIgnorePlugin.check.js"),
() => require("../schemas/plugins/WatchIgnorePlugin.json"),
{
name: "Watch Ignore Plugin",
baseDataPath: "options"
}
);
const IGNORE_TIME_ENTRY = "ignore";
class IgnoringWatchFileSystem {
@ -90,10 +98,7 @@ class WatchIgnorePlugin {
* @param {WatchIgnorePluginOptions} options options
*/
constructor(options) {
validate(schema, options, {
name: "Watch Ignore Plugin",
baseDataPath: "options"
});
validate(options);
this.paths = options.paths;
}

View File

@ -53,19 +53,74 @@ class Watching {
this.compiler = compiler;
this.running = false;
this._initial = true;
this._invalidReported = true;
this._needRecords = true;
this.watcher = undefined;
this.pausedWatcher = undefined;
/** @type {Set<string>} */
this._collectedChangedFiles = undefined;
/** @type {Set<string>} */
this._collectedRemovedFiles = undefined;
this._done = this._done.bind(this);
process.nextTick(() => {
if (this._initial) this._invalidate();
});
}
_go() {
_mergeWithCollected(changedFiles, removedFiles) {
if (!changedFiles) return;
if (!this._collectedChangedFiles) {
this._collectedChangedFiles = new Set(changedFiles);
this._collectedRemovedFiles = new Set(removedFiles);
} else {
for (const file of changedFiles) {
this._collectedChangedFiles.add(file);
this._collectedRemovedFiles.delete(file);
}
for (const file of removedFiles) {
this._collectedChangedFiles.delete(file);
this._collectedRemovedFiles.add(file);
}
}
}
_go(fileTimeInfoEntries, contextTimeInfoEntries, changedFiles, removedFiles) {
this._initial = false;
this.startTime = Date.now();
this.running = true;
if (this.watcher) {
this.pausedWatcher = this.watcher;
this.lastWatcherStartTime = Date.now();
this.watcher.pause();
this.watcher = null;
} else if (!this.lastWatcherStartTime) {
this.lastWatcherStartTime = Date.now();
}
this.compiler.fsStartTime = Date.now();
this._mergeWithCollected(
changedFiles ||
(this.pausedWatcher &&
this.pausedWatcher.getAggregatedChanges &&
this.pausedWatcher.getAggregatedChanges()),
(this.compiler.removedFiles =
removedFiles ||
(this.pausedWatcher &&
this.pausedWatcher.getAggregatedRemovals &&
this.pausedWatcher.getAggregatedRemovals()))
);
this.compiler.modifiedFiles = this._collectedChangedFiles;
this._collectedChangedFiles = undefined;
this.compiler.removedFiles = this._collectedRemovedFiles;
this._collectedRemovedFiles = undefined;
this.compiler.fileTimestamps =
fileTimeInfoEntries ||
(this.pausedWatcher && this.pausedWatcher.getFileTimeInfoEntries());
this.compiler.contextTimestamps =
contextTimeInfoEntries ||
(this.pausedWatcher && this.pausedWatcher.getContextTimeInfoEntries());
const run = () => {
if (this.compiler.idle) {
return this.compiler.cache.endIdle(err => {
@ -83,6 +138,7 @@ class Watching {
});
}
this.invalid = false;
this._invalidReported = false;
this.compiler.hooks.watchRun.callAsync(this.compiler, err => {
if (err) return this._done(err);
const onCompiled = (err, compilation) => {
@ -157,13 +213,16 @@ class Watching {
let stats = null;
const handleError = err => {
const handleError = (err, cbs) => {
this.compiler.hooks.failed.call(err);
this.compiler.cache.beginIdle();
this.compiler.idle = true;
this.handler(err, stats);
for (const cb of this.callbacks) cb();
this.callbacks.length = 0;
if (!cbs) {
cbs = this.callbacks;
this.callbacks = [];
}
for (const cb of cbs) cb(err);
};
if (
@ -195,17 +254,19 @@ class Watching {
}
if (err) return handleError(err);
const cbs = this.callbacks;
this.callbacks = [];
logger.time("done hook");
this.compiler.hooks.done.callAsync(stats, err => {
logger.timeEnd("done hook");
if (err) return handleError(err);
if (err) return handleError(err, cbs);
this.handler(null, stats);
logger.time("storeBuildDependencies");
this.compiler.cache.storeBuildDependencies(
compilation.buildDependencies,
err => {
logger.timeEnd("storeBuildDependencies");
if (err) return handleError(err);
if (err) return handleError(err, cbs);
logger.time("beginIdle");
this.compiler.cache.beginIdle();
this.compiler.idle = true;
@ -219,8 +280,7 @@ class Watching {
);
}
});
for (const cb of this.callbacks) cb();
this.callbacks.length = 0;
for (const cb of cbs) cb(null);
this.compiler.hooks.afterDone.call(stats);
}
);
@ -239,7 +299,7 @@ class Watching {
files,
dirs,
missing,
this.startTime,
this.lastWatcherStartTime,
this.watchOptions,
(
err,
@ -248,29 +308,27 @@ class Watching {
changedFiles,
removedFiles
) => {
this.pausedWatcher = this.watcher;
this.watcher = null;
if (err) {
this.compiler.modifiedFiles = undefined;
this.compiler.removedFiles = undefined;
this.compiler.fileTimestamps = undefined;
this.compiler.contextTimestamps = undefined;
this.compiler.fsStartTime = undefined;
return this.handler(err);
}
this.compiler.fileTimestamps = fileTimeInfoEntries;
this.compiler.contextTimestamps = contextTimeInfoEntries;
this.compiler.removedFiles = removedFiles;
this.compiler.modifiedFiles = changedFiles;
if (this.watcher) {
this.pausedWatcher = this.watcher;
this.watcher.pause();
this.watcher = null;
}
this._invalidate();
this._invalidate(
fileTimeInfoEntries,
contextTimeInfoEntries,
changedFiles,
removedFiles
);
this._onChange();
},
(fileName, changeTime) => {
this.compiler.hooks.invalid.call(fileName, changeTime);
if (!this._invalidReported) {
this._invalidReported = true;
this.compiler.hooks.invalid.call(fileName, changeTime);
}
this._onInvalid();
}
);
@ -284,36 +342,35 @@ class Watching {
if (callback) {
this.callbacks.push(callback);
}
if (!this._initial) {
if (!this._invalidReported) {
this._invalidReported = true;
this.compiler.hooks.invalid.call(null, Date.now());
}
this._onChange();
this._invalidate();
}
_invalidate() {
if (this.suspended) return;
if (this._isBlocked()) {
this.blocked = true;
_invalidate(
fileTimeInfoEntries,
contextTimeInfoEntries,
changedFiles,
removedFiles
) {
if (this.suspended || (this._isBlocked() && (this.blocked = true))) {
this._mergeWithCollected(changedFiles, removedFiles);
return;
}
if (this.watcher) {
this.compiler.modifiedFiles =
this.watcher.getAggregatedChanges &&
this.watcher.getAggregatedChanges();
this.compiler.removedFiles =
this.watcher.getAggregatedRemovals &&
this.watcher.getAggregatedRemovals();
this.compiler.fileTimestamps = this.watcher.getFileTimeInfoEntries();
this.compiler.contextTimestamps = this.watcher.getContextTimeInfoEntries();
this.pausedWatcher = this.watcher;
this.watcher.pause();
this.watcher = null;
}
if (this.running) {
this._mergeWithCollected(changedFiles, removedFiles);
this.invalid = true;
} else {
this._go();
this._go(
fileTimeInfoEntries,
contextTimeInfoEntries,
changedFiles,
removedFiles
);
}
}
@ -328,14 +385,6 @@ class Watching {
}
}
_checkUnblocked() {
if (this.blocked && !this._isBlocked()) {
this.blocked = false;
this._needWatcherInfo = true;
this._invalidate();
}
}
/**
* @param {Callback<void>} callback signals when the watcher is closed
* @returns {void}
@ -356,6 +405,7 @@ class Watching {
this.compiler.removedFiles = undefined;
this.compiler.fileTimestamps = undefined;
this.compiler.contextTimestamps = undefined;
this.compiler.fsStartTime = undefined;
const shutdown = () => {
this.compiler.cache.shutdown(err => {
this.compiler.hooks.watchClose.call();

View File

@ -31,8 +31,6 @@ class WebpackError extends Error {
this.chunk = undefined;
/** @type {string} */
this.file = undefined;
Error.captureStackTrace(this, this.constructor);
}
[inspect]() {

View File

@ -296,7 +296,9 @@ class WebpackOptionsApply extends OptionsApply {
new RequireJsStuffPlugin().apply(compiler);
}
new CommonJsPlugin().apply(compiler);
new LoaderPlugin().apply(compiler);
new LoaderPlugin({
enableExecuteModule: options.experiments.executeModule
}).apply(compiler);
if (options.node !== false) {
const NodeStuffPlugin = require("./NodeStuffPlugin");
new NodeStuffPlugin(options.node).apply(compiler);
@ -566,7 +568,9 @@ class WebpackOptionsApply extends OptionsApply {
"webpack.cache.PackFileCacheStrategy"
),
snapshot: options.snapshot,
maxAge: cacheOptions.maxAge
maxAge: cacheOptions.maxAge,
profile: cacheOptions.profile,
allowCollectingMemory: cacheOptions.allowCollectingMemory
}),
cacheOptions.idleTimeout,
cacheOptions.idleTimeoutForInitialStore

View File

@ -120,12 +120,33 @@ class AssetGenerator extends Generator {
}
);
} else {
const encoding = this.dataUrlOptions.encoding;
const ext = path.extname(module.nameForCondition());
const mimeType =
this.dataUrlOptions.mimetype || mimeTypes.lookup(ext);
if (!mimeType) {
/** @type {string | false | undefined} */
let encoding = this.dataUrlOptions.encoding;
if (encoding === undefined) {
if (
module.resourceResolveData &&
module.resourceResolveData.encoding !== undefined
) {
encoding = module.resourceResolveData.encoding;
}
}
if (encoding === undefined) {
encoding = "base64";
}
let ext;
let mimeType = this.dataUrlOptions.mimetype;
if (mimeType === undefined) {
ext = path.extname(module.nameForCondition());
if (
module.resourceResolveData &&
module.resourceResolveData.mimetype !== undefined
) {
mimeType = module.resourceResolveData.mimetype;
} else if (ext) {
mimeType = mimeTypes.lookup(ext);
}
}
if (typeof mimeType !== "string") {
throw new Error(
"DataUrl can't be generated automatically, " +
`because there is no mimetype for "${ext}" in mimetype database. ` +
@ -170,9 +191,9 @@ class AssetGenerator extends Generator {
hash.update(runtimeTemplate.outputOptions.hashSalt);
}
hash.update(originalSource.buffer());
const fullHash = /** @type {string} */ (hash.digest(
runtimeTemplate.outputOptions.hashDigest
));
const fullHash = /** @type {string} */ (
hash.digest(runtimeTemplate.outputOptions.hashDigest)
);
const contentHash = fullHash.slice(
0,
runtimeTemplate.outputOptions.hashDigestLength
@ -183,26 +204,9 @@ class AssetGenerator extends Generator {
module.matchResource || module.resource,
runtimeTemplate.compilation.compiler.root
).replace(/^\.\//, "");
let {
path: filename,
info: assetInfo
} = runtimeTemplate.compilation.getAssetPathWithInfo(
assetModuleFilename,
{
module,
runtime,
filename: sourceFilename,
chunkGraph,
contentHash
}
);
let publicPath;
if (this.publicPath) {
const {
path,
info
} = runtimeTemplate.compilation.getAssetPathWithInfo(
this.publicPath,
let { path: filename, info: assetInfo } =
runtimeTemplate.compilation.getAssetPathWithInfo(
assetModuleFilename,
{
module,
runtime,
@ -211,6 +215,19 @@ class AssetGenerator extends Generator {
contentHash
}
);
let publicPath;
if (this.publicPath) {
const { path, info } =
runtimeTemplate.compilation.getAssetPathWithInfo(
this.publicPath,
{
module,
runtime,
filename: sourceFilename,
chunkGraph,
contentHash
}
);
publicPath = JSON.stringify(path);
assetInfo = mergeAssetInfo(assetInfo, info);
} else {

View File

@ -5,9 +5,9 @@
"use strict";
const { validate } = require("schema-utils");
const { cleverMerge } = require("../util/cleverMerge");
const { compareModulesByIdentifier } = require("../util/comparators");
const createSchemaValidation = require("../util/create-schema-validation");
const memoize = require("../util/memoize");
/** @typedef {import("webpack-sources").Source} Source */
@ -22,13 +22,38 @@ const getSchema = name => {
oneOf: [{ $ref: `#/definitions/${name}` }]
};
};
const getGeneratorSchemaMap = {
asset: memoize(() => getSchema("AssetGeneratorOptions")),
"asset/resource": memoize(() => getSchema("AssetResourceGeneratorOptions")),
"asset/inline": memoize(() => getSchema("AssetInlineGeneratorOptions"))
const generatorValidationOptions = {
name: "Asset Modules Plugin",
baseDataPath: "generator"
};
const validateGeneratorOptions = {
asset: createSchemaValidation(
require("../../schemas/plugins/asset/AssetGeneratorOptions.check.js"),
() => getSchema("AssetGeneratorOptions"),
generatorValidationOptions
),
"asset/resource": createSchemaValidation(
require("../../schemas/plugins/asset/AssetResourceGeneratorOptions.check.js"),
() => getSchema("AssetResourceGeneratorOptions"),
generatorValidationOptions
),
"asset/inline": createSchemaValidation(
require("../../schemas/plugins/asset/AssetInlineGeneratorOptions.check.js"),
() => getSchema("AssetInlineGeneratorOptions"),
generatorValidationOptions
)
};
const getParserSchema = memoize(() => getSchema("AssetParserOptions"));
const validateParserOptions = createSchemaValidation(
require("../../schemas/plugins/asset/AssetParserOptions.check.js"),
() => getSchema("AssetParserOptions"),
{
name: "Asset Modules Plugin",
baseDataPath: "parser"
}
);
const getAssetGenerator = memoize(() => require("./AssetGenerator"));
const getAssetParser = memoize(() => require("./AssetParser"));
const getAssetSourceParser = memoize(() => require("./AssetSourceParser"));
@ -52,10 +77,7 @@ class AssetModulesPlugin {
normalModuleFactory.hooks.createParser
.for("asset")
.tap(plugin, parserOptions => {
validate(getParserSchema(), parserOptions, {
name: "Asset Modules Plugin",
baseDataPath: "parser"
});
validateParserOptions(parserOptions);
parserOptions = cleverMerge(
compiler.options.module.parser.asset,
parserOptions
@ -100,17 +122,14 @@ class AssetModulesPlugin {
.for(type)
// eslint-disable-next-line no-loop-func
.tap(plugin, generatorOptions => {
validate(getGeneratorSchemaMap[type](), generatorOptions, {
name: "Asset Modules Plugin",
baseDataPath: "generator"
});
validateGeneratorOptions[type](generatorOptions);
let dataUrl = undefined;
if (type !== "asset/resource") {
dataUrl = generatorOptions.dataUrl;
if (!dataUrl || typeof dataUrl === "object") {
dataUrl = {
encoding: "base64",
encoding: undefined,
mimetype: undefined,
...dataUrl
};
@ -176,6 +195,19 @@ class AssetModulesPlugin {
return result;
});
compilation.hooks.prepareModuleExecution.tap(
"AssetModulesPlugin",
(options, context) => {
const { codeGenerationResult } = options;
const source = codeGenerationResult.sources.get("asset");
if (source === undefined) return;
context.assets.set(codeGenerationResult.data.get("filename"), {
source,
info: codeGenerationResult.data.get("assetInfo")
});
}
);
}
);
}

View File

@ -519,7 +519,8 @@ const visitModules = (
if (skipConnectionBuffer.length > 0) {
let { skippedModuleConnections } = chunkGroupInfo;
if (skippedModuleConnections === undefined) {
chunkGroupInfo.skippedModuleConnections = skippedModuleConnections = new Set();
chunkGroupInfo.skippedModuleConnections = skippedModuleConnections =
new Set();
}
for (let i = skipConnectionBuffer.length - 1; i >= 0; i--) {
skippedModuleConnections.add(skipConnectionBuffer[i]);
@ -695,7 +696,8 @@ const visitModules = (
let resultingAvailableModules;
if (minAvailableModules.size > minAvailableModules.plus.size) {
// resultingAvailableModules = (modules of chunk) + (minAvailableModules + minAvailableModules.plus)
resultingAvailableModules = /** @type {Set<Module> & {plus: Set<Module>}} */ (new Set());
resultingAvailableModules =
/** @type {Set<Module> & {plus: Set<Module>}} */ (new Set());
for (const module of minAvailableModules.plus)
minAvailableModules.add(module);
minAvailableModules.plus = EMPTY_SET;
@ -703,9 +705,10 @@ const visitModules = (
chunkGroupInfo.minAvailableModulesOwned = false;
} else {
// resultingAvailableModules = (minAvailableModules + modules of chunk) + (minAvailableModules.plus)
resultingAvailableModules = /** @type {Set<Module> & {plus: Set<Module>}} */ (new Set(
minAvailableModules
));
resultingAvailableModules =
/** @type {Set<Module> & {plus: Set<Module>}} */ (
new Set(minAvailableModules)
);
resultingAvailableModules.plus = minAvailableModules.plus;
}
@ -715,7 +718,8 @@ const visitModules = (
resultingAvailableModules.add(m);
}
}
return (chunkGroupInfo.resultingAvailableModules = resultingAvailableModules);
return (chunkGroupInfo.resultingAvailableModules =
resultingAvailableModules);
};
const processConnectQueue = () => {
@ -732,9 +736,8 @@ const visitModules = (
}
// 2. Calculate resulting available modules
const resultingAvailableModules = calculateResultingAvailableModules(
chunkGroupInfo
);
const resultingAvailableModules =
calculateResultingAvailableModules(chunkGroupInfo);
const runtime = chunkGroupInfo.runtime;
@ -800,9 +803,8 @@ const visitModules = (
if (!availableModules.has(m) && !availableModules.plus.has(m)) {
// We can't remove modules from the plus part
// so we need to merge plus into the normal part to allow modifying it
const iterator = cachedMinAvailableModules.plus[
Symbol.iterator
]();
const iterator =
cachedMinAvailableModules.plus[Symbol.iterator]();
// fast forward add all modules until m
/** @type {IteratorResult<Module>} */
let it;
@ -951,13 +953,12 @@ const visitModules = (
statForkedMergedModulesCountPlus += availableModules.plus.size;
// construct a new Set as intersection of cachedMinAvailableModules and availableModules
// we already know that all modules directly from cachedMinAvailableModules are in availableModules too
const newSet = /** @type {ModuleSetPlus} */ (new Set(
cachedMinAvailableModules
));
const newSet = /** @type {ModuleSetPlus} */ (
new Set(cachedMinAvailableModules)
);
newSet.plus = EMPTY_SET;
const iterator = cachedMinAvailableModules.plus[
Symbol.iterator
]();
const iterator =
cachedMinAvailableModules.plus[Symbol.iterator]();
// fast forward add all modules until m
/** @type {IteratorResult<Module>} */
let it;
@ -997,10 +998,15 @@ const visitModules = (
};
const processChunkGroupsForCombining = () => {
loop: for (const info of chunkGroupsForCombining) {
for (const info of chunkGroupsForCombining) {
for (const source of info.availableSources) {
if (!source.minAvailableModules) continue loop;
if (!source.minAvailableModules) {
chunkGroupsForCombining.delete(info);
break;
}
}
}
for (const info of chunkGroupsForCombining) {
const availableModules = /** @type {ModuleSetPlus} */ (new Set());
availableModules.plus = EMPTY_SET;
const mergeSet = set => {
@ -1013,9 +1019,8 @@ const visitModules = (
};
// combine minAvailableModules from all resultingAvailableModules
for (const source of info.availableSources) {
const resultingAvailableModules = calculateResultingAvailableModules(
source
);
const resultingAvailableModules =
calculateResultingAvailableModules(source);
mergeSet(resultingAvailableModules);
mergeSet(resultingAvailableModules.plus);
}

View File

@ -8,6 +8,7 @@
const FileSystemInfo = require("../FileSystemInfo");
const ProgressPlugin = require("../ProgressPlugin");
const { formatSize } = require("../SizeFormatHelpers");
const SerializerMiddleware = require("../serialization/SerializerMiddleware");
const LazySet = require("../util/LazySet");
const makeSerializable = require("../util/makeSerializable");
const memoize = require("../util/memoize");
@ -29,7 +30,7 @@ class PackContainer {
* @param {string} version version identifier
* @param {Snapshot} buildSnapshot snapshot of all build dependencies
* @param {Set<string>} buildDependencies list of all unresolved build dependencies captured
* @param {Map<string, string>} resolveResults result of the resolved build dependencies
* @param {Map<string, string | false>} resolveResults result of the resolved build dependencies
* @param {Snapshot} resolveBuildDependenciesSnapshot snapshot of the dependencies of the build dependencies resolving
*/
constructor(
@ -561,7 +562,40 @@ class PackContentItems {
this.map = map;
}
serialize({ write, snapshot, rollback, logger }) {
serialize({ write, snapshot, rollback, logger, profile }) {
if (profile) {
write(false);
for (const [key, value] of this.map) {
const s = snapshot();
try {
write(key);
const start = process.hrtime();
write(value);
const durationHr = process.hrtime(start);
const duration = durationHr[0] * 1000 + durationHr[1] / 1e6;
if (duration > 1) {
if (duration > 500)
logger.error(`Serialization of '${key}': ${duration} ms`);
else if (duration > 50)
logger.warn(`Serialization of '${key}': ${duration} ms`);
else if (duration > 10)
logger.info(`Serialization of '${key}': ${duration} ms`);
else if (duration > 5)
logger.log(`Serialization of '${key}': ${duration} ms`);
else logger.debug(`Serialization of '${key}': ${duration} ms`);
}
} catch (e) {
rollback(s);
if (e === NOT_SERIALIZABLE) continue;
logger.warn(
`Skipped not serializable cache item '${key}': ${e.message}`
);
logger.debug(e.stack);
}
}
write(null);
return;
}
// Try to serialize all at once
const s = snapshot();
try {
@ -590,9 +624,32 @@ class PackContentItems {
}
}
deserialize({ read }) {
deserialize({ read, logger, profile }) {
if (read()) {
this.map = read();
} else if (profile) {
const map = new Map();
let key = read();
while (key !== null) {
const start = process.hrtime();
const value = read();
const durationHr = process.hrtime(start);
const duration = durationHr[0] * 1000 + durationHr[1] / 1e6;
if (duration > 1) {
if (duration > 100)
logger.error(`Deserialization of '${key}': ${duration} ms`);
else if (duration > 20)
logger.warn(`Deserialization of '${key}': ${duration} ms`);
else if (duration > 5)
logger.info(`Deserialization of '${key}': ${duration} ms`);
else if (duration > 2)
logger.log(`Deserialization of '${key}': ${duration} ms`);
else logger.debug(`Deserialization of '${key}': ${duration} ms`);
}
map.set(key, value);
key = read();
}
this.map = map;
} else {
const map = new Map();
let key = read();
@ -621,7 +678,7 @@ class PackContent {
*/
constructor(items, usedItems, dataOrFn, logger, lazyName) {
this.items = items;
/** @type {function(): PackContentItems | Promise<PackContentItems>} */
/** @type {function(): Promise<PackContentItems> | PackContentItems } */
this.lazy = typeof dataOrFn === "function" ? dataOrFn : undefined;
/** @type {Map<string, any>} */
this.content = typeof dataOrFn === "function" ? undefined : dataOrFn.map;
@ -659,6 +716,7 @@ class PackContent {
this.logger.timeEnd(timeMessage);
}
this.content = map;
this.lazy = SerializerMiddleware.unMemoizeLazy(this.lazy);
return map.get(identifier);
});
} else {
@ -667,6 +725,7 @@ class PackContent {
this.logger.timeEnd(timeMessage);
}
this.content = map;
this.lazy = SerializerMiddleware.unMemoizeLazy(this.lazy);
return map.get(identifier);
}
}
@ -768,6 +827,14 @@ class PackContent {
}
}
const allowCollectingMemory = buf => {
const wasted = buf.buffer.byteLength - buf.byteLength;
if (wasted > 8192 && (wasted > 1048576 || wasted > buf.byteLength)) {
return Buffer.from(buf);
}
return buf;
};
class PackFileCacheStrategy {
/**
* @param {Object} options options
@ -779,6 +846,8 @@ class PackFileCacheStrategy {
* @param {Logger} options.logger a logger
* @param {SnapshotOptions} options.snapshot options regarding snapshotting
* @param {number} options.maxAge max age of cache items
* @param {boolean} options.profile track and log detailed timing information for individual cache items
* @param {boolean} options.allowCollectingMemory allow to collect unused memory created during deserialization
*/
constructor({
compiler,
@ -788,7 +857,9 @@ class PackFileCacheStrategy {
version,
logger,
snapshot,
maxAge
maxAge,
profile,
allowCollectingMemory
}) {
this.fileSerializer = createFileSerializer(fs);
this.fileSystemInfo = new FileSystemInfo(fs, {
@ -802,6 +873,8 @@ class PackFileCacheStrategy {
this.version = version;
this.logger = logger;
this.maxAge = maxAge;
this.profile = profile;
this.allowCollectingMemory = allowCollectingMemory;
this.snapshot = snapshot;
/** @type {Set<string>} */
this.buildDependencies = new Set();
@ -809,7 +882,7 @@ class PackFileCacheStrategy {
this.newBuildDependencies = new LazySet();
/** @type {Snapshot} */
this.resolveBuildDependenciesSnapshot = undefined;
/** @type {Map<string, string>} */
/** @type {Map<string, string | false>} */
this.resolveResults = undefined;
/** @type {Snapshot} */
this.buildSnapshot = undefined;
@ -829,7 +902,7 @@ class PackFileCacheStrategy {
* @returns {Promise<Pack>} the pack
*/
_openPack() {
const { logger, cacheLocation, version } = this;
const { logger, profile, cacheLocation, version } = this;
/** @type {Snapshot} */
let buildSnapshot;
/** @type {Set<string>} */
@ -838,14 +911,18 @@ class PackFileCacheStrategy {
let newBuildDependencies;
/** @type {Snapshot} */
let resolveBuildDependenciesSnapshot;
/** @type {Map<string, string>} */
/** @type {Map<string, string | false>} */
let resolveResults;
logger.time("restore cache container");
return this.fileSerializer
.deserialize(null, {
filename: `${cacheLocation}/index.pack`,
extension: ".pack",
logger
logger,
profile,
retainedBuffer: this.allowCollectingMemory
? allowCollectingMemory
: undefined
})
.catch(err => {
if (err.code !== "ENOENT") {
@ -967,7 +1044,8 @@ class PackFileCacheStrategy {
if (newBuildDependencies)
this.newBuildDependencies.addAll(newBuildDependencies);
this.resolveResults = resolveResults;
this.resolveBuildDependenciesSnapshot = resolveBuildDependenciesSnapshot;
this.resolveBuildDependenciesSnapshot =
resolveBuildDependenciesSnapshot;
return pack;
}
return new Pack(logger, this.maxAge);
@ -1089,10 +1167,11 @@ class PackFileCacheStrategy {
);
}
if (this.resolveBuildDependenciesSnapshot) {
this.resolveBuildDependenciesSnapshot = this.fileSystemInfo.mergeSnapshots(
this.resolveBuildDependenciesSnapshot,
snapshot
);
this.resolveBuildDependenciesSnapshot =
this.fileSystemInfo.mergeSnapshots(
this.resolveBuildDependenciesSnapshot,
snapshot
);
} else {
this.resolveBuildDependenciesSnapshot = snapshot;
}
@ -1120,10 +1199,11 @@ class PackFileCacheStrategy {
this.logger.debug("Captured build dependencies");
if (this.buildSnapshot) {
this.buildSnapshot = this.fileSystemInfo.mergeSnapshots(
this.buildSnapshot,
snapshot
);
this.buildSnapshot =
this.fileSystemInfo.mergeSnapshots(
this.buildSnapshot,
snapshot
);
} else {
this.buildSnapshot = snapshot;
}
@ -1158,7 +1238,8 @@ class PackFileCacheStrategy {
.serialize(content, {
filename: `${this.cacheLocation}/index.pack`,
extension: ".pack",
logger: this.logger
logger: this.logger,
profile: this.profile
})
.then(() => {
for (const dep of newBuildDependencies) {

View File

@ -148,7 +148,7 @@ const getArguments = (schema = webpackSchema) => {
{
type: "reset",
multiple: false,
description: `Clear all items provided in configuration. ${description}`,
description: `Clear all items provided in '${schemaPath}' configuration. ${description}`,
path: schemaPath
}
],

View File

@ -117,6 +117,7 @@ const A = (obj, prop, factory) => {
*/
const applyWebpackOptionsBaseDefaults = options => {
F(options, "context", () => process.cwd());
applyInfrastructureLoggingDefaults(options.infrastructureLogging);
};
/**
@ -235,8 +236,6 @@ const applyWebpackOptionsDefaults = options => {
getResolveLoaderDefaults({ cache }),
options.resolveLoader
);
applyInfrastructureLoggingDefaults(options.infrastructureLogging);
};
/**
@ -294,10 +293,12 @@ const applyCacheDefaults = (cache, { name, mode, development }) => {
);
D(cache, "hashAlgorithm", "md4");
D(cache, "store", "pack");
D(cache, "profile", false);
D(cache, "idleTimeout", 60000);
D(cache, "idleTimeoutForInitialStore", 0);
D(cache, "maxMemoryGenerations", development ? 10 : Infinity);
D(cache, "maxMemoryGenerations", development ? 5 : Infinity);
D(cache, "maxAge", 1000 * 60 * 60 * 24 * 60); // 1 month
D(cache, "allowCollectingMemory", development);
D(cache.buildDependencies, "defaultWebpack", [
path.resolve(__dirname, "..") + path.sep
]);
@ -317,9 +318,10 @@ const applyCacheDefaults = (cache, { name, mode, development }) => {
const applySnapshotDefaults = (snapshot, { production }) => {
A(snapshot, "managedPaths", () => {
if (process.versions.pnp === "3") {
const match = /^(.+?)[\\/]cache[\\/]watchpack-npm-[^\\/]+\.zip[\\/]node_modules[\\/]/.exec(
require.resolve("watchpack")
);
const match =
/^(.+?)[\\/]cache[\\/]watchpack-npm-[^\\/]+\.zip[\\/]node_modules[\\/]/.exec(
require.resolve("watchpack")
);
if (match) {
return [path.resolve(match[1], "unplugged")];
}
@ -336,16 +338,18 @@ const applySnapshotDefaults = (snapshot, { production }) => {
});
A(snapshot, "immutablePaths", () => {
if (process.versions.pnp === "1") {
const match = /^(.+?[\\/]v4)[\\/]npm-watchpack-[^\\/]+-[\da-f]{40}[\\/]node_modules[\\/]/.exec(
require.resolve("watchpack")
);
const match =
/^(.+?[\\/]v4)[\\/]npm-watchpack-[^\\/]+-[\da-f]{40}[\\/]node_modules[\\/]/.exec(
require.resolve("watchpack")
);
if (match) {
return [match[1]];
}
} else if (process.versions.pnp === "3") {
const match = /^(.+?)[\\/]watchpack-npm-[^\\/]+\.zip[\\/]node_modules[\\/]/.exec(
require.resolve("watchpack")
);
const match =
/^(.+?)[\\/]watchpack-npm-[^\\/]+\.zip[\\/]node_modules[\\/]/.exec(
require.resolve("watchpack")
);
if (match) {
return [match[1]];
}
@ -432,9 +436,6 @@ const applyModuleDefaults = (
};
/** @type {RuleSetRules} */
const rules = [
{
type: "javascript/auto"
},
{
mimetype: "application/node",
type: "javascript/auto"
@ -477,7 +478,15 @@ const applyModuleDefaults = (
},
{
dependency: "url",
type: "asset/resource"
oneOf: [
{
scheme: /^data$/,
type: "asset/inline"
},
{
type: "asset/resource"
}
]
}
];
if (asyncWebAssembly) {
@ -734,6 +743,16 @@ const applyOutputDefaults = (
F(output.environment, "dynamicImport", () => tp && tp.dynamicImport);
F(output.environment, "module", () => tp && tp.module);
const { trustedTypes } = output;
if (trustedTypes) {
F(
trustedTypes,
"policyName",
() =>
output.uniqueName.replace(/[^a-zA-Z0-9\-#=_/@.%]+/g, "_") || "webpack"
);
}
/**
* @param {function(EntryDescription): void} fn iterator
* @returns {void}
@ -951,7 +970,7 @@ const applyOptimizationDefaults = (
A(splitChunks, "defaultSizeTypes", () => ["javascript", "unknown"]);
D(splitChunks, "hidePathInfo", production);
D(splitChunks, "chunks", "async");
D(splitChunks, "usedExports", true);
D(splitChunks, "usedExports", optimization.usedExports === true);
D(splitChunks, "minChunks", 1);
F(splitChunks, "minSize", () => (production ? 20000 : 10000));
F(splitChunks, "minRemainingSize", () => (development ? 0 : undefined));
@ -1032,6 +1051,7 @@ const getResolveDefaults = ({ cache, context, targetProperties, mode }) => {
byDependency: {
wasm: esmDeps(),
esm: esmDeps(),
loaderImport: esmDeps(),
url: {
preferRelative: true
},
@ -1077,8 +1097,14 @@ const getResolveLoaderDefaults = ({ cache }) => {
* @returns {void}
*/
const applyInfrastructureLoggingDefaults = infrastructureLogging => {
F(infrastructureLogging, "stream", () => process.stderr);
const tty =
/** @type {any} */ (infrastructureLogging.stream).isTTY &&
process.env.TERM !== "dumb";
D(infrastructureLogging, "level", "info");
D(infrastructureLogging, "debug", false);
D(infrastructureLogging, "colors", tty);
D(infrastructureLogging, "appendOnly", !tty);
};
exports.applyWebpackOptionsBaseDefaults = applyWebpackOptionsBaseDefaults;

View File

@ -92,9 +92,9 @@ const keyedNestedConfig = (value, fn, customKeys) => {
? {}
: Object.keys(value).reduce(
(obj, key) => (
(obj[key] = (customKeys && key in customKeys
? customKeys[key]
: fn)(value[key])),
(obj[key] = (
customKeys && key in customKeys ? customKeys[key] : fn
)(value[key])),
obj
),
/** @type {Record<string, R>} */ ({})
@ -129,8 +129,10 @@ const getNormalizedWebpackOptions = config => {
case "filesystem":
return {
type: "filesystem",
allowCollectingMemory: cache.allowCollectingMemory,
maxMemoryGenerations: cache.maxMemoryGenerations,
maxAge: cache.maxAge,
profile: cache.profile,
buildDependencies: cloneObject(cache.buildDependencies),
cacheDirectory: cache.cacheDirectory,
cacheLocation: cache.cacheLocation,
@ -162,10 +164,10 @@ const getNormalizedWebpackOptions = config => {
config.entry === undefined
? { main: {} }
: typeof config.entry === "function"
? (fn => () =>
Promise.resolve().then(fn).then(getNormalizedEntryStatic))(
config.entry
)
? (
fn => () =>
Promise.resolve().then(fn).then(getNormalizedEntryStatic)
)(config.entry)
: getNormalizedEntryStatic(config.entry),
experiments: cloneObject(config.experiments),
externals: config.externals,
@ -337,6 +339,15 @@ const getNormalizedWebpackOptions = config => {
sourceMapFilename: output.sourceMapFilename,
sourcePrefix: output.sourcePrefix,
strictModuleExceptionHandling: output.strictModuleExceptionHandling,
trustedTypes: optionalNestedConfig(
output.trustedTypes,
trustedTypes => {
if (trustedTypes === true) return {};
if (typeof trustedTypes === "string")
return { policyName: trustedTypes };
return { ...trustedTypes };
}
),
uniqueName: output.uniqueName,
wasmLoading: output.wasmLoading,
webassemblyModuleFilename: output.webassemblyModuleFilename,
@ -458,6 +469,7 @@ const getNormalizedEntryStatic = entry => {
filename: value.filename,
layer: value.layer,
runtime: value.runtime,
publicPath: value.publicPath,
chunkLoading: value.chunkLoading,
wasmLoading: value.wasmLoading,
dependOn:

View File

@ -5,14 +5,18 @@
"use strict";
const browserslistTargetHandler = require("./browserslistTargetHandler");
const memoize = require("../util/memoize");
const getBrowserslistTargetHandler = memoize(() =>
require("./browserslistTargetHandler")
);
/**
* @param {string} context the context directory
* @returns {string} default target
*/
const getDefaultTarget = context => {
const browsers = browserslistTargetHandler.load(null, context);
const browsers = getBrowserslistTargetHandler().load(null, context);
return browsers ? "browserslist" : "web";
};
@ -78,6 +82,7 @@ const TARGETS = [
"Resolve features from browserslist. Will resolve browserslist config automatically. Only browser or node queries are supported (electron is not supported). Examples: 'browserslist:modern' to use 'modern' environment from browserslist config",
/^browserslist(?::(.+))?$/,
(rest, context) => {
const browserslistTargetHandler = getBrowserslistTargetHandler();
const browsers = browserslistTargetHandler.load(
rest ? rest.trim() : null,
context

View File

@ -5,8 +5,7 @@
"use strict";
const { validate } = require("schema-utils");
const schema = require("../../schemas/plugins/container/ContainerPlugin.json");
const createSchemaValidation = require("../util/create-schema-validation");
const ContainerEntryDependency = require("./ContainerEntryDependency");
const ContainerEntryModuleFactory = require("./ContainerEntryModuleFactory");
const ContainerExposedDependency = require("./ContainerExposedDependency");
@ -15,6 +14,15 @@ const { parseOptions } = require("./options");
/** @typedef {import("../../declarations/plugins/container/ContainerPlugin").ContainerPluginOptions} ContainerPluginOptions */
/** @typedef {import("../Compiler")} Compiler */
const validate = createSchemaValidation(
require("../../schemas/plugins/container/ContainerPlugin.check.js"),
() => require("../../schemas/plugins/container/ContainerPlugin.json"),
{
name: "Container Plugin",
baseDataPath: "options"
}
);
const PLUGIN_NAME = "ContainerPlugin";
class ContainerPlugin {
@ -22,7 +30,7 @@ class ContainerPlugin {
* @param {ContainerPluginOptions} options options
*/
constructor(options) {
validate(schema, options, { name: "Container Plugin" });
validate(options);
this._options = {
name: options.name,

View File

@ -5,10 +5,9 @@
"use strict";
const { validate } = require("schema-utils");
const schema = require("../../schemas/plugins/container/ContainerReferencePlugin.json");
const ExternalsPlugin = require("../ExternalsPlugin");
const RuntimeGlobals = require("../RuntimeGlobals");
const createSchemaValidation = require("../util/create-schema-validation");
const FallbackDependency = require("./FallbackDependency");
const FallbackItemDependency = require("./FallbackItemDependency");
const FallbackModuleFactory = require("./FallbackModuleFactory");
@ -21,6 +20,16 @@ const { parseOptions } = require("./options");
/** @typedef {import("../../declarations/plugins/container/ContainerReferencePlugin").RemotesConfig} RemotesConfig */
/** @typedef {import("../Compiler")} Compiler */
const validate = createSchemaValidation(
require("../../schemas/plugins/container/ContainerReferencePlugin.check.js"),
() =>
require("../../schemas/plugins/container/ContainerReferencePlugin.json"),
{
name: "Container Reference Plugin",
baseDataPath: "options"
}
);
const slashCode = "/".charCodeAt(0);
class ContainerReferencePlugin {
@ -28,7 +37,7 @@ class ContainerReferencePlugin {
* @param {ContainerReferencePluginOptions} options options
*/
constructor(options) {
validate(schema, options, { name: "Container Reference Plugin" });
validate(options);
this._remoteType = options.remoteType;
this._remotes = parseOptions(

View File

@ -5,9 +5,9 @@
"use strict";
const { validate } = require("schema-utils");
const schema = require("../../schemas/plugins/container/ModuleFederationPlugin.json");
const isValidExternalsType = require("../../schemas/plugins/container/ExternalsType.check.js");
const SharePlugin = require("../sharing/SharePlugin");
const createSchemaValidation = require("../util/create-schema-validation");
const ContainerPlugin = require("./ContainerPlugin");
const ContainerReferencePlugin = require("./ContainerReferencePlugin");
@ -16,12 +16,20 @@ const ContainerReferencePlugin = require("./ContainerReferencePlugin");
/** @typedef {import("../../declarations/plugins/container/ModuleFederationPlugin").Shared} Shared */
/** @typedef {import("../Compiler")} Compiler */
const validate = createSchemaValidation(
require("../../schemas/plugins/container/ModuleFederationPlugin.check.js"),
() => require("../../schemas/plugins/container/ModuleFederationPlugin.json"),
{
name: "Module Federation Plugin",
baseDataPath: "options"
}
);
class ModuleFederationPlugin {
/**
* @param {ModuleFederationPluginOptions} options options
*/
constructor(options) {
validate(schema, options, { name: "Module Federation Plugin" });
validate(options);
this._options = options;
}
@ -36,8 +44,7 @@ class ModuleFederationPlugin {
const library = options.library || { type: "var", name: options.name };
const remoteType =
options.remoteType ||
(options.library &&
schema.definitions.ExternalsType.enum.includes(options.library.type)
(options.library && isValidExternalsType(options.library.type)
? /** @type {ExternalsType} */ (options.library.type)
: "script");
if (

View File

@ -20,7 +20,8 @@ class RemoteRuntimeModule extends RuntimeModule {
* @returns {string} runtime code
*/
generate() {
const { runtimeTemplate, chunkGraph, moduleGraph } = this.compilation;
const { compilation, chunkGraph } = this;
const { runtimeTemplate, moduleGraph } = compilation;
const chunkToRemotesMapping = {};
const idToExternalAndNameMapping = {};
for (const chunk of this.chunk.getAllAsyncChunks()) {

View File

@ -5,13 +5,20 @@
"use strict";
const { Tracer } = require("chrome-trace-event");
const { validate } = require("schema-utils");
const schema = require("../../schemas/plugins/debug/ProfilingPlugin.json");
const createSchemaValidation = require("../util/create-schema-validation");
const { dirname, mkdirpSync } = require("../util/fs");
/** @typedef {import("../../declarations/plugins/debug/ProfilingPlugin").ProfilingPluginOptions} ProfilingPluginOptions */
/** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
const validate = createSchemaValidation(
require("../../schemas/plugins/debug/ProfilingPlugin.check.js"),
() => require("../../schemas/plugins/debug/ProfilingPlugin.json"),
{
name: "Profiling Plugin",
baseDataPath: "options"
}
);
let inspector = undefined;
try {
@ -183,10 +190,7 @@ class ProfilingPlugin {
* @param {ProfilingPluginOptions=} options options object
*/
constructor(options = {}) {
validate(schema, options, {
name: "Profiling Plugin",
baseDataPath: "options"
});
validate(options);
this.outputPath = options.outputPath || "events.json";
}
@ -328,9 +332,10 @@ const interceptAllParserHooks = (moduleFactory, tracer) => {
const interceptAllJavascriptModulesPluginHooks = (compilation, tracer) => {
interceptAllHooksFor(
{
hooks: require("../javascript/JavascriptModulesPlugin").getCompilationHooks(
compilation
)
hooks:
require("../javascript/JavascriptModulesPlugin").getCompilationHooks(
compilation
)
},
tracer,
"JavascriptModulesPlugin"

View File

@ -77,9 +77,9 @@ AMDRequireDependency.Template = class AMDRequireDependencyTemplate extends (
{ runtimeTemplate, moduleGraph, chunkGraph, runtimeRequirements }
) {
const dep = /** @type {AMDRequireDependency} */ (dependency);
const depBlock = /** @type {AsyncDependenciesBlock} */ (moduleGraph.getParentBlock(
dep
));
const depBlock = /** @type {AsyncDependenciesBlock} */ (
moduleGraph.getParentBlock(dep)
);
const promise = runtimeTemplate.blockPromise({
chunkGraph,
block: depBlock,

View File

@ -230,9 +230,8 @@ class CommonJsExportRequireDependency extends ModuleDependency {
if (name === "__esModule" && isNamespaceImport) {
exports.add(name);
} else if (importedExportsInfo) {
const importedExportInfo = importedExportsInfo.getReadOnlyExportInfo(
name
);
const importedExportInfo =
importedExportsInfo.getReadOnlyExportInfo(name);
if (importedExportInfo.provided === false) continue;
exports.add(name);
if (importedExportInfo.provided === true) continue;

Some files were not shown because too many files have changed in this diff Show More