mirror of https://github.com/webpack/webpack.git
Add maxAsyncSize and maxInitialSize options
This commit is contained in:
parent
5f26040786
commit
2a10b12e09
|
|
@ -910,6 +910,14 @@ export interface OptimizationSplitChunksOptions {
|
||||||
* Sets the name delimiter for created chunks
|
* Sets the name delimiter for created chunks
|
||||||
*/
|
*/
|
||||||
automaticNameDelimiter?: string;
|
automaticNameDelimiter?: string;
|
||||||
|
/**
|
||||||
|
* Maximal size hint for the on-demand chunks
|
||||||
|
*/
|
||||||
|
maxAsyncSize?: OptimizationSplitChunksSizes;
|
||||||
|
/**
|
||||||
|
* Maximal size hint for the initial chunks
|
||||||
|
*/
|
||||||
|
maxInitialSize?: OptimizationSplitChunksSizes;
|
||||||
/**
|
/**
|
||||||
* Maximal size hint for the created chunks
|
* Maximal size hint for the created chunks
|
||||||
*/
|
*/
|
||||||
|
|
@ -989,10 +997,18 @@ export interface OptimizationSplitChunksCacheGroup {
|
||||||
* Maximum number of requests which are accepted for on-demand loading
|
* Maximum number of requests which are accepted for on-demand loading
|
||||||
*/
|
*/
|
||||||
maxAsyncRequests?: number;
|
maxAsyncRequests?: number;
|
||||||
|
/**
|
||||||
|
* Maximal size hint for the on-demand chunks
|
||||||
|
*/
|
||||||
|
maxAsyncSize?: OptimizationSplitChunksSizes;
|
||||||
/**
|
/**
|
||||||
* Maximum number of initial chunks which are accepted for an entry point
|
* Maximum number of initial chunks which are accepted for an entry point
|
||||||
*/
|
*/
|
||||||
maxInitialRequests?: number;
|
maxInitialRequests?: number;
|
||||||
|
/**
|
||||||
|
* Maximal size hint for the initial chunks
|
||||||
|
*/
|
||||||
|
maxInitialSize?: OptimizationSplitChunksSizes;
|
||||||
/**
|
/**
|
||||||
* Maximal size hint for the created chunks
|
* Maximal size hint for the created chunks
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
|
|
@ -263,7 +263,6 @@ class WebpackOptionsDefaulter extends OptionsDefaulter {
|
||||||
this.set("optimization.splitChunks.maxAsyncRequests", "make", options => {
|
this.set("optimization.splitChunks.maxAsyncRequests", "make", options => {
|
||||||
return isProductionLikeMode(options) ? 6 : Infinity;
|
return isProductionLikeMode(options) ? 6 : Infinity;
|
||||||
});
|
});
|
||||||
this.set("optimization.splitChunks.maxAsyncSize", 100000);
|
|
||||||
this.set("optimization.splitChunks.automaticNameDelimiter", "-");
|
this.set("optimization.splitChunks.automaticNameDelimiter", "-");
|
||||||
this.set("optimization.splitChunks.maxInitialRequests", "make", options => {
|
this.set("optimization.splitChunks.maxInitialRequests", "make", options => {
|
||||||
return isProductionLikeMode(options) ? 4 : Infinity;
|
return isProductionLikeMode(options) ? 4 : Infinity;
|
||||||
|
|
|
||||||
|
|
@ -44,6 +44,13 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @returns {boolean}
|
* @returns {boolean}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @callback CombineSizeFunction
|
||||||
|
* @param {number} a
|
||||||
|
* @param {number} b
|
||||||
|
* @returns {number}
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {Object} CacheGroupSource
|
* @typedef {Object} CacheGroupSource
|
||||||
* @property {string=} key
|
* @property {string=} key
|
||||||
|
|
@ -52,7 +59,8 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @property {ChunkFilterFunction=} chunksFilter
|
* @property {ChunkFilterFunction=} chunksFilter
|
||||||
* @property {boolean=} enforce
|
* @property {boolean=} enforce
|
||||||
* @property {SplitChunksSizes} minSize
|
* @property {SplitChunksSizes} minSize
|
||||||
* @property {SplitChunksSizes} maxSize
|
* @property {SplitChunksSizes} maxAsyncSize
|
||||||
|
* @property {SplitChunksSizes} maxInitialSize
|
||||||
* @property {number=} minChunks
|
* @property {number=} minChunks
|
||||||
* @property {number=} maxAsyncRequests
|
* @property {number=} maxAsyncRequests
|
||||||
* @property {number=} maxInitialRequests
|
* @property {number=} maxInitialRequests
|
||||||
|
|
@ -71,7 +79,6 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @property {boolean=} enforce
|
* @property {boolean=} enforce
|
||||||
* @property {SplitChunksSizes} minSize
|
* @property {SplitChunksSizes} minSize
|
||||||
* @property {SplitChunksSizes} minSizeForMaxSize
|
* @property {SplitChunksSizes} minSizeForMaxSize
|
||||||
* @property {SplitChunksSizes} maxSize
|
|
||||||
* @property {SplitChunksSizes} maxAsyncSize
|
* @property {SplitChunksSizes} maxAsyncSize
|
||||||
* @property {SplitChunksSizes} maxInitialSize
|
* @property {SplitChunksSizes} maxInitialSize
|
||||||
* @property {number=} minChunks
|
* @property {number=} minChunks
|
||||||
|
|
@ -86,7 +93,8 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
/**
|
/**
|
||||||
* @typedef {Object} FallbackCacheGroup
|
* @typedef {Object} FallbackCacheGroup
|
||||||
* @property {SplitChunksSizes} minSize
|
* @property {SplitChunksSizes} minSize
|
||||||
* @property {SplitChunksSizes} maxSize
|
* @property {SplitChunksSizes} maxAsyncSize
|
||||||
|
* @property {SplitChunksSizes} maxInitialSize
|
||||||
* @property {string} automaticNameDelimiter
|
* @property {string} automaticNameDelimiter
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
@ -115,7 +123,6 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||||
* @typedef {Object} SplitChunksOptions
|
* @typedef {Object} SplitChunksOptions
|
||||||
* @property {ChunkFilterFunction} chunksFilter
|
* @property {ChunkFilterFunction} chunksFilter
|
||||||
* @property {SplitChunksSizes} minSize
|
* @property {SplitChunksSizes} minSize
|
||||||
* @property {SplitChunksSizes} maxSize
|
|
||||||
* @property {SplitChunksSizes} maxInitialSize
|
* @property {SplitChunksSizes} maxInitialSize
|
||||||
* @property {SplitChunksSizes} maxAsyncSize
|
* @property {SplitChunksSizes} maxAsyncSize
|
||||||
* @property {number} minChunks
|
* @property {number} minChunks
|
||||||
|
|
@ -230,32 +237,35 @@ const ALL_CHUNK_FILTER = chunk => true;
|
||||||
*/
|
*/
|
||||||
const normalizeSizes = value => {
|
const normalizeSizes = value => {
|
||||||
if (typeof value === "number") {
|
if (typeof value === "number") {
|
||||||
const obj = Object.create(null);
|
return {
|
||||||
obj.javascript = value;
|
javascript: value
|
||||||
return obj;
|
};
|
||||||
} else if (typeof value === "object" && value !== null) {
|
} else if (typeof value === "object" && value !== null) {
|
||||||
return Object.assign(Object.create(null), value);
|
return Object.assign({}, value);
|
||||||
} else {
|
} else {
|
||||||
return Object.create(null);
|
return {};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {OptimizationSplitChunksSizes} value the sizes
|
* @param {...OptimizationSplitChunksSizes} sizes the sizes
|
||||||
* @param {OptimizationSplitChunksSizes} defaultValue the default sizes
|
|
||||||
* @returns {SplitChunksSizes} the merged sizes
|
* @returns {SplitChunksSizes} the merged sizes
|
||||||
*/
|
*/
|
||||||
const mergeSizes = (value, defaultValue) => {
|
const mergeSizes = (...sizes) => {
|
||||||
if (value === undefined) return normalizeSizes(defaultValue);
|
return Object.assign({}, ...sizes.map(normalizeSizes).reverse());
|
||||||
const sizes = normalizeSizes(value);
|
|
||||||
const defaultSizes = normalizeSizes(defaultValue);
|
|
||||||
return Object.assign(Object.create(null), defaultSizes, sizes);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {SplitChunksSizes} a first sizes
|
||||||
|
* @param {SplitChunksSizes} b second sizes
|
||||||
|
* @param {CombineSizeFunction} combine a function to combine sizes
|
||||||
|
* @returns {SplitChunksSizes} the combine sizes
|
||||||
|
*/
|
||||||
const combineSizes = (a, b, combine) => {
|
const combineSizes = (a, b, combine) => {
|
||||||
const aKeys = new Set(Object.keys(a));
|
const aKeys = new Set(Object.keys(a));
|
||||||
const bKeys = new Set(Object.keys(b));
|
const bKeys = new Set(Object.keys(b));
|
||||||
const result = Object.create(null);
|
/** @type {SplitChunksSizes} */
|
||||||
|
const result = {};
|
||||||
for (const key of aKeys) {
|
for (const key of aKeys) {
|
||||||
if (bKeys.has(key)) {
|
if (bKeys.has(key)) {
|
||||||
result[key] = combine(a[key], b[key]);
|
result[key] = combine(a[key], b[key]);
|
||||||
|
|
@ -385,6 +395,8 @@ const normalizeCacheGroups = (cacheGroups, name) => {
|
||||||
enforce: option.enforce,
|
enforce: option.enforce,
|
||||||
minSize: option.minSize,
|
minSize: option.minSize,
|
||||||
maxSize: option.maxSize,
|
maxSize: option.maxSize,
|
||||||
|
maxAsyncSize: option.maxAsyncSize,
|
||||||
|
maxInitialSize: option.maxInitialSize,
|
||||||
minChunks: option.minChunks,
|
minChunks: option.minChunks,
|
||||||
maxAsyncRequests: option.maxAsyncRequests,
|
maxAsyncRequests: option.maxAsyncRequests,
|
||||||
maxInitialRequests: option.maxInitialRequests,
|
maxInitialRequests: option.maxInitialRequests,
|
||||||
|
|
@ -465,7 +477,8 @@ const createCacheGroupSource = options => {
|
||||||
chunksFilter: normalizeChunksFilter(options.chunks),
|
chunksFilter: normalizeChunksFilter(options.chunks),
|
||||||
enforce: options.enforce,
|
enforce: options.enforce,
|
||||||
minSize: normalizeSizes(options.minSize),
|
minSize: normalizeSizes(options.minSize),
|
||||||
maxSize: normalizeSizes(options.maxSize),
|
maxAsyncSize: mergeSizes(options.maxAsyncSize, options.maxSize),
|
||||||
|
maxInitialSize: mergeSizes(options.maxInitialSize, options.maxSize),
|
||||||
minChunks: options.minChunks,
|
minChunks: options.minChunks,
|
||||||
maxAsyncRequests: options.maxAsyncRequests,
|
maxAsyncRequests: options.maxAsyncRequests,
|
||||||
maxInitialRequests: options.maxInitialRequests,
|
maxInitialRequests: options.maxInitialRequests,
|
||||||
|
|
@ -487,9 +500,8 @@ module.exports = class SplitChunksPlugin {
|
||||||
this.options = {
|
this.options = {
|
||||||
chunksFilter: normalizeChunksFilter(options.chunks || "all"),
|
chunksFilter: normalizeChunksFilter(options.chunks || "all"),
|
||||||
minSize: normalizeSizes(options.minSize),
|
minSize: normalizeSizes(options.minSize),
|
||||||
maxSize: normalizeSizes(options.maxSize),
|
|
||||||
maxInitialSize: normalizeSizes(options.maxInitialSize || options.maxSize),
|
|
||||||
maxAsyncSize: normalizeSizes(options.maxAsyncSize || options.maxSize),
|
maxAsyncSize: normalizeSizes(options.maxAsyncSize || options.maxSize),
|
||||||
|
maxInitialSize: normalizeSizes(options.maxInitialSize || options.maxSize),
|
||||||
minChunks: options.minChunks || 1,
|
minChunks: options.minChunks || 1,
|
||||||
maxAsyncRequests: options.maxAsyncRequests || 1,
|
maxAsyncRequests: options.maxAsyncRequests || 1,
|
||||||
maxInitialRequests: options.maxInitialRequests || 1,
|
maxInitialRequests: options.maxInitialRequests || 1,
|
||||||
|
|
@ -500,7 +512,18 @@ module.exports = class SplitChunksPlugin {
|
||||||
automaticNameDelimiter: options.automaticNameDelimiter,
|
automaticNameDelimiter: options.automaticNameDelimiter,
|
||||||
fallbackCacheGroup: {
|
fallbackCacheGroup: {
|
||||||
minSize: mergeSizes(fallbackCacheGroup.minSize, options.minSize),
|
minSize: mergeSizes(fallbackCacheGroup.minSize, options.minSize),
|
||||||
maxSize: mergeSizes(fallbackCacheGroup.maxSize, options.maxSize),
|
maxAsyncSize: mergeSizes(
|
||||||
|
fallbackCacheGroup.maxAsyncSize,
|
||||||
|
fallbackCacheGroup.maxSize,
|
||||||
|
options.maxAsyncSize,
|
||||||
|
options.maxSize
|
||||||
|
),
|
||||||
|
maxInitialSize: mergeSizes(
|
||||||
|
fallbackCacheGroup.maxInitialSize,
|
||||||
|
fallbackCacheGroup.maxSize,
|
||||||
|
options.maxInitialSize,
|
||||||
|
options.maxSize
|
||||||
|
),
|
||||||
automaticNameDelimiter:
|
automaticNameDelimiter:
|
||||||
fallbackCacheGroup.automaticNameDelimiter ||
|
fallbackCacheGroup.automaticNameDelimiter ||
|
||||||
options.automaticNameDelimiter ||
|
options.automaticNameDelimiter ||
|
||||||
|
|
@ -754,30 +777,22 @@ module.exports = class SplitChunksPlugin {
|
||||||
cacheGroupSource.chunksFilter || this.options.chunksFilter,
|
cacheGroupSource.chunksFilter || this.options.chunksFilter,
|
||||||
minSize: mergeSizes(
|
minSize: mergeSizes(
|
||||||
cacheGroupSource.minSize,
|
cacheGroupSource.minSize,
|
||||||
cacheGroupSource.enforce
|
cacheGroupSource.enforce ? undefined : this.options.minSize
|
||||||
? Object.create(null)
|
|
||||||
: this.options.minSize
|
|
||||||
),
|
),
|
||||||
minSizeForMaxSize: mergeSizes(
|
minSizeForMaxSize: mergeSizes(
|
||||||
cacheGroupSource.minSize,
|
cacheGroupSource.minSize,
|
||||||
this.options.minSize
|
this.options.minSize
|
||||||
),
|
),
|
||||||
maxSize: mergeSizes(
|
|
||||||
cacheGroupSource.maxSize,
|
|
||||||
cacheGroupSource.enforce
|
|
||||||
? Object.create(null)
|
|
||||||
: this.options.maxSize
|
|
||||||
),
|
|
||||||
maxAsyncSize: mergeSizes(
|
maxAsyncSize: mergeSizes(
|
||||||
cacheGroupSource.maxSize,
|
cacheGroupSource.maxAsyncSize,
|
||||||
cacheGroupSource.enforce
|
cacheGroupSource.enforce
|
||||||
? Object.create(null)
|
? undefined
|
||||||
: this.options.maxAsyncSize
|
: this.options.maxAsyncSize
|
||||||
),
|
),
|
||||||
maxInitialSize: mergeSizes(
|
maxInitialSize: mergeSizes(
|
||||||
cacheGroupSource.maxSize,
|
cacheGroupSource.maxInitialSize,
|
||||||
cacheGroupSource.enforce
|
cacheGroupSource.enforce
|
||||||
? Object.create(null)
|
? undefined
|
||||||
: this.options.maxInitialSize
|
: this.options.maxInitialSize
|
||||||
),
|
),
|
||||||
minChunks:
|
minChunks:
|
||||||
|
|
@ -853,7 +868,8 @@ module.exports = class SplitChunksPlugin {
|
||||||
/**
|
/**
|
||||||
* @typedef {Object} MaxSizeQueueItem
|
* @typedef {Object} MaxSizeQueueItem
|
||||||
* @property {SplitChunksSizes} minSize
|
* @property {SplitChunksSizes} minSize
|
||||||
* @property {SplitChunksSizes} maxSize
|
* @property {SplitChunksSizes} maxAsyncSize
|
||||||
|
* @property {SplitChunksSizes} maxInitialSize
|
||||||
* @property {string} automaticNameDelimiter
|
* @property {string} automaticNameDelimiter
|
||||||
* @property {string[]} keys
|
* @property {string[]} keys
|
||||||
*/
|
*/
|
||||||
|
|
@ -891,12 +907,16 @@ module.exports = class SplitChunksPlugin {
|
||||||
if (
|
if (
|
||||||
chunkGraph.getNumberOfChunkModules(chunk) !==
|
chunkGraph.getNumberOfChunkModules(chunk) !==
|
||||||
item.modules.size
|
item.modules.size
|
||||||
)
|
) {
|
||||||
continue;
|
continue;
|
||||||
if (chunkGraph.getNumberOfEntryModules(chunk) > 0) continue;
|
}
|
||||||
|
if (chunkGraph.getNumberOfEntryModules(chunk) > 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
for (const module of item.modules) {
|
for (const module of item.modules) {
|
||||||
if (!chunkGraph.isModuleInChunk(module, chunk))
|
if (!chunkGraph.isModuleInChunk(module, chunk)) {
|
||||||
continue outer;
|
continue outer;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (!newChunk || !newChunk.name) {
|
if (!newChunk || !newChunk.name) {
|
||||||
newChunk = chunk;
|
newChunk = chunk;
|
||||||
|
|
@ -1025,7 +1045,10 @@ module.exports = class SplitChunksPlugin {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Object.keys(item.cacheGroup.maxSize).length > 0) {
|
if (
|
||||||
|
Object.keys(item.cacheGroup.maxAsyncSize).length > 0 ||
|
||||||
|
Object.keys(item.cacheGroup.maxInitialSize).length > 0
|
||||||
|
) {
|
||||||
const oldMaxSizeSettings = maxSizeQueueMap.get(newChunk);
|
const oldMaxSizeSettings = maxSizeQueueMap.get(newChunk);
|
||||||
maxSizeQueueMap.set(newChunk, {
|
maxSizeQueueMap.set(newChunk, {
|
||||||
minSize: oldMaxSizeSettings
|
minSize: oldMaxSizeSettings
|
||||||
|
|
@ -1035,13 +1058,20 @@ module.exports = class SplitChunksPlugin {
|
||||||
Math.max
|
Math.max
|
||||||
)
|
)
|
||||||
: item.cacheGroup.minSize,
|
: item.cacheGroup.minSize,
|
||||||
maxSize: oldMaxSizeSettings
|
maxAsyncSize: oldMaxSizeSettings
|
||||||
? combineSizes(
|
? combineSizes(
|
||||||
oldMaxSizeSettings.maxSize,
|
oldMaxSizeSettings.maxAsyncSize,
|
||||||
item.cacheGroup.maxSize,
|
item.cacheGroup.maxAsyncSize,
|
||||||
Math.min
|
Math.min
|
||||||
)
|
)
|
||||||
: item.cacheGroup.maxSize,
|
: item.cacheGroup.maxAsyncSize,
|
||||||
|
maxInitialSize: oldMaxSizeSettings
|
||||||
|
? combineSizes(
|
||||||
|
oldMaxSizeSettings.maxInitialSize,
|
||||||
|
item.cacheGroup.maxInitialSize,
|
||||||
|
Math.min
|
||||||
|
)
|
||||||
|
: item.cacheGroup.maxInitialSize,
|
||||||
automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter,
|
automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter,
|
||||||
keys: oldMaxSizeSettings
|
keys: oldMaxSizeSettings
|
||||||
? oldMaxSizeSettings.keys.concat(item.cacheGroup.key)
|
? oldMaxSizeSettings.keys.concat(item.cacheGroup.key)
|
||||||
|
|
@ -1095,9 +1125,24 @@ module.exports = class SplitChunksPlugin {
|
||||||
// Make sure that maxSize is fulfilled
|
// Make sure that maxSize is fulfilled
|
||||||
for (const chunk of Array.from(compilation.chunks)) {
|
for (const chunk of Array.from(compilation.chunks)) {
|
||||||
const chunkConfig = maxSizeQueueMap.get(chunk);
|
const chunkConfig = maxSizeQueueMap.get(chunk);
|
||||||
const { minSize, maxSize, automaticNameDelimiter } =
|
const {
|
||||||
chunkConfig || this.options.fallbackCacheGroup;
|
minSize,
|
||||||
if (!maxSize || Object.keys(maxSize).length === 0) continue;
|
maxAsyncSize,
|
||||||
|
maxInitialSize,
|
||||||
|
automaticNameDelimiter
|
||||||
|
} = chunkConfig || this.options.fallbackCacheGroup;
|
||||||
|
/** @type {SplitChunksSizes} */
|
||||||
|
let maxSize;
|
||||||
|
if (chunk.isOnlyInitial()) {
|
||||||
|
maxSize = maxInitialSize;
|
||||||
|
} else if (chunk.canBeInitial()) {
|
||||||
|
maxSize = combineSizes(maxAsyncSize, maxInitialSize, Math.min);
|
||||||
|
} else {
|
||||||
|
maxSize = maxAsyncSize;
|
||||||
|
}
|
||||||
|
if (Object.keys(maxSize).length === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
for (const key of Object.keys(maxSize)) {
|
for (const key of Object.keys(maxSize)) {
|
||||||
const maxSizeValue = maxSize[key];
|
const maxSizeValue = maxSize[key];
|
||||||
const minSizeValue = minSize[key];
|
const minSizeValue = minSize[key];
|
||||||
|
|
|
||||||
|
|
@ -605,11 +605,27 @@
|
||||||
"type": "number",
|
"type": "number",
|
||||||
"minimum": 1
|
"minimum": 1
|
||||||
},
|
},
|
||||||
|
"maxAsyncSize": {
|
||||||
|
"description": "Maximal size hint for the on-demand chunks",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/OptimizationSplitChunksSizes"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"maxInitialRequests": {
|
"maxInitialRequests": {
|
||||||
"description": "Maximum number of initial chunks which are accepted for an entry point",
|
"description": "Maximum number of initial chunks which are accepted for an entry point",
|
||||||
"type": "number",
|
"type": "number",
|
||||||
"minimum": 1
|
"minimum": 1
|
||||||
},
|
},
|
||||||
|
"maxInitialSize": {
|
||||||
|
"description": "Maximal size hint for the initial chunks",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/OptimizationSplitChunksSizes"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"maxSize": {
|
"maxSize": {
|
||||||
"description": "Maximal size hint for the created chunks",
|
"description": "Maximal size hint for the created chunks",
|
||||||
"oneOf": [
|
"oneOf": [
|
||||||
|
|
@ -745,6 +761,22 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1
|
"minLength": 1
|
||||||
},
|
},
|
||||||
|
"maxAsyncSize": {
|
||||||
|
"description": "Maximal size hint for the on-demand chunks",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/OptimizationSplitChunksSizes"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"maxInitialSize": {
|
||||||
|
"description": "Maximal size hint for the initial chunks",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/OptimizationSplitChunksSizes"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"maxSize": {
|
"maxSize": {
|
||||||
"description": "Maximal size hint for the created chunks",
|
"description": "Maximal size hint for the created chunks",
|
||||||
"oneOf": [
|
"oneOf": [
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue