Merge pull request #15373 from webpack/fix/issue-14907

if cache pack is too big, we should batch writing
This commit is contained in:
Tobias Koppers 2022-02-15 13:33:08 +01:00 committed by GitHub
commit ba4e83c3a9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 44 additions and 13 deletions

View File

@ -40,6 +40,8 @@ Section -> Buffer
// "wpc" + 1 in little-endian
const VERSION = 0x01637077;
const WRITE_LIMIT_TOTAL = 0x7fff0000;
const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;
/**
* @param {Buffer[]} buffers buffers
@ -87,7 +89,7 @@ const readUInt64LE = Buffer.prototype.readBigUInt64LE
* @param {FileMiddleware} middleware this
* @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
* @param {string | boolean} name file base name
* @param {function(string | false, Buffer[]): Promise<void>} writeFile writes a file
* @param {function(string | false, Buffer[], number): Promise<void>} writeFile writes a file
* @param {string | Hash} hashFunction hash function to use
* @returns {Promise<SerializeResult>} resulting file pointer and promise
*/
@ -212,9 +214,9 @@ const serialize = async (
if (name === true) {
name = hashForName(buf, hashFunction);
}
backgroundJobs.push(writeFile(name, buf));
let size = 0;
for (const b of buf) size += b.length;
backgroundJobs.push(writeFile(name, buf, size));
return {
size,
name,
@ -422,7 +424,7 @@ class FileMiddleware extends SerializerMiddleware {
// It's important that we don't touch existing files during serialization
// because serialize may read existing files (when deserializing)
const allWrittenFiles = new Set();
const writeFile = async (name, content) => {
const writeFile = async (name, content, size) => {
const file = name
? join(this.fs, filename, `../${name}${extension}`)
: filename;
@ -441,10 +443,7 @@ class FileMiddleware extends SerializerMiddleware {
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
[zConstants.BROTLI_PARAM_QUALITY]: 2,
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
[zConstants.BROTLI_PARAM_SIZE_HINT]: content.reduce(
(size, b) => size + b.length,
0
)
[zConstants.BROTLI_PARAM_SIZE_HINT]: size
}
});
}
@ -456,8 +455,44 @@ class FileMiddleware extends SerializerMiddleware {
stream.on("error", err => reject(err));
stream.on("finish", () => resolve());
}
for (const b of content) stream.write(b);
stream.end();
// split into chunks for WRITE_LIMIT_CHUNK size
const chunks = [];
for (const b of content) {
if (b.length < WRITE_LIMIT_CHUNK) {
chunks.push(b);
} else {
for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) {
chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK));
}
}
}
const len = chunks.length;
let i = 0;
const batchWrite = err => {
// will be handled in "on" error handler
if (err) return;
if (i === len) {
stream.end();
return;
}
// queue up a batch of chunks up to the write limit
// end is exclusive
let end = i;
let sum = chunks[end++].length;
while (end < len) {
sum += chunks[end].length;
if (sum > WRITE_LIMIT_TOTAL) break;
end++;
}
while (i < end - 1) {
stream.write(chunks[i++]);
}
stream.write(chunks[i++], batchWrite);
};
batchWrite();
});
if (name) allWrittenFiles.add(file);
};

View File

@ -25,10 +25,6 @@ describe("TestCases", () => {
["no-string"]:
/^Pack got invalid because of write to: Compilation\/modules.+no-string[/\\]loader\.js!.+no-string[/\\]file\.js$/
},
large: {
["big-assets"]:
/^Pack got invalid because of write to: ResolverCachePlugin|normal|dependencyType=|esm|path=|.+|request=|\.\/large\/big-assets\/$/
},
parsing: {
// Module parse failed
context: