mirror of https://github.com/webpack/webpack.git
feat: use ES modules for universal target for chunks and worker chunks
This commit is contained in:
commit
1a5e531ccb
|
@ -1138,10 +1138,12 @@ const applyOutputDefaults = (
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
tp.require === null ||
|
(tp.require === null ||
|
||||||
tp.nodeBuiltins === null ||
|
tp.nodeBuiltins === null ||
|
||||||
tp.document === null ||
|
tp.document === null ||
|
||||||
tp.importScripts === null
|
tp.importScripts === null) &&
|
||||||
|
output.module &&
|
||||||
|
environment.dynamicImport
|
||||||
) {
|
) {
|
||||||
return "universal";
|
return "universal";
|
||||||
}
|
}
|
||||||
|
@ -1163,9 +1165,11 @@ const applyOutputDefaults = (
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
tp.require === null ||
|
(tp.require === null ||
|
||||||
tp.nodeBuiltins === null ||
|
tp.nodeBuiltins === null ||
|
||||||
tp.importScriptsInWorker === null
|
tp.importScriptsInWorker === null) &&
|
||||||
|
output.module &&
|
||||||
|
environment.dynamicImport
|
||||||
) {
|
) {
|
||||||
return "universal";
|
return "universal";
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,14 +101,12 @@ class EnableChunkLoadingPlugin {
|
||||||
}).apply(compiler);
|
}).apply(compiler);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case "import": {
|
case "import":
|
||||||
|
case "universal": {
|
||||||
const ModuleChunkLoadingPlugin = require("../esm/ModuleChunkLoadingPlugin");
|
const ModuleChunkLoadingPlugin = require("../esm/ModuleChunkLoadingPlugin");
|
||||||
new ModuleChunkLoadingPlugin().apply(compiler);
|
new ModuleChunkLoadingPlugin().apply(compiler);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case "universal":
|
|
||||||
// TODO implement universal chunk loading
|
|
||||||
throw new Error("Universal Chunk Loading is not implemented yet");
|
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unsupported chunk loading type ${type}.
|
throw new Error(`Unsupported chunk loading type ${type}.
|
||||||
Plugins which provide custom chunk loading types must call EnableChunkLoadingPlugin.setEnabled(compiler, type) to disable this error.`);
|
Plugins which provide custom chunk loading types must call EnableChunkLoadingPlugin.setEnabled(compiler, type) to disable this error.`);
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
|
@ -0,0 +1,22 @@
|
||||||
|
import value from "./separate";
|
||||||
|
import { test as t } from "external-self";
|
||||||
|
|
||||||
|
it("should compile", () => {
|
||||||
|
expect(value).toBe(42);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should circular depend on itself external", () => {
|
||||||
|
expect(test()).toBe(42);
|
||||||
|
expect(t()).toBe(42);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("work with URL", () => {
|
||||||
|
const url = new URL("./file.png", import.meta.url);
|
||||||
|
expect(/[a-f0-9]{20}\.png/.test(url)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
function test() {
|
||||||
|
return 42;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { test };
|
|
@ -0,0 +1 @@
|
||||||
|
export default 42;
|
|
@ -0,0 +1,5 @@
|
||||||
|
module.exports = {
|
||||||
|
findBundle: function () {
|
||||||
|
return ["./runtime.mjs", "./separate.mjs", "./main.mjs"];
|
||||||
|
}
|
||||||
|
};
|
|
@ -0,0 +1,30 @@
|
||||||
|
/** @type {import("../../../../").Configuration} */
|
||||||
|
module.exports = {
|
||||||
|
output: {
|
||||||
|
filename: "[name].mjs",
|
||||||
|
library: {
|
||||||
|
type: "module"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
target: ["web", "node"],
|
||||||
|
experiments: {
|
||||||
|
outputModule: true
|
||||||
|
},
|
||||||
|
optimization: {
|
||||||
|
minimize: true,
|
||||||
|
runtimeChunk: "single",
|
||||||
|
splitChunks: {
|
||||||
|
cacheGroups: {
|
||||||
|
separate: {
|
||||||
|
test: /separate/,
|
||||||
|
chunks: "all",
|
||||||
|
filename: "separate.mjs",
|
||||||
|
enforce: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
externals: {
|
||||||
|
"external-self": "./main.mjs"
|
||||||
|
}
|
||||||
|
};
|
|
@ -11,3 +11,17 @@ it("should allow to run a WebAssembly module (direct)", function() {
|
||||||
expect(result).toEqual(42);
|
expect(result).toEqual(42);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should allow to run a WebAssembly module (in Worker)", async function() {
|
||||||
|
const worker = new Worker(new URL("./worker.js", import.meta.url), {
|
||||||
|
type: "module"
|
||||||
|
});
|
||||||
|
worker.postMessage("ok");
|
||||||
|
const result = await new Promise(resolve => {
|
||||||
|
worker.onmessage = event => {
|
||||||
|
resolve(event.data);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
expect(result).toBe("data: 42, thanks");
|
||||||
|
await worker.terminate();
|
||||||
|
});
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
self.onmessage = async event => {
|
||||||
|
const { run } = await import("./module");
|
||||||
|
postMessage(`data: ${run()}, thanks`);
|
||||||
|
};
|
|
@ -0,0 +1,18 @@
|
||||||
|
it("should allow to create a WebWorker", async () => {
|
||||||
|
const worker = new Worker(new URL("./worker.js", import.meta.url), {
|
||||||
|
type: "module"
|
||||||
|
});
|
||||||
|
worker.postMessage("ok");
|
||||||
|
const result = await new Promise(resolve => {
|
||||||
|
worker.onmessage = event => {
|
||||||
|
resolve(event.data);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
expect(result).toBe("data: OK, thanks");
|
||||||
|
await worker.terminate();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should allow to share chunks", async () => {
|
||||||
|
const { upper } = await import("./module");
|
||||||
|
expect(upper("ok")).toBe("OK");
|
||||||
|
});
|
|
@ -0,0 +1,3 @@
|
||||||
|
export function upper(str) {
|
||||||
|
return str.toUpperCase();
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
module.exports = {
|
||||||
|
moduleScope(scope, options) {
|
||||||
|
if (options.name.includes("node")) {
|
||||||
|
delete scope.Worker;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
findBundle: function (i, options) {
|
||||||
|
return ["web-main.mjs"];
|
||||||
|
}
|
||||||
|
};
|
|
@ -0,0 +1,5 @@
|
||||||
|
const supportsWorker = require("../../../helpers/supportsWorker");
|
||||||
|
|
||||||
|
module.exports = function (config) {
|
||||||
|
return supportsWorker();
|
||||||
|
};
|
|
@ -0,0 +1,13 @@
|
||||||
|
/** @type {import("../../../../").Configuration} */
|
||||||
|
module.exports = [
|
||||||
|
{
|
||||||
|
name: "web",
|
||||||
|
target: ["web", "node"],
|
||||||
|
output: {
|
||||||
|
filename: "web-[name].mjs"
|
||||||
|
},
|
||||||
|
experiments: {
|
||||||
|
outputModule: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
];
|
|
@ -0,0 +1,4 @@
|
||||||
|
self.onmessage = async event => {
|
||||||
|
const { upper } = await import("./module");
|
||||||
|
postMessage(`data: ${upper(event.data)}, thanks`);
|
||||||
|
};
|
|
@ -2,22 +2,32 @@ const path = require("path");
|
||||||
|
|
||||||
module.exports = ({ outputDirectory }) =>
|
module.exports = ({ outputDirectory }) =>
|
||||||
class Worker {
|
class Worker {
|
||||||
constructor(url, options = {}) {
|
constructor(resource, options = {}) {
|
||||||
expect(url).toBeInstanceOf(URL);
|
expect(resource).toBeInstanceOf(URL);
|
||||||
expect(url.origin).toBe("https://test.cases");
|
|
||||||
expect(url.pathname.startsWith("/path/")).toBe(true);
|
const isFileURL = /^file:/i.test(resource);
|
||||||
this.url = url;
|
|
||||||
const file = url.pathname.slice(6);
|
if (!isFileURL) {
|
||||||
|
expect(resource.origin).toBe("https://test.cases");
|
||||||
|
expect(resource.pathname.startsWith("/path/")).toBe(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.url = resource;
|
||||||
|
const file = isFileURL
|
||||||
|
? resource
|
||||||
|
: path.resolve(outputDirectory, resource.pathname.slice(6));
|
||||||
|
|
||||||
const workerBootstrap = `
|
const workerBootstrap = `
|
||||||
const { parentPort } = require("worker_threads");
|
const { parentPort } = require("worker_threads");
|
||||||
const { URL } = require("url");
|
const { URL, fileURLToPath } = require("url");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
global.self = global;
|
global.self = global;
|
||||||
self.URL = URL;
|
self.URL = URL;
|
||||||
self.location = new URL(${JSON.stringify(url.toString())});
|
self.location = new URL(${JSON.stringify(resource.toString())});
|
||||||
const urlToPath = url => {
|
const urlToPath = url => {
|
||||||
if(url.startsWith("https://test.cases/path/")) url = url.slice(24);
|
if (/^file:/i.test(url)) return fileURLToPath(url);
|
||||||
|
if (url.startsWith("https://test.cases/path/")) url = url.slice(24);
|
||||||
return path.resolve(${JSON.stringify(outputDirectory)}, \`./\${url}\`);
|
return path.resolve(${JSON.stringify(outputDirectory)}, \`./\${url}\`);
|
||||||
};
|
};
|
||||||
self.importScripts = url => {
|
self.importScripts = url => {
|
||||||
|
@ -35,8 +45,10 @@ self.fetch = async url => {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
|
headers: { get(name) { } },
|
||||||
status: 200,
|
status: 200,
|
||||||
ok: true,
|
ok: true,
|
||||||
|
arrayBuffer() { return buffer; },
|
||||||
json: async () => JSON.parse(buffer.toString("utf-8"))
|
json: async () => JSON.parse(buffer.toString("utf-8"))
|
||||||
};
|
};
|
||||||
} catch(err) {
|
} catch(err) {
|
||||||
|
@ -49,15 +61,26 @@ self.fetch = async url => {
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
parentPort.on("message", data => {
|
|
||||||
if(self.onmessage) self.onmessage({
|
|
||||||
data
|
|
||||||
});
|
|
||||||
});
|
|
||||||
self.postMessage = data => {
|
self.postMessage = data => {
|
||||||
parentPort.postMessage(data);
|
parentPort.postMessage(data);
|
||||||
};
|
};
|
||||||
require(${JSON.stringify(path.resolve(outputDirectory, file))});
|
if (${options.type === "module"}) {
|
||||||
|
import(${JSON.stringify(file)}).then(() => {
|
||||||
|
parentPort.on("message", data => {
|
||||||
|
if(self.onmessage) self.onmessage({
|
||||||
|
data
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
parentPort.on("message", data => {
|
||||||
|
if(self.onmessage) self.onmessage({
|
||||||
|
data
|
||||||
|
});
|
||||||
|
});
|
||||||
|
require(${JSON.stringify(file)});
|
||||||
|
}
|
||||||
`;
|
`;
|
||||||
this.worker = new (require("worker_threads").Worker)(workerBootstrap, {
|
this.worker = new (require("worker_threads").Worker)(workerBootstrap, {
|
||||||
eval: true
|
eval: true
|
||||||
|
|
Loading…
Reference in New Issue