build: use rolldown

This commit is contained in:
Evan You 2024-11-14 20:57:59 +08:00
parent 8bff142f99
commit 79eddcc7f6
No known key found for this signature in database
GPG Key ID: 00E9AB7A6704CE0A
14 changed files with 1299 additions and 457 deletions

View File

@ -148,6 +148,7 @@ export default tseslint.config(
files: [ files: [
'eslint.config.js', 'eslint.config.js',
'rollup*.config.js', 'rollup*.config.js',
'rolldown*.config.js',
'scripts/**', 'scripts/**',
'./*.{js,ts}', './*.{js,ts}',
'packages/*/*.js', 'packages/*/*.js',

View File

@ -6,7 +6,9 @@
"scripts": { "scripts": {
"dev": "node scripts/dev.js", "dev": "node scripts/dev.js",
"build": "node scripts/build.js", "build": "node scripts/build.js",
"build-dts": "tsc -p tsconfig.build.json --noCheck && rollup -c rollup.dts.config.js", "build-rollup": "node scripts/build-with-rollup.js",
"build-dts": "node scripts/build-types.js",
"build-dts-tsc": "tsc -p tsconfig.build.json --noCheck && rollup -c rollup.dts.config.js",
"clean": "rimraf --glob packages/*/dist temp .eslintcache", "clean": "rimraf --glob packages/*/dist temp .eslintcache",
"size": "run-s \"size-*\" && node scripts/usage-size.js", "size": "run-s \"size-*\" && node scripts/usage-size.js",
"size-global": "node scripts/build.js vue runtime-dom -f global -p --size", "size-global": "node scripts/build.js vue runtime-dom -f global -p --size",
@ -62,8 +64,8 @@
"node": ">=18.12.0" "node": ">=18.12.0"
}, },
"devDependencies": { "devDependencies": {
"@babel/parser": "catalog:",
"@babel/types": "catalog:", "@babel/types": "catalog:",
"@rolldown/plugin-node-polyfills": "^1.0.0",
"@rollup/plugin-alias": "^5.1.1", "@rollup/plugin-alias": "^5.1.1",
"@rollup/plugin-commonjs": "^28.0.1", "@rollup/plugin-commonjs": "^28.0.1",
"@rollup/plugin-json": "^6.1.0", "@rollup/plugin-json": "^6.1.0",
@ -75,6 +77,7 @@
"@types/semver": "^7.5.8", "@types/semver": "^7.5.8",
"@types/serve-handler": "^6.1.4", "@types/serve-handler": "^6.1.4",
"@vitest/coverage-v8": "^2.1.1", "@vitest/coverage-v8": "^2.1.1",
"@vitest/eslint-plugin": "^1.0.1",
"@vue/consolidate": "1.0.0", "@vue/consolidate": "1.0.0",
"conventional-changelog-cli": "^5.0.0", "conventional-changelog-cli": "^5.0.0",
"enquirer": "^2.4.1", "enquirer": "^2.4.1",
@ -82,8 +85,8 @@
"esbuild-plugin-polyfill-node": "^0.3.0", "esbuild-plugin-polyfill-node": "^0.3.0",
"eslint": "^9.14.0", "eslint": "^9.14.0",
"eslint-plugin-import-x": "^4.4.0", "eslint-plugin-import-x": "^4.4.0",
"@vitest/eslint-plugin": "^1.0.1",
"estree-walker": "catalog:", "estree-walker": "catalog:",
"fast-glob": "^3.3.2",
"jsdom": "^25.0.0", "jsdom": "^25.0.0",
"lint-staged": "^15.2.10", "lint-staged": "^15.2.10",
"lodash": "^4.17.21", "lodash": "^4.17.21",
@ -91,12 +94,15 @@
"markdown-table": "^3.0.4", "markdown-table": "^3.0.4",
"marked": "13.0.3", "marked": "13.0.3",
"npm-run-all2": "^7.0.1", "npm-run-all2": "^7.0.1",
"oxc-parser": "^0.35.0",
"oxc-transform": "^0.35.0",
"picocolors": "^1.1.1", "picocolors": "^1.1.1",
"prettier": "^3.3.3", "prettier": "^3.3.3",
"pretty-bytes": "^6.1.1", "pretty-bytes": "^6.1.1",
"pug": "^3.0.3", "pug": "^3.0.3",
"puppeteer": "~23.3.0", "puppeteer": "~23.3.0",
"rimraf": "^6.0.1", "rimraf": "^6.0.1",
"rolldown": "0.14.0-snapshot-d5e797b-20241114003621",
"rollup": "^4.25.0", "rollup": "^4.25.0",
"rollup-plugin-dts": "^6.1.1", "rollup-plugin-dts": "^6.1.1",
"rollup-plugin-esbuild": "^6.1.1", "rollup-plugin-esbuild": "^6.1.1",

View File

@ -81,7 +81,7 @@ font-weight: bold;
const consumer = new SourceMapConsumer(script!.map!) const consumer = new SourceMapConsumer(script!.map!)
consumer.eachMapping(mapping => { consumer.eachMapping(mapping => {
expect(mapping.originalLine - mapping.generatedLine).toBe(padding) expect(mapping.originalLine! - mapping.generatedLine).toBe(padding)
}) })
}) })
@ -100,8 +100,8 @@ font-weight: bold;
const consumer = new SourceMapConsumer(template.map!) const consumer = new SourceMapConsumer(template.map!)
consumer.eachMapping(mapping => { consumer.eachMapping(mapping => {
expect(mapping.originalLine - mapping.generatedLine).toBe(padding) expect(mapping.originalLine! - mapping.generatedLine).toBe(padding)
expect(mapping.originalColumn - mapping.generatedColumn).toBe(2) expect(mapping.originalColumn! - mapping.generatedColumn).toBe(2)
}) })
}) })
@ -115,7 +115,7 @@ font-weight: bold;
const consumer = new SourceMapConsumer(custom!.map!) const consumer = new SourceMapConsumer(custom!.map!)
consumer.eachMapping(mapping => { consumer.eachMapping(mapping => {
expect(mapping.originalLine - mapping.generatedLine).toBe(padding) expect(mapping.originalLine! - mapping.generatedLine).toBe(padding)
}) })
}) })
}) })

View File

@ -289,7 +289,7 @@ function mapLines(oldMap: RawSourceMap, newMap: RawSourceMap): RawSourceMap {
const origPosInOldMap = oldMapConsumer.originalPositionFor({ const origPosInOldMap = oldMapConsumer.originalPositionFor({
line: m.originalLine, line: m.originalLine,
column: m.originalColumn, column: m.originalColumn!,
}) })
if (origPosInOldMap.source == null) { if (origPosInOldMap.source == null) {
@ -305,7 +305,7 @@ function mapLines(oldMap: RawSourceMap, newMap: RawSourceMap): RawSourceMap {
line: origPosInOldMap.line, // map line line: origPosInOldMap.line, // map line
// use current column, since the oldMap produced by @vue/compiler-sfc // use current column, since the oldMap produced by @vue/compiler-sfc
// does not // does not
column: m.originalColumn, column: m.originalColumn!,
}, },
source: origPosInOldMap.source, source: origPosInOldMap.source,
name: origPosInOldMap.name, name: origPosInOldMap.name,

View File

@ -18,12 +18,11 @@ export class ScriptCompileContext {
scriptAst: Program | null scriptAst: Program | null
scriptSetupAst: Program | null scriptSetupAst: Program | null
source: string = this.descriptor.source source: string
filename: string = this.descriptor.filename filename: string
s: MagicString = new MagicString(this.source) s: MagicString
startOffset: number | undefined = startOffset: number | undefined
this.descriptor.scriptSetup?.loc.start.offset endOffset: number | undefined
endOffset: number | undefined = this.descriptor.scriptSetup?.loc.end.offset
// import / type analysis // import / type analysis
scope?: TypeScope scope?: TypeScope
@ -87,6 +86,12 @@ export class ScriptCompileContext {
const scriptLang = script && script.lang const scriptLang = script && script.lang
const scriptSetupLang = scriptSetup && scriptSetup.lang const scriptSetupLang = scriptSetup && scriptSetup.lang
this.source = descriptor.source
this.filename = descriptor.filename
this.s = new MagicString(descriptor.source)
this.startOffset = descriptor.scriptSetup?.loc.start.offset
this.endOffset = descriptor.scriptSetup?.loc.end.offset
this.isJS = this.isJS =
scriptLang === 'js' || scriptLang === 'js' ||
scriptLang === 'jsx' || scriptLang === 'jsx' ||
@ -99,7 +104,7 @@ export class ScriptCompileContext {
scriptSetupLang === 'tsx' scriptSetupLang === 'tsx'
const customElement = options.customElement const customElement = options.customElement
const filename = this.descriptor.filename const filename = descriptor.filename
if (customElement) { if (customElement) {
this.isCE = this.isCE =
typeof customElement === 'boolean' typeof customElement === 'boolean'

View File

@ -219,7 +219,7 @@ export class VueElement
/** /**
* @internal * @internal
*/ */
_nonce: string | undefined = this._def.nonce _nonce: string | undefined
/** /**
* @internal * @internal
@ -253,6 +253,7 @@ export class VueElement
private _createApp: CreateAppFunction<Element> = createApp, private _createApp: CreateAppFunction<Element> = createApp,
) { ) {
super() super()
this._nonce = _def.nonce
if (this.shadowRoot && _createApp !== createApp) { if (this.shadowRoot && _createApp !== createApp) {
this._root = this.shadowRoot this._root = this.shadowRoot
} else { } else {
@ -313,7 +314,7 @@ export class VueElement
} }
} }
private _setParent(parent = this._parent) { private _setParent(parent: VueElement | undefined = this._parent): void {
if (parent) { if (parent) {
this._instance!.parent = parent._instance this._instance!.parent = parent._instance
this._instance!.provides = parent._instance!.provides this._instance!.provides = parent._instance!.provides

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
// @ts-check // @ts-check
import assert from 'node:assert/strict' import assert from 'node:assert/strict'
import { parse } from '@babel/parser' import { parseSync } from 'oxc-parser'
import { existsSync, readFileSync, readdirSync, writeFileSync } from 'node:fs' import { existsSync, readFileSync, readdirSync, writeFileSync } from 'node:fs'
import MagicString from 'magic-string' import MagicString from 'magic-string'
import dts from 'rollup-plugin-dts' import dts from 'rollup-plugin-dts'
@ -58,11 +58,15 @@ function patchTypes(pkg) {
name: 'patch-types', name: 'patch-types',
renderChunk(code, chunk) { renderChunk(code, chunk) {
const s = new MagicString(code) const s = new MagicString(code)
const ast = parse(code, { const { program: ast, errors } = parseSync(code, {
plugins: ['typescript'], sourceFilename: 'x.d.ts',
sourceType: 'module', sourceType: 'module',
}) })
if (errors.length) {
throw new Error(errors.join('\n'))
}
/** /**
* @param {import('@babel/types').VariableDeclarator | import('@babel/types').TSTypeAliasDeclaration | import('@babel/types').TSInterfaceDeclaration | import('@babel/types').TSDeclareFunction | import('@babel/types').TSInterfaceDeclaration | import('@babel/types').TSEnumDeclaration | import('@babel/types').ClassDeclaration} node * @param {import('@babel/types').VariableDeclarator | import('@babel/types').TSTypeAliasDeclaration | import('@babel/types').TSInterfaceDeclaration | import('@babel/types').TSDeclareFunction | import('@babel/types').TSInterfaceDeclaration | import('@babel/types').TSEnumDeclaration | import('@babel/types').ClassDeclaration} node
* @param {import('@babel/types').VariableDeclaration} [parentDecl] * @param {import('@babel/types').VariableDeclaration} [parentDecl]
@ -88,20 +92,23 @@ function patchTypes(pkg) {
const shouldRemoveExport = new Set() const shouldRemoveExport = new Set()
// pass 0: check all exported types // pass 0: check all exported types
for (const node of ast.program.body) { for (const node of ast.body) {
if (node.type === 'ExportNamedDeclaration' && !node.source) { if (node.type === 'ExportNamedDeclaration' && !node.source) {
for (let i = 0; i < node.specifiers.length; i++) { for (let i = 0; i < node.specifiers.length; i++) {
const spec = node.specifiers[i] const spec = node.specifiers[i]
if (spec.type === 'ExportSpecifier') { if (spec.type === 'ExportSpecifier') {
isExported.add(spec.local.name) isExported.add(
'name' in spec.local ? spec.local.name : spec.local.value,
)
} }
} }
} }
} }
// pass 1: add exports // pass 1: add exports
for (const node of ast.program.body) { for (const node of ast.body) {
if (node.type === 'VariableDeclaration') { if (node.type === 'VariableDeclaration') {
// @ts-expect-error waiting for oxc-parser to expose types
processDeclaration(node.declarations[0], node) processDeclaration(node.declarations[0], node)
if (node.declarations.length > 1) { if (node.declarations.length > 1) {
assert(typeof node.start === 'number') assert(typeof node.start === 'number')
@ -120,23 +127,26 @@ function patchTypes(pkg) {
node.type === 'TSEnumDeclaration' || node.type === 'TSEnumDeclaration' ||
node.type === 'ClassDeclaration' node.type === 'ClassDeclaration'
) { ) {
// @ts-expect-error waiting for oxc-parser to expose types
processDeclaration(node) processDeclaration(node)
} }
} }
// pass 2: remove exports // pass 2: remove exports
for (const node of ast.program.body) { for (const node of ast.body) {
if (node.type === 'ExportNamedDeclaration' && !node.source) { if (node.type === 'ExportNamedDeclaration' && !node.source) {
let removed = 0 let removed = 0
for (let i = 0; i < node.specifiers.length; i++) { for (let i = 0; i < node.specifiers.length; i++) {
const spec = node.specifiers[i] const spec = node.specifiers[i]
const localName =
'name' in spec.local ? spec.local.name : spec.local.value
if ( if (
spec.type === 'ExportSpecifier' && spec.type === 'ExportSpecifier' &&
shouldRemoveExport.has(spec.local.name) shouldRemoveExport.has(localName)
) { ) {
assert(spec.exported.type === 'Identifier') assert(spec.exported.type === 'Identifier')
const exported = spec.exported.name const exported = spec.exported.name
if (exported !== spec.local.name) { if (exported !== localName) {
// this only happens if we have something like // this only happens if we have something like
// type Foo // type Foo
// export { Foo as Bar } // export { Foo as Bar }

71
scripts/build-types.js Normal file
View File

@ -0,0 +1,71 @@
import fs from 'node:fs'
import path from 'node:path'
import glob from 'fast-glob'
import { isolatedDeclaration } from 'oxc-transform'
import { rollup } from 'rollup'
import picocolors from 'picocolors'
if (fs.existsSync('temp/packages')) {
fs.rmSync('temp/packages', { recursive: true })
}
let errs = ''
let start = performance.now()
let count = 0
for (const file of await glob('packages/*/src/**/*.ts')) {
if (file.includes('runtime-test')) continue
const ts = fs.readFileSync(file, 'utf-8')
const dts = isolatedDeclaration(file, ts, {
sourcemap: false,
stripInternal: true,
})
if (dts.errors.length) {
dts.errors.forEach(err => {
// temporary workaround for https://github.com/oxc-project/oxc/issues/5668
if (!err.includes('set value(_: S)')) {
console.error(err)
}
errs += err + '\n'
})
}
write(path.join('temp', file.replace(/\.ts$/, '.d.ts')), dts.code)
count++
}
console.log(
`\n${count} isolated dts files generated in ${(performance.now() - start).toFixed(2)}ms.`,
)
if (errs) {
write(path.join('temp', 'oxc-iso-decl-errors.txt'), errs)
}
console.log('bundling dts with rollup-plugin-dts...')
// bundle with rollup-plugin-dts
const rollupConfigs = (await import('../rollup.dts.config.js')).default
start = performance.now()
await Promise.all(
rollupConfigs.map(c =>
rollup(c).then(bundle => {
return bundle.write(c.output).then(() => {
console.log(picocolors.gray('built: ') + picocolors.blue(c.output.file))
})
}),
),
)
console.log(
`bundled dts generated in ${(performance.now() - start).toFixed(2)}ms.`,
)
function write(file, content) {
const dir = path.dirname(file)
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true })
fs.writeFileSync(file, content)
}

View File

@ -0,0 +1,259 @@
// @ts-check
/*
Produces production builds and stitches together d.ts files.
To specify the package to build, simply pass its name and the desired build
formats to output (defaults to `buildOptions.formats` specified in that package,
or "esm,cjs"):
```
# name supports fuzzy match. will build all packages with name containing "dom":
nr build dom
# specify the format to output
nr build core --formats cjs
```
*/
import fs from 'node:fs'
import { parseArgs } from 'node:util'
import { existsSync, readFileSync } from 'node:fs'
import path from 'node:path'
import { brotliCompressSync, gzipSync } from 'node:zlib'
import pico from 'picocolors'
import { cpus } from 'node:os'
import { targets as allTargets, exec, fuzzyMatchTarget } from './utils.js'
import { scanEnums } from './inline-enums.js'
import prettyBytes from 'pretty-bytes'
import { spawnSync } from 'node:child_process'
const commit = spawnSync('git', ['rev-parse', '--short=7', 'HEAD'])
.stdout.toString()
.trim()
const { values, positionals: targets } = parseArgs({
allowPositionals: true,
options: {
formats: {
type: 'string',
short: 'f',
},
devOnly: {
type: 'boolean',
short: 'd',
},
prodOnly: {
type: 'boolean',
short: 'p',
},
withTypes: {
type: 'boolean',
short: 't',
},
sourceMap: {
type: 'boolean',
short: 's',
},
release: {
type: 'boolean',
},
all: {
type: 'boolean',
short: 'a',
},
size: {
type: 'boolean',
},
},
})
const {
formats,
all: buildAllMatching,
devOnly,
prodOnly,
withTypes: buildTypes,
sourceMap,
release: isRelease,
size: writeSize,
} = values
const sizeDir = path.resolve('temp/size')
run()
async function run() {
if (writeSize) fs.mkdirSync(sizeDir, { recursive: true })
const removeCache = scanEnums()
try {
const resolvedTargets = targets.length
? fuzzyMatchTarget(targets, buildAllMatching)
: allTargets
await buildAll(resolvedTargets)
await checkAllSizes(resolvedTargets)
if (buildTypes) {
await exec(
'pnpm',
[
'run',
'build-dts',
...(targets.length
? ['--environment', `TARGETS:${resolvedTargets.join(',')}`]
: []),
],
{
stdio: 'inherit',
},
)
}
} finally {
removeCache()
}
}
/**
* Builds all the targets in parallel.
* @param {Array<string>} targets - An array of targets to build.
* @returns {Promise<void>} - A promise representing the build process.
*/
async function buildAll(targets) {
await runParallel(cpus().length, targets, build)
}
/**
* Runs iterator function in parallel.
* @template T - The type of items in the data source
* @param {number} maxConcurrency - The maximum concurrency.
* @param {Array<T>} source - The data source
* @param {(item: T) => Promise<void>} iteratorFn - The iteratorFn
* @returns {Promise<void[]>} - A Promise array containing all iteration results.
*/
async function runParallel(maxConcurrency, source, iteratorFn) {
/**@type {Promise<void>[]} */
const ret = []
/**@type {Promise<void>[]} */
const executing = []
for (const item of source) {
const p = Promise.resolve().then(() => iteratorFn(item))
ret.push(p)
if (maxConcurrency <= source.length) {
const e = p.then(() => {
executing.splice(executing.indexOf(e), 1)
})
executing.push(e)
if (executing.length >= maxConcurrency) {
await Promise.race(executing)
}
}
}
return Promise.all(ret)
}
const privatePackages = fs.readdirSync('packages-private')
/**
* Builds the target.
* @param {string} target - The target to build.
* @returns {Promise<void>} - A promise representing the build process.
*/
async function build(target) {
const pkgBase = privatePackages.includes(target)
? `packages-private`
: `packages`
const pkgDir = path.resolve(`${pkgBase}/${target}`)
const pkg = JSON.parse(readFileSync(`${pkgDir}/package.json`, 'utf-8'))
// if this is a full build (no specific targets), ignore private packages
if ((isRelease || !targets.length) && pkg.private) {
return
}
// if building a specific format, do not remove dist.
if (!formats && existsSync(`${pkgDir}/dist`)) {
fs.rmSync(`${pkgDir}/dist`, { recursive: true })
}
const env = {
...process.env,
TARGET: target,
COMMIT: commit,
NODE_ENV:
(pkg.buildOptions && pkg.buildOptions.env) ||
(devOnly ? 'development' : 'production'),
...(formats ? { FORMATS: formats } : null),
...(prodOnly ? { PROD_ONLY: true } : null),
...(sourceMap ? { SOURCE_MAP: true } : null),
}
await exec('rollup', ['-c'], {
stdio: 'inherit',
env,
})
}
/**
* Checks the sizes of all targets.
* @param {string[]} targets - The targets to check sizes for.
* @returns {Promise<void>}
*/
async function checkAllSizes(targets) {
if (devOnly || (formats && !formats.includes('global'))) {
return
}
console.log()
for (const target of targets) {
await checkSize(target)
}
console.log()
}
/**
* Checks the size of a target.
* @param {string} target - The target to check the size for.
* @returns {Promise<void>}
*/
async function checkSize(target) {
const pkgDir = path.resolve(`packages/${target}`)
await checkFileSize(`${pkgDir}/dist/${target}.global.prod.js`)
if (!formats || formats.includes('global-runtime')) {
await checkFileSize(`${pkgDir}/dist/${target}.runtime.global.prod.js`)
}
}
/**
* Checks the file size.
* @param {string} filePath - The path of the file to check the size for.
* @returns {Promise<void>}
*/
async function checkFileSize(filePath) {
if (!existsSync(filePath)) {
return
}
const file = fs.readFileSync(filePath)
const fileName = path.basename(filePath)
const gzipped = gzipSync(file)
const brotli = brotliCompressSync(file)
console.log(
`${pico.gray(pico.bold(fileName))} min:${prettyBytes(
file.length,
)} / gzip:${prettyBytes(gzipped.length)} / brotli:${prettyBytes(
brotli.length,
)}`,
)
if (writeSize)
fs.writeFileSync(
path.resolve(sizeDir, `${fileName}.json`),
JSON.stringify({
file: fileName,
size: file.length,
gzip: gzipped.length,
brotli: brotli.length,
}),
'utf-8',
)
}

View File

@ -22,12 +22,16 @@ import { existsSync, readFileSync } from 'node:fs'
import path from 'node:path' import path from 'node:path'
import { brotliCompressSync, gzipSync } from 'node:zlib' import { brotliCompressSync, gzipSync } from 'node:zlib'
import pico from 'picocolors' import pico from 'picocolors'
import { cpus } from 'node:os' import { targets as allTargets, fuzzyMatchTarget } from './utils.js'
import { targets as allTargets, exec, fuzzyMatchTarget } from './utils.js'
import { scanEnums } from './inline-enums.js'
import prettyBytes from 'pretty-bytes' import prettyBytes from 'pretty-bytes'
import { spawnSync } from 'node:child_process' import { spawnSync } from 'node:child_process'
import { createConfigsForPackage } from './create-rolldown-config.js'
import { rolldown } from 'rolldown'
import { scanEnums } from './inline-enums.js'
import { fileURLToPath } from 'node:url'
const __dirname = fileURLToPath(new URL('.', import.meta.url))
const privatePackages = fs.readdirSync('packages-private')
const commit = spawnSync('git', ['rev-parse', '--short=7', 'HEAD']) const commit = spawnSync('git', ['rev-parse', '--short=7', 'HEAD'])
.stdout.toString() .stdout.toString()
.trim() .trim()
@ -69,43 +73,32 @@ const { values, positionals: targets } = parseArgs({
}) })
const { const {
formats, formats: rawFormats,
all: buildAllMatching, all: buildAllMatching,
devOnly, devOnly,
prodOnly, prodOnly,
withTypes: buildTypes, withTypes: buildTypes,
sourceMap, sourceMap,
release: isRelease, release: isRelease,
size: writeSize, size,
} = values } = values
const formats = rawFormats?.split(',')
const sizeDir = path.resolve('temp/size') const sizeDir = path.resolve('temp/size')
run() run()
async function run() { async function run() {
if (writeSize) fs.mkdirSync(sizeDir, { recursive: true }) if (size) fs.mkdirSync(sizeDir, { recursive: true })
const removeCache = scanEnums() const removeCache = scanEnums()
try { try {
const resolvedTargets = targets.length const resolvedTargets = targets.length
? fuzzyMatchTarget(targets, buildAllMatching) ? fuzzyMatchTarget(targets, buildAllMatching)
: allTargets : allTargets
await buildAll(resolvedTargets) await buildAll(resolvedTargets)
await checkAllSizes(resolvedTargets) if (size) await checkAllSizes(resolvedTargets)
if (buildTypes) { if (buildTypes) {
await exec( await import('./build-types.js')
'pnpm',
[
'run',
'build-dts',
...(targets.length
? ['--environment', `TARGETS:${resolvedTargets.join(',')}`]
: []),
],
{
stdio: 'inherit',
},
)
} }
} finally { } finally {
removeCache() removeCache()
@ -118,51 +111,47 @@ async function run() {
* @returns {Promise<void>} - A promise representing the build process. * @returns {Promise<void>} - A promise representing the build process.
*/ */
async function buildAll(targets) { async function buildAll(targets) {
await runParallel(cpus().length, targets, build) const start = performance.now()
} const all = []
let count = 0
/** for (const t of targets) {
* Runs iterator function in parallel. const configs = createConfigsForTarget(t)
* @template T - The type of items in the data source if (configs) {
* @param {number} maxConcurrency - The maximum concurrency. all.push(
* @param {Array<T>} source - The data source Promise.all(
* @param {(item: T) => Promise<void>} iteratorFn - The iteratorFn configs.map(c =>
* @returns {Promise<void[]>} - A Promise array containing all iteration results. rolldown(c).then(bundle => {
*/ return bundle.write(c.output).then(() => {
async function runParallel(maxConcurrency, source, iteratorFn) { // @ts-expect-error
/**@type {Promise<void>[]} */ return path.join('packages', t, 'dist', c.output.entryFileNames)
const ret = [] })
/**@type {Promise<void>[]} */ }),
const executing = [] ),
for (const item of source) { ).then(files => {
const p = Promise.resolve().then(() => iteratorFn(item)) files.forEach(f => {
ret.push(p) count++
console.log(pico.gray('built: ') + pico.green(f))
if (maxConcurrency <= source.length) { })
const e = p.then(() => { }),
executing.splice(executing.indexOf(e), 1) )
})
executing.push(e)
if (executing.length >= maxConcurrency) {
await Promise.race(executing)
}
} }
} }
return Promise.all(ret) await Promise.all(all)
console.log(
`\n${count} files built in ${(performance.now() - start).toFixed(2)}ms.`,
)
} }
const privatePackages = fs.readdirSync('packages-private')
/** /**
* Builds the target. * Builds the target.
* @param {string} target - The target to build. * @param {string} target - The target to build.
* @returns {Promise<void>} - A promise representing the build process. * @returns {import('rolldown').RolldownOptions[] | void} - A promise representing the build process.
*/ */
async function build(target) { function createConfigsForTarget(target) {
const pkgBase = privatePackages.includes(target) const pkgBase = privatePackages.includes(target)
? `packages-private` ? `packages-private`
: `packages` : `packages`
const pkgDir = path.resolve(`${pkgBase}/${target}`) const pkgDir = path.resolve(__dirname, `../${pkgBase}/${target}`)
const pkg = JSON.parse(readFileSync(`${pkgDir}/package.json`, 'utf-8')) const pkg = JSON.parse(readFileSync(`${pkgDir}/package.json`, 'utf-8'))
// if this is a full build (no specific targets), ignore private packages // if this is a full build (no specific targets), ignore private packages
@ -175,28 +164,16 @@ async function build(target) {
fs.rmSync(`${pkgDir}/dist`, { recursive: true }) fs.rmSync(`${pkgDir}/dist`, { recursive: true })
} }
const env = return createConfigsForPackage({
(pkg.buildOptions && pkg.buildOptions.env) || target,
(devOnly ? 'development' : 'production') commit,
// @ts-expect-error
await exec( formats,
'rollup', prodOnly,
[ devOnly:
'-c', (pkg.buildOptions && pkg.buildOptions.env === 'development') || devOnly,
'--environment', sourceMap,
[ })
`COMMIT:${commit}`,
`NODE_ENV:${env}`,
`TARGET:${target}`,
formats ? `FORMATS:${formats}` : ``,
prodOnly ? `PROD_ONLY:true` : ``,
sourceMap ? `SOURCE_MAP:true` : ``,
]
.filter(Boolean)
.join(','),
],
{ stdio: 'inherit' },
)
} }
/** /**
@ -221,7 +198,7 @@ async function checkAllSizes(targets) {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async function checkSize(target) { async function checkSize(target) {
const pkgDir = path.resolve(`packages/${target}`) const pkgDir = path.resolve(__dirname, `../packages/${target}`)
await checkFileSize(`${pkgDir}/dist/${target}.global.prod.js`) await checkFileSize(`${pkgDir}/dist/${target}.global.prod.js`)
if (!formats || formats.includes('global-runtime')) { if (!formats || formats.includes('global-runtime')) {
await checkFileSize(`${pkgDir}/dist/${target}.runtime.global.prod.js`) await checkFileSize(`${pkgDir}/dist/${target}.runtime.global.prod.js`)
@ -251,7 +228,7 @@ async function checkFileSize(filePath) {
)}`, )}`,
) )
if (writeSize) if (size)
fs.writeFileSync( fs.writeFileSync(
path.resolve(sizeDir, `${fileName}.json`), path.resolve(sizeDir, `${fileName}.json`),
JSON.stringify({ JSON.stringify({

View File

@ -0,0 +1,387 @@
// @ts-check
import assert from 'node:assert/strict'
import { createRequire } from 'node:module'
import { fileURLToPath } from 'node:url'
import path from 'node:path'
import { replacePlugin } from 'rolldown/experimental'
import pico from 'picocolors'
import polyfillNode from '@rolldown/plugin-node-polyfills'
import { entries } from './aliases.js'
import { inlineEnums } from './inline-enums.js'
import { minify as minifySwc } from '@swc/core'
const require = createRequire(import.meta.url)
const __dirname = fileURLToPath(new URL('.', import.meta.url))
const masterVersion = require('../package.json').version
const consolidatePkg = require('@vue/consolidate/package.json')
const packagesDir = path.resolve(__dirname, '../packages')
/** @typedef {'cjs' | 'esm-bundler' | 'global' | 'global-runtime' | 'esm-browser' | 'esm-bundler-runtime' | 'esm-browser-runtime'} PackageFormat */
/**
* @param {{
* target: string
* commit: string
* formats?: PackageFormat[]
* devOnly?: boolean
* prodOnly?: boolean
* sourceMap?: boolean
* }} options
*/
export function createConfigsForPackage({
target,
commit,
formats,
devOnly = false,
prodOnly = false,
sourceMap = false,
}) {
const [enumPlugin, enumDefines] = inlineEnums()
const packageDir = path.resolve(packagesDir, target)
const resolve = (/** @type {string} */ p) => path.resolve(packageDir, p)
const pkg = require(resolve(`package.json`))
const packageOptions = pkg.buildOptions || {}
const name = packageOptions.filename || path.basename(packageDir)
/** @type {Record<PackageFormat, import('rolldown').OutputOptions>} */
const outputConfigs = {
'esm-bundler': {
entryFileNames: `${name}.esm-bundler.js`,
format: 'es',
},
'esm-browser': {
entryFileNames: `${name}.esm-browser.js`,
format: 'es',
},
cjs: {
entryFileNames: `${name}.cjs.js`,
format: 'cjs',
},
global: {
entryFileNames: `${name}.global.js`,
format: 'iife',
},
// runtime-only builds, for main "vue" package only
'esm-bundler-runtime': {
entryFileNames: `${name}.runtime.esm-bundler.js`,
format: 'es',
},
'esm-browser-runtime': {
entryFileNames: `${name}.runtime.esm-browser.js`,
format: 'es',
},
'global-runtime': {
entryFileNames: `${name}.runtime.global.js`,
format: 'iife',
},
}
const resolvedFormats = (
formats ||
packageOptions.formats || ['esm-bundler', 'cjs']
).filter(format => outputConfigs[format])
const packageConfigs = prodOnly
? []
: resolvedFormats.map(format => createConfig(format, outputConfigs[format]))
if (!devOnly) {
resolvedFormats.forEach(format => {
if (packageOptions.prod === false) {
return
}
if (format === 'cjs') {
packageConfigs.push(createProductionConfig(format))
}
if (/^(global|esm-browser)(-runtime)?/.test(format)) {
packageConfigs.push(createMinifiedConfig(format))
}
})
}
/**
*
* @param {PackageFormat} format
* @param {import('rolldown').OutputOptions} output
* @param {import('rolldown').Plugin[]} plugins
* @returns {import('rolldown').RolldownOptions}
*/
function createConfig(format, output, plugins = []) {
if (!output) {
console.error(pico.yellow(`invalid format: "${format}"`))
process.exit(1)
}
output.dir = resolve('dist')
const isProductionBuild = /\.prod\.js$/.test(
String(output.entryFileNames) || '',
)
const isBundlerESMBuild = /esm-bundler/.test(format)
const isBrowserESMBuild = /esm-browser/.test(format)
const isServerRenderer = name === 'server-renderer'
const isCJSBuild = format === 'cjs'
const isGlobalBuild = /global/.test(format)
const isCompatPackage =
pkg.name === '@vue/compat' || pkg.name === '@vue/compat-canary'
const isCompatBuild = !!packageOptions.compat
const isBrowserBuild =
(isGlobalBuild || isBrowserESMBuild || isBundlerESMBuild) &&
!packageOptions.enableNonBrowserBranches
output.banner = `/**
* ${pkg.name} v${masterVersion}
* (c) 2018-present Yuxi (Evan) You and Vue contributors
* @license MIT
**/`
output.exports = isCompatPackage ? 'auto' : 'named'
if (isCJSBuild) {
output.esModule = true
}
output.sourcemap = sourceMap
output.externalLiveBindings = false
// https://github.com/rollup/rollup/pull/5380
// @ts-expect-error Not supported yet
output.reexportProtoFromExternal = false
if (isGlobalBuild) {
output.name = packageOptions.name
}
let entryFile = /runtime$/.test(format) ? `src/runtime.ts` : `src/index.ts`
// the compat build needs both default AND named exports. This will cause
// Rollup to complain for non-ESM targets, so we use separate entries for
// esm vs. non-esm builds.
if (isCompatPackage && (isBrowserESMBuild || isBundlerESMBuild)) {
entryFile = /runtime$/.test(format)
? `src/esm-runtime.ts`
: `src/esm-index.ts`
}
function resolveDefine() {
/** @type {Record<string, string>} */
const defines = {
__COMMIT__: `"${commit}"`,
__VERSION__: `"${masterVersion}"`,
// this is only used during Vue's internal tests
__TEST__: `false`,
// If the build is expected to run directly in the browser (global / esm builds)
__BROWSER__: String(isBrowserBuild),
__GLOBAL__: String(isGlobalBuild),
__ESM_BUNDLER__: String(isBundlerESMBuild),
__ESM_BROWSER__: String(isBrowserESMBuild),
// is targeting Node (SSR)?
__CJS__: String(isCJSBuild),
// need SSR-specific branches?
__SSR__: String(isCJSBuild || isBundlerESMBuild || isServerRenderer),
// 2.x compat build
__COMPAT__: String(isCompatBuild),
// feature flags
__FEATURE_SUSPENSE__: `true`,
__FEATURE_OPTIONS_API__: isBundlerESMBuild
? `__VUE_OPTIONS_API__`
: `true`,
__FEATURE_PROD_DEVTOOLS__: isBundlerESMBuild
? `__VUE_PROD_DEVTOOLS__`
: `false`,
__FEATURE_PROD_HYDRATION_MISMATCH_DETAILS__: isBundlerESMBuild
? `__VUE_PROD_HYDRATION_MISMATCH_DETAILS__`
: `false`,
}
if (!isBundlerESMBuild) {
// hard coded dev/prod builds
defines.__DEV__ = String(!isProductionBuild)
}
// allow inline overrides like
//__RUNTIME_COMPILE__=true pnpm build runtime-core
Object.keys(defines).forEach(key => {
if (key in process.env) {
const value = process.env[key]
assert(typeof value === 'string')
defines[key] = value
}
})
return defines
}
// esbuild define is a bit strict and only allows literal json or identifiers
// so we still need replace plugin in some cases
function resolveReplace() {
/** @type {Record<string, string>} */
const replacements = { ...enumDefines }
if (isBundlerESMBuild) {
Object.assign(replacements, {
// preserve to be handled by bundlers
__DEV__: `!!(process.env.NODE_ENV !== 'production')`,
})
}
// for compiler-sfc browser build inlined deps
if (isBrowserESMBuild && name === 'compiler-sfc') {
Object.assign(replacements, {
'process.env': '({})',
'process.platform': '""',
'process.stdout': 'null',
})
}
if (Object.keys(replacements).length) {
return [replacePlugin(replacements)]
} else {
return []
}
}
function resolveExternal() {
const treeShakenDeps = [
'source-map-js',
'@babel/parser',
'estree-walker',
'entities/lib/decode.js',
]
// we are bundling forked consolidate.js in compiler-sfc which dynamically
// requires a ton of template engines which should be ignored.
let cjsIgnores = []
if (
pkg.name === '@vue/compiler-sfc' ||
pkg.name === '@vue/compiler-sfc-canary'
) {
cjsIgnores = [
...Object.keys(consolidatePkg.devDependencies),
'vm',
'crypto',
'react-dom/server',
'teacup/lib/express',
'arc-templates/dist/es5',
'then-pug',
'then-jade',
]
}
if (isGlobalBuild || isBrowserESMBuild || isCompatPackage) {
if (!packageOptions.enableNonBrowserBranches) {
// normal browser builds - non-browser only imports are tree-shaken,
// they are only listed here to suppress warnings.
return treeShakenDeps
} else {
return cjsIgnores
}
} else {
// Node / esm-bundler builds.
// externalize all direct deps unless it's the compat build.
return [
...Object.keys(pkg.dependencies || {}),
...Object.keys(pkg.peerDependencies || {}),
// for @vue/compiler-sfc / server-renderer
...['path', 'url', 'stream'],
// somehow these throw warnings for runtime-* package builds
...treeShakenDeps,
...cjsIgnores,
]
}
}
function resolveNodePlugins() {
const nodePlugins =
(format === 'cjs' && Object.keys(pkg.devDependencies || {}).length) ||
packageOptions.enableNonBrowserBranches
? [...(format === 'cjs' ? [] : [polyfillNode()])]
: []
return nodePlugins
}
return {
input: resolve(entryFile),
// Global and Browser ESM builds inlines everything so that they can be
// used alone.
external: resolveExternal(),
define: resolveDefine(),
platform: format === 'cjs' ? 'node' : 'browser',
resolve: {
alias: entries,
},
plugins: [
// @ts-expect-error rollup's Plugin type incompatible w/ rolldown's vendored Plugin type
enumPlugin,
...resolveReplace(),
...resolveNodePlugins(),
...plugins,
],
output,
onwarn: (msg, warn) => {
if (msg.code !== 'CIRCULAR_DEPENDENCY') {
warn(msg)
}
},
treeshake: {
// https://github.com/rolldown/rolldown/issues/1917
moduleSideEffects: false,
},
}
}
function createProductionConfig(/** @type {PackageFormat} */ format) {
return createConfig(format, {
entryFileNames: `${name}.${format}.prod.js`,
format: outputConfigs[format].format,
})
}
function createMinifiedConfig(/** @type {PackageFormat} */ format) {
return createConfig(
format,
{
entryFileNames: String(outputConfigs[format].entryFileNames).replace(
/\.js$/,
'.prod.js',
),
format: outputConfigs[format].format,
// minify: true,
},
[
{
name: 'swc-minify',
async renderChunk(
contents,
_,
{
format,
sourcemap,
// @ts-expect-error not supported yet
sourcemapExcludeSources,
},
) {
const { code, map } = await minifySwc(contents, {
module: format === 'es',
compress: {
ecma: 2016,
pure_getters: true,
},
safari10: true,
mangle: true,
sourceMap: !!sourcemap,
inlineSourcesContent: !sourcemapExcludeSources,
})
return { code, map: map || null }
},
},
],
)
}
return packageConfigs
}

View File

@ -20,7 +20,7 @@ import {
writeFileSync, writeFileSync,
} from 'node:fs' } from 'node:fs'
import * as path from 'node:path' import * as path from 'node:path'
import { parse } from '@babel/parser' import { parseSync } from 'oxc-parser'
import { spawnSync } from 'node:child_process' import { spawnSync } from 'node:child_process'
import MagicString from 'magic-string' import MagicString from 'magic-string'
@ -61,17 +61,19 @@ export function scanEnums() {
] ]
// 2. parse matched files to collect enum info // 2. parse matched files to collect enum info
let i = 0
for (const relativeFile of files) { for (const relativeFile of files) {
const file = path.resolve(process.cwd(), relativeFile) const file = path.resolve(process.cwd(), relativeFile)
const content = readFileSync(file, 'utf-8') const content = readFileSync(file, 'utf-8')
const ast = parse(content, { const res = parseSync(content, {
plugins: ['typescript'], // plugins: ['typescript'],
sourceFilename: file,
sourceType: 'module', sourceType: 'module',
}) })
/** @type {Set<string>} */ /** @type {Set<string>} */
const enumIds = new Set() const enumIds = new Set()
for (const node of ast.program.body) { for (const node of res.program.body) {
if ( if (
node.type === 'ExportNamedDeclaration' && node.type === 'ExportNamedDeclaration' &&
node.declaration && node.declaration &&
@ -129,7 +131,11 @@ export function scanEnums() {
node.type === 'StringLiteral' node.type === 'StringLiteral'
) { ) {
return node.value return node.value
} else if (node.type === 'MemberExpression') { } else if (
node.type === 'MemberExpression' ||
// @ts-expect-error oxc only type
node.type === 'StaticMemberExpression'
) {
const exp = /** @type {`${string}.${string}`} */ ( const exp = /** @type {`${string}.${string}`} */ (
content.slice(node.start, node.end) content.slice(node.start, node.end)
) )

0
scripts/test.js Normal file
View File