mirror of https://github.com/vuejs/core.git
build: use rolldown
This commit is contained in:
parent
8bff142f99
commit
79eddcc7f6
|
@ -148,6 +148,7 @@ export default tseslint.config(
|
|||
files: [
|
||||
'eslint.config.js',
|
||||
'rollup*.config.js',
|
||||
'rolldown*.config.js',
|
||||
'scripts/**',
|
||||
'./*.{js,ts}',
|
||||
'packages/*/*.js',
|
||||
|
|
12
package.json
12
package.json
|
@ -6,7 +6,9 @@
|
|||
"scripts": {
|
||||
"dev": "node scripts/dev.js",
|
||||
"build": "node scripts/build.js",
|
||||
"build-dts": "tsc -p tsconfig.build.json --noCheck && rollup -c rollup.dts.config.js",
|
||||
"build-rollup": "node scripts/build-with-rollup.js",
|
||||
"build-dts": "node scripts/build-types.js",
|
||||
"build-dts-tsc": "tsc -p tsconfig.build.json --noCheck && rollup -c rollup.dts.config.js",
|
||||
"clean": "rimraf --glob packages/*/dist temp .eslintcache",
|
||||
"size": "run-s \"size-*\" && node scripts/usage-size.js",
|
||||
"size-global": "node scripts/build.js vue runtime-dom -f global -p --size",
|
||||
|
@ -62,8 +64,8 @@
|
|||
"node": ">=18.12.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/parser": "catalog:",
|
||||
"@babel/types": "catalog:",
|
||||
"@rolldown/plugin-node-polyfills": "^1.0.0",
|
||||
"@rollup/plugin-alias": "^5.1.1",
|
||||
"@rollup/plugin-commonjs": "^28.0.1",
|
||||
"@rollup/plugin-json": "^6.1.0",
|
||||
|
@ -75,6 +77,7 @@
|
|||
"@types/semver": "^7.5.8",
|
||||
"@types/serve-handler": "^6.1.4",
|
||||
"@vitest/coverage-v8": "^2.1.1",
|
||||
"@vitest/eslint-plugin": "^1.0.1",
|
||||
"@vue/consolidate": "1.0.0",
|
||||
"conventional-changelog-cli": "^5.0.0",
|
||||
"enquirer": "^2.4.1",
|
||||
|
@ -82,8 +85,8 @@
|
|||
"esbuild-plugin-polyfill-node": "^0.3.0",
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-plugin-import-x": "^4.4.0",
|
||||
"@vitest/eslint-plugin": "^1.0.1",
|
||||
"estree-walker": "catalog:",
|
||||
"fast-glob": "^3.3.2",
|
||||
"jsdom": "^25.0.0",
|
||||
"lint-staged": "^15.2.10",
|
||||
"lodash": "^4.17.21",
|
||||
|
@ -91,12 +94,15 @@
|
|||
"markdown-table": "^3.0.4",
|
||||
"marked": "13.0.3",
|
||||
"npm-run-all2": "^7.0.1",
|
||||
"oxc-parser": "^0.35.0",
|
||||
"oxc-transform": "^0.35.0",
|
||||
"picocolors": "^1.1.1",
|
||||
"prettier": "^3.3.3",
|
||||
"pretty-bytes": "^6.1.1",
|
||||
"pug": "^3.0.3",
|
||||
"puppeteer": "~23.3.0",
|
||||
"rimraf": "^6.0.1",
|
||||
"rolldown": "0.14.0-snapshot-d5e797b-20241114003621",
|
||||
"rollup": "^4.25.0",
|
||||
"rollup-plugin-dts": "^6.1.1",
|
||||
"rollup-plugin-esbuild": "^6.1.1",
|
||||
|
|
|
@ -81,7 +81,7 @@ font-weight: bold;
|
|||
|
||||
const consumer = new SourceMapConsumer(script!.map!)
|
||||
consumer.eachMapping(mapping => {
|
||||
expect(mapping.originalLine - mapping.generatedLine).toBe(padding)
|
||||
expect(mapping.originalLine! - mapping.generatedLine).toBe(padding)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -100,8 +100,8 @@ font-weight: bold;
|
|||
|
||||
const consumer = new SourceMapConsumer(template.map!)
|
||||
consumer.eachMapping(mapping => {
|
||||
expect(mapping.originalLine - mapping.generatedLine).toBe(padding)
|
||||
expect(mapping.originalColumn - mapping.generatedColumn).toBe(2)
|
||||
expect(mapping.originalLine! - mapping.generatedLine).toBe(padding)
|
||||
expect(mapping.originalColumn! - mapping.generatedColumn).toBe(2)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -115,7 +115,7 @@ font-weight: bold;
|
|||
|
||||
const consumer = new SourceMapConsumer(custom!.map!)
|
||||
consumer.eachMapping(mapping => {
|
||||
expect(mapping.originalLine - mapping.generatedLine).toBe(padding)
|
||||
expect(mapping.originalLine! - mapping.generatedLine).toBe(padding)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -289,7 +289,7 @@ function mapLines(oldMap: RawSourceMap, newMap: RawSourceMap): RawSourceMap {
|
|||
|
||||
const origPosInOldMap = oldMapConsumer.originalPositionFor({
|
||||
line: m.originalLine,
|
||||
column: m.originalColumn,
|
||||
column: m.originalColumn!,
|
||||
})
|
||||
|
||||
if (origPosInOldMap.source == null) {
|
||||
|
@ -305,7 +305,7 @@ function mapLines(oldMap: RawSourceMap, newMap: RawSourceMap): RawSourceMap {
|
|||
line: origPosInOldMap.line, // map line
|
||||
// use current column, since the oldMap produced by @vue/compiler-sfc
|
||||
// does not
|
||||
column: m.originalColumn,
|
||||
column: m.originalColumn!,
|
||||
},
|
||||
source: origPosInOldMap.source,
|
||||
name: origPosInOldMap.name,
|
||||
|
|
|
@ -18,12 +18,11 @@ export class ScriptCompileContext {
|
|||
scriptAst: Program | null
|
||||
scriptSetupAst: Program | null
|
||||
|
||||
source: string = this.descriptor.source
|
||||
filename: string = this.descriptor.filename
|
||||
s: MagicString = new MagicString(this.source)
|
||||
startOffset: number | undefined =
|
||||
this.descriptor.scriptSetup?.loc.start.offset
|
||||
endOffset: number | undefined = this.descriptor.scriptSetup?.loc.end.offset
|
||||
source: string
|
||||
filename: string
|
||||
s: MagicString
|
||||
startOffset: number | undefined
|
||||
endOffset: number | undefined
|
||||
|
||||
// import / type analysis
|
||||
scope?: TypeScope
|
||||
|
@ -87,6 +86,12 @@ export class ScriptCompileContext {
|
|||
const scriptLang = script && script.lang
|
||||
const scriptSetupLang = scriptSetup && scriptSetup.lang
|
||||
|
||||
this.source = descriptor.source
|
||||
this.filename = descriptor.filename
|
||||
this.s = new MagicString(descriptor.source)
|
||||
this.startOffset = descriptor.scriptSetup?.loc.start.offset
|
||||
this.endOffset = descriptor.scriptSetup?.loc.end.offset
|
||||
|
||||
this.isJS =
|
||||
scriptLang === 'js' ||
|
||||
scriptLang === 'jsx' ||
|
||||
|
@ -99,7 +104,7 @@ export class ScriptCompileContext {
|
|||
scriptSetupLang === 'tsx'
|
||||
|
||||
const customElement = options.customElement
|
||||
const filename = this.descriptor.filename
|
||||
const filename = descriptor.filename
|
||||
if (customElement) {
|
||||
this.isCE =
|
||||
typeof customElement === 'boolean'
|
||||
|
|
|
@ -219,7 +219,7 @@ export class VueElement
|
|||
/**
|
||||
* @internal
|
||||
*/
|
||||
_nonce: string | undefined = this._def.nonce
|
||||
_nonce: string | undefined
|
||||
|
||||
/**
|
||||
* @internal
|
||||
|
@ -253,6 +253,7 @@ export class VueElement
|
|||
private _createApp: CreateAppFunction<Element> = createApp,
|
||||
) {
|
||||
super()
|
||||
this._nonce = _def.nonce
|
||||
if (this.shadowRoot && _createApp !== createApp) {
|
||||
this._root = this.shadowRoot
|
||||
} else {
|
||||
|
@ -313,7 +314,7 @@ export class VueElement
|
|||
}
|
||||
}
|
||||
|
||||
private _setParent(parent = this._parent) {
|
||||
private _setParent(parent: VueElement | undefined = this._parent): void {
|
||||
if (parent) {
|
||||
this._instance!.parent = parent._instance
|
||||
this._instance!.provides = parent._instance!.provides
|
||||
|
|
813
pnpm-lock.yaml
813
pnpm-lock.yaml
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
|||
// @ts-check
|
||||
import assert from 'node:assert/strict'
|
||||
import { parse } from '@babel/parser'
|
||||
import { parseSync } from 'oxc-parser'
|
||||
import { existsSync, readFileSync, readdirSync, writeFileSync } from 'node:fs'
|
||||
import MagicString from 'magic-string'
|
||||
import dts from 'rollup-plugin-dts'
|
||||
|
@ -58,11 +58,15 @@ function patchTypes(pkg) {
|
|||
name: 'patch-types',
|
||||
renderChunk(code, chunk) {
|
||||
const s = new MagicString(code)
|
||||
const ast = parse(code, {
|
||||
plugins: ['typescript'],
|
||||
const { program: ast, errors } = parseSync(code, {
|
||||
sourceFilename: 'x.d.ts',
|
||||
sourceType: 'module',
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(errors.join('\n'))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('@babel/types').VariableDeclarator | import('@babel/types').TSTypeAliasDeclaration | import('@babel/types').TSInterfaceDeclaration | import('@babel/types').TSDeclareFunction | import('@babel/types').TSInterfaceDeclaration | import('@babel/types').TSEnumDeclaration | import('@babel/types').ClassDeclaration} node
|
||||
* @param {import('@babel/types').VariableDeclaration} [parentDecl]
|
||||
|
@ -88,20 +92,23 @@ function patchTypes(pkg) {
|
|||
const shouldRemoveExport = new Set()
|
||||
|
||||
// pass 0: check all exported types
|
||||
for (const node of ast.program.body) {
|
||||
for (const node of ast.body) {
|
||||
if (node.type === 'ExportNamedDeclaration' && !node.source) {
|
||||
for (let i = 0; i < node.specifiers.length; i++) {
|
||||
const spec = node.specifiers[i]
|
||||
if (spec.type === 'ExportSpecifier') {
|
||||
isExported.add(spec.local.name)
|
||||
isExported.add(
|
||||
'name' in spec.local ? spec.local.name : spec.local.value,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pass 1: add exports
|
||||
for (const node of ast.program.body) {
|
||||
for (const node of ast.body) {
|
||||
if (node.type === 'VariableDeclaration') {
|
||||
// @ts-expect-error waiting for oxc-parser to expose types
|
||||
processDeclaration(node.declarations[0], node)
|
||||
if (node.declarations.length > 1) {
|
||||
assert(typeof node.start === 'number')
|
||||
|
@ -120,23 +127,26 @@ function patchTypes(pkg) {
|
|||
node.type === 'TSEnumDeclaration' ||
|
||||
node.type === 'ClassDeclaration'
|
||||
) {
|
||||
// @ts-expect-error waiting for oxc-parser to expose types
|
||||
processDeclaration(node)
|
||||
}
|
||||
}
|
||||
|
||||
// pass 2: remove exports
|
||||
for (const node of ast.program.body) {
|
||||
for (const node of ast.body) {
|
||||
if (node.type === 'ExportNamedDeclaration' && !node.source) {
|
||||
let removed = 0
|
||||
for (let i = 0; i < node.specifiers.length; i++) {
|
||||
const spec = node.specifiers[i]
|
||||
const localName =
|
||||
'name' in spec.local ? spec.local.name : spec.local.value
|
||||
if (
|
||||
spec.type === 'ExportSpecifier' &&
|
||||
shouldRemoveExport.has(spec.local.name)
|
||||
shouldRemoveExport.has(localName)
|
||||
) {
|
||||
assert(spec.exported.type === 'Identifier')
|
||||
const exported = spec.exported.name
|
||||
if (exported !== spec.local.name) {
|
||||
if (exported !== localName) {
|
||||
// this only happens if we have something like
|
||||
// type Foo
|
||||
// export { Foo as Bar }
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
import fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
import glob from 'fast-glob'
|
||||
import { isolatedDeclaration } from 'oxc-transform'
|
||||
import { rollup } from 'rollup'
|
||||
import picocolors from 'picocolors'
|
||||
|
||||
if (fs.existsSync('temp/packages')) {
|
||||
fs.rmSync('temp/packages', { recursive: true })
|
||||
}
|
||||
|
||||
let errs = ''
|
||||
let start = performance.now()
|
||||
let count = 0
|
||||
|
||||
for (const file of await glob('packages/*/src/**/*.ts')) {
|
||||
if (file.includes('runtime-test')) continue
|
||||
|
||||
const ts = fs.readFileSync(file, 'utf-8')
|
||||
const dts = isolatedDeclaration(file, ts, {
|
||||
sourcemap: false,
|
||||
stripInternal: true,
|
||||
})
|
||||
if (dts.errors.length) {
|
||||
dts.errors.forEach(err => {
|
||||
// temporary workaround for https://github.com/oxc-project/oxc/issues/5668
|
||||
if (!err.includes('set value(_: S)')) {
|
||||
console.error(err)
|
||||
}
|
||||
errs += err + '\n'
|
||||
})
|
||||
}
|
||||
|
||||
write(path.join('temp', file.replace(/\.ts$/, '.d.ts')), dts.code)
|
||||
count++
|
||||
}
|
||||
|
||||
console.log(
|
||||
`\n${count} isolated dts files generated in ${(performance.now() - start).toFixed(2)}ms.`,
|
||||
)
|
||||
|
||||
if (errs) {
|
||||
write(path.join('temp', 'oxc-iso-decl-errors.txt'), errs)
|
||||
}
|
||||
|
||||
console.log('bundling dts with rollup-plugin-dts...')
|
||||
|
||||
// bundle with rollup-plugin-dts
|
||||
const rollupConfigs = (await import('../rollup.dts.config.js')).default
|
||||
|
||||
start = performance.now()
|
||||
|
||||
await Promise.all(
|
||||
rollupConfigs.map(c =>
|
||||
rollup(c).then(bundle => {
|
||||
return bundle.write(c.output).then(() => {
|
||||
console.log(picocolors.gray('built: ') + picocolors.blue(c.output.file))
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
console.log(
|
||||
`bundled dts generated in ${(performance.now() - start).toFixed(2)}ms.`,
|
||||
)
|
||||
|
||||
function write(file, content) {
|
||||
const dir = path.dirname(file)
|
||||
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true })
|
||||
fs.writeFileSync(file, content)
|
||||
}
|
|
@ -0,0 +1,259 @@
|
|||
// @ts-check
|
||||
|
||||
/*
|
||||
Produces production builds and stitches together d.ts files.
|
||||
|
||||
To specify the package to build, simply pass its name and the desired build
|
||||
formats to output (defaults to `buildOptions.formats` specified in that package,
|
||||
or "esm,cjs"):
|
||||
|
||||
```
|
||||
# name supports fuzzy match. will build all packages with name containing "dom":
|
||||
nr build dom
|
||||
|
||||
# specify the format to output
|
||||
nr build core --formats cjs
|
||||
```
|
||||
*/
|
||||
|
||||
import fs from 'node:fs'
|
||||
import { parseArgs } from 'node:util'
|
||||
import { existsSync, readFileSync } from 'node:fs'
|
||||
import path from 'node:path'
|
||||
import { brotliCompressSync, gzipSync } from 'node:zlib'
|
||||
import pico from 'picocolors'
|
||||
import { cpus } from 'node:os'
|
||||
import { targets as allTargets, exec, fuzzyMatchTarget } from './utils.js'
|
||||
import { scanEnums } from './inline-enums.js'
|
||||
import prettyBytes from 'pretty-bytes'
|
||||
import { spawnSync } from 'node:child_process'
|
||||
|
||||
const commit = spawnSync('git', ['rev-parse', '--short=7', 'HEAD'])
|
||||
.stdout.toString()
|
||||
.trim()
|
||||
|
||||
const { values, positionals: targets } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
formats: {
|
||||
type: 'string',
|
||||
short: 'f',
|
||||
},
|
||||
devOnly: {
|
||||
type: 'boolean',
|
||||
short: 'd',
|
||||
},
|
||||
prodOnly: {
|
||||
type: 'boolean',
|
||||
short: 'p',
|
||||
},
|
||||
withTypes: {
|
||||
type: 'boolean',
|
||||
short: 't',
|
||||
},
|
||||
sourceMap: {
|
||||
type: 'boolean',
|
||||
short: 's',
|
||||
},
|
||||
release: {
|
||||
type: 'boolean',
|
||||
},
|
||||
all: {
|
||||
type: 'boolean',
|
||||
short: 'a',
|
||||
},
|
||||
size: {
|
||||
type: 'boolean',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const {
|
||||
formats,
|
||||
all: buildAllMatching,
|
||||
devOnly,
|
||||
prodOnly,
|
||||
withTypes: buildTypes,
|
||||
sourceMap,
|
||||
release: isRelease,
|
||||
size: writeSize,
|
||||
} = values
|
||||
|
||||
const sizeDir = path.resolve('temp/size')
|
||||
|
||||
run()
|
||||
|
||||
async function run() {
|
||||
if (writeSize) fs.mkdirSync(sizeDir, { recursive: true })
|
||||
const removeCache = scanEnums()
|
||||
try {
|
||||
const resolvedTargets = targets.length
|
||||
? fuzzyMatchTarget(targets, buildAllMatching)
|
||||
: allTargets
|
||||
await buildAll(resolvedTargets)
|
||||
await checkAllSizes(resolvedTargets)
|
||||
if (buildTypes) {
|
||||
await exec(
|
||||
'pnpm',
|
||||
[
|
||||
'run',
|
||||
'build-dts',
|
||||
...(targets.length
|
||||
? ['--environment', `TARGETS:${resolvedTargets.join(',')}`]
|
||||
: []),
|
||||
],
|
||||
{
|
||||
stdio: 'inherit',
|
||||
},
|
||||
)
|
||||
}
|
||||
} finally {
|
||||
removeCache()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds all the targets in parallel.
|
||||
* @param {Array<string>} targets - An array of targets to build.
|
||||
* @returns {Promise<void>} - A promise representing the build process.
|
||||
*/
|
||||
async function buildAll(targets) {
|
||||
await runParallel(cpus().length, targets, build)
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs iterator function in parallel.
|
||||
* @template T - The type of items in the data source
|
||||
* @param {number} maxConcurrency - The maximum concurrency.
|
||||
* @param {Array<T>} source - The data source
|
||||
* @param {(item: T) => Promise<void>} iteratorFn - The iteratorFn
|
||||
* @returns {Promise<void[]>} - A Promise array containing all iteration results.
|
||||
*/
|
||||
async function runParallel(maxConcurrency, source, iteratorFn) {
|
||||
/**@type {Promise<void>[]} */
|
||||
const ret = []
|
||||
/**@type {Promise<void>[]} */
|
||||
const executing = []
|
||||
for (const item of source) {
|
||||
const p = Promise.resolve().then(() => iteratorFn(item))
|
||||
ret.push(p)
|
||||
|
||||
if (maxConcurrency <= source.length) {
|
||||
const e = p.then(() => {
|
||||
executing.splice(executing.indexOf(e), 1)
|
||||
})
|
||||
executing.push(e)
|
||||
if (executing.length >= maxConcurrency) {
|
||||
await Promise.race(executing)
|
||||
}
|
||||
}
|
||||
}
|
||||
return Promise.all(ret)
|
||||
}
|
||||
|
||||
const privatePackages = fs.readdirSync('packages-private')
|
||||
|
||||
/**
|
||||
* Builds the target.
|
||||
* @param {string} target - The target to build.
|
||||
* @returns {Promise<void>} - A promise representing the build process.
|
||||
*/
|
||||
async function build(target) {
|
||||
const pkgBase = privatePackages.includes(target)
|
||||
? `packages-private`
|
||||
: `packages`
|
||||
const pkgDir = path.resolve(`${pkgBase}/${target}`)
|
||||
const pkg = JSON.parse(readFileSync(`${pkgDir}/package.json`, 'utf-8'))
|
||||
|
||||
// if this is a full build (no specific targets), ignore private packages
|
||||
if ((isRelease || !targets.length) && pkg.private) {
|
||||
return
|
||||
}
|
||||
|
||||
// if building a specific format, do not remove dist.
|
||||
if (!formats && existsSync(`${pkgDir}/dist`)) {
|
||||
fs.rmSync(`${pkgDir}/dist`, { recursive: true })
|
||||
}
|
||||
|
||||
const env = {
|
||||
...process.env,
|
||||
TARGET: target,
|
||||
COMMIT: commit,
|
||||
NODE_ENV:
|
||||
(pkg.buildOptions && pkg.buildOptions.env) ||
|
||||
(devOnly ? 'development' : 'production'),
|
||||
...(formats ? { FORMATS: formats } : null),
|
||||
...(prodOnly ? { PROD_ONLY: true } : null),
|
||||
...(sourceMap ? { SOURCE_MAP: true } : null),
|
||||
}
|
||||
|
||||
await exec('rollup', ['-c'], {
|
||||
stdio: 'inherit',
|
||||
env,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the sizes of all targets.
|
||||
* @param {string[]} targets - The targets to check sizes for.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function checkAllSizes(targets) {
|
||||
if (devOnly || (formats && !formats.includes('global'))) {
|
||||
return
|
||||
}
|
||||
console.log()
|
||||
for (const target of targets) {
|
||||
await checkSize(target)
|
||||
}
|
||||
console.log()
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the size of a target.
|
||||
* @param {string} target - The target to check the size for.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function checkSize(target) {
|
||||
const pkgDir = path.resolve(`packages/${target}`)
|
||||
await checkFileSize(`${pkgDir}/dist/${target}.global.prod.js`)
|
||||
if (!formats || formats.includes('global-runtime')) {
|
||||
await checkFileSize(`${pkgDir}/dist/${target}.runtime.global.prod.js`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the file size.
|
||||
* @param {string} filePath - The path of the file to check the size for.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function checkFileSize(filePath) {
|
||||
if (!existsSync(filePath)) {
|
||||
return
|
||||
}
|
||||
const file = fs.readFileSync(filePath)
|
||||
const fileName = path.basename(filePath)
|
||||
|
||||
const gzipped = gzipSync(file)
|
||||
const brotli = brotliCompressSync(file)
|
||||
|
||||
console.log(
|
||||
`${pico.gray(pico.bold(fileName))} min:${prettyBytes(
|
||||
file.length,
|
||||
)} / gzip:${prettyBytes(gzipped.length)} / brotli:${prettyBytes(
|
||||
brotli.length,
|
||||
)}`,
|
||||
)
|
||||
|
||||
if (writeSize)
|
||||
fs.writeFileSync(
|
||||
path.resolve(sizeDir, `${fileName}.json`),
|
||||
JSON.stringify({
|
||||
file: fileName,
|
||||
size: file.length,
|
||||
gzip: gzipped.length,
|
||||
brotli: brotli.length,
|
||||
}),
|
||||
'utf-8',
|
||||
)
|
||||
}
|
131
scripts/build.js
131
scripts/build.js
|
@ -22,12 +22,16 @@ import { existsSync, readFileSync } from 'node:fs'
|
|||
import path from 'node:path'
|
||||
import { brotliCompressSync, gzipSync } from 'node:zlib'
|
||||
import pico from 'picocolors'
|
||||
import { cpus } from 'node:os'
|
||||
import { targets as allTargets, exec, fuzzyMatchTarget } from './utils.js'
|
||||
import { scanEnums } from './inline-enums.js'
|
||||
import { targets as allTargets, fuzzyMatchTarget } from './utils.js'
|
||||
import prettyBytes from 'pretty-bytes'
|
||||
import { spawnSync } from 'node:child_process'
|
||||
import { createConfigsForPackage } from './create-rolldown-config.js'
|
||||
import { rolldown } from 'rolldown'
|
||||
import { scanEnums } from './inline-enums.js'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url))
|
||||
const privatePackages = fs.readdirSync('packages-private')
|
||||
const commit = spawnSync('git', ['rev-parse', '--short=7', 'HEAD'])
|
||||
.stdout.toString()
|
||||
.trim()
|
||||
|
@ -69,43 +73,32 @@ const { values, positionals: targets } = parseArgs({
|
|||
})
|
||||
|
||||
const {
|
||||
formats,
|
||||
formats: rawFormats,
|
||||
all: buildAllMatching,
|
||||
devOnly,
|
||||
prodOnly,
|
||||
withTypes: buildTypes,
|
||||
sourceMap,
|
||||
release: isRelease,
|
||||
size: writeSize,
|
||||
size,
|
||||
} = values
|
||||
|
||||
const formats = rawFormats?.split(',')
|
||||
const sizeDir = path.resolve('temp/size')
|
||||
|
||||
run()
|
||||
|
||||
async function run() {
|
||||
if (writeSize) fs.mkdirSync(sizeDir, { recursive: true })
|
||||
if (size) fs.mkdirSync(sizeDir, { recursive: true })
|
||||
const removeCache = scanEnums()
|
||||
try {
|
||||
const resolvedTargets = targets.length
|
||||
? fuzzyMatchTarget(targets, buildAllMatching)
|
||||
: allTargets
|
||||
await buildAll(resolvedTargets)
|
||||
await checkAllSizes(resolvedTargets)
|
||||
if (size) await checkAllSizes(resolvedTargets)
|
||||
if (buildTypes) {
|
||||
await exec(
|
||||
'pnpm',
|
||||
[
|
||||
'run',
|
||||
'build-dts',
|
||||
...(targets.length
|
||||
? ['--environment', `TARGETS:${resolvedTargets.join(',')}`]
|
||||
: []),
|
||||
],
|
||||
{
|
||||
stdio: 'inherit',
|
||||
},
|
||||
)
|
||||
await import('./build-types.js')
|
||||
}
|
||||
} finally {
|
||||
removeCache()
|
||||
|
@ -118,51 +111,47 @@ async function run() {
|
|||
* @returns {Promise<void>} - A promise representing the build process.
|
||||
*/
|
||||
async function buildAll(targets) {
|
||||
await runParallel(cpus().length, targets, build)
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs iterator function in parallel.
|
||||
* @template T - The type of items in the data source
|
||||
* @param {number} maxConcurrency - The maximum concurrency.
|
||||
* @param {Array<T>} source - The data source
|
||||
* @param {(item: T) => Promise<void>} iteratorFn - The iteratorFn
|
||||
* @returns {Promise<void[]>} - A Promise array containing all iteration results.
|
||||
*/
|
||||
async function runParallel(maxConcurrency, source, iteratorFn) {
|
||||
/**@type {Promise<void>[]} */
|
||||
const ret = []
|
||||
/**@type {Promise<void>[]} */
|
||||
const executing = []
|
||||
for (const item of source) {
|
||||
const p = Promise.resolve().then(() => iteratorFn(item))
|
||||
ret.push(p)
|
||||
|
||||
if (maxConcurrency <= source.length) {
|
||||
const e = p.then(() => {
|
||||
executing.splice(executing.indexOf(e), 1)
|
||||
const start = performance.now()
|
||||
const all = []
|
||||
let count = 0
|
||||
for (const t of targets) {
|
||||
const configs = createConfigsForTarget(t)
|
||||
if (configs) {
|
||||
all.push(
|
||||
Promise.all(
|
||||
configs.map(c =>
|
||||
rolldown(c).then(bundle => {
|
||||
return bundle.write(c.output).then(() => {
|
||||
// @ts-expect-error
|
||||
return path.join('packages', t, 'dist', c.output.entryFileNames)
|
||||
})
|
||||
executing.push(e)
|
||||
if (executing.length >= maxConcurrency) {
|
||||
await Promise.race(executing)
|
||||
}),
|
||||
),
|
||||
).then(files => {
|
||||
files.forEach(f => {
|
||||
count++
|
||||
console.log(pico.gray('built: ') + pico.green(f))
|
||||
})
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
await Promise.all(all)
|
||||
console.log(
|
||||
`\n${count} files built in ${(performance.now() - start).toFixed(2)}ms.`,
|
||||
)
|
||||
}
|
||||
return Promise.all(ret)
|
||||
}
|
||||
|
||||
const privatePackages = fs.readdirSync('packages-private')
|
||||
|
||||
/**
|
||||
* Builds the target.
|
||||
* @param {string} target - The target to build.
|
||||
* @returns {Promise<void>} - A promise representing the build process.
|
||||
* @returns {import('rolldown').RolldownOptions[] | void} - A promise representing the build process.
|
||||
*/
|
||||
async function build(target) {
|
||||
function createConfigsForTarget(target) {
|
||||
const pkgBase = privatePackages.includes(target)
|
||||
? `packages-private`
|
||||
: `packages`
|
||||
const pkgDir = path.resolve(`${pkgBase}/${target}`)
|
||||
const pkgDir = path.resolve(__dirname, `../${pkgBase}/${target}`)
|
||||
const pkg = JSON.parse(readFileSync(`${pkgDir}/package.json`, 'utf-8'))
|
||||
|
||||
// if this is a full build (no specific targets), ignore private packages
|
||||
|
@ -175,28 +164,16 @@ async function build(target) {
|
|||
fs.rmSync(`${pkgDir}/dist`, { recursive: true })
|
||||
}
|
||||
|
||||
const env =
|
||||
(pkg.buildOptions && pkg.buildOptions.env) ||
|
||||
(devOnly ? 'development' : 'production')
|
||||
|
||||
await exec(
|
||||
'rollup',
|
||||
[
|
||||
'-c',
|
||||
'--environment',
|
||||
[
|
||||
`COMMIT:${commit}`,
|
||||
`NODE_ENV:${env}`,
|
||||
`TARGET:${target}`,
|
||||
formats ? `FORMATS:${formats}` : ``,
|
||||
prodOnly ? `PROD_ONLY:true` : ``,
|
||||
sourceMap ? `SOURCE_MAP:true` : ``,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(','),
|
||||
],
|
||||
{ stdio: 'inherit' },
|
||||
)
|
||||
return createConfigsForPackage({
|
||||
target,
|
||||
commit,
|
||||
// @ts-expect-error
|
||||
formats,
|
||||
prodOnly,
|
||||
devOnly:
|
||||
(pkg.buildOptions && pkg.buildOptions.env === 'development') || devOnly,
|
||||
sourceMap,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -221,7 +198,7 @@ async function checkAllSizes(targets) {
|
|||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function checkSize(target) {
|
||||
const pkgDir = path.resolve(`packages/${target}`)
|
||||
const pkgDir = path.resolve(__dirname, `../packages/${target}`)
|
||||
await checkFileSize(`${pkgDir}/dist/${target}.global.prod.js`)
|
||||
if (!formats || formats.includes('global-runtime')) {
|
||||
await checkFileSize(`${pkgDir}/dist/${target}.runtime.global.prod.js`)
|
||||
|
@ -251,7 +228,7 @@ async function checkFileSize(filePath) {
|
|||
)}`,
|
||||
)
|
||||
|
||||
if (writeSize)
|
||||
if (size)
|
||||
fs.writeFileSync(
|
||||
path.resolve(sizeDir, `${fileName}.json`),
|
||||
JSON.stringify({
|
||||
|
|
|
@ -0,0 +1,387 @@
|
|||
// @ts-check
|
||||
import assert from 'node:assert/strict'
|
||||
import { createRequire } from 'node:module'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import path from 'node:path'
|
||||
import { replacePlugin } from 'rolldown/experimental'
|
||||
import pico from 'picocolors'
|
||||
import polyfillNode from '@rolldown/plugin-node-polyfills'
|
||||
import { entries } from './aliases.js'
|
||||
import { inlineEnums } from './inline-enums.js'
|
||||
import { minify as minifySwc } from '@swc/core'
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url))
|
||||
|
||||
const masterVersion = require('../package.json').version
|
||||
const consolidatePkg = require('@vue/consolidate/package.json')
|
||||
|
||||
const packagesDir = path.resolve(__dirname, '../packages')
|
||||
|
||||
/** @typedef {'cjs' | 'esm-bundler' | 'global' | 'global-runtime' | 'esm-browser' | 'esm-bundler-runtime' | 'esm-browser-runtime'} PackageFormat */
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* target: string
|
||||
* commit: string
|
||||
* formats?: PackageFormat[]
|
||||
* devOnly?: boolean
|
||||
* prodOnly?: boolean
|
||||
* sourceMap?: boolean
|
||||
* }} options
|
||||
*/
|
||||
export function createConfigsForPackage({
|
||||
target,
|
||||
commit,
|
||||
formats,
|
||||
devOnly = false,
|
||||
prodOnly = false,
|
||||
sourceMap = false,
|
||||
}) {
|
||||
const [enumPlugin, enumDefines] = inlineEnums()
|
||||
|
||||
const packageDir = path.resolve(packagesDir, target)
|
||||
const resolve = (/** @type {string} */ p) => path.resolve(packageDir, p)
|
||||
const pkg = require(resolve(`package.json`))
|
||||
const packageOptions = pkg.buildOptions || {}
|
||||
const name = packageOptions.filename || path.basename(packageDir)
|
||||
|
||||
/** @type {Record<PackageFormat, import('rolldown').OutputOptions>} */
|
||||
const outputConfigs = {
|
||||
'esm-bundler': {
|
||||
entryFileNames: `${name}.esm-bundler.js`,
|
||||
format: 'es',
|
||||
},
|
||||
'esm-browser': {
|
||||
entryFileNames: `${name}.esm-browser.js`,
|
||||
format: 'es',
|
||||
},
|
||||
cjs: {
|
||||
entryFileNames: `${name}.cjs.js`,
|
||||
format: 'cjs',
|
||||
},
|
||||
global: {
|
||||
entryFileNames: `${name}.global.js`,
|
||||
format: 'iife',
|
||||
},
|
||||
// runtime-only builds, for main "vue" package only
|
||||
'esm-bundler-runtime': {
|
||||
entryFileNames: `${name}.runtime.esm-bundler.js`,
|
||||
format: 'es',
|
||||
},
|
||||
'esm-browser-runtime': {
|
||||
entryFileNames: `${name}.runtime.esm-browser.js`,
|
||||
format: 'es',
|
||||
},
|
||||
'global-runtime': {
|
||||
entryFileNames: `${name}.runtime.global.js`,
|
||||
format: 'iife',
|
||||
},
|
||||
}
|
||||
|
||||
const resolvedFormats = (
|
||||
formats ||
|
||||
packageOptions.formats || ['esm-bundler', 'cjs']
|
||||
).filter(format => outputConfigs[format])
|
||||
|
||||
const packageConfigs = prodOnly
|
||||
? []
|
||||
: resolvedFormats.map(format => createConfig(format, outputConfigs[format]))
|
||||
|
||||
if (!devOnly) {
|
||||
resolvedFormats.forEach(format => {
|
||||
if (packageOptions.prod === false) {
|
||||
return
|
||||
}
|
||||
if (format === 'cjs') {
|
||||
packageConfigs.push(createProductionConfig(format))
|
||||
}
|
||||
if (/^(global|esm-browser)(-runtime)?/.test(format)) {
|
||||
packageConfigs.push(createMinifiedConfig(format))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PackageFormat} format
|
||||
* @param {import('rolldown').OutputOptions} output
|
||||
* @param {import('rolldown').Plugin[]} plugins
|
||||
* @returns {import('rolldown').RolldownOptions}
|
||||
*/
|
||||
function createConfig(format, output, plugins = []) {
|
||||
if (!output) {
|
||||
console.error(pico.yellow(`invalid format: "${format}"`))
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
output.dir = resolve('dist')
|
||||
|
||||
const isProductionBuild = /\.prod\.js$/.test(
|
||||
String(output.entryFileNames) || '',
|
||||
)
|
||||
const isBundlerESMBuild = /esm-bundler/.test(format)
|
||||
const isBrowserESMBuild = /esm-browser/.test(format)
|
||||
const isServerRenderer = name === 'server-renderer'
|
||||
const isCJSBuild = format === 'cjs'
|
||||
const isGlobalBuild = /global/.test(format)
|
||||
const isCompatPackage =
|
||||
pkg.name === '@vue/compat' || pkg.name === '@vue/compat-canary'
|
||||
const isCompatBuild = !!packageOptions.compat
|
||||
const isBrowserBuild =
|
||||
(isGlobalBuild || isBrowserESMBuild || isBundlerESMBuild) &&
|
||||
!packageOptions.enableNonBrowserBranches
|
||||
|
||||
output.banner = `/**
|
||||
* ${pkg.name} v${masterVersion}
|
||||
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
||||
* @license MIT
|
||||
**/`
|
||||
|
||||
output.exports = isCompatPackage ? 'auto' : 'named'
|
||||
if (isCJSBuild) {
|
||||
output.esModule = true
|
||||
}
|
||||
output.sourcemap = sourceMap
|
||||
|
||||
output.externalLiveBindings = false
|
||||
|
||||
// https://github.com/rollup/rollup/pull/5380
|
||||
// @ts-expect-error Not supported yet
|
||||
output.reexportProtoFromExternal = false
|
||||
|
||||
if (isGlobalBuild) {
|
||||
output.name = packageOptions.name
|
||||
}
|
||||
|
||||
let entryFile = /runtime$/.test(format) ? `src/runtime.ts` : `src/index.ts`
|
||||
|
||||
// the compat build needs both default AND named exports. This will cause
|
||||
// Rollup to complain for non-ESM targets, so we use separate entries for
|
||||
// esm vs. non-esm builds.
|
||||
if (isCompatPackage && (isBrowserESMBuild || isBundlerESMBuild)) {
|
||||
entryFile = /runtime$/.test(format)
|
||||
? `src/esm-runtime.ts`
|
||||
: `src/esm-index.ts`
|
||||
}
|
||||
|
||||
function resolveDefine() {
|
||||
/** @type {Record<string, string>} */
|
||||
const defines = {
|
||||
__COMMIT__: `"${commit}"`,
|
||||
__VERSION__: `"${masterVersion}"`,
|
||||
// this is only used during Vue's internal tests
|
||||
__TEST__: `false`,
|
||||
// If the build is expected to run directly in the browser (global / esm builds)
|
||||
__BROWSER__: String(isBrowserBuild),
|
||||
__GLOBAL__: String(isGlobalBuild),
|
||||
__ESM_BUNDLER__: String(isBundlerESMBuild),
|
||||
__ESM_BROWSER__: String(isBrowserESMBuild),
|
||||
// is targeting Node (SSR)?
|
||||
__CJS__: String(isCJSBuild),
|
||||
// need SSR-specific branches?
|
||||
__SSR__: String(isCJSBuild || isBundlerESMBuild || isServerRenderer),
|
||||
|
||||
// 2.x compat build
|
||||
__COMPAT__: String(isCompatBuild),
|
||||
|
||||
// feature flags
|
||||
__FEATURE_SUSPENSE__: `true`,
|
||||
__FEATURE_OPTIONS_API__: isBundlerESMBuild
|
||||
? `__VUE_OPTIONS_API__`
|
||||
: `true`,
|
||||
__FEATURE_PROD_DEVTOOLS__: isBundlerESMBuild
|
||||
? `__VUE_PROD_DEVTOOLS__`
|
||||
: `false`,
|
||||
__FEATURE_PROD_HYDRATION_MISMATCH_DETAILS__: isBundlerESMBuild
|
||||
? `__VUE_PROD_HYDRATION_MISMATCH_DETAILS__`
|
||||
: `false`,
|
||||
}
|
||||
|
||||
if (!isBundlerESMBuild) {
|
||||
// hard coded dev/prod builds
|
||||
defines.__DEV__ = String(!isProductionBuild)
|
||||
}
|
||||
|
||||
// allow inline overrides like
|
||||
//__RUNTIME_COMPILE__=true pnpm build runtime-core
|
||||
Object.keys(defines).forEach(key => {
|
||||
if (key in process.env) {
|
||||
const value = process.env[key]
|
||||
assert(typeof value === 'string')
|
||||
defines[key] = value
|
||||
}
|
||||
})
|
||||
|
||||
return defines
|
||||
}
|
||||
|
||||
// esbuild define is a bit strict and only allows literal json or identifiers
|
||||
// so we still need replace plugin in some cases
|
||||
function resolveReplace() {
|
||||
/** @type {Record<string, string>} */
|
||||
const replacements = { ...enumDefines }
|
||||
|
||||
if (isBundlerESMBuild) {
|
||||
Object.assign(replacements, {
|
||||
// preserve to be handled by bundlers
|
||||
__DEV__: `!!(process.env.NODE_ENV !== 'production')`,
|
||||
})
|
||||
}
|
||||
|
||||
// for compiler-sfc browser build inlined deps
|
||||
if (isBrowserESMBuild && name === 'compiler-sfc') {
|
||||
Object.assign(replacements, {
|
||||
'process.env': '({})',
|
||||
'process.platform': '""',
|
||||
'process.stdout': 'null',
|
||||
})
|
||||
}
|
||||
|
||||
if (Object.keys(replacements).length) {
|
||||
return [replacePlugin(replacements)]
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
function resolveExternal() {
|
||||
const treeShakenDeps = [
|
||||
'source-map-js',
|
||||
'@babel/parser',
|
||||
'estree-walker',
|
||||
'entities/lib/decode.js',
|
||||
]
|
||||
|
||||
// we are bundling forked consolidate.js in compiler-sfc which dynamically
|
||||
// requires a ton of template engines which should be ignored.
|
||||
let cjsIgnores = []
|
||||
if (
|
||||
pkg.name === '@vue/compiler-sfc' ||
|
||||
pkg.name === '@vue/compiler-sfc-canary'
|
||||
) {
|
||||
cjsIgnores = [
|
||||
...Object.keys(consolidatePkg.devDependencies),
|
||||
'vm',
|
||||
'crypto',
|
||||
'react-dom/server',
|
||||
'teacup/lib/express',
|
||||
'arc-templates/dist/es5',
|
||||
'then-pug',
|
||||
'then-jade',
|
||||
]
|
||||
}
|
||||
|
||||
if (isGlobalBuild || isBrowserESMBuild || isCompatPackage) {
|
||||
if (!packageOptions.enableNonBrowserBranches) {
|
||||
// normal browser builds - non-browser only imports are tree-shaken,
|
||||
// they are only listed here to suppress warnings.
|
||||
return treeShakenDeps
|
||||
} else {
|
||||
return cjsIgnores
|
||||
}
|
||||
} else {
|
||||
// Node / esm-bundler builds.
|
||||
// externalize all direct deps unless it's the compat build.
|
||||
return [
|
||||
...Object.keys(pkg.dependencies || {}),
|
||||
...Object.keys(pkg.peerDependencies || {}),
|
||||
// for @vue/compiler-sfc / server-renderer
|
||||
...['path', 'url', 'stream'],
|
||||
// somehow these throw warnings for runtime-* package builds
|
||||
...treeShakenDeps,
|
||||
...cjsIgnores,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
function resolveNodePlugins() {
|
||||
const nodePlugins =
|
||||
(format === 'cjs' && Object.keys(pkg.devDependencies || {}).length) ||
|
||||
packageOptions.enableNonBrowserBranches
|
||||
? [...(format === 'cjs' ? [] : [polyfillNode()])]
|
||||
: []
|
||||
return nodePlugins
|
||||
}
|
||||
|
||||
return {
|
||||
input: resolve(entryFile),
|
||||
// Global and Browser ESM builds inlines everything so that they can be
|
||||
// used alone.
|
||||
external: resolveExternal(),
|
||||
define: resolveDefine(),
|
||||
platform: format === 'cjs' ? 'node' : 'browser',
|
||||
resolve: {
|
||||
alias: entries,
|
||||
},
|
||||
plugins: [
|
||||
// @ts-expect-error rollup's Plugin type incompatible w/ rolldown's vendored Plugin type
|
||||
enumPlugin,
|
||||
...resolveReplace(),
|
||||
...resolveNodePlugins(),
|
||||
...plugins,
|
||||
],
|
||||
output,
|
||||
onwarn: (msg, warn) => {
|
||||
if (msg.code !== 'CIRCULAR_DEPENDENCY') {
|
||||
warn(msg)
|
||||
}
|
||||
},
|
||||
treeshake: {
|
||||
// https://github.com/rolldown/rolldown/issues/1917
|
||||
moduleSideEffects: false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function createProductionConfig(/** @type {PackageFormat} */ format) {
|
||||
return createConfig(format, {
|
||||
entryFileNames: `${name}.${format}.prod.js`,
|
||||
format: outputConfigs[format].format,
|
||||
})
|
||||
}
|
||||
|
||||
function createMinifiedConfig(/** @type {PackageFormat} */ format) {
|
||||
return createConfig(
|
||||
format,
|
||||
{
|
||||
entryFileNames: String(outputConfigs[format].entryFileNames).replace(
|
||||
/\.js$/,
|
||||
'.prod.js',
|
||||
),
|
||||
format: outputConfigs[format].format,
|
||||
// minify: true,
|
||||
},
|
||||
[
|
||||
{
|
||||
name: 'swc-minify',
|
||||
async renderChunk(
|
||||
contents,
|
||||
_,
|
||||
{
|
||||
format,
|
||||
sourcemap,
|
||||
// @ts-expect-error not supported yet
|
||||
sourcemapExcludeSources,
|
||||
},
|
||||
) {
|
||||
const { code, map } = await minifySwc(contents, {
|
||||
module: format === 'es',
|
||||
compress: {
|
||||
ecma: 2016,
|
||||
pure_getters: true,
|
||||
},
|
||||
safari10: true,
|
||||
mangle: true,
|
||||
sourceMap: !!sourcemap,
|
||||
inlineSourcesContent: !sourcemapExcludeSources,
|
||||
})
|
||||
return { code, map: map || null }
|
||||
},
|
||||
},
|
||||
],
|
||||
)
|
||||
}
|
||||
|
||||
return packageConfigs
|
||||
}
|
|
@ -20,7 +20,7 @@ import {
|
|||
writeFileSync,
|
||||
} from 'node:fs'
|
||||
import * as path from 'node:path'
|
||||
import { parse } from '@babel/parser'
|
||||
import { parseSync } from 'oxc-parser'
|
||||
import { spawnSync } from 'node:child_process'
|
||||
import MagicString from 'magic-string'
|
||||
|
||||
|
@ -61,17 +61,19 @@ export function scanEnums() {
|
|||
]
|
||||
|
||||
// 2. parse matched files to collect enum info
|
||||
let i = 0
|
||||
for (const relativeFile of files) {
|
||||
const file = path.resolve(process.cwd(), relativeFile)
|
||||
const content = readFileSync(file, 'utf-8')
|
||||
const ast = parse(content, {
|
||||
plugins: ['typescript'],
|
||||
const res = parseSync(content, {
|
||||
// plugins: ['typescript'],
|
||||
sourceFilename: file,
|
||||
sourceType: 'module',
|
||||
})
|
||||
|
||||
/** @type {Set<string>} */
|
||||
const enumIds = new Set()
|
||||
for (const node of ast.program.body) {
|
||||
for (const node of res.program.body) {
|
||||
if (
|
||||
node.type === 'ExportNamedDeclaration' &&
|
||||
node.declaration &&
|
||||
|
@ -129,7 +131,11 @@ export function scanEnums() {
|
|||
node.type === 'StringLiteral'
|
||||
) {
|
||||
return node.value
|
||||
} else if (node.type === 'MemberExpression') {
|
||||
} else if (
|
||||
node.type === 'MemberExpression' ||
|
||||
// @ts-expect-error oxc only type
|
||||
node.type === 'StaticMemberExpression'
|
||||
) {
|
||||
const exp = /** @type {`${string}.${string}`} */ (
|
||||
content.slice(node.start, node.end)
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue