2023-12-19 16:59:47 +08:00
|
|
|
import { ErrorCodes, callWithErrorHandling, handleError } from './errorHandling'
|
2024-09-05 16:09:10 +08:00
|
|
|
import { NOOP, isArray } from '@vue/shared'
|
2021-04-22 03:20:15 +08:00
|
|
|
import { type ComponentInternalInstance, getComponentName } from './component'
|
2019-08-31 03:15:23 +08:00
|
|
|
|
2024-02-26 10:22:12 +08:00
|
|
|
export enum SchedulerJobFlags {
|
|
|
|
QUEUED = 1 << 0,
|
|
|
|
PRE = 1 << 1,
|
2021-06-25 05:44:32 +08:00
|
|
|
/**
|
|
|
|
* Indicates whether the effect is allowed to recursively trigger itself
|
|
|
|
* when managed by the scheduler.
|
|
|
|
*
|
|
|
|
* By default, a job cannot trigger itself because some built-in method calls,
|
|
|
|
* e.g. Array.prototype.push actually performs reads as well (#1740) which
|
|
|
|
* can lead to confusing infinite loops.
|
|
|
|
* The allowed cases are component update functions and watch callbacks.
|
|
|
|
* Component update functions may update child component props, which in turn
|
|
|
|
* trigger flush: "pre" watch callbacks that mutates state that the parent
|
|
|
|
* relies on (#1801). Watch callbacks doesn't track its dependencies so if it
|
|
|
|
* triggers itself again, it's likely intentional and it is the user's
|
|
|
|
* responsibility to perform recursive state mutation that eventually
|
|
|
|
* stabilizes (#1727).
|
|
|
|
*/
|
2024-02-26 10:22:12 +08:00
|
|
|
ALLOW_RECURSE = 1 << 2,
|
|
|
|
DISPOSED = 1 << 3,
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface SchedulerJob extends Function {
|
|
|
|
id?: number
|
|
|
|
/**
|
|
|
|
* flags can technically be undefined, but it can still be used in bitwise
|
|
|
|
* operations just like 0.
|
|
|
|
*/
|
|
|
|
flags?: SchedulerJobFlags
|
2020-07-31 05:54:05 +08:00
|
|
|
/**
|
2021-05-28 08:53:21 +08:00
|
|
|
* Attached by renderer.ts when setting up a component's render effect
|
|
|
|
* Used to obtain component information when reporting max recursive updates.
|
2020-07-31 05:54:05 +08:00
|
|
|
*/
|
2024-07-12 01:24:17 +08:00
|
|
|
i?: ComponentInternalInstance
|
2020-04-15 05:31:35 +08:00
|
|
|
}
|
|
|
|
|
2021-06-25 05:44:32 +08:00
|
|
|
export type SchedulerJobs = SchedulerJob | SchedulerJob[]
|
2020-08-14 21:50:23 +08:00
|
|
|
|
2020-11-09 22:19:32 +08:00
|
|
|
const queue: SchedulerJob[] = []
|
2024-10-03 23:21:31 +08:00
|
|
|
let flushIndex = -1
|
2020-08-05 22:55:23 +08:00
|
|
|
|
2021-06-25 05:44:32 +08:00
|
|
|
const pendingPostFlushCbs: SchedulerJob[] = []
|
|
|
|
let activePostFlushCbs: SchedulerJob[] | null = null
|
2020-08-05 22:55:23 +08:00
|
|
|
let postFlushIndex = 0
|
|
|
|
|
2024-09-04 20:24:33 +08:00
|
|
|
const resolvedPromise = /*@__PURE__*/ Promise.resolve() as Promise<any>
|
2020-07-28 12:20:30 +08:00
|
|
|
let currentFlushPromise: Promise<void> | null = null
|
2019-05-28 17:19:47 +08:00
|
|
|
|
2019-11-15 01:06:23 +08:00
|
|
|
const RECURSION_LIMIT = 100
|
2021-06-25 05:44:32 +08:00
|
|
|
type CountMap = Map<SchedulerJob, number>
|
2019-11-15 01:06:23 +08:00
|
|
|
|
2025-09-02 17:13:08 +08:00
|
|
|
export function nextTick(): Promise<void>
|
|
|
|
export function nextTick<T, R>(
|
2021-07-16 04:28:20 +08:00
|
|
|
this: T,
|
2025-09-02 17:13:08 +08:00
|
|
|
fn: (this: T) => R | Promise<R>,
|
|
|
|
): Promise<R>
|
|
|
|
export function nextTick<T, R>(
|
|
|
|
this: T,
|
|
|
|
fn?: (this: T) => R | Promise<R>,
|
|
|
|
): Promise<void | R> {
|
2020-07-28 12:20:30 +08:00
|
|
|
const p = currentFlushPromise || resolvedPromise
|
2020-10-06 06:18:38 +08:00
|
|
|
return fn ? p.then(this ? fn.bind(this) : fn) : p
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|
|
|
|
|
2024-08-19 10:17:45 +08:00
|
|
|
// Use binary-search to find a suitable position in the queue. The queue needs
|
|
|
|
// to be sorted in increasing order of the job ids. This ensures that:
|
|
|
|
// 1. Components are updated from parent to child. As the parent is always
|
|
|
|
// created before the child it will always have a smaller id.
|
|
|
|
// 2. If a component is unmounted during a parent component's update, its update
|
|
|
|
// can be skipped.
|
|
|
|
// A pre watcher will have the same id as its component's update job. The
|
|
|
|
// watcher should be inserted immediately before the update job. This allows
|
|
|
|
// watchers to be skipped if the component is unmounted by the parent update.
|
2021-07-20 01:37:35 +08:00
|
|
|
function findInsertionIndex(id: number) {
|
2024-10-03 23:21:31 +08:00
|
|
|
let start = flushIndex + 1
|
2021-02-25 22:37:25 +08:00
|
|
|
let end = queue.length
|
|
|
|
|
|
|
|
while (start < end) {
|
|
|
|
const middle = (start + end) >>> 1
|
2023-10-21 10:38:54 +08:00
|
|
|
const middleJob = queue[middle]
|
|
|
|
const middleJobId = getId(middleJob)
|
2024-02-26 10:22:12 +08:00
|
|
|
if (
|
|
|
|
middleJobId < id ||
|
|
|
|
(middleJobId === id && middleJob.flags! & SchedulerJobFlags.PRE)
|
|
|
|
) {
|
2023-10-21 10:38:54 +08:00
|
|
|
start = middle + 1
|
|
|
|
} else {
|
|
|
|
end = middle
|
|
|
|
}
|
2021-02-25 22:37:25 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return start
|
|
|
|
}
|
|
|
|
|
2024-08-08 23:05:21 +08:00
|
|
|
export function queueJob(job: SchedulerJob): void {
|
2024-02-26 10:22:12 +08:00
|
|
|
if (!(job.flags! & SchedulerJobFlags.QUEUED)) {
|
2024-08-19 10:13:50 +08:00
|
|
|
const jobId = getId(job)
|
|
|
|
const lastJob = queue[queue.length - 1]
|
|
|
|
if (
|
|
|
|
!lastJob ||
|
2024-02-26 10:22:12 +08:00
|
|
|
// fast path when the job id is larger than the tail
|
2024-08-19 10:13:50 +08:00
|
|
|
(!(job.flags! & SchedulerJobFlags.PRE) && jobId >= getId(lastJob))
|
2024-02-26 10:22:12 +08:00
|
|
|
) {
|
|
|
|
queue.push(job)
|
2021-07-20 01:37:35 +08:00
|
|
|
} else {
|
2024-08-19 10:13:50 +08:00
|
|
|
queue.splice(findInsertionIndex(jobId), 0, job)
|
2021-02-25 22:37:25 +08:00
|
|
|
}
|
2024-02-26 10:22:12 +08:00
|
|
|
|
2024-09-05 20:51:26 +08:00
|
|
|
job.flags! |= SchedulerJobFlags.QUEUED
|
|
|
|
|
2019-10-31 09:41:28 +08:00
|
|
|
queueFlush()
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-05 22:55:23 +08:00
|
|
|
function queueFlush() {
|
2024-10-03 23:21:31 +08:00
|
|
|
if (!currentFlushPromise) {
|
2020-08-05 22:55:23 +08:00
|
|
|
currentFlushPromise = resolvedPromise.then(flushJobs)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-08 23:05:21 +08:00
|
|
|
export function queuePostFlushCb(cb: SchedulerJobs): void {
|
2019-10-14 10:41:23 +08:00
|
|
|
if (!isArray(cb)) {
|
2024-07-18 21:06:48 +08:00
|
|
|
if (activePostFlushCbs && cb.id === -1) {
|
|
|
|
activePostFlushCbs.splice(postFlushIndex + 1, 0, cb)
|
|
|
|
} else if (!(cb.flags! & SchedulerJobFlags.QUEUED)) {
|
2022-08-15 19:00:55 +08:00
|
|
|
pendingPostFlushCbs.push(cb)
|
2024-09-05 20:51:26 +08:00
|
|
|
cb.flags! |= SchedulerJobFlags.QUEUED
|
2020-07-16 10:36:41 +08:00
|
|
|
}
|
2019-10-14 10:41:23 +08:00
|
|
|
} else {
|
2020-07-16 10:36:41 +08:00
|
|
|
// if cb is an array, it is a component lifecycle hook which can only be
|
|
|
|
// triggered by a job, which is already deduped in the main queue, so
|
2020-08-15 05:05:12 +08:00
|
|
|
// we can skip duplicate check here to improve perf
|
2022-08-15 19:00:55 +08:00
|
|
|
pendingPostFlushCbs.push(...cb)
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|
2019-10-31 09:41:28 +08:00
|
|
|
queueFlush()
|
|
|
|
}
|
2019-10-14 10:41:23 +08:00
|
|
|
|
2022-09-08 09:59:51 +08:00
|
|
|
export function flushPreFlushCbs(
|
2023-12-08 12:24:44 +08:00
|
|
|
instance?: ComponentInternalInstance,
|
2022-09-08 09:59:51 +08:00
|
|
|
seen?: CountMap,
|
2024-10-03 23:21:31 +08:00
|
|
|
// skip the current job
|
|
|
|
i: number = flushIndex + 1,
|
2024-08-08 23:05:21 +08:00
|
|
|
): void {
|
2022-08-15 19:00:55 +08:00
|
|
|
if (__DEV__) {
|
|
|
|
seen = seen || new Map()
|
|
|
|
}
|
|
|
|
for (; i < queue.length; i++) {
|
|
|
|
const cb = queue[i]
|
2024-02-26 10:22:12 +08:00
|
|
|
if (cb && cb.flags! & SchedulerJobFlags.PRE) {
|
2023-12-08 12:24:44 +08:00
|
|
|
if (instance && cb.id !== instance.uid) {
|
|
|
|
continue
|
|
|
|
}
|
2022-08-15 19:00:55 +08:00
|
|
|
if (__DEV__ && checkRecursiveUpdates(seen!, cb)) {
|
2021-04-22 03:20:15 +08:00
|
|
|
continue
|
2020-08-05 22:55:23 +08:00
|
|
|
}
|
2022-08-15 19:00:55 +08:00
|
|
|
queue.splice(i, 1)
|
|
|
|
i--
|
2024-09-05 20:51:26 +08:00
|
|
|
if (cb.flags! & SchedulerJobFlags.ALLOW_RECURSE) {
|
|
|
|
cb.flags! &= ~SchedulerJobFlags.QUEUED
|
|
|
|
}
|
2022-08-15 19:00:55 +08:00
|
|
|
cb()
|
2024-09-20 16:43:35 +08:00
|
|
|
if (!(cb.flags! & SchedulerJobFlags.ALLOW_RECURSE)) {
|
|
|
|
cb.flags! &= ~SchedulerJobFlags.QUEUED
|
|
|
|
}
|
2020-08-05 22:55:23 +08:00
|
|
|
}
|
2019-08-20 02:44:52 +08:00
|
|
|
}
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|
|
|
|
|
2024-08-08 23:05:21 +08:00
|
|
|
export function flushPostFlushCbs(seen?: CountMap): void {
|
2020-08-05 22:55:23 +08:00
|
|
|
if (pendingPostFlushCbs.length) {
|
2024-01-08 18:20:29 +08:00
|
|
|
const deduped = [...new Set(pendingPostFlushCbs)].sort(
|
|
|
|
(a, b) => getId(a) - getId(b),
|
|
|
|
)
|
2020-08-05 22:55:23 +08:00
|
|
|
pendingPostFlushCbs.length = 0
|
2020-08-25 06:46:53 +08:00
|
|
|
|
|
|
|
// #1947 already has active queue, nested flushPostFlushCbs call
|
|
|
|
if (activePostFlushCbs) {
|
|
|
|
activePostFlushCbs.push(...deduped)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
activePostFlushCbs = deduped
|
2019-11-15 01:06:23 +08:00
|
|
|
if (__DEV__) {
|
|
|
|
seen = seen || new Map()
|
|
|
|
}
|
2020-08-14 21:50:23 +08:00
|
|
|
|
2020-07-16 10:36:41 +08:00
|
|
|
for (
|
2020-08-05 22:55:23 +08:00
|
|
|
postFlushIndex = 0;
|
|
|
|
postFlushIndex < activePostFlushCbs.length;
|
|
|
|
postFlushIndex++
|
2020-07-16 10:36:41 +08:00
|
|
|
) {
|
2024-06-07 13:48:50 +08:00
|
|
|
const cb = activePostFlushCbs[postFlushIndex]
|
|
|
|
if (__DEV__ && checkRecursiveUpdates(seen!, cb)) {
|
2021-04-22 03:20:15 +08:00
|
|
|
continue
|
2019-11-15 01:06:23 +08:00
|
|
|
}
|
2024-09-05 20:51:26 +08:00
|
|
|
if (cb.flags! & SchedulerJobFlags.ALLOW_RECURSE) {
|
|
|
|
cb.flags! &= ~SchedulerJobFlags.QUEUED
|
|
|
|
}
|
2024-06-14 18:32:28 +08:00
|
|
|
if (!(cb.flags! & SchedulerJobFlags.DISPOSED)) cb()
|
|
|
|
cb.flags! &= ~SchedulerJobFlags.QUEUED
|
2019-05-28 19:36:15 +08:00
|
|
|
}
|
2020-08-05 22:55:23 +08:00
|
|
|
activePostFlushCbs = null
|
|
|
|
postFlushIndex = 0
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-25 05:44:32 +08:00
|
|
|
const getId = (job: SchedulerJob): number =>
|
2024-08-19 10:13:50 +08:00
|
|
|
job.id == null ? (job.flags! & SchedulerJobFlags.PRE ? -1 : Infinity) : job.id
|
2020-04-15 05:31:35 +08:00
|
|
|
|
2019-11-15 01:06:23 +08:00
|
|
|
function flushJobs(seen?: CountMap) {
|
2019-05-28 17:19:47 +08:00
|
|
|
if (__DEV__) {
|
2019-11-15 01:06:23 +08:00
|
|
|
seen = seen || new Map()
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|
2020-04-15 05:31:35 +08:00
|
|
|
|
2021-09-16 22:56:34 +08:00
|
|
|
// conditional usage of checkRecursiveUpdate must be determined out of
|
|
|
|
// try ... catch block since Rollup by default de-optimizes treeshaking
|
|
|
|
// inside try-catch. This can leave all warning code unshaked. Although
|
|
|
|
// they would get eventually shaken by a minifier like terser, some minifiers
|
|
|
|
// would fail to do that (e.g. https://github.com/evanw/esbuild/issues/1610)
|
|
|
|
const check = __DEV__
|
|
|
|
? (job: SchedulerJob) => checkRecursiveUpdates(seen!, job)
|
|
|
|
: NOOP
|
|
|
|
|
2020-07-31 05:54:05 +08:00
|
|
|
try {
|
|
|
|
for (flushIndex = 0; flushIndex < queue.length; flushIndex++) {
|
|
|
|
const job = queue[flushIndex]
|
2024-02-26 10:22:12 +08:00
|
|
|
if (job && !(job.flags! & SchedulerJobFlags.DISPOSED)) {
|
2021-09-16 22:56:34 +08:00
|
|
|
if (__DEV__ && check(job)) {
|
2021-04-22 03:20:15 +08:00
|
|
|
continue
|
2020-07-31 05:54:05 +08:00
|
|
|
}
|
2024-09-05 20:51:26 +08:00
|
|
|
if (job.flags! & SchedulerJobFlags.ALLOW_RECURSE) {
|
|
|
|
job.flags! &= ~SchedulerJobFlags.QUEUED
|
|
|
|
}
|
2024-07-12 01:24:17 +08:00
|
|
|
callWithErrorHandling(
|
|
|
|
job,
|
|
|
|
job.i,
|
|
|
|
job.i ? ErrorCodes.COMPONENT_UPDATE : ErrorCodes.SCHEDULER,
|
|
|
|
)
|
2024-09-20 16:43:35 +08:00
|
|
|
if (!(job.flags! & SchedulerJobFlags.ALLOW_RECURSE)) {
|
|
|
|
job.flags! &= ~SchedulerJobFlags.QUEUED
|
|
|
|
}
|
2020-07-16 10:36:41 +08:00
|
|
|
}
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|
2020-07-31 05:54:05 +08:00
|
|
|
} finally {
|
2024-09-05 20:51:26 +08:00
|
|
|
// If there was an error we still need to clear the QUEUED flags
|
|
|
|
for (; flushIndex < queue.length; flushIndex++) {
|
|
|
|
const job = queue[flushIndex]
|
|
|
|
if (job) {
|
|
|
|
job.flags! &= ~SchedulerJobFlags.QUEUED
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-03 23:21:31 +08:00
|
|
|
flushIndex = -1
|
2020-07-31 05:54:05 +08:00
|
|
|
queue.length = 0
|
|
|
|
|
|
|
|
flushPostFlushCbs(seen)
|
2020-08-05 22:55:23 +08:00
|
|
|
|
2020-07-31 05:54:05 +08:00
|
|
|
currentFlushPromise = null
|
2024-10-03 23:21:31 +08:00
|
|
|
// If new jobs have been added to either queue, keep flushing
|
2022-08-15 19:00:55 +08:00
|
|
|
if (queue.length || pendingPostFlushCbs.length) {
|
2020-07-31 05:54:05 +08:00
|
|
|
flushJobs(seen)
|
|
|
|
}
|
2019-11-15 01:06:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-25 05:44:32 +08:00
|
|
|
function checkRecursiveUpdates(seen: CountMap, fn: SchedulerJob) {
|
2024-09-10 15:51:10 +08:00
|
|
|
const count = seen.get(fn) || 0
|
|
|
|
if (count > RECURSION_LIMIT) {
|
|
|
|
const instance = fn.i
|
|
|
|
const componentName = instance && getComponentName(instance.type)
|
|
|
|
handleError(
|
|
|
|
`Maximum recursive updates exceeded${
|
|
|
|
componentName ? ` in component <${componentName}>` : ``
|
|
|
|
}. ` +
|
|
|
|
`This means you have a reactive effect that is mutating its own ` +
|
|
|
|
`dependencies and thus recursively triggering itself. Possible sources ` +
|
|
|
|
`include component template, render function, updated hook or ` +
|
|
|
|
`watcher source function.`,
|
|
|
|
null,
|
|
|
|
ErrorCodes.APP_ERROR_HANDLER,
|
|
|
|
)
|
|
|
|
return true
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|
2024-09-10 15:51:10 +08:00
|
|
|
seen.set(fn, count + 1)
|
|
|
|
return false
|
2019-05-28 17:19:47 +08:00
|
|
|
}
|