fix(watch): flush:pre watchers should not fire if state change causes
owner component to unmount fix #2291
This commit is contained in:
parent
a95554d35c
commit
78c199d6db
@ -509,7 +509,8 @@ describe('api: watch', () => {
|
||||
expect(cb).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should fire on component unmount w/ flush: pre', async () => {
|
||||
// #2291
|
||||
it('should not fire on component unmount w/ flush: pre', async () => {
|
||||
const toggle = ref(true)
|
||||
const cb = jest.fn()
|
||||
const Comp = {
|
||||
@ -527,7 +528,7 @@ describe('api: watch', () => {
|
||||
expect(cb).not.toHaveBeenCalled()
|
||||
toggle.value = false
|
||||
await nextTick()
|
||||
expect(cb).toHaveBeenCalledTimes(1)
|
||||
expect(cb).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// #1763
|
||||
|
@ -3,9 +3,8 @@ import {
|
||||
nextTick,
|
||||
queuePostFlushCb,
|
||||
invalidateJob,
|
||||
queuePreFlushCb,
|
||||
flushPreFlushCbs,
|
||||
flushPostFlushCbs
|
||||
flushPostFlushCbs,
|
||||
flushPreFlushCbs
|
||||
} from '../src/scheduler'
|
||||
|
||||
describe('scheduler', () => {
|
||||
@ -114,65 +113,7 @@ describe('scheduler', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('queuePreFlushCb', () => {
|
||||
it('basic usage', async () => {
|
||||
const calls: string[] = []
|
||||
const cb1 = () => {
|
||||
calls.push('cb1')
|
||||
}
|
||||
const cb2 = () => {
|
||||
calls.push('cb2')
|
||||
}
|
||||
|
||||
queuePreFlushCb(cb1)
|
||||
queuePreFlushCb(cb2)
|
||||
|
||||
expect(calls).toEqual([])
|
||||
await nextTick()
|
||||
expect(calls).toEqual(['cb1', 'cb2'])
|
||||
})
|
||||
|
||||
it('should dedupe queued preFlushCb', async () => {
|
||||
const calls: string[] = []
|
||||
const cb1 = () => {
|
||||
calls.push('cb1')
|
||||
}
|
||||
const cb2 = () => {
|
||||
calls.push('cb2')
|
||||
}
|
||||
const cb3 = () => {
|
||||
calls.push('cb3')
|
||||
}
|
||||
|
||||
queuePreFlushCb(cb1)
|
||||
queuePreFlushCb(cb2)
|
||||
queuePreFlushCb(cb1)
|
||||
queuePreFlushCb(cb2)
|
||||
queuePreFlushCb(cb3)
|
||||
|
||||
expect(calls).toEqual([])
|
||||
await nextTick()
|
||||
expect(calls).toEqual(['cb1', 'cb2', 'cb3'])
|
||||
})
|
||||
|
||||
it('chained queuePreFlushCb', async () => {
|
||||
const calls: string[] = []
|
||||
const cb1 = () => {
|
||||
calls.push('cb1')
|
||||
// cb2 will be executed after cb1 at the same tick
|
||||
queuePreFlushCb(cb2)
|
||||
}
|
||||
const cb2 = () => {
|
||||
calls.push('cb2')
|
||||
}
|
||||
queuePreFlushCb(cb1)
|
||||
|
||||
await nextTick()
|
||||
expect(calls).toEqual(['cb1', 'cb2'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('queueJob w/ queuePreFlushCb', () => {
|
||||
describe('pre flush jobs', () => {
|
||||
it('queueJob inside preFlushCb', async () => {
|
||||
const calls: string[] = []
|
||||
const job1 = () => {
|
||||
@ -183,8 +124,9 @@ describe('scheduler', () => {
|
||||
calls.push('cb1')
|
||||
queueJob(job1)
|
||||
}
|
||||
cb1.pre = true
|
||||
|
||||
queuePreFlushCb(cb1)
|
||||
queueJob(cb1)
|
||||
await nextTick()
|
||||
expect(calls).toEqual(['cb1', 'job1'])
|
||||
})
|
||||
@ -194,17 +136,23 @@ describe('scheduler', () => {
|
||||
const job1 = () => {
|
||||
calls.push('job1')
|
||||
}
|
||||
job1.id = 1
|
||||
|
||||
const cb1 = () => {
|
||||
calls.push('cb1')
|
||||
queueJob(job1)
|
||||
// cb2 should execute before the job
|
||||
queuePreFlushCb(cb2)
|
||||
queueJob(cb2)
|
||||
}
|
||||
cb1.pre = true
|
||||
|
||||
const cb2 = () => {
|
||||
calls.push('cb2')
|
||||
}
|
||||
cb2.pre = true
|
||||
cb2.id = 1
|
||||
|
||||
queuePreFlushCb(cb1)
|
||||
queueJob(cb1)
|
||||
await nextTick()
|
||||
expect(calls).toEqual(['cb1', 'cb2', 'job1'])
|
||||
})
|
||||
@ -216,9 +164,9 @@ describe('scheduler', () => {
|
||||
// when updating the props of a child component. This is handled
|
||||
// directly inside `updateComponentPreRender` to avoid non atomic
|
||||
// cb triggers (#1763)
|
||||
queuePreFlushCb(cb1)
|
||||
queuePreFlushCb(cb2)
|
||||
flushPreFlushCbs(undefined, job1)
|
||||
queueJob(cb1)
|
||||
queueJob(cb2)
|
||||
flushPreFlushCbs()
|
||||
calls.push('job1')
|
||||
}
|
||||
const cb1 = () => {
|
||||
@ -226,9 +174,11 @@ describe('scheduler', () => {
|
||||
// a cb triggers its parent job, which should be skipped
|
||||
queueJob(job1)
|
||||
}
|
||||
cb1.pre = true
|
||||
const cb2 = () => {
|
||||
calls.push('cb2')
|
||||
}
|
||||
cb2.pre = true
|
||||
|
||||
queueJob(job1)
|
||||
await nextTick()
|
||||
@ -237,12 +187,14 @@ describe('scheduler', () => {
|
||||
|
||||
// #3806
|
||||
it('queue preFlushCb inside postFlushCb', async () => {
|
||||
const cb = jest.fn()
|
||||
const spy = jest.fn()
|
||||
const cb = () => spy()
|
||||
cb.pre = true
|
||||
queuePostFlushCb(() => {
|
||||
queuePreFlushCb(cb)
|
||||
queueJob(cb)
|
||||
})
|
||||
await nextTick()
|
||||
expect(cb).toHaveBeenCalled()
|
||||
expect(spy).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -9,7 +9,7 @@ import {
|
||||
EffectScheduler,
|
||||
DebuggerOptions
|
||||
} from '@vue/reactivity'
|
||||
import { SchedulerJob, queuePreFlushCb } from './scheduler'
|
||||
import { SchedulerJob, queueJob } from './scheduler'
|
||||
import {
|
||||
EMPTY_OBJ,
|
||||
isObject,
|
||||
@ -345,7 +345,9 @@ function doWatch(
|
||||
scheduler = () => queuePostRenderEffect(job, instance && instance.suspense)
|
||||
} else {
|
||||
// default: 'pre'
|
||||
scheduler = () => queuePreFlushCb(job)
|
||||
job.pre = true
|
||||
if (instance) job.id = instance.uid
|
||||
scheduler = () => queueJob(job)
|
||||
}
|
||||
|
||||
const effect = new ReactiveEffect(getter, scheduler)
|
||||
|
@ -1590,7 +1590,7 @@ function baseCreateRenderer(
|
||||
pauseTracking()
|
||||
// props update may have triggered pre-flush watchers.
|
||||
// flush them before the render update.
|
||||
flushPreFlushCbs(undefined, instance.update)
|
||||
flushPreFlushCbs()
|
||||
resetTracking()
|
||||
}
|
||||
|
||||
@ -2331,6 +2331,7 @@ function baseCreateRenderer(
|
||||
} else {
|
||||
patch(container._vnode || null, vnode, container, null, null, null, isSVG)
|
||||
}
|
||||
flushPreFlushCbs()
|
||||
flushPostFlushCbs()
|
||||
container._vnode = vnode
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import { warn } from './warning'
|
||||
|
||||
export interface SchedulerJob extends Function {
|
||||
id?: number
|
||||
pre?: boolean
|
||||
active?: boolean
|
||||
computed?: boolean
|
||||
/**
|
||||
@ -39,10 +40,6 @@ let isFlushPending = false
|
||||
const queue: SchedulerJob[] = []
|
||||
let flushIndex = 0
|
||||
|
||||
const pendingPreFlushCbs: SchedulerJob[] = []
|
||||
let activePreFlushCbs: SchedulerJob[] | null = null
|
||||
let preFlushIndex = 0
|
||||
|
||||
const pendingPostFlushCbs: SchedulerJob[] = []
|
||||
let activePostFlushCbs: SchedulerJob[] | null = null
|
||||
let postFlushIndex = 0
|
||||
@ -50,8 +47,6 @@ let postFlushIndex = 0
|
||||
const resolvedPromise = /*#__PURE__*/ Promise.resolve() as Promise<any>
|
||||
let currentFlushPromise: Promise<void> | null = null
|
||||
|
||||
let currentPreFlushParentJob: SchedulerJob | null = null
|
||||
|
||||
const RECURSION_LIMIT = 100
|
||||
type CountMap = Map<SchedulerJob, number>
|
||||
|
||||
@ -89,12 +84,11 @@ export function queueJob(job: SchedulerJob) {
|
||||
// allow it recursively trigger itself - it is the user's responsibility to
|
||||
// ensure it doesn't end up in an infinite loop.
|
||||
if (
|
||||
(!queue.length ||
|
||||
!queue.includes(
|
||||
job,
|
||||
isFlushing && job.allowRecurse ? flushIndex + 1 : flushIndex
|
||||
)) &&
|
||||
job !== currentPreFlushParentJob
|
||||
!queue.length ||
|
||||
!queue.includes(
|
||||
job,
|
||||
isFlushing && job.allowRecurse ? flushIndex + 1 : flushIndex
|
||||
)
|
||||
) {
|
||||
if (job.id == null) {
|
||||
queue.push(job)
|
||||
@ -119,71 +113,44 @@ export function invalidateJob(job: SchedulerJob) {
|
||||
}
|
||||
}
|
||||
|
||||
function queueCb(
|
||||
cb: SchedulerJobs,
|
||||
activeQueue: SchedulerJob[] | null,
|
||||
pendingQueue: SchedulerJob[],
|
||||
index: number
|
||||
) {
|
||||
export function queuePostFlushCb(cb: SchedulerJobs) {
|
||||
if (!isArray(cb)) {
|
||||
if (
|
||||
!activeQueue ||
|
||||
!activeQueue.includes(cb, cb.allowRecurse ? index + 1 : index)
|
||||
!activePostFlushCbs ||
|
||||
!activePostFlushCbs.includes(
|
||||
cb,
|
||||
cb.allowRecurse ? postFlushIndex + 1 : postFlushIndex
|
||||
)
|
||||
) {
|
||||
pendingQueue.push(cb)
|
||||
pendingPostFlushCbs.push(cb)
|
||||
}
|
||||
} else {
|
||||
// if cb is an array, it is a component lifecycle hook which can only be
|
||||
// triggered by a job, which is already deduped in the main queue, so
|
||||
// we can skip duplicate check here to improve perf
|
||||
pendingQueue.push(...cb)
|
||||
pendingPostFlushCbs.push(...cb)
|
||||
}
|
||||
queueFlush()
|
||||
}
|
||||
|
||||
export function queuePreFlushCb(cb: SchedulerJob) {
|
||||
queueCb(cb, activePreFlushCbs, pendingPreFlushCbs, preFlushIndex)
|
||||
}
|
||||
|
||||
export function queuePostFlushCb(cb: SchedulerJobs) {
|
||||
queueCb(cb, activePostFlushCbs, pendingPostFlushCbs, postFlushIndex)
|
||||
}
|
||||
|
||||
export function flushPreFlushCbs(
|
||||
seen?: CountMap,
|
||||
parentJob: SchedulerJob | null = null
|
||||
) {
|
||||
if (pendingPreFlushCbs.length) {
|
||||
currentPreFlushParentJob = parentJob
|
||||
activePreFlushCbs = [...new Set(pendingPreFlushCbs)]
|
||||
pendingPreFlushCbs.length = 0
|
||||
if (__DEV__) {
|
||||
seen = seen || new Map()
|
||||
}
|
||||
for (
|
||||
preFlushIndex = 0;
|
||||
preFlushIndex < activePreFlushCbs.length;
|
||||
preFlushIndex++
|
||||
) {
|
||||
if (
|
||||
__DEV__ &&
|
||||
checkRecursiveUpdates(seen!, activePreFlushCbs[preFlushIndex])
|
||||
) {
|
||||
export function flushPreFlushCbs(seen?: CountMap, i = flushIndex) {
|
||||
if (__DEV__) {
|
||||
seen = seen || new Map()
|
||||
}
|
||||
for (; i < queue.length; i++) {
|
||||
const cb = queue[i]
|
||||
if (cb && cb.pre) {
|
||||
if (__DEV__ && checkRecursiveUpdates(seen!, cb)) {
|
||||
continue
|
||||
}
|
||||
activePreFlushCbs[preFlushIndex]()
|
||||
queue.splice(i, 1)
|
||||
i--
|
||||
cb()
|
||||
}
|
||||
activePreFlushCbs = null
|
||||
preFlushIndex = 0
|
||||
currentPreFlushParentJob = null
|
||||
// recursively flush until it drains
|
||||
flushPreFlushCbs(seen, parentJob)
|
||||
}
|
||||
}
|
||||
|
||||
export function flushPostFlushCbs(seen?: CountMap) {
|
||||
// flush any pre cbs queued during the flush (e.g. pre watchers)
|
||||
flushPreFlushCbs()
|
||||
if (pendingPostFlushCbs.length) {
|
||||
const deduped = [...new Set(pendingPostFlushCbs)]
|
||||
pendingPostFlushCbs.length = 0
|
||||
@ -222,6 +189,15 @@ export function flushPostFlushCbs(seen?: CountMap) {
|
||||
const getId = (job: SchedulerJob): number =>
|
||||
job.id == null ? Infinity : job.id
|
||||
|
||||
const comparator = (a: SchedulerJob, b: SchedulerJob): number => {
|
||||
const diff = getId(a) - getId(b)
|
||||
if (diff === 0) {
|
||||
if (a.pre && !b.pre) return -1
|
||||
if (b.pre && !a.pre) return 1
|
||||
}
|
||||
return diff
|
||||
}
|
||||
|
||||
function flushJobs(seen?: CountMap) {
|
||||
isFlushPending = false
|
||||
isFlushing = true
|
||||
@ -229,8 +205,6 @@ function flushJobs(seen?: CountMap) {
|
||||
seen = seen || new Map()
|
||||
}
|
||||
|
||||
flushPreFlushCbs(seen)
|
||||
|
||||
// Sort queue before flush.
|
||||
// This ensures that:
|
||||
// 1. Components are updated from parent to child. (because parent is always
|
||||
@ -238,7 +212,7 @@ function flushJobs(seen?: CountMap) {
|
||||
// priority number)
|
||||
// 2. If a component is unmounted during a parent component's update,
|
||||
// its update can be skipped.
|
||||
queue.sort((a, b) => getId(a) - getId(b))
|
||||
queue.sort(comparator)
|
||||
|
||||
// conditional usage of checkRecursiveUpdate must be determined out of
|
||||
// try ... catch block since Rollup by default de-optimizes treeshaking
|
||||
@ -270,11 +244,7 @@ function flushJobs(seen?: CountMap) {
|
||||
currentFlushPromise = null
|
||||
// some postFlushCb queued jobs!
|
||||
// keep flushing until it drains.
|
||||
if (
|
||||
queue.length ||
|
||||
pendingPreFlushCbs.length ||
|
||||
pendingPostFlushCbs.length
|
||||
) {
|
||||
if (queue.length || pendingPostFlushCbs.length) {
|
||||
flushJobs(seen)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user