Skip to content

Commit 2826615

Browse files
committed
feat(descriptors): minimize blocking the UI
This adds a "pauser" into the descriptor converter which avoids blocking the UI by scheduling idle callbacks. See the comment in the code for justification and potential follow-up work.
1 parent 6647a27 commit 2826615

File tree

3 files changed

+180
-12
lines changed

3 files changed

+180
-12
lines changed

packages/@sanity/schema/src/descriptors/convert.ts

Lines changed: 54 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import {isEqual, isObject} from 'lodash'
1818

1919
import {Rule} from '../legacy/Rule'
2020
import {OWN_PROPS_NAME} from '../legacy/types/constants'
21+
import {IdleScheduler, type Scheduler, SYNC_SCHEDULER} from './scheduler'
2122
import {
2223
type ArrayElement,
2324
type ArrayTypeDef,
@@ -59,18 +60,59 @@ export class DescriptorConverter {
5960
*
6061
* This is automatically cached in a weak map.
6162
*/
62-
async get(schema: Schema): Promise<SetSynchronization<RegistryType>> {
63+
async get(
64+
schema: Schema,
65+
opts?: {
66+
/**
67+
* If present, this will use an idle scheduler which records duration into this array.
68+
* This option will be ignored if the `scheduler` option is passed in.
69+
**/
70+
pauseDurations?: number[]
71+
72+
/** An explicit scheduler to do the work. */
73+
scheduler?: Scheduler
74+
},
75+
): Promise<SetSynchronization<RegistryType>> {
76+
/*
77+
Converting the schema into a descriptor consists of two parts:
78+
79+
1. Traversing the type into a descriptor.
80+
2. Serializing the descriptor, including SHA256 hashing.
81+
82+
Note that only (2) can be done in a background worker since the type
83+
itself isn't serializable (which is a requirement for a background
84+
worker). In addition, we expect (2) to scale in the same way as (1): If it
85+
takes X milliseconds to traverse the type into a descriptor it will
86+
probably take c*X milliseconds to serialize it.
87+
88+
This means that a background worker actually doesn't give us that much
89+
value. A huge type will either way be expensive to convert from a type to
90+
a descriptor. Therefore this function currently only avoid blocking by
91+
only processing each type separately.
92+
93+
If we want to minimize the blocking further we would have to restructure
94+
this converter to be able to convert the types asynchronously and _then_
95+
it might make sense to the serialization step itself in a background
96+
worker.
97+
*/
6398
let value = this.cache.get(schema)
6499
if (value) return value
65100

101+
let idleScheduler: IdleScheduler | undefined
102+
const scheduler =
103+
opts?.scheduler ||
104+
(opts?.pauseDurations
105+
? (idleScheduler = new IdleScheduler(opts.pauseDurations))
106+
: SYNC_SCHEDULER)
107+
66108
const options: Options = {
67109
fields: new Map(),
68110
duplicateFields: new Map(),
69111
arrayElements: new Map(),
70112
duplicateArrayElements: new Map(),
71113
}
72114

73-
const namedTypes = schema.getLocalTypeNames().map((name) => {
115+
const namedTypes = await scheduler.map(schema.getLocalTypeNames(), (name) => {
74116
const typeDef = convertTypeDef(schema.get(name)!, name, options)
75117
return {name, typeDef}
76118
})
@@ -89,24 +131,27 @@ export class DescriptorConverter {
89131
const builder = new SetBuilder({rewriteMap})
90132

91133
// Now we can build the de-duplicated objects:
92-
for (const [fieldDef, key] of options.duplicateFields.entries()) {
134+
await scheduler.forEachIter(options.duplicateFields.entries(), ([fieldDef, key]) => {
93135
builder.addObject('sanity.schema.hoisted', {key, value: {...fieldDef}})
94-
}
136+
})
95137

96-
for (const [arrayElem, key] of options.duplicateArrayElements.entries()) {
138+
await scheduler.forEachIter(options.duplicateArrayElements.entries(), ([arrayElem, key]) => {
97139
builder.addObject('sanity.schema.hoisted', {key, value: {...arrayElem}})
98-
}
140+
})
99141

100-
for (const namedType of namedTypes) {
142+
await scheduler.forEach(namedTypes, (namedType) => {
101143
builder.addObject('sanity.schema.namedType', namedType)
102-
}
144+
})
103145

104146
if (schema.parent) {
105-
builder.addSet(await this.get(schema.parent))
147+
builder.addSet(await this.get(schema.parent, {scheduler}))
106148
}
107149

108150
value = builder.build('sanity.schema.registry')
109151
this.cache.set(schema, value)
152+
153+
// If we created the scheduler we also need to end it.
154+
if (idleScheduler) idleScheduler.end()
110155
return value
111156
}
112157
}
Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
/** The scheduler is capable of executing work in different ways. */
2+
export type Scheduler = {
3+
map<T, U>(arr: T[], fn: (val: T) => U): Promise<U[]>
4+
forEach<T>(arr: T[], fn: (val: T) => void): Promise<void>
5+
forEachIter<T>(iter: Iterable<T>, fn: (val: T) => void): Promise<void>
6+
}
7+
8+
/**
9+
* How long we're willing to do work before invoking the idle callback.
10+
* This is set to 50% of the budget of maintaining 60 FPS.
11+
*/
12+
const MAX_IDLE_WORK = 0.5 * (1000 / 60)
13+
14+
/** A scheduler which uses an idle callback to process work. */
15+
export class IdleScheduler implements Scheduler {
16+
#durations: number[] = []
17+
#lastAwake: number
18+
19+
constructor(durations: number[]) {
20+
this.#lastAwake = performance.now()
21+
this.#durations = durations
22+
}
23+
24+
async map<T, U>(arr: T[], fn: (val: T) => U): Promise<U[]> {
25+
const result: U[] = []
26+
for (const val of arr) {
27+
const pause = this._tryPause()
28+
if (pause) await pause
29+
result.push(fn(val))
30+
}
31+
return result
32+
}
33+
34+
async forEach<T>(arr: T[], fn: (val: T) => void): Promise<void> {
35+
for (const val of arr) {
36+
const pause = this._tryPause()
37+
if (pause) await pause
38+
fn(val)
39+
}
40+
}
41+
42+
async forEachIter<T>(iter: Iterable<T>, fn: (val: T) => void): Promise<void> {
43+
for (const val of iter) {
44+
const pause = this._tryPause()
45+
if (pause) await pause
46+
fn(val)
47+
}
48+
}
49+
50+
/** Should be invoked at the end to also measure the last pause. */
51+
end() {
52+
this.#durations.push(performance.now() - this.#lastAwake)
53+
}
54+
55+
/**
56+
* Yields control back to the UI.
57+
*/
58+
private _tryPause(): Promise<void> | undefined {
59+
// Record how much time we've used so far:
60+
const now = performance.now()
61+
const elapsed = now - this.#lastAwake
62+
if (elapsed < MAX_IDLE_WORK) {
63+
// We're willing to do more work!
64+
return undefined
65+
}
66+
67+
this.#durations.push(elapsed)
68+
69+
return new Promise((resolve) => {
70+
const done = () => {
71+
this.#lastAwake = performance.now()
72+
resolve()
73+
}
74+
75+
if (typeof requestIdleCallback === 'function') {
76+
requestIdleCallback(done, {timeout: 1})
77+
} else if (typeof requestAnimationFrame === 'function') {
78+
requestAnimationFrame(done)
79+
} else {
80+
setTimeout(done, 0)
81+
}
82+
})
83+
}
84+
}
85+
86+
/** A scheduler which does the work as synchronous as possible. */
87+
export const SYNC_SCHEDULER: Scheduler = {
88+
async map<T, U>(arr: T[], fn: (val: T) => U): Promise<U[]> {
89+
return arr.map(fn)
90+
},
91+
92+
async forEach<T>(arr: T[], fn: (val: T) => void): Promise<void> {
93+
return arr.forEach(fn)
94+
},
95+
96+
async forEachIter<T>(iter: Iterable<T>, fn: (val: T) => void): Promise<void> {
97+
for (const val of iter) {
98+
fn(val)
99+
}
100+
},
101+
}

packages/sanity/src/core/config/uploadSchema.ts

Lines changed: 25 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import {
55
} from '@sanity/schema/_internal'
66
import {type Schema} from '@sanity/types'
77
import debugit from 'debug'
8+
import {max, sum} from 'lodash'
89
import {firstValueFrom} from 'rxjs'
910

1011
import {isDev} from '../environment'
@@ -13,7 +14,7 @@ import {DESCRIPTOR_CONVERTER} from '../schema'
1314

1415
const debug = debugit('sanity:config')
1516

16-
const TOGGLE = 'toggle.schema.upload'
17+
const TOGGLE = 'toggle.schema.upload-pause'
1718

1819
async function isEnabled(client: SanityClient): Promise<boolean> {
1920
if (typeof process !== 'undefined' && process?.env?.SANITY_STUDIO_SCHEMA_DESCRIPTOR) {
@@ -72,28 +73,49 @@ export async function uploadSchema(
7273
// The second step is then to actually synchronize it. This is a multi-step
7374
// process where it tries to synchronize as much as possible in each step.
7475

76+
const pauseDurations: number[] = []
7577
const before = performance.now()
76-
const sync = await DESCRIPTOR_CONVERTER.get(schema)
78+
const sync = await DESCRIPTOR_CONVERTER.get(schema, {pauseDurations})
7779
const after = performance.now()
80+
81+
const totalPause = sum(pauseDurations) || 0
82+
const maxPause = max(pauseDurations) || 0
83+
const avgPause = pauseDurations.length === 0 ? 0 : totalPause / pauseDurations.length
7884
const duration = after - before
85+
7986
if (duration > 1000) {
8087
console.warn(`Building schema for synchronization took more than 1 second (${duration}ms)`)
8188
}
8289

90+
if (maxPause > 100) {
91+
console.warn(
92+
`Building schema for synchronization blocked UI for more than 100ms (${maxPause}ms)`,
93+
)
94+
}
95+
8396
const descriptorId = sync.set.id
8497
const {projectId = '?', dataset = '?'} = client.config()
8598
let contextKey = `dataset:${projectId}:${dataset}`
8699
if (isDev) contextKey += '#dev'
87100

88101
const claimRequest: ClaimRequest = {descriptorId, contextKey}
89102

103+
const clientTimings = {
104+
convertSchema: duration,
105+
convertSchemaPauseTotal: totalPause,
106+
convertSchemaPauseMax: maxPause,
107+
convertSchemaPauseAvg: avgPause,
108+
}
109+
90110
const claimResponse = await client.request<ClaimResponse>({
91111
uri: '/descriptors/claim',
92112
method: 'POST',
93113
body: claimRequest,
94114
headers: {
95115
// We mirror the format of Server-Timing: https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Server-Timing
96-
'Client-Timing': `convertSchema;dur=${duration}`,
116+
'Client-Timing': Object.entries(clientTimings)
117+
.map(([name, dur]) => `${name};dur=${dur}`)
118+
.join(','),
97119
},
98120
})
99121

0 commit comments

Comments
 (0)