Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 72 additions & 0 deletions packages/sanity/src/_internal/cli/actions/schema/metafile.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import {type SeralizedSchemaDebug, type SerializedTypeDebug} from '../../threads/validateSchema'

// This implements the metafile format of ESBuild.
type Metafile = {
inputs: Record<string, MetafileInput>
outputs: Record<string, MetafileOutput>
}

type MetafileOutput = {
imports: []
exports: []
inputs: Record<string, {bytesInOutput: number}>
bytes: number
}

type MetafileInput = {
bytes: number
imports: []
format: 'esm' | 'csj'
}

/** Converts the */
export function generateMetafile(schema: SeralizedSchemaDebug): Metafile {
const output: MetafileOutput = {
imports: [],
exports: [],
inputs: {},
bytes: 0,
}

// Generate a esbuild metafile
const inputs: Record<string, MetafileInput> = {}

function processType(path: string, entry: SerializedTypeDebug) {
let childSize = 0

if (entry.fields) {
for (const [name, fieldEntry] of Object.entries(entry.fields)) {
processType(`${path}/${name}`, fieldEntry)
childSize += fieldEntry.size
}
}

if (entry.of) {
for (const [name, fieldEntry] of Object.entries(entry.of)) {
processType(`${path}/${name}`, fieldEntry)
childSize += fieldEntry.size
}
}

const selfSize = entry.size - childSize

inputs[path] = {
bytes: selfSize,
imports: [],
format: 'esm',
}

output.inputs[path] = {
bytesInOutput: selfSize,
}

output.bytes += selfSize
}

for (const [name, entry] of Object.entries(schema.types)) {
const fakePath = `schema/${entry.extends}/${name}`
processType(fakePath, entry)
}

return {outputs: {root: output}, inputs}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import {writeFileSync} from 'node:fs'
import path from 'node:path'
import {Worker} from 'node:worker_threads'

Expand All @@ -10,13 +11,15 @@ import {
type ValidateSchemaWorkerResult,
} from '../../threads/validateSchema'
import {formatSchemaValidation, getAggregatedSeverity} from './formatSchemaValidation'
import {generateMetafile} from './metafile'

const __dirname = path.dirname(new URL(import.meta.url).pathname)

interface ValidateFlags {
workspace?: string
format?: string
level?: 'error' | 'warning'
'workspace'?: string
'format'?: string
'level'?: 'error' | 'warning'
'debug-metafile-path'?: string
}

export type SchemaValidationFormatter = (result: ValidateSchemaWorkerResult) => string
Expand Down Expand Up @@ -72,20 +75,30 @@ export default async function validateAction(
workDir,
level,
workspace: flags.workspace,
debugSerialize: Boolean(flags['debug-metafile-path']),
} satisfies ValidateSchemaWorkerData,
env: process.env,
})

const {validation} = await new Promise<ValidateSchemaWorkerResult>((resolve, reject) => {
worker.addListener('message', resolve)
worker.addListener('error', reject)
})
const {validation, serializedDebug} = await new Promise<ValidateSchemaWorkerResult>(
(resolve, reject) => {
worker.addListener('message', resolve)
worker.addListener('error', reject)
},
)

const problems = validation.flatMap((group) => group.problems)
const errorCount = problems.filter((problem) => problem.severity === 'error').length
const warningCount = problems.filter((problem) => problem.severity === 'warning').length

const overallSeverity = getAggregatedSeverity(validation)
const didFail = overallSeverity === 'error'

if (flags['debug-metafile-path'] && !didFail) {
if (!serializedDebug) throw new Error('serializedDebug should always be produced')
const metafile = generateMetafile(serializedDebug)
writeFileSync(flags['debug-metafile-path'], JSON.stringify(metafile), 'utf8')
}

switch (format) {
case 'ndjson': {
Expand Down Expand Up @@ -116,8 +129,18 @@ export default async function validateAction(
output.print()

output.print(formatSchemaValidation(validation))

if (flags['debug-metafile-path']) {
output.print()
if (didFail) {
output.print(`${logSymbols.info} Metafile not written due to validation errors`)
} else {
output.print(`${logSymbols.info} Metafile written to: ${flags['debug-metafile-path']}`)
output.print(` This can be analyzed at https://esbuild.github.io/analyze/`)
}
}
}
}

process.exitCode = overallSeverity === 'error' ? 1 : 0
process.exitCode = didFail ? 1 : 0
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ Options
--workspace <name> The name of the workspace to use when validating all schema types.
--format <pretty|ndjson|json> The output format used to print schema errors and warnings.
--level <error|warning> The minimum level reported out. Defaults to warning.
--debug-metafile-path <path> Optional path where a metafile

Examples
# Validates all schema types in a Sanity project with more than one workspace
Expand All @@ -17,6 +18,9 @@ Examples

# Report out only errors
sanity schema validate --level error

# Generate a report which can be analyzed with https://esbuild.github.io/analyze/
sanity schema validate --debug-metafile-path metafile.json
`

const validateDocumentsCommand: CliCommandDefinition = {
Expand Down
107 changes: 107 additions & 0 deletions packages/sanity/src/_internal/cli/threads/validateSchema.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
import {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'

import {
type EncodableObject,
type EncodableValue,
type SetSynchronization,
} from '@sanity/descriptors'
import {DescriptorConverter} from '@sanity/schema/_internal'
import {type SchemaValidationProblem, type SchemaValidationProblemGroup} from '@sanity/types'

import {getStudioWorkspaces} from '../util/getStudioWorkspaces'
Expand All @@ -10,17 +16,43 @@ export interface ValidateSchemaWorkerData {
workDir: string
workspace?: string
level?: SchemaValidationProblem['severity']
debugSerialize?: boolean
}

/** @internal */
export interface ValidateSchemaWorkerResult {
validation: SchemaValidationProblemGroup[]
serializedDebug?: SeralizedSchemaDebug
}

/**
* Contains debug information about the serialized schema.
*
* @internal
**/
export type SeralizedSchemaDebug = {
size: number
parent?: SeralizedSchemaDebug
types: Record<string, SerializedTypeDebug>
}

/**
* Contains debug information about a serialized type.
*
* @internal
**/
export type SerializedTypeDebug = {
size: number
extends: string
fields?: Record<string, SerializedTypeDebug>
of?: Record<string, SerializedTypeDebug>
}

const {
workDir,
workspace: workspaceName,
level = 'warning',
debugSerialize,
} = _workerData as ValidateSchemaWorkerData

async function main() {
Expand Down Expand Up @@ -55,6 +87,14 @@ async function main() {
const schema = workspace.schema
const validation = schema._validation!

let serializedDebug: ValidateSchemaWorkerResult['serializedDebug']

if (debugSerialize) {
const conv = new DescriptorConverter({})
const set = conv.get(schema)
serializedDebug = getSeralizedSchemaDebug(set)
}

const result: ValidateSchemaWorkerResult = {
validation: validation
.map((group) => ({
Expand All @@ -64,12 +104,79 @@ async function main() {
),
}))
.filter((group) => group.problems.length),
serializedDebug,
}

parentPort?.postMessage(result)
} catch (err) {
console.error(err)
console.error(err.stack)
throw err
} finally {
cleanup()
}
}

function getSeralizedSchemaDebug(set: SetSynchronization<string>): SeralizedSchemaDebug {
let size = 0
const types: Record<string, SerializedTypeDebug> = {}

for (const [id, value] of Object.entries(set.objectValues)) {
const typeName = typeof value.name === 'string' ? value.name : id
if (isEncodableObject(value.typeDef)) {
const debug = getSerializedTypeDebug(value.typeDef)
types[typeName] = debug
size += debug.size
}
}

return {
size,
types,
}
}

function isEncodableObject(val: EncodableValue | undefined): val is EncodableObject {
return typeof val === 'object' && val !== null && !Array.isArray(val)
}

function getSerializedTypeDebug(typeDef: EncodableObject): SerializedTypeDebug {
const ext = typeof typeDef.extends === 'string' ? typeDef.extends : '<unknown>'
let fields: SerializedTypeDebug['fields']
let of: SerializedTypeDebug['of']

if (Array.isArray(typeDef.fields)) {
fields = {}

for (const field of typeDef.fields) {
if (!isEncodableObject(field)) continue
const name = field.name
const fieldTypeDef = field.typeDef
if (typeof name !== 'string' || !isEncodableObject(fieldTypeDef)) continue

fields[name] = getSerializedTypeDebug(fieldTypeDef)
}
}

if (Array.isArray(typeDef.of)) {
of = {}

for (const field of typeDef.of) {
if (!isEncodableObject(field)) continue
const name = field.name
const arrayTypeDef = field.typeDef
if (typeof name !== 'string' || !isEncodableObject(arrayTypeDef)) continue

of[name] = getSerializedTypeDebug(arrayTypeDef)
}
}

return {
size: JSON.stringify(typeDef).length,
extends: ext,
fields,
of,
}
}

void main().then(() => process.exit())
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,12 @@ export function mockBrowserEnvironment(basePath: string): () => void {
}
}

const btoa = global.btoa
const domCleanup = jsdomGlobal(jsdomDefaultHtml, {url: 'http://localhost:3333/'})

// Don't use jsdom's btoa as it's using the deprecatd `abab` package.
if (typeof btoa === 'function') global.btoa = btoa

const windowCleanup = () => global.window.close()
const globalCleanup = provideFakeGlobals(basePath)
const cleanupFileLoader = addHook(
Expand Down
Loading