diff --git a/benches/bench.ts b/benches/bench.ts new file mode 100644 index 00000000..ea156023 --- /dev/null +++ b/benches/bench.ts @@ -0,0 +1,141 @@ +const MAX_ITER = 10; +const MAX_DURATION = 60_000; + +type BenchFn = (cb: () => Promise) => void; + +type TimeitFn = (name: string, cb: () => Promise, opt?: BenchOptions) => void; + +interface BenchOptions { + maxIter?: number; + maxDuration?: number; +} + +export const bench = async ( + cb: (params: { beforeAll: BenchFn; afterAll: BenchFn; time: TimeitFn }) => Promise, + opt?: BenchOptions, +) => { + const { maxIter = MAX_ITER, maxDuration = MAX_DURATION } = opt ?? {}; + const beforePromises: (() => Promise)[] = []; + const afterPromises: (() => Promise)[] = []; + const beforeAll = (cb: () => Promise) => { + beforePromises.push(cb); + }; + const afterAll = (cb: () => Promise) => { + afterPromises.push(cb); + }; + const variants: { + name: string; + cb: () => Promise; + durations: number[]; + totalDuration: number; + maxIter: number; + maxDuration: number; + }[] = []; + const time = (name: string, cb: () => Promise, opt?: BenchOptions) => { + variants.push({ + name, + cb, + durations: [], + totalDuration: 0, + maxIter: opt?.maxIter ?? maxIter, + maxDuration: opt?.maxDuration ?? maxDuration, + }); + }; + + await cb({ beforeAll, afterAll, time }); + + await Promise.all(beforePromises.map(async (cb) => cb())); + + for (const variant of variants) { + console.log(`Running "${variant.name}"...`); + while (variant.durations.length < variant.maxIter && variant.totalDuration < variant.maxDuration) { + try { + const start = performance.now(); + await variant.cb(); + const duration = performance.now() - start; + variant.durations.push(duration); + variant.totalDuration += duration; + } catch (error) { + console.error(`Error running "${variant.name}":`, error); + break; + } + } + } + + await Promise.all(afterPromises.map((cb) => cb())); + + const summary = summarize(variants); + console.log(format(summary)); +}; + +interface Summary { + name: string; + iter: number; + first: number; + min: number; + max: number; + mean: number; + median: number; + p90: number; + p95: number; +} + +const summarize = (variants: { name: string; durations: number[] }[]): Summary[] => { + return variants.map((variant) => { + const sorted = [...variant.durations].sort((a, b) => a - b); + const total = sorted.reduce((a, b) => a + b, 0); + const count = sorted.length; + + const min = sorted[0] || 0; + const max = sorted[count - 1] || 0; + const mean = count > 0 ? total / count : 0; + const median = + count === 0 + ? 0 + : count % 2 === 0 + ? (sorted[count / 2 - 1] + sorted[count / 2]) / 2 + : sorted[Math.floor(count / 2)]; + + const p90 = count === 0 ? 0 : sorted[Math.floor(count * 0.9)]; + const p95 = count === 0 ? 0 : sorted[Math.floor(count * 0.95)]; + + return { + name: variant.name, + iter: variant.durations.length, + first: variant.durations[0] ?? 0, + min, + max, + mean, + median, + p90, + p95, + }; + }); +}; + +const format = (summary: Summary[]): string => { + const headers = ['name', 'iter', 'first', 'min', 'max', 'mean', 'median'] as const; + + const rows = summary.map((s) => ({ + name: s.name.length > 48 ? `${s.name.slice(0, 40)}...${s.name.slice(-5)}` : s.name, + iter: s.iter.toString(), + first: s.first.toFixed(4), + min: s.min.toFixed(4), + max: s.max.toFixed(4), + mean: s.mean.toFixed(4), + median: s.median.toFixed(4), + })); + + const allRows = [ + { name: 'name', iter: 'iter', first: 'first', min: 'min', max: 'max', mean: 'mean', median: 'median' }, + ...rows, + ]; + + const widths = headers.map((h) => Math.max(...allRows.map((r) => r[h].length))); + + return allRows + .map((row) => + headers.map((h, i) => (h === 'name' ? row[h].padEnd(widths[i]) : row[h].padStart(widths[i]))).join(' | '), + ) + .join('\n'); +}; diff --git a/benches/bench.v2.sh b/benches/bench.v2.sh new file mode 100755 index 00000000..dac64a66 --- /dev/null +++ b/benches/bench.v2.sh @@ -0,0 +1,90 @@ +#!/usr/bin/env bash +set -euo pipefail + +CONTAINER_NAME=borm_bench_v2 +USER=borm_bench +PASSWORD=borm_bench +NAMESPACE=borm_bench +DATABASE=borm_bench +SCHEMA_FILE="./benches/schema.v2.surql" + +# Function to clean up the container +cleanup() { + echo "Stopping and removing container..." + docker stop ${CONTAINER_NAME} >/dev/null 2>&1 + docker rm ${CONTAINER_NAME} >/dev/null 2>&1 + exit ${EXIT_CODE:-1} # Default to 1 if EXIT_CODE is unset (e.g. early crash) +} + +# Set up trap to call cleanup function on script exit +trap cleanup EXIT INT TERM + +# Function to parse command line arguments +parse_args() { + VITEST_ARGS=() + for arg in "$@" + do + case $arg in + -link=*) + # We'll ignore this parameter now + ;; + *) + VITEST_ARGS+=("$arg") + ;; + esac + done +} + +# Parse the command line arguments +parse_args "$@" + +# Start the container +if ! docker run \ + --rm \ + --detach \ + --name $CONTAINER_NAME \ + --user root \ + -p 8002:8002 \ + --pull always \ + surrealdb/surrealdb:v2.3.7 \ + start \ + -u $USER \ + -p $PASSWORD \ + --bind 0.0.0.0:8002 \ + rocksdb:///data/blitz.db; then + echo "Failed to start SurrealDB container" + exit 1 +fi + +until [ "`docker inspect -f {{.State.Running}} $CONTAINER_NAME`" == "true" ]; do + sleep 0.1; +done; + +# Wait for SurrealDB to be ready +echo "Waiting for SurrealDB to be ready..." +until docker exec $CONTAINER_NAME ./surreal is-ready --endpoint http://localhost:8002 2>/dev/null; do + sleep 0.5; +done; +echo "SurrealDB is ready!" + +# Setup surrealdb database: create the namespace, database, and user dynamically +docker exec -i $CONTAINER_NAME ./surreal sql -u $USER -p $PASSWORD --endpoint http://localhost:8002 < { + const a: A[] = []; + const b: B[] = []; + + const randomInt = (min: number, max: number) => Math.floor(Math.random() * (max - min + 1)) + min; + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + const randomString = (min: number, max: number) => { + const length = randomInt(min, max); + let result = ''; + for (let i = 0; i < length; i++) { + result += chars.charAt(Math.floor(Math.random() * chars.length)); + } + return result; + }; + const randomBoolean = () => Math.random() < 0.5; + const randomDate = () => { + const start = new Date('2020-01-01').getTime(); + const end = new Date('2026-01-01').getTime(); + return new Date(start + Math.random() * (end - start)); + }; + + const generateBase = (): Base => ({ + id: genAlphaId(16), + string_1: randomString(10, 20), + number_1: Math.floor(Math.random() * Number.MAX_SAFE_INTEGER), + boolean_1: randomBoolean(), + datetime_1: randomDate(), + }); + + for (let i = 0; i < params.records; i++) { + b.push(generateBase()); + } + + for (let i = 0; i < params.records; i++) { + const fewLength = randomInt(params.few.min, params.few.max); + const manyLength = randomInt(params.many.min, params.many.max); + const fewSet = new Set(); + const manySet = new Set(); + + while (fewSet.size < fewLength && fewSet.size < b.length) { + fewSet.add(b[randomInt(0, b.length - 1)].id); + } + + while (manySet.size < manyLength && manySet.size < b.length) { + manySet.add(b[randomInt(0, b.length - 1)].id); + } + + a.push({ + ...generateBase(), + one: b[i].id, + few: Array.from(fewSet), + many: Array.from(manySet), + }); + } + + return { a, b }; +}; diff --git a/benches/insertData.v2.ts b/benches/insertData.v2.ts new file mode 100644 index 00000000..7a1f17a6 --- /dev/null +++ b/benches/insertData.v2.ts @@ -0,0 +1,102 @@ +import Surreal from 'surrealdb'; +import { type A, type B, type Base, generateData } from './generateData'; + +const URL = 'ws://127.0.0.1:8002'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +const insertData = async () => { + const db = await connect(); + console.log('generating data'); + const data = generateData({ + records: 10, + few: { min: 2, max: 2 }, + many: { min: 3, max: 3 }, + }); + const surql = createSurql(data); + console.log('\n> surql\n', surql); + console.log('inserting data'); + const start = performance.now(); + const result = await db.query(surql); + const end = performance.now(); + console.log(`Time taken: ${end - start} milliseconds`); + return result; +}; + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +}; + +const createSurql = (data: { a: A[]; b: B[] }): string => { + const lines = ['BEGIN TRANSACTION;']; + + for (const b of data.b) { + lines.push(`CREATE t_b:${b.id} SET ${createSurqlBaseSet(b)};`); + } + + for (const a of data.a) { + const refFew = `[${a.few.map((i) => `t_b:${i}`).join(', ')}]`; + const refMany = `[${a.many.map((i) => `t_b:${i}`).join(', ')}]`; + + const tunnelOneId = `${a.id}_${a.one}`; + const tunnelFewIds = a.few.map((i) => `${a.id}_${i}`); + const tunnelManyIds = a.many.map((i) => `${a.id}_${i}`); + const tunnelOne = `tunnel_one:${tunnelOneId}`; + const tunnelFew = `[${tunnelFewIds.map((i) => `tunnel_few:${i}`).join(', ')}]`; + const tunnelMany = `[${tunnelManyIds.map((i) => `tunnel_many:${i}`).join(', ')}]`; + + lines.push( + `CREATE t_a:${a.id} SET ${createSurqlBaseSet(a)}, ref_one = t_b:${a.one}, ref_few = ${refFew}, ref_many = ${refMany};`, + ); + + lines.push(`CREATE ${tunnelOne} SET a = t_a:${a.id}, b = t_b:${a.one};`); + lines.push(`UPDATE t_b:${a.one} SET ref_one = t_a:${a.id}, tunnel_one = tunnel_one:${tunnelOneId};`); + lines.push(`RELATE t_a:${a.id}->edge_one->t_b:${a.one};`); + + for (const b of a.few) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_few:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_few += t_a:${a.id}, tunnel_few += tunnel_few:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_few->t_b:${b};`); + } + + for (const b of a.many) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_many:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_many += t_a:${a.id}, tunnel_many += tunnel_many:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_many->t_b:${b};`); + } + + lines.push( + `UPDATE t_a:${a.id} SET tunnel_one = ${tunnelOne}, tunnel_few = ${tunnelFew}, tunnel_many = ${tunnelMany};`, + ); + } + + lines.push('COMMIT TRANSACTION;'); + + return lines.join('\n'); +}; + +const createSurqlBaseSet = (data: Base): string => { + return `string_1 = "${data.string_1}", number_1 = ${data.number_1}, boolean_1 = ${data.boolean_1}, datetime_1 = type::datetime("${data.datetime_1.toISOString()}")`; +}; + +insertData() + .then(() => { + console.log('Data inserted successfully'); + }) + .catch((error) => { + console.error('Error inserting data:', error); + }); diff --git a/benches/insertData.v3.ts b/benches/insertData.v3.ts new file mode 100644 index 00000000..66e667bc --- /dev/null +++ b/benches/insertData.v3.ts @@ -0,0 +1,78 @@ +import Surreal from 'surrealdb'; +import { type A, type B, type Base, generateData } from './generateData'; + +const URL = 'ws://127.0.0.1:8001'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +const insertData = async () => { + const db = await connect(); + console.log('generating data'); + const data = generateData({ + records: 10, + few: { min: 2, max: 2 }, + many: { min: 2, max: 2 }, + }); + const surql = createSurql(data); + console.log('inserting data'); + const start = performance.now(); + const result = await db.query(surql); + const end = performance.now(); + console.log(`Time taken: ${end - start} milliseconds`); + return result; +}; + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +}; + +const createSurql = (data: { a: A[]; b: B[] }): string => { + const lines = ['BEGIN TRANSACTION;']; + + for (const b of data.b) { + lines.push(`CREATE t_b:${b.id} SET ${createSurqlBaseSet(b)};`); + } + + for (const a of data.a) { + const refFew = `[${a.few.map((i) => `t_b:${i}`).join(', ')}]`; + const refMany = `[${a.many.map((i) => `t_b:${i}`).join(', ')}]`; + lines.push( + `CREATE t_a:${a.id} SET ${createSurqlBaseSet(a)}, ref_one = t_b:${a.one}, ref_few = ${refFew}, ref_many = ${refMany};`, + ); + lines.push(`RELATE t_a:${a.id}->t_a_b_one->t_b:${a.one};`); + for (const i of a.few) { + lines.push(`RELATE t_a:${a.id}->t_a_b_few->t_b:${i};`); + } + for (const i of a.many) { + lines.push(`RELATE t_a:${a.id}->t_a_b_many->t_b:${i};`); + } + } + + lines.push('COMMIT TRANSACTION;'); + + return lines.join('\n'); +}; + +const createSurqlBaseSet = (data: Base): string => { + return `string_1 = "${data.string_1}", number_1 = ${data.number_1}, boolean_1 = ${data.boolean_1}, datetime_1 = type::datetime("${data.datetime_1.toISOString()}")`; +}; + +insertData() + .then(() => { + console.log('Data inserted successfully'); + }) + .catch((error) => { + console.error('Error inserting data:', error); + }); diff --git a/benches/query.v3.ts b/benches/query.v3.ts new file mode 100644 index 00000000..587efdde --- /dev/null +++ b/benches/query.v3.ts @@ -0,0 +1,42 @@ +import Surreal from 'surrealdb'; + +const URL = 'ws://127.0.0.1:8001'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +const query = async () => { + const db = await connect(); + const result = await db.query('SELECT id FROM type::table($table) WHERE id = type::record($id) LIMIT 2', { + table: 't_a', + id: 't_a:A0HE7yuafcaZYxFd', + // alias: 'tableId', + b: [true], + }); + console.log(JSON.stringify(result, null, 2)); + return result; +}; + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +}; + +query() + .then(() => { + process.exit(0); + }) + .catch((error) => { + console.error(error); + process.exit(1); + }); diff --git a/benches/rundb.v2.sh b/benches/rundb.v2.sh new file mode 100755 index 00000000..49d6716a --- /dev/null +++ b/benches/rundb.v2.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +set -euo pipefail + +CONTAINER_NAME=borm_bench_v2 +USER=borm_bench +PASSWORD=borm_bench +NAMESPACE=borm_bench +DATABASE=borm_bench +SCHEMA_FILE="./benches/schema.v2.surql" + +# Start the container +docker run \ + --rm \ + --detach \ + --name $CONTAINER_NAME \ + -v borm_bench_data_v2:/data \ + -e SURREAL_CAPS_ALLOW_EXPERIMENTAL=graphql \ + --user root \ + -p 8002:8002 \ + --pull always \ + surrealdb/surrealdb:v2 \ + start \ + -u $USER \ + -p $PASSWORD \ + --bind 0.0.0.0:8002 \ + rocksdb:///data/blitz.db + # surrealkv:///data/blitz.db + +until [ "`docker inspect -f {{.State.Running}} $CONTAINER_NAME`" == "true" ]; do + sleep 0.1; +done; + +# Wait for SurrealDB to be ready +echo "Waiting for SurrealDB to be ready..." +until docker exec $CONTAINER_NAME ./surreal is-ready --endpoint http://localhost:8002 2>/dev/null; do + sleep 0.5; +done; +echo "SurrealDB is ready!" + +# Setup surrealdb database: create the namespace, database, and user dynamically +docker exec -i $CONTAINER_NAME ./surreal sql -u $USER -p $PASSWORD --endpoint http://localhost:8002 </dev/null; do + sleep 0.5; +done; +echo "SurrealDB is ready!" + +# Setup surrealdb database: create the namespace, database, and user dynamically +docker exec -i $CONTAINER_NAME ./surreal sql -u $USER -p $PASSWORD --endpoint http://localhost:8001 <; +DEFINE FIELD OVERWRITE number_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE boolean_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE datetime_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE ref_one ON TABLE t_a TYPE option>; +DEFINE FIELD OVERWRITE ref_few ON TABLE t_a TYPE option>>; +DEFINE FIELD OVERWRITE ref_many ON TABLE t_a TYPE option>>; +DEFINE FIELD OVERWRITE fut_one ON TABLE t_a VALUE { RETURN array::first(SELECT VALUE b FROM $parent.tunnel_one) || NONE }; +DEFINE FIELD OVERWRITE fut_few ON TABLE t_a VALUE { RETURN SELECT VALUE b FROM $this.tunnel_few }; +DEFINE FIELD OVERWRITE fut_many ON TABLE t_a VALUE { RETURN SELECT VALUE b FROM $this.tunnel_many }; +DEFINE FIELD OVERWRITE tunnel_one ON TABLE t_a TYPE option>; +DEFINE FIELD OVERWRITE tunnel_few ON TABLE t_a TYPE option>>; +DEFINE FIELD OVERWRITE tunnel_many ON TABLE t_a TYPE option>>; +DEFINE INDEX IF NOT EXISTS idx_a_string_1 ON TABLE t_a COLUMNS string_1; +DEFINE INDEX IF NOT EXISTS idx_a_ref_one ON TABLE t_a COLUMNS ref_one; +DEFINE INDEX IF NOT EXISTS idx_a_ref_few ON TABLE t_a COLUMNS ref_few; +DEFINE INDEX IF NOT EXISTS idx_a_ref_many ON TABLE t_a COLUMNS ref_many; +DEFINE INDEX IF NOT EXISTS idx_a_tunnel_one ON TABLE t_a COLUMNS tunnel_one; +DEFINE INDEX IF NOT EXISTS idx_a_tunnel_few ON TABLE t_a COLUMNS tunnel_few; +DEFINE INDEX IF NOT EXISTS idx_a_tunnel_many ON TABLE t_a COLUMNS tunnel_many; + +DEFINE TABLE IF NOT EXISTS t_b SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE string_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE number_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE boolean_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE datetime_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE ref_one ON TABLE t_b TYPE option>; +DEFINE FIELD OVERWRITE ref_few ON TABLE t_b TYPE option>>; +DEFINE FIELD OVERWRITE ref_many ON TABLE t_b TYPE option>>; +DEFINE FIELD OVERWRITE fut_one ON TABLE t_b VALUE { RETURN array::first(SELECT VALUE a FROM $parent.tunnel_one) || NONE }; +DEFINE FIELD OVERWRITE fut_few ON TABLE t_b VALUE { RETURN SELECT VALUE a FROM $this.tunnel_few }; +DEFINE FIELD OVERWRITE fut_many ON TABLE t_b VALUE { RETURN SELECT VALUE a FROM $this.tunnel_many }; +DEFINE FIELD OVERWRITE tunnel_one ON TABLE t_b TYPE option>; +DEFINE FIELD OVERWRITE tunnel_few ON TABLE t_b TYPE option>>; +DEFINE FIELD OVERWRITE tunnel_many ON TABLE t_b TYPE option>>; +DEFINE INDEX IF NOT EXISTS idx_b_string_1 ON TABLE t_b COLUMNS string_1; +DEFINE INDEX IF NOT EXISTS idx_b_ref_one ON TABLE t_b COLUMNS ref_one; +DEFINE INDEX IF NOT EXISTS idx_b_ref_few ON TABLE t_b COLUMNS ref_few; +DEFINE INDEX IF NOT EXISTS idx_b_ref_many ON TABLE t_b COLUMNS ref_many; +DEFINE INDEX IF NOT EXISTS idx_b_tunnel_one ON TABLE t_b COLUMNS tunnel_one; +DEFINE INDEX IF NOT EXISTS idx_b_tunnel_few ON TABLE t_b COLUMNS tunnel_few; +DEFINE INDEX IF NOT EXISTS idx_b_tunnel_many ON TABLE t_b COLUMNS tunnel_many; + +DEFINE TABLE IF NOT EXISTS tunnel_one SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE a ON TABLE tunnel_one TYPE option>; +DEFINE FIELD OVERWRITE b ON TABLE tunnel_one TYPE option>; +DEFINE INDEX IF NOT EXISTS unique_tunnel_one_a ON TABLE tunnel_one COLUMNS a UNIQUE; +DEFINE INDEX IF NOT EXISTS unique_tunnel_one_b ON TABLE tunnel_one COLUMNS b UNIQUE; + +DEFINE TABLE IF NOT EXISTS tunnel_few SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE a ON TABLE tunnel_few TYPE option>; +DEFINE FIELD OVERWRITE b ON TABLE tunnel_few TYPE option>; +DEFINE INDEX IF NOT EXISTS unique_tunnel_few_a_b ON TABLE tunnel_few COLUMNS a, b UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_tunnel_few_b ON TABLE tunnel_few COLUMNS b; + +DEFINE TABLE IF NOT EXISTS tunnel_many SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE a ON TABLE tunnel_many TYPE option>; +DEFINE FIELD OVERWRITE b ON TABLE tunnel_many TYPE option>; +DEFINE INDEX IF NOT EXISTS unique_tunnel_many_a_b ON TABLE tunnel_many COLUMNS a, b UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_tunnel_many_b ON TABLE tunnel_many COLUMNS b; + +-- These tables are not needed for the benchmark, but they are here for reference + +DEFINE TABLE IF NOT EXISTS edge_one SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_edge_one_in ON TABLE edge_one COLUMNS in UNIQUE; +DEFINE INDEX IF NOT EXISTS unique_edge_one_out ON TABLE edge_one COLUMNS out UNIQUE; + +DEFINE TABLE IF NOT EXISTS edge_few SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_edge_few_in_out ON TABLE edge_few COLUMNS in, out UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_edge_few_out ON TABLE edge_few COLUMNS out; + +DEFINE TABLE IF NOT EXISTS edge_many SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_edge_many_in_out ON TABLE edge_many COLUMNS in, out UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_edge_many_out ON TABLE edge_many COLUMNS out; + +DEFINE FUNCTION fn::as_array($var: option|record>) { + RETURN (type::is::array($var) AND $var) OR [$var] +}; + +COMMIT TRANSACTION; diff --git a/benches/schema.v2.ts b/benches/schema.v2.ts new file mode 100644 index 00000000..eaec2fd7 --- /dev/null +++ b/benches/schema.v2.ts @@ -0,0 +1,207 @@ +import type { BormSchema, DataField } from '../src'; +import { genId } from '../src/helpers'; + +const id: DataField = { + path: 'id', + default: { type: 'fn', fn: () => genId() }, + validations: { required: true, unique: true }, + contentType: 'ID', + rights: ['CREATE'], +}; + +export const schema: BormSchema = { + entities: {}, + relations: { + t_a: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 't_a' }, + dataFields: [ + id, + { contentType: 'BOOLEAN', path: 'boolean_1' }, + { contentType: 'NUMBER', path: 'number_1' }, + { contentType: 'TEXT', path: 'string_1' }, + { contentType: 'DATE', path: 'datetime_1' }, + ], + roles: {}, + linkFields: [ + { + path: 'ref_one', + cardinality: 'ONE', + relation: 't_b', + plays: 'ref_one', + target: 'relation', + }, + { + path: 'ref_few', + cardinality: 'MANY', + relation: 't_b', + plays: 'ref_few', + target: 'relation', + }, + { + path: 'ref_many', + cardinality: 'MANY', + relation: 't_b', + plays: 'ref_many', + target: 'relation', + }, + { + path: 'fut_one', + relation: 'tunnel_one', + plays: 'a', + target: 'role', + targetRole: 'b', + cardinality: 'ONE', + }, + { + path: 'fut_few', + relation: 'tunnel_few', + plays: 'a', + target: 'role', + targetRole: 'b', + cardinality: 'MANY', + }, + { + path: 'fut_many', + relation: 'tunnel_many', + plays: 'a', + target: 'role', + targetRole: 'b', + cardinality: 'MANY', + }, + { + path: 'tunnel_one', + relation: 'tunnel_one', + plays: 'a', + target: 'relation', + cardinality: 'ONE', + }, + { + path: 'tunnel_few', + relation: 'tunnel_few', + plays: 'a', + target: 'relation', + cardinality: 'MANY', + }, + { + path: 'tunnel_many', + relation: 'tunnel_many', + plays: 'a', + target: 'relation', + cardinality: 'MANY', + }, + ], + }, + t_b: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 't_b' }, + dataFields: [ + id, + { contentType: 'BOOLEAN', path: 'boolean_1' }, + { contentType: 'NUMBER', path: 'number_1' }, + { contentType: 'TEXT', path: 'string_1' }, + { contentType: 'DATE', path: 'datetime_1' }, + ], + roles: { + ref_one: { + cardinality: 'ONE', + }, + ref_few: { + cardinality: 'MANY', + }, + ref_many: { + cardinality: 'MANY', + }, + }, + linkFields: [ + { + path: 'fut_one', + relation: 'tunnel_one', + plays: 'b', + target: 'role', + targetRole: 'a', + cardinality: 'ONE', + }, + { + path: 'fut_few', + relation: 'tunnel_few', + plays: 'b', + target: 'role', + targetRole: 'a', + cardinality: 'MANY', + }, + { + path: 'fut_many', + relation: 'tunnel_many', + plays: 'b', + target: 'role', + targetRole: 'a', + cardinality: 'MANY', + }, + { + path: 'tunnel_one', + relation: 'tunnel_one', + plays: 'b', + target: 'relation', + cardinality: 'ONE', + }, + { + path: 'tunnel_few', + relation: 'tunnel_few', + plays: 'b', + target: 'relation', + cardinality: 'MANY', + }, + { + path: 'tunnel_many', + relation: 'tunnel_many', + plays: 'b', + target: 'relation', + cardinality: 'MANY', + }, + ], + }, + tunnel_one: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 'tunnel_one' }, + dataFields: [id], + roles: { + a: { + cardinality: 'ONE', + }, + b: { + cardinality: 'ONE', + }, + }, + linkFields: [], + }, + tunnel_few: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 'tunnel_few' }, + dataFields: [id], + roles: { + a: { + cardinality: 'ONE', + }, + b: { + cardinality: 'ONE', + }, + }, + linkFields: [], + }, + tunnel_many: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 'tunnel_many' }, + dataFields: [id], + roles: { + a: { + cardinality: 'ONE', + }, + b: { + cardinality: 'ONE', + }, + }, + linkFields: [], + }, + }, +}; diff --git a/benches/schema.v3.surql b/benches/schema.v3.surql new file mode 100644 index 00000000..80c2b60e --- /dev/null +++ b/benches/schema.v3.surql @@ -0,0 +1,41 @@ +USE NS borm_bench; +USE DB borm_bench; + +BEGIN TRANSACTION; + +DEFINE TABLE IF NOT EXISTS t_a SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE string_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE number_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE boolean_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE datetime_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE ref_one ON TABLE t_a TYPE option> REFERENCE; +DEFINE FIELD OVERWRITE ref_few ON TABLE t_a TYPE option>> REFERENCE; +DEFINE FIELD OVERWRITE ref_many ON TABLE t_a TYPE option>> REFERENCE; +DEFINE INDEX IF NOT EXISTS idx_a_string_1 ON TABLE t_a COLUMNS string_1; +DEFINE INDEX IF NOT EXISTS idx_a_ref_one ON TABLE t_a COLUMNS ref_one; +DEFINE INDEX IF NOT EXISTS idx_a_ref_few ON TABLE t_a COLUMNS ref_few; +DEFINE INDEX IF NOT EXISTS idx_a_ref_many ON TABLE t_a COLUMNS ref_many; + +DEFINE TABLE IF NOT EXISTS t_b SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE string_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE number_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE boolean_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE datetime_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE computed_one ON TABLE t_b COMPUTED <~(t_a FIELD ref_one); +DEFINE FIELD OVERWRITE computed_few ON TABLE t_b COMPUTED <~(t_a FIELD ref_few); +DEFINE FIELD OVERWRITE computed_many ON TABLE t_b COMPUTED <~(t_a FIELD ref_many); +DEFINE INDEX IF NOT EXISTS idx_b_string_1 ON TABLE t_b COLUMNS string_1; + +DEFINE TABLE IF NOT EXISTS t_a_b_one SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_a_b_one_in ON TABLE t_a_b_one COLUMNS in UNIQUE; +DEFINE INDEX IF NOT EXISTS unique_a_b_one_out ON TABLE t_a_b_one COLUMNS out UNIQUE; + +DEFINE TABLE IF NOT EXISTS t_a_b_few SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_a_b_few_in_out ON TABLE t_a_b_few COLUMNS in, out UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_a_b_few_out ON TABLE t_a_b_few COLUMNS out; + +DEFINE TABLE IF NOT EXISTS t_a_b_many SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_a_b_many_in_out ON TABLE t_a_b_many COLUMNS in, out UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_a_b_many_out ON TABLE t_a_b_many COLUMNS out; + +COMMIT TRANSACTION; diff --git a/benches/v2.bench.ts b/benches/v2.bench.ts new file mode 100644 index 00000000..924b177d --- /dev/null +++ b/benches/v2.bench.ts @@ -0,0 +1,281 @@ +import Surreal from 'surrealdb'; +import type BormClient from '../src'; +import { setup } from '../tests/helpers/setup'; +import { bench } from './bench'; +import { type A, type B, type Base, generateData } from './generateData'; +import { schema } from './schema.v2'; + +const URL = 'ws://127.0.0.1:8002'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +let client: BormClient; +let cleanup: () => Promise; +let data: { a: A[]; b: B[] }; + +bench(async ({ beforeAll, afterAll, time }) => { + beforeAll(async () => { + const result = await setup({ + config: { + server: { + provider: 'blitz-orm-js', + }, + dbConnectors: [ + { + id: 'default', + provider: 'surrealDB', + providerConfig: { linkMode: 'refs' }, + url: URL, + namespace: NAMESPACE, + dbName: DATABASE, + username: USERNAME, + password: PASSWORD, + }, + ], + query: { + legacySurrealDBAdapter: process.env.BORM_TEST_LEGACY_SURREALDB_ADAPTER?.toLowerCase() === 'true', + }, + }, + schema, + }); + client = result.client; + cleanup = result.cleanup; + console.log('Generating data'); + data = generateData({ + records: 1000, + few: { min: 5, max: 5 }, + many: { min: 20, max: 20 }, + }); + console.log('Connecting to database'); + const surrealDB = await connect(); + console.log('Creating surql'); + const surql = createSurql(data); + console.log('Inserting data'); + await surrealDB.query(surql); + }); + + afterAll(async () => { + console.log('Cleaning up'); + await cleanup(); + }); + + time('Select all, sort by string_1, and limit 100', async () => { + await client.query({ $relation: 't_a', $limit: 100, $sort: [{ field: 'string_1', desc: true }] }); + }); + + time('Filter by id', async () => { + const a = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $id: a.id }); + }); + + time('Filter by multiple ids', async () => { + const a1 = data.a[randIndex(data.a.length)]; + const a2 = data.a[randIndex(data.a.length)]; + const a3 = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $id: [a1.id, a2.id, a3.id] }); + }); + + time('Filter by indexed field', async () => { + const a = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $filter: { string_1: a.string_1 } }); + }); + + time('Filter by indexed field and non-indexed field', async () => { + const a = data.a[randIndex(data.a.length)]; + // string_1 is indexed, number_1 is not. + // Put number_1 first. Optimized surql should put string_1 first. + await client.query({ $relation: 't_a', $filter: { number_1: a.number_1, string_1: a.string_1 } }); + }); + + time('Filter by ref_one', async () => { + const b = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { ref_one: b.id } }); + }); + + time('Filter by ref_many', async () => { + const b = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { ref_many: b.id } }); + }); + + time('Filter by ref_one string_1', async () => { + const b = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { ref_one: { string_1: b.string_1 } } }); + }); + + time('Filter by ref_many string_1', async () => { + const b = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { ref_many: { string_1: b.string_1 } } }); + }); + + time('Filter by fut_one', async () => { + const b = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { fut_one: b.id } }); + }); + + time('Filter by fut_many', async () => { + const b = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { fut_many: b.id } }); + }); + + time('Filter by fut_one string_1', async () => { + const b = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { fut_one: { string_1: b.string_1 } } }); + }); + + time('Filter by fut_many string_1', async () => { + const b = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { fut_many: { string_1: b.string_1 } } }); + }); + + time('Filter by multiple ref_one', async () => { + const b1 = data.b[randIndex(data.b.length)]; + const b2 = data.b[randIndex(data.b.length)]; + const b3 = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { ref_one: [b1.id, b2.id, b3.id] } }); + }); + + time('Filter by multiple ref_many', async () => { + const b1 = data.b[randIndex(data.b.length)]; + const b2 = data.b[randIndex(data.b.length)]; + const b3 = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { ref_many: [b1.id, b2.id, b3.id] } }); + }); + + time('Filter by multiple fut_one', async () => { + const b1 = data.b[randIndex(data.b.length)]; + const b2 = data.b[randIndex(data.b.length)]; + const b3 = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { fut_one: [b1.id, b2.id, b3.id] } }); + }); + + time('Filter by multiple fut_many', async () => { + const b1 = data.b[randIndex(data.b.length)]; + const b2 = data.b[randIndex(data.b.length)]; + const b3 = data.b[randIndex(data.b.length)]; + await client.query({ $relation: 't_a', $filter: { fut_many: [b1.id, b2.id, b3.id] } }); + }); + + time('Filter by multiple values of an indexed field', async () => { + const a1 = data.a[randIndex(data.a.length)]; + const a2 = data.a[randIndex(data.a.length)]; + const a3 = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $filter: { string_1: [a1.string_1, a2.string_1, a3.string_1] } }); + }); + + time('Filter by multiple values of an non-indexed field', async () => { + const a1 = data.a[randIndex(data.a.length)]; + const a2 = data.a[randIndex(data.a.length)]; + const a3 = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $filter: { number_1: [a1.number_1, a2.number_1, a3.number_1] } }); + }); + + time('Filter by a single value of an indexed field and multiple ref_one', async () => { + const a1 = data.a[randIndex(data.a.length)]; + const a2 = data.a[randIndex(data.a.length)]; + const a3 = data.a[randIndex(data.a.length)]; + // Optimized surql should convert ref_one into relationship traversal. + await client.query({ $relation: 't_a', $filter: { ref_one: [a1.one, a2.one, a3.one], string_1: a1.string_1 } }); + }); + + time('Filter by a single value of an indexed field and multiple fut_one', async () => { + const a1 = data.a[randIndex(data.a.length)]; + const a2 = data.a[randIndex(data.a.length)]; + const a3 = data.a[randIndex(data.a.length)]; + // Optimized surql should convert fut_one into relationship traversal. + await client.query({ $relation: 't_a', $filter: { fut_one: [a1.one, a2.one, a3.one], string_1: a1.string_1 } }); + }); + + time('Nested ref_one', async () => { + const a = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $id: a.id, $fields: [{ $path: 'ref_one' }] }); + }); + + time('Nested ref_many', async () => { + const a = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $id: a.id, $fields: [{ $path: 'ref_many' }] }); + }); + + time('Nested fut_one', async () => { + const a = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $id: a.id, $fields: [{ $path: 'fut_one' }] }); + }); + + time('Nested fut_many', async () => { + const a = data.a[randIndex(data.a.length)]; + await client.query({ $relation: 't_a', $id: a.id, $fields: [{ $path: 'fut_many' }] }); + }); +}); + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +}; + +const createSurql = (data: { a: A[]; b: B[] }): string => { + const lines = ['BEGIN TRANSACTION;']; + + for (const b of data.b) { + lines.push(`CREATE t_b:${b.id} SET ${createSurqlBaseSet(b)};`); + } + + for (const a of data.a) { + const refFew = `[${a.few.map((i) => `t_b:${i}`).join(', ')}]`; + const refMany = `[${a.many.map((i) => `t_b:${i}`).join(', ')}]`; + + const tunnelOneId = `${a.id}_${a.one}`; + const tunnelFewIds = a.few.map((i) => `${a.id}_${i}`); + const tunnelManyIds = a.many.map((i) => `${a.id}_${i}`); + const tunnelOne = `tunnel_one:${tunnelOneId}`; + const tunnelFew = `[${tunnelFewIds.map((i) => `tunnel_few:${i}`).join(', ')}]`; + const tunnelMany = `[${tunnelManyIds.map((i) => `tunnel_many:${i}`).join(', ')}]`; + + lines.push( + `CREATE t_a:${a.id} SET ${createSurqlBaseSet(a)}, ref_one = t_b:${a.one}, ref_few = ${refFew}, ref_many = ${refMany};`, + ); + + lines.push(`CREATE ${tunnelOne} SET a = t_a:${a.id}, b = t_b:${a.one};`); + lines.push(`UPDATE t_b:${a.one} SET ref_one = t_a:${a.id}, tunnel_one = tunnel_one:${tunnelOneId};`); + lines.push(`RELATE t_a:${a.id}->edge_one->t_b:${a.one};`); + + for (const b of a.few) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_few:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_few += t_a:${a.id}, tunnel_few += tunnel_few:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_few->t_b:${b};`); + } + + for (const b of a.many) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_many:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_many += t_a:${a.id}, tunnel_many += tunnel_many:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_many->t_b:${b};`); + } + + lines.push( + `UPDATE t_a:${a.id} SET tunnel_one = ${tunnelOne}, tunnel_few = ${tunnelFew}, tunnel_many = ${tunnelMany};`, + ); + } + + lines.push('COMMIT TRANSACTION;'); + + return lines.join('\n'); +}; + +const createSurqlBaseSet = (data: Base): string => { + return `string_1 = "${data.string_1}", number_1 = ${data.number_1}, boolean_1 = ${data.boolean_1}, datetime_1 = type::datetime("${data.datetime_1.toISOString()}")`; +}; + +const randIndex = (len: number) => { + return Math.floor(Math.random() * len); +}; diff --git a/package.json b/package.json index 2248d65e..70eb74b3 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,9 @@ "prepare": "husky", "pub": "pnpm build && pnpm publish", "knip": "knip", + "bench:surrealdb": "./benches/bench.v2.sh", + "bench:tests:surrealdb": "cross-env BORM_TEST_ADAPTER=surrealDB BORM_TEST_SURREALDB_LINK_MODE=refs ./tests/benchTests.sh", + "bench:tests:surrealdb:legacy": "cross-env BORM_TEST_ADAPTER=surrealDB BORM_TEST_SURREALDB_LINK_MODE=refs LEGACY_SURREALDB_ADAPTER=true ./tests/benchTests.sh", "bench:surrealdb:edges": "cross-env BORM_TEST_ADAPTER=surrealDB BORM_TEST_SURREALDB_LINK_MODE=edges ./tests/bench.sh tests/unit/bench", "bench:surrealdb:refs": "cross-env BORM_TEST_ADAPTER=surrealDB BORM_TEST_SURREALDB_LINK_MODE=refs ./tests/bench.sh tests/unit/bench", "bench:typedb": "vitest bench typedb/bench", @@ -71,7 +74,8 @@ "robot3": "^1.1.1", "surrealdb": "^1.3.2", "typedb-driver": "^2.29.2", - "uuid": "^11.1.0" + "uuid": "^11.1.0", + "zod": "^4.2.1" }, "devDependencies": { "@biomejs/biome": "^2.1.3", @@ -85,6 +89,7 @@ "lint-staged": "^16.1.2", "only-allow": "^1.2.1", "prettier": "^3.6.2", + "tinybench": "^6.0.0", "tsup": "^8.5.0", "typescript": "^5.8.3", "vitest": "^3.2.4" @@ -100,5 +105,6 @@ "homepage": "https://github.com/Blitzapps/blitz-orm#readme", "directories": { "test": "tests" - } + }, + "packageManager": "pnpm@8.10.2+sha512.0782093d5ba6c7ad9462081bc1ef0775016a4b4109eca1e1fedcea6f110143af5f50993db36c427d4fa8c62be3920a3224db12da719d246ca19dd9f18048c33c" } diff --git a/src/adapters/typeDB/schema/define.ts b/src/adapters/typeDB/schema/define.ts index 914a220a..ed0a7af1 100644 --- a/src/adapters/typeDB/schema/define.ts +++ b/src/adapters/typeDB/schema/define.ts @@ -44,6 +44,7 @@ export const convertTQLSchema = (connectorId: any, schema: EnrichedBormSchema) = // Adding data fields if (dataFields && dataFields.length > 0) { for (const field of dataFields) { + // @ts-expect-error TODO: fix type error if (field.contentType === 'REF') { continue; } //ignore ref types diff --git a/src/enrichSchema.draft.ts b/src/enrichSchema.draft.ts new file mode 100644 index 00000000..4b49680a --- /dev/null +++ b/src/enrichSchema.draft.ts @@ -0,0 +1,472 @@ +import { isEqual } from 'radash'; +import type { BormEntity, BormRelation, BormSchema, DataField, LinkField, RefField, RoleField } from './types'; +import type { + DRAFT_EnrichedBormComputedField, + DRAFT_EnrichedBormConstantField, + DRAFT_EnrichedBormDataField, + DRAFT_EnrichedBormEntity, + DRAFT_EnrichedBormField, + DRAFT_EnrichedBormLinkField, + DRAFT_EnrichedBormRefField, + DRAFT_EnrichedBormRelation, + DRAFT_EnrichedBormRoleField, + DRAFT_EnrichedBormSchema, +} from './types/schema/enriched.draft'; + +export const enrichSchemaDraft = (schema: BormSchema): DRAFT_EnrichedBormSchema => { + const extendedSchema = extendSchema(schema); + const enrichedSchema: DRAFT_EnrichedBormSchema = {}; + const rolePlayerMap: RolePlayerMap = buildRolePlayerMap(extendedSchema); + + for (const entity in extendedSchema.entities) { + enrichThing('entity', entity, enrichedSchema, extendedSchema, rolePlayerMap); + } + + for (const relation in extendedSchema.relations) { + enrichThing('relation', relation, enrichedSchema, extendedSchema, rolePlayerMap); + } + + return enrichedSchema; +}; + +/** + * Mutate the enriched schema in place. + */ +const enrichThing = ( + type: 'entity' | 'relation', + thingName: string, + mutEnrichedSchema: DRAFT_EnrichedBormSchema, + schema: BormSchema, + rolePlayerMap: RolePlayerMap, +): DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation => { + const enrichedEntity = mutEnrichedSchema[thingName]; + if (enrichedEntity) { + if (enrichedEntity.type === type) { + return enrichedEntity; + } + throw new Error(`Found entity and relation with the same name: ${thingName}`); + } + const thing = + type === 'entity' ? schema.entities[thingName] : (schema.relations[thingName] as BormEntity | BormRelation); + if (!thing) { + throw new Error(`${type === 'entity' ? 'Entity' : 'Relation'} "${thingName}" not found`); + } + + const extended = + 'extends' in thing && thing.extends + ? enrichThing(type, thing.extends, mutEnrichedSchema, schema, rolePlayerMap) + : undefined; + + if (extended) { + addSubType(extended.name, thingName, mutEnrichedSchema); + } + + const fields: Record = {}; + const idFields = extended ? extended.idFields : getIdFields(thingName, thing); + + enrichDataFields(fields, thing.dataFields ?? [], thingName); + enrichRefFields(fields, thing.refFields ?? {}, thingName); + enrichLinkFields(fields, thing.linkFields ?? [], thingName, schema, rolePlayerMap); + + if (type === 'entity') { + const enriched: DRAFT_EnrichedBormEntity = { + type: 'entity', + name: thingName, + idFields, + extends: extended ? extended.name : undefined, + subTypes: [], + indexes: thing.indexes ?? [], + fields: fields as DRAFT_EnrichedBormEntity['fields'], + }; + mutEnrichedSchema[thingName] = enriched; + return enriched; + } + + if ('roles' in thing && thing.roles) { + enrichRoleFields( + fields as Record, + (thing.roles as Record) ?? {}, + thingName, + rolePlayerMap, + ); + } + + const enriched: DRAFT_EnrichedBormRelation = { + type: 'relation', + name: thingName, + idFields, + extends: extended ? extended.name : undefined, + subTypes: [], + indexes: thing.indexes ?? [], + fields, + }; + mutEnrichedSchema[thingName] = enriched; + return enriched; +}; + +const addSubType = (thing: string, subThing: string, mutSchema: DRAFT_EnrichedBormSchema) => { + let currentThing: string | undefined = thing; + while (currentThing) { + const enrichedThing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation | undefined = mutSchema[currentThing]; + if (!enrichedThing) { + throw new Error(`Thing "${currentThing}" not found`); + } + enrichedThing.subTypes.push(subThing); + currentThing = enrichedThing.extends; + } +}; + +/** + * Mutate the enriched fields in place. + */ +const enrichDataFields = ( + mutEnrichedFields: Record, + dataFields: readonly DataField[], + thingName: string, +) => { + for (const df of dataFields ?? []) { + const existing = mutEnrichedFields[df.path]; + if (df.isVirtual) { + if (df.default?.type === 'fn' && typeof df.default.fn === 'function') { + const enriched: DRAFT_EnrichedBormComputedField = { + type: 'computed', + name: df.path, + contentType: df.contentType, + cardinality: df.cardinality ?? 'ONE', + fn: df.default.fn, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[df.path] = enriched; + continue; + } + + if (df.default?.type === 'value') { + const enriched: DRAFT_EnrichedBormConstantField = { + type: 'constant', + name: df.path, + contentType: df.contentType, + cardinality: df.cardinality ?? 'ONE', + value: df.default.value, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[df.path] = enriched; + continue; + } + } + + const enriched: DRAFT_EnrichedBormDataField = { + type: 'data', + name: df.path, + contentType: df.contentType, + cardinality: df.cardinality ?? 'ONE', + unique: df.validations?.unique ?? false, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[df.path] = enriched; + } +}; + +/** + * Mutate the enriched fields in place. + */ +const enrichRefFields = ( + mutEnrichedFields: Record, + refFields: Record, + thingName: string, +) => { + for (const [refName, ref] of Object.entries(refFields ?? {})) { + const existing = mutEnrichedFields[refName]; + const enriched: DRAFT_EnrichedBormRefField = { + type: 'ref', + name: refName, + contentType: ref.contentType, + cardinality: ref.cardinality ?? 'ONE', + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[refName] = enriched; + } +}; + +/** + * Mutate the enriched fields in place. + */ +const enrichLinkFields = ( + mutEnrichedFields: Record, + linkFields: readonly LinkField[], + thingName: string, + schema: BormSchema, + rolePlayerMap: RolePlayerMap, +) => { + for (const lf of linkFields ?? []) { + const targetRel = schema.relations[lf.relation]; + if (!targetRel) { + throw new Error(`Relation ${lf.relation} not found`); + } + const targetRole = targetRel.roles?.[lf.plays]; + if (!targetRole) { + throw new Error(`Role ${lf.plays} not found in relation ${lf.relation}`); + } + const existing = mutEnrichedFields[lf.path]; + + if (lf.target === 'relation') { + const enriched: DRAFT_EnrichedBormLinkField = { + type: 'link', + name: lf.path, + cardinality: lf.cardinality, + target: 'relation', + opposite: { + thing: lf.relation, + path: lf.plays, + cardinality: targetRole.cardinality, + }, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[lf.path] = enriched; + continue; + } + + const oppositeRole = rolePlayerMap[lf.relation]?.[lf.targetRole]; + if (!oppositeRole) { + throw new Error(`Role ${lf.targetRole} in relation ${lf.relation} does not exist`); + } + const rolePlayer = oppositeRole.targetingRole; + if (!rolePlayer) { + throw new Error( + `Role "${lf.targetRole}" in relation "${lf.relation}" is not played by any other thing that targets role "${lf.plays}"`, + ); + } + const enriched: DRAFT_EnrichedBormLinkField = { + type: 'link', + name: lf.path, + cardinality: lf.cardinality, + target: 'role', + opposite: rolePlayer, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[lf.path] = enriched; + } +}; + +/** + * Mutate the enriched fields in place. + */ +const enrichRoleFields = ( + mutEnrichedFields: Record, + roles: Record, + thingName: string, + rolePlayerMap: RolePlayerMap, +) => { + for (const [roleName, role] of Object.entries(roles)) { + // NOTE: It should not fallback to a player with target "role" if a player with target "relation" is not found + // because in the SurrealDB schema for a player with target "role", the value of the role.opposite.thing[targetingRelation.path] is not thingName. + // This becomes problematic when we transform filter into sub-query: + // SELECT * FROM WHERE = xyz + // Is not the same as: + // SELECT * FROM (SELECT VALUE FROM WHERE id = xyz) + const opposite = + rolePlayerMap[thingName]?.[roleName]?.targetingRelation ?? rolePlayerMap[thingName]?.[roleName]?.targetingRole; + if (!opposite) { + throw new Error(`Role ${roleName} in relation ${thingName} is not played by any other thing`); + } + const existing = mutEnrichedFields[roleName]; + const enriched: DRAFT_EnrichedBormRoleField = { + type: 'role', + name: roleName, + cardinality: role.cardinality ?? 'ONE', + opposite: opposite, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[roleName] = enriched; + } +}; + +const assertNoDuplicateField = ( + thing: string, + newField: DRAFT_EnrichedBormField, + existing?: DRAFT_EnrichedBormField, +) => { + if (!existing) { + return; + } + if (isEqual(newField, existing)) { + return; + } + throw new Error(`Duplicate field name "${newField.name}" in "${thing}"`); +}; + +type RolePlayerMap = Record< + DRAFT_EnrichedBormRelation['name'], + Record< + DRAFT_EnrichedBormRoleField['name'], + { + targetingRole?: DRAFT_EnrichedBormRoleField['opposite']; + targetingRelation?: DRAFT_EnrichedBormRoleField['opposite']; + } + > +>; + +const buildRolePlayerMap = (schema: BormSchema): RolePlayerMap => { + const rolePlayerMap: RolePlayerMap = {}; + for (const [relName, rel] of [...Object.entries(schema.relations), ...Object.entries(schema.entities)]) { + for (const lf of rel.linkFields ?? []) { + const roleMap = rolePlayerMap[lf.relation] ?? {}; + rolePlayerMap[lf.relation] = roleMap; + const rolePlayer = roleMap[lf.plays] ?? {}; + roleMap[lf.plays] = rolePlayer; + const existingOpposite = lf.target === 'relation' ? rolePlayer.targetingRelation : rolePlayer.targetingRole; + if (existingOpposite) { + if (existingOpposite.thing === relName) { + // Multiple link fields of the same thing may play the same role. And it's fine. + continue; + } + if (isExtend(relName, existingOpposite.thing, schema)) { + // The current relation extends the role's opposite relation. Keep it. + continue; + } + if (!isExtend(existingOpposite.thing, relName, schema)) { + throw new Error(`Found multiple players for role ${lf.plays} in relation ${lf.relation}`); + } + } + if (lf.target === 'relation') { + rolePlayer.targetingRelation = { + thing: relName, + path: lf.path, + cardinality: lf.cardinality, + }; + } else { + rolePlayer.targetingRole = { + thing: relName, + path: lf.path, + cardinality: lf.cardinality, + }; + } + } + } + return rolePlayerMap; +}; + +/** + * Return true if thingA extends thingB directly or indirectly. + */ +const isExtend = (thingA: string, thingB: string, schema: BormSchema): boolean => { + const ancestorsA = getAncestors(thingA, schema); + return ancestorsA.includes(thingB); +}; + +const getAncestors = (thing: string, schema: BormSchema): string[] => { + const ancestors: string[] = []; + let current = thing; + while (current) { + const _thing = schema.entities[current] ?? schema.relations[current]; + if (!_thing) { + throw new Error(`Thing "${current}" not found`); + } + if (!('extends' in _thing) || !_thing.extends) { + break; + } + ancestors.push(_thing.extends); + current = _thing.extends; + } + return ancestors.reverse(); +}; + +const getIdFields = (name: string, entity: BormEntity | BormRelation): [string, ...string[]] => { + if (entity.idFields && entity.idFields.length > 0) { + return [entity.idFields[0], ...entity.idFields.slice(1)]; + } + const f = entity.dataFields?.find((f) => f.contentType === 'ID'); + if (f) { + return [f.path]; + } + throw new Error(`No id field found for entity "${name}"`); +}; + +const extendSchema = (schema: BormSchema): BormSchema => { + const extendedSchema: BormSchema = { + entities: {}, + relations: {}, + }; + for (const name in schema.entities) { + extendEntity(name, schema, extendedSchema); + } + for (const name in schema.relations) { + extendRelation(name, schema, extendedSchema); + } + return extendedSchema; +}; + +/** + * NOTE: Mutate the extended schema in place. + */ +const extendEntity = (name: string, schema: BormSchema, mutExtendedSchema: BormSchema): BormEntity => { + const entity = schema.entities[name]; + if (!entity) { + throw new Error(`Entity "${name}" not found`); + } + if ('extends' in entity && entity.extends) { + const ancestor = extendEntity(entity.extends, schema, mutExtendedSchema); + const extended = { + ...entity, + idFields: entity.idFields ?? ancestor.idFields, + dataFields: extendDataFields(ancestor, entity), + linkFields: extendLinkFields(ancestor, entity), + refFields: extendRefFields(ancestor, entity), + }; + mutExtendedSchema.entities[name] = extended; + return extended; + } + mutExtendedSchema.entities[name] = entity; + return entity; +}; + +/** + * NOTE: Mutate the extended schema in place. + */ +const extendRelation = (name: string, schema: BormSchema, mutExtendedSchema: BormSchema): BormRelation => { + const relation = schema.relations[name]; + if (!relation) { + throw new Error(`Relation "${name}" not found`); + } + if ('extends' in relation && relation.extends) { + const ancestor = extendRelation(relation.extends, schema, mutExtendedSchema); + const extended = { + ...relation, + idFields: relation.idFields ?? ancestor.idFields, + dataFields: extendDataFields(ancestor, relation), + linkFields: extendLinkFields(ancestor, relation), + refFields: extendRefFields(ancestor, relation), + roles: extendRoles(ancestor, relation), + }; + mutExtendedSchema.relations[name] = extended; + return extended; + } + mutExtendedSchema.relations[name] = relation; + return relation; +}; + +const extendDataFields = (ancestor: BormEntity | BormRelation, entity: BormEntity | BormRelation): DataField[] => { + const explicitDataFieldSet = new Set(entity.dataFields?.map((df) => df.path) ?? []); + const inheritedDataFields = ancestor.dataFields?.filter((df) => !explicitDataFieldSet.has(df.path)) ?? []; + return [...inheritedDataFields, ...(entity.dataFields ?? [])]; +}; + +const extendLinkFields = (ancestor: BormEntity | BormRelation, entity: BormEntity | BormRelation): LinkField[] => { + const explicitLinkFieldSet = new Set(entity.linkFields?.map((lf) => lf.path) ?? []); + const inheritedLinkFields = ancestor.linkFields?.filter((lf) => !explicitLinkFieldSet.has(lf.path)) ?? []; + return [...inheritedLinkFields, ...(entity.linkFields ?? [])]; +}; + +const extendRefFields = ( + ancestor: BormEntity | BormRelation, + entity: BormEntity | BormRelation, +): Record => { + const inheritedRefFields = Object.fromEntries( + Object.entries(ancestor.refFields ?? {}).filter(([k]) => !entity.refFields?.[k]), + ); + return { ...inheritedRefFields, ...(entity.refFields ?? {}) }; +}; + +const extendRoles = (ancestor: BormRelation, entity: BormRelation): Record => { + const inheritedRoles = Object.fromEntries(Object.entries(ancestor.roles ?? {}).filter(([k]) => !entity.roles?.[k])); + return { ...inheritedRoles, ...(entity.roles ?? {}) }; +}; diff --git a/src/enrichSchema.ts b/src/enrichSchema.ts index 577aa5a7..4c78b9a8 100644 --- a/src/enrichSchema.ts +++ b/src/enrichSchema.ts @@ -120,7 +120,9 @@ const orderExtended = (thingMap: Record) => { const ordered: string[] = []; const seen = new Set(); const inProcess = new Set(); - Object.keys(thingMap).forEach((name) => pushExtended({ thingMap, name, inProcess, seen, ordered })); + for (const name of Object.keys(thingMap)) { + pushExtended({ thingMap, name, inProcess, seen, ordered }); + } return ordered; }; diff --git a/src/helpers.ts b/src/helpers.ts index 3fa3b04b..abfd7772 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -1,9 +1,9 @@ /* eslint-disable no-param-reassign */ import type { Draft } from 'immer'; -import { current, isDraft } from 'immer'; +import { current, isDraft, produce } from 'immer'; import { customAlphabet } from 'nanoid'; -import type { TraversalMeta } from 'object-traversal'; -import { getNodeByPath } from 'object-traversal'; +import type { TraversalCallbackContext, TraversalMeta } from 'object-traversal'; +import { getNodeByPath, traverse } from 'object-traversal'; import { isArray, isObject, listify, tryit } from 'radash'; // todo: split helpers between common helpers, typeDBhelpers, dgraphelpers... @@ -380,11 +380,8 @@ export const isBQLBlock = (block: unknown): block is FilledBQLMutationBlock => { type Drafted = T | Draft; // Recursively define the type to handle nested structures -type DeepCurrent = T extends Array - ? Array> - : T extends object - ? { [K in keyof T]: DeepCurrent } - : T; +type DeepCurrent = + T extends Array ? Array> : T extends object ? { [K in keyof T]: DeepCurrent } : T; export const deepCurrent = (obj: Drafted): any => { if (Array.isArray(obj)) { @@ -430,3 +427,22 @@ export const genId = (n?: number) => { const nanoid = customAlphabet(alphabet, idLength); return nanoid(); }; + +export const genAlphaId = (length = 5): string => { + const alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'; + const nanoid = customAlphabet(alphabet, length); + return nanoid(); +}; + +export const deepRemoveMetaData = (obj: object) => { + const removeMeta = ({ value }: TraversalCallbackContext) => { + if (value && typeof value === 'object' && '$id' in value) { + const metas = Object.keys(value).filter((k) => k.startsWith('$')); + for (const k of metas) { + delete value[k]; + } + } + return value; + }; + return produce(obj, (draft) => traverse(draft, removeMeta)); +}; diff --git a/src/index.ts b/src/index.ts index 0585f1f7..3101eab1 100644 --- a/src/index.ts +++ b/src/index.ts @@ -5,6 +5,7 @@ import { SimpleSurrealClient } from './adapters/surrealDB/client'; import { defaultConfig } from './default.config'; import { bormDefine } from './define'; import { enrichSchema } from './enrichSchema'; +import { enrichSchemaDraft } from './enrichSchema.draft'; import { runMutationMachine } from './stateMachine/mutation/mutationMachine'; import { runQueryMachine } from './stateMachine/query/queryMachine'; import type { @@ -20,6 +21,7 @@ import type { QueryConfig, RawBQLQuery, } from './types'; +import type { DRAFT_EnrichedBormSchema } from './types/schema/enriched.draft'; export * from './types'; @@ -37,7 +39,11 @@ class BormClient { private config: BormConfig; private initializing = false; private subscribers: ((err?: unknown) => void)[] = []; - private initialized: { enrichedSchema: EnrichedBormSchema; dbHandles: DBHandles } | null = null; + private initialized: { + enrichedSchema: EnrichedBormSchema; + draftSchema: DRAFT_EnrichedBormSchema; + dbHandles: DBHandles; + } | null = null; constructor({ schema, config }: BormProps) { this.schema = schema; @@ -140,6 +146,7 @@ class BormClient { ); this.initialized = { + draftSchema: enrichSchemaDraft(this.schema), enrichedSchema: enrichSchema(this.schema, dbHandles), dbHandles, }; @@ -204,6 +211,7 @@ class BormClient { const [errorRes, res] = await tryit(runQueryMachine)( queries, initialized.enrichedSchema, + initialized.draftSchema, qConfig, initialized.dbHandles, ); @@ -234,6 +242,7 @@ class BormClient { const [errorRes, res] = await tryit(runMutationMachine)( mutation, initialized.enrichedSchema, + initialized.draftSchema, mConfig, initialized.dbHandles, ); diff --git a/src/pipeline/postprocess/query/parseTQLQuery.ts b/src/pipeline/postprocess/query/parseTQLQuery.ts index 0d08cb81..c2cea3e3 100644 --- a/src/pipeline/postprocess/query/parseTQLQuery.ts +++ b/src/pipeline/postprocess/query/parseTQLQuery.ts @@ -161,7 +161,9 @@ export const parseTQLQuery: PipelineOperation = async (req, res) const parsedRoleFields = parseRoleFields(roleFields); const resDataFields = { ...parsedDataFields }; if (idNotIncluded === 'true') { - currentSchema?.idFields?.forEach((field) => delete resDataFields[field]); + for (const field of currentSchema.idFields ?? []) { + delete resDataFields[field]; + } } return { ...resDataFields, @@ -206,7 +208,9 @@ export const parseTQLQuery: PipelineOperation = async (req, res) const resDataFields = { ...parsedDataFields }; if (idNotIncluded === 'true') { - currentSchema.idFields?.forEach((field) => delete resDataFields[field]); + for (const field of currentSchema.idFields ?? []) { + delete resDataFields[field]; + } } return { diff --git a/src/stateMachine/mutation/bql/enrich.ts b/src/stateMachine/mutation/bql/enrich.ts index 48d30a4a..e5d16449 100644 --- a/src/stateMachine/mutation/bql/enrich.ts +++ b/src/stateMachine/mutation/bql/enrich.ts @@ -99,15 +99,15 @@ export const enrichBQLMutation = ( node.$filter = enrichFilter(node.$filter, node.$thing, schema); } - Object.keys(node).forEach((field) => { + for (const field of Object.keys(node)) { ///1. Clean step cleanStep(node, field); if (field !== '$root' && isFilter) { - return; + continue; } if (field !== '$root' && (field.startsWith('$') || field.startsWith('%'))) { - return; + continue; } const fieldSchema = @@ -128,7 +128,8 @@ export const enrichBQLMutation = ( //console.log('field2', field, fieldType); ///2.DATAFIELD STEP if (fieldType === 'dataField') { - return dataFieldStep(node, field); + dataFieldStep(node, field); + continue; } ///3.NESTED OBJECTS: RoleFields and linkFields @@ -185,7 +186,7 @@ export const enrichBQLMutation = ( //Ideally, in updates we could not demand the $thing, but then we need to check that the field belongs to all the potential $things const toValidate = isArray(node[field]) ? node[field] : [node[field]]; - toValidate.forEach((subNode: BQLMutationBlock) => { + for (const subNode of toValidate as BQLMutationBlock[]) { const subNodeSchema = getCurrentSchema(schema, subNode); const { unidentifiedFields, usedLinkFields, usedFields, fields } = getCurrentFields( subNodeSchema, @@ -193,11 +194,11 @@ export const enrichBQLMutation = ( ); //Check that every used field is in the fields array - usedFields.forEach((uf) => { + for (const uf of usedFields) { if (!fields.includes(uf)) { throw new Error(`[Schema] Field ${uf} not found in the schema`); } - }); + } if (unidentifiedFields.length > 0) { throw new Error(`Unknown fields: [${unidentifiedFields.join(',')}] in ${JSON.stringify(value)}`); @@ -208,17 +209,21 @@ export const enrichBQLMutation = ( usedLinkFields.includes(lf.path), ); /// Check if at least two of the usedLinkFields schemas, share same relation and have different targets - usedLinkFieldsSchemas?.some((lf1, i) => { - return usedLinkFieldsSchemas.some((lf2, j) => { - if (i !== j && lf1.target !== lf2.target && lf1.relation === lf2.relation) { - throw new Error( - "[Wrong format]: Can't use a link field with target === 'role' and another with target === 'relation' in the same mutation.", - ); + if (usedLinkFieldsSchemas) { + for (let i = 0; i < usedLinkFieldsSchemas.length; i++) { + const lf1 = usedLinkFieldsSchemas[i]; + for (let j = 0; j < usedLinkFieldsSchemas.length; j++) { + const lf2 = usedLinkFieldsSchemas[j]; + if (i !== j && lf1.target !== lf2.target && lf1.relation === lf2.relation) { + throw new Error( + "[Wrong format]: Can't use a link field with target === 'role' and another with target === 'relation' in the same mutation.", + ); + } } - }); - }); + } + } } - }); + } if (!has$Fields) { //if it has $field, it has dependencies so its still not ready for transformation @@ -235,7 +240,7 @@ export const enrichBQLMutation = ( //#endregion pre-hook validations } } - }); + } } }), ); diff --git a/src/stateMachine/mutation/bql/enrichSteps/enrichChildren.ts b/src/stateMachine/mutation/bql/enrichSteps/enrichChildren.ts index 8d4532c8..22c81b31 100644 --- a/src/stateMachine/mutation/bql/enrichSteps/enrichChildren.ts +++ b/src/stateMachine/mutation/bql/enrichSteps/enrichChildren.ts @@ -59,6 +59,7 @@ export const enrichChildren = ( if (subNode.$thing && subNode.$thing !== player.thing) { throw new Error(`[Wrong format] The field ${field} can only be played by ${player.thing}.`); } + return { ...subNode, [EdgeSchema]: relFieldSchema, @@ -68,44 +69,43 @@ export const enrichChildren = ( $bzId, }; } - if (relFieldSchema.$things.length > 1) { - if (subNode.$thing) { - return [ - { - ...subNode, - [EdgeSchema]: relFieldSchema, - $thing: subNode.$thing, - $thingType: subNode.$thing in schema.entities ? 'entity' : 'relation', - $op, - $bzId, - }, - ]; - } - if (!subNode.$thing) { - if (subNode.$tempId) { - throw new Error( - '[Unsupported] Objects with $tempId and multiple potential players require to explicitly indicate the $thing type.', - ); - } - if ($op === 'create') { - throw new Error( - `[Wrong format] The field ${field} can be played by multiple things, please specify one on creation.`, - ); - } - return relFieldSchema.$things.map((thing) => { - return { - ...subNode, - [EdgeSchema]: relFieldSchema, - $thing: thing, - $thingType: thing in schema.entities ? 'entity' : 'relation', - $op, - $bzId: get$bzId(subNode, thing), - //[QueryContext]: { ...subNode[QueryContext], $multiThing: true }, //multiThing is used so the arcs of this manual split are merged in a single arc - }; - }); - } + if (subNode.$thing) { + return [ + { + ...subNode, + [EdgeSchema]: relFieldSchema, + $thing: subNode.$thing, + $thingType: subNode.$thing in schema.entities ? 'entity' : 'relation', + $op, + $bzId, + }, + ]; + } + + if (subNode.$tempId) { + throw new Error( + '[Unsupported] Objects with $tempId and multiple potential players require to explicitly indicate the $thing type.', + ); } + + if ($op === 'create') { + throw new Error( + `[Wrong format] The field ${field} can be played by multiple things, please specify one on creation.`, + ); + } + + return relFieldSchema.$things.map((thing) => { + return { + ...subNode, + [EdgeSchema]: relFieldSchema, + $thing: thing, + $thingType: thing in schema.entities ? 'entity' : 'relation', + $op, + $bzId: get$bzId(subNode, thing), + //[QueryContext]: { ...subNode[QueryContext], $multiThing: true }, //multiThing is used so the arcs of this manual split are merged in a single arc + }; + }); //#endregion nested nodes }); diff --git a/src/stateMachine/mutation/bql/enrichSteps/preHookDependencies.ts b/src/stateMachine/mutation/bql/enrichSteps/preHookDependencies.ts index 7753cb4b..11e5b74a 100644 --- a/src/stateMachine/mutation/bql/enrichSteps/preHookDependencies.ts +++ b/src/stateMachine/mutation/bql/enrichSteps/preHookDependencies.ts @@ -13,12 +13,14 @@ import type { EnrichedRoleField, FilledBQLMutationBlock, } from '../../../../types'; +import type { DRAFT_EnrichedBormSchema } from '../../../../types/schema/enriched.draft'; import { DBNode } from '../../../../types/symbols'; import { runQueryMachine } from '../../../query/queryMachine'; export const preHookDependencies = async ( blocks: EnrichedBQLMutationBlock | EnrichedBQLMutationBlock[], schema: EnrichedBormSchema, + draftSchema: DRAFT_EnrichedBormSchema, config: BormConfig, dbHandles: DBHandles, ) => { @@ -28,6 +30,7 @@ export const preHookDependencies = async ( // @ts-expect-error todo transformationPreQueryReq, schema, + draftSchema, config, dbHandles, ); diff --git a/src/stateMachine/mutation/bql/enrichSteps/preHookValidations.ts b/src/stateMachine/mutation/bql/enrichSteps/preHookValidations.ts index f6ad2e57..b5dd7a77 100644 --- a/src/stateMachine/mutation/bql/enrichSteps/preHookValidations.ts +++ b/src/stateMachine/mutation/bql/enrichSteps/preHookValidations.ts @@ -12,46 +12,48 @@ export const preHookValidations = ( config: BormConfig, ) => { const subNodes = isArray(node[field]) ? node[field] : [node[field]]; - subNodes.forEach((subNode: EnrichedBQLMutationBlock) => { + for (const subNode of subNodes as EnrichedBQLMutationBlock[]) { if ('$thing' in subNode) { if (subNode.$fields) { - ///change machine context so we are sure we run preQueryDeps before coming back to here - return subNode; + // change machine context so we are sure we run preQueryDeps before coming back to here + continue; } const { requiredFields, enumFields, fnValidatedFields, dataFields } = getCurrentSchema(schema, subNode); /// Required fields if ('$op' in subNode && subNode.$op === 'create') { - requiredFields.forEach((field) => { + for (const field of requiredFields) { if (!(field in subNode)) { throw new Error(`[Validations] Required field "${field}" is missing.`); } - }); + } } + /// Enums if (('$op' in subNode && subNode.$op === 'update') || subNode.$op === 'create') { - enumFields.forEach((field) => { + for (const field of enumFields) { if (field in subNode) { const enumOptions = dataFields?.find((df) => df.path === field)?.validations?.enum; if (!enumOptions) { throw new Error(`[Validations] Enum field "${field}" is missing enum options.`); } if (isArray(subNode[field])) { - subNode[field].some((val: any) => { + for (const val of subNode[field]) { if (val !== null && !enumOptions.includes(val)) { throw new Error(`[Validations] Option "${val}" is not a valid option for field "${field}".`); } - }); + } } else if (enumOptions && !enumOptions.includes(subNode[field]) && !(subNode[field] === null)) { throw new Error(`[Validations] Option "${subNode[field]}" is not a valid option for field "${field}".`); } } - }); + } } + /// fn if (('$op' in subNode && subNode.$op === 'update') || subNode.$op === 'create') { - fnValidatedFields.forEach((field: string) => { + for (const field of fnValidatedFields as string[]) { if (field in subNode) { try { const fn = dataFields?.find((df) => df.path === field)?.validations?.fn; @@ -66,7 +68,7 @@ export const preHookValidations = ( throw new Error(`[Validations:attribute:${field}] ${error.message}`); } } - }); + } } /// Node validations @@ -82,10 +84,10 @@ export const preHookValidations = ( | Record; const triggeredActions = getTriggeredActions(value, schema); - triggeredActions.forEach((action) => { + for (const action of triggeredActions) { if (action.type === 'validate') { if (action.severity !== 'error') { - return; // in borm we only use the errors + continue; // in borm we only use the errors } try { @@ -102,8 +104,8 @@ export const preHookValidations = ( throw new Error(`[Validations:thing:${currentThing}] ${error.message}`); } } - }); + } } } - }); + } }; diff --git a/src/stateMachine/mutation/bql/flatter.ts b/src/stateMachine/mutation/bql/flatter.ts index 7dfd1909..80af6247 100644 --- a/src/stateMachine/mutation/bql/flatter.ts +++ b/src/stateMachine/mutation/bql/flatter.ts @@ -75,20 +75,22 @@ export const flattenBQLMutation = ( (k: string | symbol) => isSymbol(k) || k.startsWith('$'), ) as EnrichedBQLMutationBlock; - usedRoleFields.forEach((role) => { + for (const role of usedRoleFields) { //1 traverse them as well - isArray(block[role]) - ? block[role].forEach((child: EnrichedBQLMutationBlock) => - traverse(child, { bzId: $bzId, edgeField: role, tempId: $tempId }), - ) - : traverse(block[role], { bzId: $bzId, edgeField: role, tempId: $tempId }); + if (isArray(block[role])) { + for (const child of block[role] as EnrichedBQLMutationBlock[]) { + traverse(child, { bzId: $bzId, edgeField: role, tempId: $tempId }); + } + } else { + traverse(block[role], { bzId: $bzId, edgeField: role, tempId: $tempId }); + } //2 fill the arrays const edges = (isArray(block[role]) ? block[role] : [block[role]]).filter( Boolean, ) as EnrichedBQLMutationBlock[]; //pre-queries add some undefineds - Object.entries(operationMap).forEach(([operation, opTypes]) => { + for (const [operation, opTypes] of Object.entries(operationMap)) { const filteredEdges = edges.filter((edge) => opTypes.includes(edge.$op)).map((edge) => edge.$bzId); if (filteredEdges.length > 0) { @@ -98,17 +100,19 @@ export const flattenBQLMutation = ( $op: operation as BormOperation, }); } - }); - }); + } + } } if (usedLinkFields) { - usedLinkFields.forEach((ulf) => { + for (const ulf of usedLinkFields) { //1 traverse them - isArray(block[ulf]) - ? block[ulf].forEach((child: EnrichedBQLMutationBlock) => - traverse(child, { bzId: $bzId, edgeField: ulf, tempId: $tempId }), - ) - : traverse(block[ulf], { bzId: $bzId, edgeField: ulf, tempId: $tempId }); + if (isArray(block[ulf])) { + for (const child of block[ulf] as EnrichedBQLMutationBlock[]) { + traverse(child, { bzId: $bzId, edgeField: ulf, tempId: $tempId }); + } + } else { + traverse(block[ulf], { bzId: $bzId, edgeField: ulf, tempId: $tempId }); + } //2 fill the arrays const edgeSchema = currentSchema.linkFields?.find((lf) => lf.path === ulf) as EnrichedLinkField; @@ -117,10 +121,10 @@ export const flattenBQLMutation = ( //case 2.2 indirect edges if (edgeSchema.target === 'relation') { - Object.entries(operationMap).forEach(([operation, opTypes]) => { + for (const [operation, opTypes] of Object.entries(operationMap)) { const filteredEdges = edges.filter((edge) => opTypes.includes(edge.$op)); - filteredEdges.forEach((edge) => { + for (const edge of filteredEdges) { const edgeMeta = oFilter( edge, (k: string | symbol) => isSymbol(k) || k.startsWith('$'), @@ -131,8 +135,8 @@ export const flattenBQLMutation = ( [edgeSchema.plays]: $bzId, $op: operation as BormOperation, }); - }); - }); + } + } } // 3. INFERRED EDGES if (edgeSchema.target === 'role') { @@ -142,14 +146,14 @@ export const flattenBQLMutation = ( replace: ['replace'], }; - Object.entries(arcOperationMap).forEach(([operation, opTypes]) => { + for (const [operation, opTypes] of Object.entries(arcOperationMap)) { const filteredEdges = edges.filter((edge) => opTypes.includes(edge.$op)); if (filteredEdges.length === 0) { - return; + continue; } - filteredEdges.forEach((edge) => { + for (const edge of filteredEdges) { const arc = { //technically is a multi-arc $thing: edgeSchema.relation, @@ -161,20 +165,22 @@ export const flattenBQLMutation = ( }; result.arcs.push(arc); - }); - }); + } + } } - }); + } } if (usedRefFields) { - usedRefFields.forEach((urf) => { + for (const urf of usedRefFields) { //const { contentType } = currentSchema.refFields[urf]; //1 traverse them to push the nested items - isArray(block[urf]) - ? block[urf].forEach((child: EnrichedBQLMutationBlock) => - traverse(child, { bzId: $bzId, edgeField: urf, tempId: $tempId }), - ) - : traverse(block[urf], { bzId: $bzId, edgeField: urf, tempId: $tempId }); + if (isArray(block[urf])) { + for (const child of block[urf] as EnrichedBQLMutationBlock[]) { + traverse(child, { bzId: $bzId, edgeField: urf, tempId: $tempId }); + } + } else { + traverse(block[urf], { bzId: $bzId, edgeField: urf, tempId: $tempId }); + } //2 fill the arrays. We need this with refFields as well because in surrealdb we need to apply link operations at the end in case the order is incorrect const children = (isArray(block[urf]) ? block[urf] : [block[urf]]).filter( @@ -194,15 +200,17 @@ export const flattenBQLMutation = ( result.references.push({ ...childMeta, [urf]: filteredChildren, - $op: 'replace' as BormOperation, //Probably add / replace/ remove byt lets do only replaces for now + $op: 'replace' as BormOperation, //Probably add / replace/ remove but lets do only replaces for now }); } - }); + } } }; const treeItems = Array.isArray(tree) ? tree : [tree]; - treeItems.forEach((item) => traverse(item)); + for (const item of treeItems) { + traverse(item); + } //order by $Op, first unlink, then link const orderedEdges = [...result.edges].sort((a, b) => { diff --git a/src/stateMachine/mutation/bql/parse.ts b/src/stateMachine/mutation/bql/parse.ts index 91da8573..8ae6d269 100644 --- a/src/stateMachine/mutation/bql/parse.ts +++ b/src/stateMachine/mutation/bql/parse.ts @@ -3,9 +3,8 @@ import { traverse } from 'object-traversal'; import { isArray, isObject, mapEntries, pick, shake } from 'radash'; import { v4 as uuidv4 } from 'uuid'; -import { deepRemoveMetaData } from '../../../../tests/helpers/matchers'; import { computeField } from '../../../engine/compute'; -import { getCurrentFields, getCurrentSchema, getParentNode, oFilter } from '../../../helpers'; +import { deepRemoveMetaData, getCurrentFields, getCurrentSchema, getParentNode, oFilter } from '../../../helpers'; import type { BormOperation, BQLMutationBlock, diff --git a/src/stateMachine/mutation/bql/preQuery.ts b/src/stateMachine/mutation/bql/preQuery.ts index db2d3e19..0f37d6f9 100644 --- a/src/stateMachine/mutation/bql/preQuery.ts +++ b/src/stateMachine/mutation/bql/preQuery.ts @@ -12,6 +12,7 @@ import type { EnrichedBQLMutationBlock, FilledBQLMutationBlock, } from '../../../types'; +import type { DRAFT_EnrichedBormSchema } from '../../../types/schema/enriched.draft'; import { runQueryMachine } from '../../query/queryMachine'; export const preQueryPathSeparator = '___'; @@ -22,6 +23,7 @@ const grandChildOfCreateSymbol = Symbol.for('grandChildOfCreate'); export const mutationPreQuery = async ( blocks: EnrichedBQLMutationBlock | EnrichedBQLMutationBlock[], schema: EnrichedBormSchema, + draftSchema: DRAFT_EnrichedBormSchema, config: BormConfig, dbHandles: DBHandles, ) => { @@ -152,6 +154,7 @@ export const mutationPreQuery = async ( // @ts-expect-error todo preQueryReq, schema, + draftSchema, { ...config, query: { ...config.query, returnNulls: true } }, dbHandles, ); diff --git a/src/stateMachine/mutation/bql/stringify.ts b/src/stateMachine/mutation/bql/stringify.ts index 9d7e2558..ed6cea25 100644 --- a/src/stateMachine/mutation/bql/stringify.ts +++ b/src/stateMachine/mutation/bql/stringify.ts @@ -34,16 +34,18 @@ const tObject = ( return; } if (Array.isArray(tree)) { - tree.forEach((i) => tObject(schema, i, $thing)); + for (const i of tree) { + tObject(schema, i, $thing); + } return; } const thing = getSchemaByThing(schema, $thing || tree.$entity || tree.$relation || tree.$thing); - Object.entries(tree).forEach(([k]) => { + for (const [k] of Object.entries(tree)) { if (k.startsWith('$') || k.startsWith('%')) { - return; + continue; } tField(schema, tree, k, thing); - }); + } }; const tField = ( diff --git a/src/stateMachine/mutation/mutationMachine.ts b/src/stateMachine/mutation/mutationMachine.ts index ae3ddb6a..ad801d08 100644 --- a/src/stateMachine/mutation/mutationMachine.ts +++ b/src/stateMachine/mutation/mutationMachine.ts @@ -8,6 +8,7 @@ import type { EnrichedBormSchema, EnrichedBQLMutationBlock, } from '../../types'; +import type { DRAFT_EnrichedBormSchema } from '../../types/schema/enriched.draft'; import { VERSION } from '../../version'; import { enrichBQLMutation } from './bql/enrich'; import { preHookDependencies } from './bql/enrichSteps/preHookDependencies'; @@ -34,6 +35,7 @@ export type bqlMutationContext = { type MachineContext = { bql: bqlMutationContext; schema: EnrichedBormSchema; + draftSchema: DRAFT_EnrichedBormSchema; config: BormConfig; handles: DBHandles; depthLevel: number; @@ -101,12 +103,12 @@ const enrich = async (ctx: MachineContext) => { const preQuery = async (ctx: MachineContext) => { logDebug(`>>> mutationMachine/preQuery[${VERSION}]`, JSON.stringify(ctx.bql.enriched)); - return mutationPreQuery(ctx.bql.enriched, ctx.schema, ctx.config, ctx.handles); + return mutationPreQuery(ctx.bql.enriched, ctx.schema, ctx.draftSchema, ctx.config, ctx.handles); }; const preQueryDependencies = async (ctx: MachineContext) => { logDebug(`>>> mutationMachine/preQueryDependencies[${VERSION}]`, JSON.stringify(ctx.bql.enriched)); - return preHookDependencies(ctx.bql.enriched, ctx.schema, ctx.config, ctx.handles); + return preHookDependencies(ctx.bql.enriched, ctx.schema, ctx.draftSchema, ctx.config, ctx.handles); }; const parseBQL = async (ctx: MachineContext) => { @@ -252,6 +254,7 @@ export const awaitMachine = async (context: MachineContext) => { export const runMutationMachine = async ( mutation: BQLMutation, schema: EnrichedBormSchema, + draftSchema: DRAFT_EnrichedBormSchema, config: BormConfig, handles: DBHandles, ) => { @@ -270,6 +273,7 @@ export const runMutationMachine = async ( res: [], }, schema: schema as EnrichedBormSchema, + draftSchema: draftSchema, config: config, handles: handles, depthLevel: 0, diff --git a/src/stateMachine/mutation/surql/build.ts b/src/stateMachine/mutation/surql/build.ts index 00ce2bc1..ec7c5c41 100644 --- a/src/stateMachine/mutation/surql/build.ts +++ b/src/stateMachine/mutation/surql/build.ts @@ -262,7 +262,6 @@ export const buildSURQLMutation = async (flat: FlatBqlMutation, schema: Enriched const { $thing, $bzId, $op, $tempId } = block; const currentSchema = getSchemaByThing(schema, $thing); const { usedRefFields } = getCurrentFields(currentSchema, block); - const VAR = `$⟨${$tempId || $bzId}⟩`; const refFields = usedRefFields.flatMap((rf) => { @@ -273,7 +272,6 @@ export const buildSURQLMutation = async (flat: FlatBqlMutation, schema: Enriched const { cardinality, contentType } = refFieldSchema; if (contentType === 'REF') { const asArrayOfVars = isArray(block[rf]) ? block[rf] : [`${block[rf]}`]; - if (cardinality === 'ONE') { if (asArrayOfVars.length > 1) { //This is ok as long as only one is a match, but we can link to several in card ONE. This is practical if we don't know the $thing for instance, we can try multiple ones @@ -298,6 +296,7 @@ export const buildSURQLMutation = async (flat: FlatBqlMutation, schema: Enriched throw new Error(`Unsupported operation ${$op} for ONE cardinality`); } } + if (cardinality === 'MANY') { const nodesString = `array::flatten([${asArrayOfVars}])`; switch ($op) { @@ -311,14 +310,19 @@ export const buildSURQLMutation = async (flat: FlatBqlMutation, schema: Enriched throw new Error(`Unsupported operation ${$op} for MANY cardinality`); } } + throw new Error(`Unsupported cardinality ${cardinality}`); } + if (contentType === 'FLEX') { //todo: card one check len 1 //todo: add/remove etc return `${rf} = ${cardinality === 'ONE' ? `array::flatten([${block[rf]}])[0]` : `array::flatten([${block[rf]}])`}`; } + + throw new Error(`Unsupported contentType ${contentType}`); }); + const refFieldsString = refFields.length > 0 ? `${refFields.join(', ')}` : ''; const SET = refFieldsString ? `SET ${refFieldsString}` : ''; diff --git a/src/stateMachine/query/queryMachine.ts b/src/stateMachine/query/queryMachine.ts index edf399d4..e703bae9 100644 --- a/src/stateMachine/query/queryMachine.ts +++ b/src/stateMachine/query/queryMachine.ts @@ -1,23 +1,23 @@ import type { TypeDBDriver } from 'typedb-driver'; import type { SimpleSurrealClient } from '../../adapters/surrealDB/client'; -import { assertDefined, getSchemaByThing } from '../../helpers'; +import { getSchemaByThing } from '../../helpers'; import { logDebug } from '../../logger'; import { createMachine, interpret, invoke, reduce, state, transition } from '../../robot3'; -import type { BormConfig, DBHandles, EnrichedBormSchema, EnrichedBQLQuery, RawBQLQuery } from '../../types'; +import type { BormConfig, DBHandles, EnrichedBormSchema, RawBQLQuery } from '../../types'; +import type { DRAFT_EnrichedBormSchema } from '../../types/schema/enriched.draft'; import { VERSION } from '../../version'; -import { cleanQueryRes } from './bql/clean'; import { enrichBQLQuery } from './bql/enrich'; -import { postHooks } from './postHook'; import { runSurrealDbQueryMachine } from './surql/machine'; +import { runSurrealDbQueryMachine2 } from './surql2/run'; import { runTypeDbQueryMachine } from './tql/machine'; type MachineContext = { bql: { raw: RawBQLQuery[]; - queries?: EnrichedBQLQuery[]; res?: any[]; // TODO }; schema: EnrichedBormSchema; + draftSchema: DRAFT_EnrichedBormSchema; config: BormConfig; handles: DBHandles; error: string | null; @@ -58,7 +58,6 @@ type TypeDBAdapter = { db: 'typeDB'; client: TypeDBDriver; rawBql: RawBQLQuery[]; - bqlQueries: EnrichedBQLQuery[]; indices: number[]; }; @@ -66,7 +65,6 @@ type SurrealDBAdapter = { db: 'surrealDB'; client: SimpleSurrealClient; rawBql: RawBQLQuery[]; - bqlQueries: EnrichedBQLQuery[]; indices: number[]; }; @@ -87,9 +85,14 @@ export const queryMachine = createMachine( async (ctx: MachineContext) => { const adapters: Record = {}; - ctx.bql.queries?.forEach((q, i) => { + ctx.bql.raw?.forEach((q, i) => { const raw = ctx.bql.raw[i]; - const thing = getSchemaByThing(ctx.schema, q.$thing); + const $thing = + '$thing' in q ? q.$thing : '$entity' in q ? q.$entity : '$relation' in q ? q.$relation : undefined; + if (!$thing) { + throw new Error(`No $thing found in query ${JSON.stringify(q, null, 2)}`); + } + const thing = getSchemaByThing(ctx.schema, $thing); const { id } = thing.defaultDBConnector; if (thing.db === 'typeDB') { @@ -102,7 +105,6 @@ export const queryMachine = createMachine( db: 'typeDB', client, rawBql: [], - bqlQueries: [], indices: [], }; } @@ -116,7 +118,6 @@ export const queryMachine = createMachine( db: 'surrealDB', client, rawBql: [], - bqlQueries: [], indices: [], }; } @@ -125,18 +126,20 @@ export const queryMachine = createMachine( } const adapter = adapters[id]; adapter.rawBql.push(raw); - adapter.bqlQueries.push(q); adapter.indices.push(i); }); const adapterList = Object.values(adapters); const proms = adapterList.map((a) => { if (a.db === 'typeDB') { // TODO: Replace DBHandles with TypeDBAdapter - return runTypeDbQueryMachine(a.rawBql, a.bqlQueries, ctx.schema, ctx.config, ctx.handles); + return runTypeDbQueryMachine(a.rawBql, ctx.schema, ctx.config, ctx.handles); } if (a.db === 'surrealDB') { - return runSurrealDbQueryMachine(a.bqlQueries, ctx.schema, ctx.config, a.client); + if (ctx.config.query?.legacySurrealDBAdapter) { + return runSurrealDbQueryMachine(a.rawBql, ctx.schema, ctx.config, a.client); + } + return runSurrealDbQueryMachine2(a.rawBql, ctx.draftSchema, ctx.config, a.client); } throw new Error(`Unsupported DB "${JSON.stringify(a, null, 2)}"`); @@ -150,16 +153,6 @@ export const queryMachine = createMachine( const result = orderedResults.map(({ result }) => result); return result; }, - transition('done', 'postHooks', reduce(updateBqlRes)), - errorTransition, - ), - postHooks: invoke( - async (ctx: MachineContext) => postHooks(ctx.schema, assertDefined(ctx.bql.queries), assertDefined(ctx.bql.res)), - transition('done', 'clean', reduce(updateBqlRes)), - errorTransition, - ), - clean: invoke( - async (ctx: MachineContext) => cleanQueryRes(ctx.config, assertDefined(ctx.bql.res)), transition('done', 'success', reduce(updateBqlRes)), errorTransition, ), @@ -189,6 +182,7 @@ export const awaitQueryMachine = async (context: MachineContext) => { export const runQueryMachine = async ( bql: RawBQLQuery[], schema: EnrichedBormSchema, + draftSchema: DRAFT_EnrichedBormSchema, config: BormConfig, handles: DBHandles, ) => { @@ -197,6 +191,7 @@ export const runQueryMachine = async ( raw: bql, }, schema: schema, + draftSchema, config: config, handles: handles, error: null, diff --git a/src/stateMachine/query/surql/machine.ts b/src/stateMachine/query/surql/machine.ts index 76f6b996..881ddf59 100644 --- a/src/stateMachine/query/surql/machine.ts +++ b/src/stateMachine/query/surql/machine.ts @@ -1,8 +1,13 @@ import type { SimpleSurrealClient } from '../../../adapters/surrealDB/client'; import { assertDefined } from '../../../helpers'; +import { logDebug } from '../../../logger'; import { createMachine, interpret, invoke, reduce, state, transition } from '../../../robot3'; -import type { BormConfig, EnrichedBormSchema, EnrichedBQLQuery } from '../../../types'; +import type { BormConfig, EnrichedBormSchema, EnrichedBQLQuery, RawBQLQuery } from '../../../types'; import type { SurrealDBProviderObject } from '../../../types/config/surrealdb'; +import { VERSION } from '../../../version'; +import { cleanQueryRes } from '../bql/clean'; +import { enrichBQLQuery } from '../bql/enrich'; +import { postHooks } from '../postHook'; import { build } from './build'; import { buildRefs } from './buildRefs'; import { parse } from './parse'; @@ -10,7 +15,8 @@ import { run } from './run'; export type SurrealDbMachineContext = { bql: { - queries: EnrichedBQLQuery[]; + raw: RawBQLQuery[]; + queries?: EnrichedBQLQuery[]; res?: any[]; }; surql: { @@ -23,6 +29,26 @@ export type SurrealDbMachineContext = { error?: string | null; }; +const updateBqlReq = (ctx: SurrealDbMachineContext, event: any) => { + if (!event.data) { + return ctx; + } + return { + ...ctx, + bql: { ...ctx.bql, queries: event.data }, + }; +}; + +const updateBqlRes = (ctx: SurrealDbMachineContext, event: any) => { + if (!event.data) { + return ctx; + } + return { + ...ctx, + bql: { ...ctx.bql, res: event.data }, + }; +}; + const errorTransition = transition( 'error', 'error', @@ -35,8 +61,16 @@ const errorTransition = transition( ); const surrealDbQueryMachine = createMachine( - 'build', + 'enrich', { + enrich: invoke( + async (ctx: SurrealDbMachineContext) => { + logDebug(`originalBQLQuery[${VERSION}]`, JSON.stringify(ctx.bql.raw)); + return enrichBQLQuery(ctx.bql.raw, ctx.schema); + }, + transition('done', 'build', reduce(updateBqlReq)), + errorTransition, + ), build: invoke( async (ctx: SurrealDbMachineContext) => { // todo: This works only if there is a single surrealDB connector @@ -44,10 +78,10 @@ const surrealDbQueryMachine = createMachine( ctx.config.dbConnectors.find((c) => c.provider === 'surrealDB') as SurrealDBProviderObject ).providerConfig; if (linkMode === 'edges') { - return build({ queries: ctx.bql.queries, schema: ctx.schema }); + return build({ queries: ctx.bql.queries ?? [], schema: ctx.schema }); } if (linkMode === 'refs') { - return buildRefs({ queries: ctx.bql.queries, schema: ctx.schema }); + return buildRefs({ queries: ctx.bql.queries ?? [], schema: ctx.schema }); } }, transition( @@ -88,14 +122,14 @@ const surrealDbQueryMachine = createMachine( async (ctx: SurrealDbMachineContext) => { return parse({ res: assertDefined(ctx.surql.res), - queries: ctx.bql.queries, + queries: ctx.bql.queries ?? [], schema: ctx.schema, config: ctx.config, }); }, transition( 'done', - 'success', + 'postHooks', reduce( (ctx: SurrealDbMachineContext, event: any): SurrealDbMachineContext => ({ ...ctx, @@ -108,6 +142,17 @@ const surrealDbQueryMachine = createMachine( ), errorTransition, ), + postHooks: invoke( + async (ctx: SurrealDbMachineContext) => + postHooks(ctx.schema, assertDefined(ctx.bql.queries), assertDefined(ctx.bql.res)), + transition('done', 'clean', reduce(updateBqlRes)), + errorTransition, + ), + clean: invoke( + async (ctx: SurrealDbMachineContext) => cleanQueryRes(ctx.config, assertDefined(ctx.bql.res)), + transition('done', 'success', reduce(updateBqlRes)), + errorTransition, + ), success: state(), error: state(), }, @@ -133,15 +178,13 @@ const awaitQueryMachine = async (context: SurrealDbMachineContext) => { }; export const runSurrealDbQueryMachine = async ( - enrichedBql: EnrichedBQLQuery[], + bql: RawBQLQuery[], schema: EnrichedBormSchema, config: BormConfig, client: SimpleSurrealClient, ) => { return awaitQueryMachine({ - bql: { - queries: enrichedBql, - }, + bql: { raw: bql }, surql: {}, schema: schema, config: config, diff --git a/src/stateMachine/query/surql2/buildLogical.ts b/src/stateMachine/query/surql2/buildLogical.ts new file mode 100644 index 00000000..386080ac --- /dev/null +++ b/src/stateMachine/query/surql2/buildLogical.ts @@ -0,0 +1,677 @@ +import z from 'zod/v4'; +import { + type BQLField, + type BQLFilter, + type BQLFilterValue, + type BQLFilterValueList, + type BQLQuery, + type NestedBQLFilter, + NestedBQLFilterParser, + StrictBQLValueFilterParser, +} from '../../../types/requests/parser'; +import type { + DRAFT_EnrichedBormDataField, + DRAFT_EnrichedBormEntity, + DRAFT_EnrichedBormField, + DRAFT_EnrichedBormLinkField, + DRAFT_EnrichedBormRefField, + DRAFT_EnrichedBormRelation, + DRAFT_EnrichedBormRoleField, + DRAFT_EnrichedBormSchema, +} from '../../../types/schema/enriched.draft'; +import type { + DataSource, + Filter, + ListFilter, + LogicalQuery, + Projection, + ProjectionField, + ScalarFilter, + Sort, +} from './logical'; + +export const buildLogicalQuery = ( + query: BQLQuery, + schema: DRAFT_EnrichedBormSchema, + metadata: boolean, +): LogicalQuery => { + const thingSchema = schema[query.$thing]; + const projection = buildProjection({ fields: query.$fields, thing: thingSchema, schema, metadata }); + const filter = query.$filter ? buildFilter(query.$filter, thingSchema, schema) : undefined; + const ids = Array.isArray(query.$id) ? query.$id : query.$id ? [query.$id] : []; + const cardinality = ids.length === 1 || isUniqueFilter(thingSchema, filter) ? 'ONE' : 'MANY'; + const source: DataSource = + ids.length > 0 + ? { + type: 'record_pointer', + thing: [thingSchema.name, ...thingSchema.subTypes], + ids, + } + : { + type: 'table_scan', + thing: [thingSchema.name, ...thingSchema.subTypes], + }; + + return { + source, + projection, + filter, + limit: validateLimit(query.$limit), + offset: validateOffset(query.$offset), + sort: validateSort(projection, buildSort(query.$sort)), + cardinality, + }; +}; + +const buildProjection = (params: { + fields?: BQLField[]; + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation; + schema: DRAFT_EnrichedBormSchema; + metadata: boolean; +}): Projection => { + const { fields, thing, schema, metadata } = params; + const projectionFields: ProjectionField[] = []; + + if (metadata) { + projectionFields.push({ + type: 'metadata', + path: '$id', + }); + projectionFields.push({ + type: 'metadata', + path: '$thing', + }); + } + + // No fields specified. Project all fields. + if (!fields) { + for (const field of Object.values(thing.fields)) { + projectionFields.push(buildSimpleFieldProjection(field)); + } + return { fields: projectionFields }; + } + + for (const field of fields) { + if (typeof field === 'string') { + if (field === '$id' || field === '$thing') { + projectionFields.push({ + type: 'metadata', + path: field, + }); + continue; + } + + const fieldSchema = thing.fields[field]; + if (!fieldSchema) { + throw new Error(`Field ${field} not found in ${thing.name}`); + } + projectionFields.push(buildSimpleFieldProjection(fieldSchema)); + continue; + } + + const alias = validateAlias(field.$as); + + if (field.$path === '$id' || field.$path === '$thing') { + projectionFields.push({ + type: 'metadata', + path: field.$path, + alias, + }); + continue; + } + + const fieldSchema = thing.fields[field.$path]; + if (!fieldSchema) { + throw new Error(`Field ${field} not found in ${thing.name}`); + } + + if (fieldSchema.type === 'constant' || fieldSchema.type === 'computed') { + continue; + } + + if (fieldSchema.type === 'data' || fieldSchema.type === 'ref') { + projectionFields.push(buildSimpleFieldProjection(fieldSchema, alias)); + continue; + } + + const oppositeThingSchema = schema[fieldSchema.opposite.thing]; + const oppositeProjection = buildProjection({ fields: field.$fields, thing: oppositeThingSchema, schema, metadata }); + const filter = + '$filter' in field && field.$filter ? buildFilter(field.$filter, oppositeThingSchema, schema) : undefined; + projectionFields.push({ + type: 'nested_reference', + path: field.$path, + projection: oppositeProjection, + cardinality: + typeof field.$id === 'string' || isUniqueFilter(oppositeThingSchema, filter) ? 'ONE' : fieldSchema.cardinality, + alias, + ids: typeof field.$id === 'string' ? [field.$id] : field.$id, + filter, + limit: validateLimit(field.$limit), + offset: validateOffset(field.$offset), + sort: validateSort(oppositeProjection, buildSort(field.$sort)), + }); + } + + return { + fields: projectionFields, + }; +}; + +const buildSimpleFieldProjection = (field: DRAFT_EnrichedBormField, alias?: string): ProjectionField => { + if (field.type === 'data') { + return { + type: 'data', + path: field.name, + alias, + }; + } + if (field.type === 'ref' && field.contentType === 'FLEX') { + return { + type: 'flex', + path: field.name, + cardinality: field.cardinality, + alias, + }; + } + return { + type: 'reference', + path: field.name, + cardinality: field.cardinality, + alias, + }; +}; + +const buildFilter = ( + filter: BQLFilter | BQLFilter[], + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema, +): Filter | undefined => { + if (Array.isArray(filter)) { + const filters = filter.map((f) => buildFilter(f, thing, schema)).filter((f) => !!f); + return { + type: 'or', + filters: filters, + }; + } + + const filters = buildFilters(filter, thing, schema); + return { + type: 'and', + filters, + }; +}; + +const buildFilters = ( + filter: BQLFilter, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema, +): Filter[] => { + return Object.entries(filter) + .map(([key, value]): Filter | undefined => { + if (key === '$not' && filter.$not) { + return buildNotFilter(filter.$not, thing, schema); + } + + if (key === '$or' && filter.$or) { + return buildOrFilter(filter.$or, thing, schema); + } + + const fieldSchema = thing.fields[key]; + + if (!fieldSchema) { + throw new Error(`Field ${key} not found in ${thing.name}`); + } + + if (fieldSchema.type === 'constant' || fieldSchema.type === 'computed') { + throw new Error(`Filtering on constant or computed field ${key} is not supported`); + } + + if (value === undefined) { + return undefined; + } + + if (fieldSchema.type === 'data') { + return buildDataFieldFilter(fieldSchema, value as BQLFilterValue | BQLFilterValueList | NestedBQLFilter); + } + + if (fieldSchema.type === 'ref') { + return buildRefFieldFilter(fieldSchema, value); + } + + return buildLinkFieldFilter(fieldSchema, value, schema); + }) + .filter((f): f is Filter => f !== undefined); +}; + +const buildNotFilter = ( + $not: BQLFilter, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema, +): Filter | undefined => { + const inner = buildFilter($not, thing, schema); + return inner + ? { + type: 'not', + filter: inner, + } + : undefined; +}; + +const buildOrFilter = ( + $or: BQLFilter[], + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema, +): Filter | undefined => { + return buildFilter($or, thing, schema); +}; + +const buildDataFieldFilter = ( + field: DRAFT_EnrichedBormDataField, + filter: BQLFilterValue | BQLFilterValueList | NestedBQLFilter, +): Filter => { + // No-sub field. Only scalar and list filters are allowed. + // If `right` is not of the same type as the field, the query will return an empty result. + // Ideally SurrealDB's query planner should skip the query. + + // scalar and list operators + const result = StrictBQLValueFilterParser.safeParse(filter); + if (result.success) { + const filters: Filter[] = []; + for (const [op, right] of Object.entries(result.data)) { + if (op === '$exists') { + filters.push({ + type: 'null', + op: right ? 'IS NOT' : 'IS', + left: field.name, + tunnel: false, + }); + continue; + } + if ((op === '$eq' || op === '$ne') && right === null) { + filters.push({ + type: 'null', + op: op === '$eq' ? 'IS' : 'IS NOT', + left: field.name, + tunnel: false, + }); + continue; + } + const scalarOp = scalarOpMap[op]; + if (scalarOp) { + filters.push({ + type: 'scalar', + op: scalarOp, + left: field.name, + right: right as BQLFilterValue, + }); + continue; + } + const listOp = listOpMap[op]; + if (listOp) { + filters.push({ + type: 'list', + op: listOp, + left: field.name, + right: right as BQLFilterValueList, + }); + continue; + } + throw new Error(`Invalid filter operation: ${op}`); + } + return { + type: 'and', + filters, + }; + } + + // List value + if (Array.isArray(filter)) { + if (field.cardinality === 'ONE') { + return { + type: 'list', + op: 'IN', + left: field.name, + right: filter, + }; + } + + return { + type: 'list', + op: 'CONTAINSANY', + left: field.name, + right: filter, + }; + } + + // Single value + if (field.cardinality === 'ONE') { + if (filter === null) { + return { + type: 'null', + op: 'IS', + left: field.name, + tunnel: false, + }; + } + return { + type: 'scalar', + op: '=', + left: field.name, + right: filter as BQLFilterValue, + }; + } + + return { + type: 'scalar', + op: 'CONTAINS', + left: field.name, + right: filter as BQLFilterValue, + }; +}; + +const buildRefFieldFilter = ( + field: DRAFT_EnrichedBormRefField, + filter: BQLFilterValue | BQLFilterValueList | NestedBQLFilter | BQLFilter[], +): Filter | undefined => { + if (field.contentType === 'REF') { + if (field.cardinality === 'ONE') { + if (typeof filter === 'string') { + return { + type: 'ref', + op: 'IN', + left: field.name, + right: [filter], + tunnel: false, + }; + } + if (StringArrayParser.safeParse(filter).success) { + return { + type: 'ref', + op: 'IN', + left: field.name, + right: filter as string[], + tunnel: false, + }; + } + throw new Error(`Invalid filter value for ref field ${field.name}: ${JSON.stringify(filter)}`); + } + if (typeof filter === 'string') { + return { + type: 'ref', + op: 'CONTAINSANY', + left: field.name, + right: [filter], + tunnel: false, + }; + } + if (StringArrayParser.safeParse(filter).success) { + return { + type: 'ref', + op: 'CONTAINSANY', + left: field.name, + right: filter as string[], + tunnel: false, + }; + } + throw new Error(`Invalid filter value for ref field ${field.name}: ${JSON.stringify(filter)}`); + } + // The cast can't be determined. + throw new Error('Filtering by FLEX reference is not supported'); +}; + +const buildLinkFieldFilter = ( + field: DRAFT_EnrichedBormLinkField | DRAFT_EnrichedBormRoleField, + filter: BQLFilterValue | BQLFilterValueList | NestedBQLFilter | BQLFilter[], + schema: DRAFT_EnrichedBormSchema, +): Filter => { + const tunnel = field.type === 'link' && field.target === 'role'; + + if (filter === null) { + return { + type: 'null', + op: 'IS', + left: field.name, + tunnel, + }; + } + + if (typeof filter === 'string') { + return { + type: 'ref', + op: field.cardinality === 'ONE' ? 'IN' : 'CONTAINSANY', + left: field.name, + right: [filter], + tunnel, + }; + } + + if (StringArrayParser.safeParse(filter).success) { + return { + type: 'ref', + op: field.cardinality === 'ONE' ? 'IN' : 'CONTAINSANY', + left: field.name, + right: filter as string[], + tunnel, + }; + } + + const nestedFilter = z.union([NestedBQLFilterParser, z.array(NestedBQLFilterParser)]).safeParse(filter); + + if (nestedFilter.error) { + throw new Error(`Invalid nested filter: ${nestedFilter.error.message}`); + } + + const oppositeThingSchema = schema[field.opposite.thing]; + + if (!oppositeThingSchema) { + throw new Error(`Opposite thing ${field.opposite.thing} not found`); + } + + const oppositeThings: [string, ...string[]] = [field.opposite.thing, ...oppositeThingSchema.subTypes]; + + if (Array.isArray(nestedFilter.data)) { + const filters = nestedFilter.data.map((f) => buildLinkFieldFilter(field, f, schema)); + return { + type: 'or', + filters, + }; + } + + const { + $eq: _eq, + $ne: _ne, + $contains: _contains, + $containsNot: _containsNot, + $in: _in, + $nin: _nin, + $containsAll: _containsAll, + $containsAny: _containsAny, + $containsNone: _containsNone, + ...rest + } = nestedFilter.data; + + for (const unsupportedOp of ['$gt', '$lt', '$gte', '$lte']) { + if (rest[unsupportedOp]) { + throw new Error(`Filtering ${field.type} field with ${unsupportedOp} operator is not supported`); + } + } + + const filters: Filter[] = []; + + for (const op of ['$exists', '$eq', '$ne', '$contains', '$containsNot']) { + const value = nestedFilter.data[op]; + if (value === undefined) { + continue; + } + if (op === '$exists') { + filters.push({ + type: 'null', + op: value ? 'IS NOT' : 'IS', + left: field.name, + tunnel, + }); + continue; + } + if ((op === '$eq' || op === '$ne') && value === null) { + filters.push({ + type: 'null', + op: op === '$eq' ? 'IS' : 'IS NOT', + left: field.name, + tunnel, + }); + continue; + } + if (typeof value !== 'string') { + throw new Error(`Filter value for ${field.type} field with operator ${op} must be a string`); + } + filters.push({ + type: 'ref', + op: op === '$eq' || op === '$contains' ? 'IN' : 'NOT IN', + left: field.name, + right: [value], + thing: oppositeThings, + tunnel, + }); + } + + for (const op of ['$in', '$nin', '$containsAll', '$containsAny', '$containsNone']) { + const value = nestedFilter.data[op]; + if (value === undefined) { + continue; + } + const stringArray = StringArrayParser.safeParse(value); + if (!stringArray.success) { + throw new Error(`Filter value for ${field.type} field with operator ${op} must be a string array`); + } + const listOp = listOpMap[op]; + if (!listOp) { + throw new Error(`Invalid list operator: ${op}`); + } + filters.push({ + type: 'ref', + op: listOp, + left: field.name, + right: stringArray.data, + thing: oppositeThings, + tunnel, + }); + } + + const oppositeSchema = schema[field.opposite.thing]; + if (!oppositeSchema) { + throw new Error(`Unknown thing: ${field.opposite.thing}`); + } + + const nestedLogicalFilter = buildFilter(rest, oppositeSchema, schema); + if (nestedLogicalFilter) { + filters.push({ + type: 'nested', + path: field.name, + filter: nestedLogicalFilter, + cardinality: field.cardinality, + }); + } + + return { + type: 'and', + filters, + }; +}; + +const isUniqueFilter = (thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, filter?: Filter): boolean => { + if (!filter) { + return false; + } + if (filter.type === 'scalar') { + if (filter.op !== '=') { + return false; + } + const field = thing.fields[filter.left]; + if (!field) { + throw new Error(`Field ${filter.left} not found in ${thing.name}`); + } + return field.type === 'data' && field.unique; + } + if (filter.type === 'list') { + if (filter.op !== 'IN' || filter.right.length > 1) { + return false; + } + const field = thing.fields[filter.left]; + if (!field) { + throw new Error(`Field ${filter.left} not found in ${thing.name}`); + } + return field.type === 'data' && field.unique; + } + if (filter.type === 'and') { + return filter.filters.some((f) => isUniqueFilter(thing, f)); + } + return false; +}; + +const buildSort = (sort?: ({ field: string; desc?: boolean } | string)[]): Sort[] | undefined => { + if (!sort || sort.length === 0) { + return undefined; + } + return sort.map((s) => { + if (typeof s === 'string') { + return { field: s, desc: false }; + } + return { field: s.field, desc: s.desc ?? false }; + }); +}; + +const scalarOpMap: Record = { + $eq: '=', + $ne: '!=', + $gt: '>', + $lt: '<', + $gte: '>=', + $lte: '<=', + $contains: 'CONTAINS', + $containsNot: 'CONTAINSNOT', +}; + +const listOpMap: Record = { + $in: 'IN', + $nin: 'NOT IN', + $containsAll: 'CONTAINSALL', + $containsAny: 'CONTAINSANY', + $containsNone: 'CONTAINSNONE', +}; + +const StringArrayParser = z.array(z.string()); + +const validateAlias = (alias?: string): string | undefined => { + if (alias !== undefined && !/^[a-zA-Z0-9_-]+$/.test(alias)) { + throw new Error(`Invalid alias: ${alias}`); + } + return alias; +}; + +const validateLimit = (limit?: number): number | undefined => { + if (limit !== undefined && (typeof limit !== 'number' || limit < 0)) { + throw new Error(`Invalid limit: ${limit}`); + } + return limit; +}; + +const validateOffset = (offset?: number): number | undefined => { + if (offset !== undefined && (typeof offset !== 'number' || offset < 0)) { + throw new Error(`Invalid offset: ${offset}`); + } + return offset; +}; + +const validateSort = (projection: Projection, sort?: Sort[]): Sort[] | undefined => { + if (!sort || sort.length === 0) { + return undefined; + } + const projectionSet = new Set(projection.fields.map((f) => (f.type === 'metadata' ? f.path : (f.alias ?? f.path)))); + for (const s of sort) { + if (!projectionSet.has(s.field)) { + throw new Error(`Missing sorter field in the selected fields: ${s.field}`); + } + } + return sort; +}; diff --git a/src/stateMachine/query/surql2/buildSurql.ts b/src/stateMachine/query/surql2/buildSurql.ts new file mode 100644 index 00000000..dcb4412e --- /dev/null +++ b/src/stateMachine/query/surql2/buildSurql.ts @@ -0,0 +1,331 @@ +import { genAlphaId } from '../../../helpers'; +import type { + DataField, + DataSource, + Filter, + FlexField, + LogicalQuery, + MetadataField, + NestedReferenceField, + Projection, + ReferenceField, + Sort, +} from './logical'; + +export type SurqlParams = Record; + +export const buildSurql = (query: LogicalQuery, mutParams: SurqlParams): string => { + const lines: string[] = []; + const level = query.cardinality === 'MANY' ? 0 : 1; + + if (query.cardinality === 'ONE') { + lines.push('array::first('); + } + lines.push(buildProjection(query.projection, level, mutParams)); + lines.push(buildFrom(query.source, level, mutParams)); + const filter = query.filter && buildFilter(query.filter, mutParams); + if (filter) { + lines.push(indent(`WHERE ${filter}`, level)); + } + if (query.sort && query.sort.length > 0) { + lines.push(buildOrderBy(query.sort, level)); + } + if (query.limit !== undefined) { + lines.push(indent(`LIMIT ${query.limit}`, level)); + } + if (query.offset !== undefined) { + lines.push(indent(`START ${query.offset}`, level)); + } + if (query.cardinality === 'ONE') { + lines.push(')'); + } + + return lines.join('\n'); +}; + +const buildProjection = (projection: Projection, level: number, mutParams: SurqlParams): string => { + const fieldLines: string[] = []; + const fieldLevel = level + 1; + for (const field of projection.fields) { + if (field.type === 'metadata') { + fieldLines.push(buildMetadataFieldProjection(field, fieldLevel)); + } else if (field.type === 'data') { + fieldLines.push(buildDataFieldProjection(field, fieldLevel)); + } else if (field.type === 'reference') { + fieldLines.push(buildReferenceFieldProjection(field, fieldLevel)); + } else if (field.type === 'nested_reference') { + fieldLines.push(buildNestedFieldProjection(field, fieldLevel, mutParams)); + } else if (field.type === 'flex') { + fieldLines.push(buildFlexFieldProjection(field, fieldLevel)); + } + } + + const lines: string[] = []; + lines.push(indent('SELECT', level)); + lines.push(fieldLines.join(',\n')); + + return lines.join('\n'); +}; + +const buildMetadataFieldProjection = (field: MetadataField, level: number) => { + if (field.path === '$id') { + return indent(`record::id(id) AS ${esc(field.alias ?? '$id')}`, level); + } else if (field.path === '$thing') { + return indent(`record::tb(id) AS ${esc(field.alias ?? '$thing')}`, level); + } + throw new Error(`Unsupported metadata field: ${field.path}`); +}; + +const buildDataFieldProjection = (field: DataField, level: number) => { + if (field.path === 'id') { + return indent(`record::id(id) AS ${esc(field.alias ?? 'id')}`, level); + } + const escapedPath = esc(field.path); + if (field.alias) { + return indent(`${escapedPath} AS ${esc(field.alias)}`, level); + } + return indent(escapedPath, level); +}; + +const buildReferenceFieldProjection = (field: ReferenceField, level: number) => { + const { path, alias, cardinality } = field; + const escapedPath = esc(path); + const escapedAlias = esc(alias || path); + if (cardinality === 'ONE') { + return indent(`array::first(SELECT VALUE record::id(id) FROM $this.${escapedPath}[*]) AS ${escapedAlias}`, level); + } + return indent(`(SELECT VALUE record::id(id) FROM $this.${escapedPath}[*]) AS ${escapedAlias}`, level); +}; + +const buildNestedFieldProjection = (field: NestedReferenceField, level: number, mutParams: SurqlParams) => { + const lines: string[] = []; + if (field.cardinality === 'MANY') { + lines.push(indent('(', level)); + } else { + lines.push(indent('array::first(', level)); + } + lines.push(buildProjection(field.projection, level + 1, mutParams)); + const filter = field.filter ? buildFilter(field.filter, mutParams) : undefined; + lines.push(indent(`FROM $this.${esc(field.path)}[*]`, level + 1)); + const conditions: string[] = []; + if (field.ids && field.ids.length > 0) { + const ids = field.ids.map((i) => `$${insertParam(mutParams, i)}`); + if (ids.length === 1) { + conditions.push(`record::id(id) = ${ids[0]}`); + } else { + conditions.push(`record::id(id) IN [${ids.join(', ')}]`); + } + } + if (filter) { + conditions.push(filter); + } + if (conditions.length > 0) { + lines.push(indent(`WHERE ${conditions.join(' AND ')}`, level + 1)); + } + if (field.sort && field.sort.length > 0) { + lines.push(buildOrderBy(field.sort, level + 1)); + } + if (field.limit !== undefined) { + lines.push(indent(`LIMIT ${field.limit}`, level + 1)); + } + if (field.offset !== undefined) { + lines.push(indent(`START ${field.offset}`, level + 1)); + } + lines.push(indent(`) AS ${esc(field.alias || field.path)}`, level)); + return lines.join('\n'); +}; + +const buildFlexFieldProjection = (field: FlexField, level: number) => { + const { path, alias, cardinality } = field; + const escapedPath = esc(path); + const escapedAlias = esc(alias || path); + if (cardinality === 'ONE') { + return indent( + `${escapedPath} && IF type::is::record(${escapedPath}) { record::id(${escapedPath}) } ELSE { ${escapedPath} } AS ${escapedAlias}`, + level, + ); + } + return indent( + `${escapedPath} && ${escapedPath}.map(|$i| IF type::is::record($i) { record::id($i)} ELSE { $i }) AS ${escapedAlias}`, + level, + ); +}; + +const buildFrom = (source: DataSource, level: number, mutParams: SurqlParams): string => { + const lines: string[] = []; + switch (source.type) { + case 'table_scan': { + lines.push(indent(`FROM ${source.thing.map(esc)}`, level)); + break; + } + case 'record_pointer': { + const pointers = source.thing + .flatMap((t) => source.ids.map((i) => `${esc(t)}:${esc(i)}`)) + .map((p) => `type::record($${insertParam(mutParams, p)})`) + .join(', '); + lines.push(indent(`FROM ${pointers}`, level)); + break; + } + case 'subquery': { + lines.push(indent(source.cardinality === 'MANY' ? 'FROM array::distinct(array::flatten(' : 'FROM (', level)); + source.oppositePath; + lines.push(indent(`SELECT VALUE ${esc(source.oppositePath)}`, level + 1)); + lines.push(buildFrom(source.source, level + 1, mutParams)); + const filter = source.filter ? buildFilter(source.filter, mutParams) : undefined; + if (filter) { + lines.push(indent(`WHERE ${filter}`, level + 1)); + } + lines.push(indent(source.cardinality === 'MANY' ? '))' : ')', level)); + break; + } + } + return lines.join('\n'); +}; + +const buildFilter = (filter: Filter, mutParams: Record, prefix?: string): string | undefined => { + const _prefix = prefix ?? ''; + switch (filter.type) { + case 'scalar': { + const path = filter.left === 'id' ? `record::id(${_prefix}id)` : `${_prefix}${esc(filter.left)}`; + const key = insertParam(mutParams, filter.right); + return `${path} ${filter.op} $${key}`; + } + case 'list': { + const items = filter.right.map((i) => `$${insertParam(mutParams, i)}`).join(', '); + const path = `${_prefix}${esc(filter.left)}`; + return `${path} ${filter.op} [${items}]`; + } + case 'ref': { + const path = filter.left === 'id' ? `record::id(${_prefix}id)` : `${_prefix}${esc(filter.left)}`; + if (filter.thing) { + const right = filter.thing.flatMap((t) => + filter.right.map((i) => { + const pointer = `${esc(t)}:${esc(i)}`; + const key = insertParam(mutParams, pointer); + return `type::record($${key})`; + }), + ); + if (right.length === 1) { + const r = right[0]; + if (filter.op === 'IN') { + return `${path} = ${r}`; + } + if (filter.op === 'NOT IN') { + return `${path} != ${r}`; + } + if (filter.op === 'CONTAINSANY') { + return `${r} IN ${path}`; + } + if (filter.op === 'CONTAINSNONE') { + return `${r} NOT IN ${path}`; + } + } + return `${path} ${filter.op} [${right.join(', ')}]`; + } + if (filter.right.length === 1) { + if (filter.op === 'IN') { + if (filter.tunnel) { + return `(array::first(${path}) && record::id(array::first(${path})) = $${insertParam(mutParams, filter.right[0])})`; + } + return `${path} && record::id(${path}) = $${insertParam(mutParams, filter.right[0])}`; + } + if (filter.op === 'NOT IN') { + if (filter.tunnel) { + return `(!array::first(${path}) || record::id(array::first(${path})) != $${insertParam(mutParams, filter.right[0])})`; + } + return `${path} && record::id(${path}) != $${insertParam(mutParams, filter.right[0])}`; + } + if (filter.op === 'CONTAINSANY') { + if (filter.tunnel) { + return `$${insertParam(mutParams, filter.right[0])} IN ${path}.map(|$i| record::id($i))`; + } + return `$${insertParam(mutParams, filter.right[0])} IN (${path} ?: []).map(|$i| record::id($i))`; + } + if (filter.op === 'CONTAINSNONE') { + if (filter.tunnel) { + return `$${insertParam(mutParams, filter.right[0])} NOT IN ${path}.map(|$i| record::id($i))`; + } + return `$${insertParam(mutParams, filter.right[0])} NOT IN (${path} ?: []).map(|$i| record::id($i))`; + } + } + if (filter.tunnel) { + return `${path}.map(|$i| record::id($i)) ${filter.op} [${filter.right.map((i) => `$${insertParam(mutParams, i)}`).join(', ')}]`; + } + return `(${path} ?: []).map(|$i| record::id($i)) ${filter.op} [${filter.right.map((i) => `$${insertParam(mutParams, i)}`).join(', ')}]`; + } + case 'null': { + if (filter.tunnel) { + return `array::len(${_prefix}${esc(filter.left)}) = 0`; + } + return `${_prefix}${esc(filter.left)} ${filter.op} NONE`; + } + case 'and': { + const conditions = filter.filters + .map((f) => { + const condition = buildFilter(f, mutParams, prefix); + return condition ? `(${condition})` : undefined; + }) + .filter((i) => !!i); + return conditions.length > 0 ? conditions.join(' AND ') : undefined; + } + case 'or': { + const conditions = filter.filters + .map((f) => { + const condition = buildFilter(f, mutParams, prefix); + return condition ? `(${condition})` : undefined; + }) + .filter((i) => !!i); + return conditions.length > 0 ? conditions.join(' OR ') : undefined; + } + case 'not': { + const subFilter = buildFilter(filter.filter, mutParams, prefix); + if (!subFilter) { + return undefined; + } + return `NOT(${subFilter})`; + } + case 'nested': { + const path = `${_prefix}${esc(filter.path)}`; + if (filter.cardinality === 'ONE') { + return buildFilter(filter.filter, mutParams, `${path}.`); + } + const subFilter = buildFilter(filter.filter, mutParams); + if (!subFilter) { + return undefined; + } + return `${path}[WHERE ${subFilter}]`; + } + } +}; + +const buildOrderBy = (sort: Sort[], level: number): string => { + const sorters = sort.map((s) => `${esc(s.field)} ${s.desc ? 'DESC' : 'ASC'}`).join(', '); + return indent(`ORDER BY ${sorters}`, level); +}; + +const indent = (text: string, level: number) => { + const spaces = ' '.repeat(level * 2); + return `${spaces}${text}`; +}; + +/** + * Insert `value` into `params` and return the param key. + */ +const insertParam = (mutParams: Record, value: unknown): string => { + let key = genAlphaId(); + while (mutParams[key] !== undefined) { + key = genAlphaId(); + } + mutParams[key] = value; + return key; +}; + +/** + * Escape identifier. Only escapes when identifier contains non-alphanumeric and non-underscore characters or starts with non-alphabetic character. + */ +const esc = (identifier: string): string => { + const escaped = identifier.replace(/⟩/g, '\\⟩'); + // Check if identifier starts with a non-alphabetic character or contains non-alphanumeric characters (excluding underscore) + const needsEscaping = /^[^a-zA-Z]/.test(escaped) || /[^a-zA-Z0-9_]/.test(escaped); + return needsEscaping ? `⟨${identifier}⟩` : identifier; +}; diff --git a/src/stateMachine/query/surql2/logical.ts b/src/stateMachine/query/surql2/logical.ts new file mode 100644 index 00000000..2f823693 --- /dev/null +++ b/src/stateMachine/query/surql2/logical.ts @@ -0,0 +1,163 @@ +import type { BQLFilterValue, BQLFilterValueList } from '../../../types/requests/parser'; + +export interface LogicalQuery { + source: DataSource; + projection: Projection; + filter?: Filter; + limit?: number; + offset?: number; + sort?: Sort[]; + cardinality: 'MANY' | 'ONE'; +} + +export type DataSource = TableScan | RecordPointer | SubQuery; + +export interface TableScan { + type: 'table_scan'; + thing: [string, ...string[]]; +} + +export interface RecordPointer { + type: 'record_pointer'; + thing: [string, ...string[]]; + ids: string[]; +} + +export interface SubQuery { + type: 'subquery'; + source: DataSource; + /** + * The link/role field path of the `source` thing to the parent thing. Example: If the parent thing is "User" and the source thing is "Post" then the opposite path is "author", not "authoredPosts". + */ + oppositePath: string; + filter?: Filter; + /** + * The cardinality of the reference in DB. If the surql sub-query returns an array the cardinality is 'MANY'. Otherwise it is 'ONE'. + * For COMPUTED REFERENCE it is always 'MANY'. + */ + cardinality: 'MANY' | 'ONE'; +} + +export interface Projection { + fields: ProjectionField[]; +} + +export type ProjectionField = MetadataField | DataField | ReferenceField | NestedReferenceField | FlexField; + +export interface MetadataField { + type: 'metadata'; + path: '$id' | '$thing'; + alias?: string; +} + +export interface DataField { + type: 'data'; + path: string; + alias?: string; +} + +export interface ReferenceField { + type: 'reference'; + path: string; + alias?: string; + cardinality: 'MANY' | 'ONE'; +} + +export interface NestedReferenceField { + type: 'nested_reference'; + path: string; + projection: Projection; + ids?: string[]; + filter?: Filter; + alias?: string; + cardinality: 'MANY' | 'ONE'; + limit?: number; + offset?: number; + sort?: Sort[]; +} + +export interface FlexField { + type: 'flex'; + path: string; + alias?: string; + cardinality: 'MANY' | 'ONE'; +} + +export type Filter = + | ScalarFilter + | ListFilter + | RefFilter + | LogicalOp + | NotOp + | NestedFilter + | NullFilter + | FalsyFilter; + +export interface ScalarFilter { + type: 'scalar'; + op: '=' | '!=' | '>' | '<' | '>=' | '<=' | 'CONTAINS' | 'CONTAINSNOT'; + left: string; + right: BQLFilterValue; +} + +export interface ListFilter { + type: 'list'; + op: 'IN' | 'NOT IN' | 'CONTAINSALL' | 'CONTAINSANY' | 'CONTAINSNONE'; + left: string; + right: BQLFilterValueList; +} + +export interface RefFilter { + type: 'ref'; + op: 'IN' | 'NOT IN' | 'CONTAINSALL' | 'CONTAINSANY' | 'CONTAINSNONE'; + left: string; + right: string[]; + /** + * Used for reference filter optimization when the values are pointers. If specified the execution may use indexes. + * If not specified the filter will be transformed into `record::id() IN [, ...]`, + * which is a little bit slower than ` IN [type::record(), ...]` when both are executed without indexes. + */ + thing?: [string, ...string[]]; + /** + * True if it's a link field with target "role". + */ + tunnel: boolean; +} + +export interface NullFilter { + type: 'null'; + op: 'IS' | 'IS NOT'; + left: string; + /** + * True if it's a link field with target "role". + */ + tunnel: boolean; +} + +export interface FalsyFilter { + type: 'falsy'; +} +export interface LogicalOp { + type: 'and' | 'or'; + filters: Filter[]; +} + +export interface NotOp { + type: 'not'; + filter: Filter; +} + +export interface NestedFilter { + type: 'nested'; + filter: Filter; + path: string; + cardinality: 'MANY' | 'ONE'; +} + +export type ScalarList = Scalar[]; +export type Scalar = string | number | boolean | null; + +export interface Sort { + field: string; + desc: boolean; +} diff --git a/src/stateMachine/query/surql2/optimize.ts b/src/stateMachine/query/surql2/optimize.ts new file mode 100644 index 00000000..e95d4bf8 --- /dev/null +++ b/src/stateMachine/query/surql2/optimize.ts @@ -0,0 +1,579 @@ +import z from 'zod/v4'; +import type { + DRAFT_EnrichedBormEntity, + DRAFT_EnrichedBormField, + DRAFT_EnrichedBormRelation, + DRAFT_EnrichedBormRoleField, + DRAFT_EnrichedBormSchema, + Index, +} from '../../../types/schema/enriched.draft'; +import type { + DataSource, + Filter, + ListFilter, + LogicalQuery, + NestedFilter, + Projection, + ProjectionField, + RecordPointer, + RefFilter, + ScalarFilter, + SubQuery, + TableScan, +} from './logical'; + +export const optimizeLogicalQuery = (query: LogicalQuery, schema: DRAFT_EnrichedBormSchema): LogicalQuery => { + const thing = getSourceThing(query.source, schema); + const filter = query.filter ? optimizeLocalFilter(query.filter) : undefined; + const { source, filter: optimizedFilter } = optimizeSource({ source: query.source, filter, schema, thing }); + + return { + source, + projection: optimizeProjection(query.projection, schema, thing), + filter: optimizedFilter, + cardinality: query.cardinality, + limit: query.limit, + offset: query.offset, + sort: query.sort, + }; +}; + +/** + * If the source is a table scan, convert a filter to a pointer or a relationship traversal. + * Return the optimized source and the other filters that are not converted. + */ +const optimizeSource = (params: { + source: DataSource; + filter?: Filter; + schema: DRAFT_EnrichedBormSchema; + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation; +}): { source: DataSource; filter?: Filter } => { + const { source, filter, schema, thing } = params; + + if (source.type !== 'table_scan') { + return { source, filter }; + } + + // TODO: If we use SurrealDB(v3) REFERENCE, convert computed reference filter into relationship traversal. + + const filters = filter?.type === 'and' ? filter.filters : filter ? [filter] : []; + + for (let i = 0; i < filters.length; i++) { + const f = filters[i]; + if (f?.type !== 'scalar' && f?.type !== 'list') { + continue; + } + const recordPointer = convertIdFilterToRecordPointer(f, source); + if (recordPointer) { + return { + source: recordPointer, + filter: mergeFilters(filters.filter((_, j) => j !== i)), + }; + } + } + + for (let i = 0; i < filters.length; i++) { + const f = filters[i]; + if (f?.type !== 'ref') { + continue; + } + const subQuery = convertRefFilterToRelationshipTraversal(f, schema, thing); + if (subQuery) { + return { + source: subQuery, + filter: mergeFilters(filters.filter((_, j) => j !== i)), + }; + } + } + + for (let i = 0; i < filters.length; i++) { + const f = filters[i]; + if (f?.type !== 'nested') { + continue; + } + const subQuery = convertNestedFilterToRelationshipTraversal(f, schema, thing); + if (subQuery) { + return { + source: subQuery, + filter: mergeFilters(filters.filter((_, j) => j !== i)), + }; + } + } + + return { + source, + filter: filter ? pushDownIndexedFilter(filter, thing) : undefined, + }; +}; + +const convertIdFilterToRecordPointer = ( + filter: ScalarFilter | ListFilter, + source: TableScan, +): RecordPointer | undefined => { + if (filter.left !== 'id') { + return undefined; + } + if (filter.op === '=' && typeof filter.right === 'string') { + return { + type: 'record_pointer', + thing: [source.thing[0], ...source.thing.slice(1)], + ids: [filter.right], + }; + } + if (filter.op === 'IN' && z.array(z.string()).safeParse(filter.right).success) { + return { + type: 'record_pointer', + thing: [source.thing[0], ...source.thing.slice(1)], + ids: filter.right as string[], + }; + } + return undefined; +}; + +/** + * Return sub query if the filter can be converted to a relationship traversal. + */ +const convertRefFilterToRelationshipTraversal = ( + filter: RefFilter, + schema: DRAFT_EnrichedBormSchema, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, +): SubQuery | undefined => { + const field = thing.fields[filter.left]; + if (!field) { + throw new Error(`Field ${filter.left} not found in ${thing.name}`); + } + const player = getRolePlayer(field, schema, thing); + if (!player) { + return undefined; + } + const { thing: oppositeThing, path: oppositePath, cardinality } = player; + const oppositeThingSchema = getThingSchema(oppositeThing, schema); + const source: RecordPointer = { + type: 'record_pointer', + thing: [oppositeThing, ...oppositeThingSchema.subTypes], + ids: filter.right, + }; + const traversal: SubQuery = { + type: 'subquery', + source, + oppositePath, + cardinality, + }; + return traversal; +}; + +const convertNestedFilterToRelationshipTraversal = ( + filter: NestedFilter, + schema: DRAFT_EnrichedBormSchema, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, +): SubQuery | undefined => { + const field = thing.fields[filter.path]; + if (!field) { + throw new Error(`Field ${filter.path} not found in ${thing.name}`); + } + const player = getRolePlayer(field, schema, thing); + if (!player) { + return undefined; + } + const { thing: oppositeThing, path: oppositePath, cardinality } = player; + const oppositeThingSchema = getThingSchema(oppositeThing, schema); + const source: TableScan = { type: 'table_scan', thing: [oppositeThing, ...oppositeThingSchema.subTypes] }; + const optimized = optimizeSource({ source, filter: filter.filter, schema, thing: oppositeThingSchema }); + const traversal: SubQuery = { + type: 'subquery', + source: optimized.source, + oppositePath, + cardinality, + filter: optimized.filter, + }; + return traversal; +}; + +/** + * Return the opposite role player that target the relation. + * Return undefined if `field` is not a role field or it doesn't have a player that target the relation. + */ +const getRolePlayer = ( + field: DRAFT_EnrichedBormField, + schema: DRAFT_EnrichedBormSchema, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, +): DRAFT_EnrichedBormRoleField['opposite'] | undefined => { + if (field.type !== 'link' && field.type !== 'role') { + return undefined; + } + if (field.type === 'role') { + // We can't convert filter to relationship traversal for role fields that are not played by a link field with target 'relation'. + // This relation is only used as intermediary relation. + const oppositeLinkField = schema[field.opposite.thing]?.fields?.[field.opposite.path]; + if (oppositeLinkField?.type !== 'link') { + throw new Error(`Role field ${field.name} in relation ${thing.name} is not played by a link field`); + } + if (oppositeLinkField.target !== 'relation') { + return undefined; + } + } + return field.opposite; +}; + +const optimizeProjection = ( + projection: Projection, + schema: DRAFT_EnrichedBormSchema, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, +): Projection => { + return { + fields: projection.fields.map((field) => optimizeProjectionField(field, schema, thing)), + }; +}; + +const optimizeProjectionField = ( + field: ProjectionField, + schema: DRAFT_EnrichedBormSchema, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, +): ProjectionField => { + if (field.type === 'metadata' || field.type === 'data' || field.type === 'flex' || field.type === 'reference') { + return field; + } + + const fieldSchema = thing.fields[field.path]; + if (!fieldSchema) { + throw new Error(`Field ${field.path} not found in ${thing.name}`); + } + if (fieldSchema.type !== 'link' && fieldSchema.type !== 'role') { + throw new Error(`Field ${field.path} is not a link or role field and can't be projected as nested reference`); + } + const oppositeThing = schema[fieldSchema.opposite.thing]; + if (!oppositeThing) { + throw new Error(`Thing ${fieldSchema.opposite.thing} not found in schema`); + } + + return { + type: 'nested_reference', + path: field.path, + projection: optimizeProjection(field.projection, schema, oppositeThing), + ids: field.ids, + filter: field.filter ? optimizeLocalFilter(field.filter) : undefined, + alias: field.alias, + cardinality: field.cardinality, + limit: field.limit, + offset: field.offset, + sort: field.sort, + }; +}; + +/** + * Flatten "and" and "or" filters into a single filter. Order the filters by cost. + * This optimization doesn't consider indexes. + */ +const optimizeLocalFilter = (filter: Filter): Filter | undefined => { + if (filter.type === 'list') { + if (filter.right.length === 0) { + if (filter.op === 'IN' || filter.op === 'CONTAINSANY') { + return { type: 'falsy' }; + } + return undefined; + } + if (filter.right.length === 1) { + switch (filter.op) { + case 'IN': + return { + type: 'scalar', + op: '=', + left: filter.left, + right: filter.right[0], + }; + case 'NOT IN': + return { + type: 'scalar', + op: '!=', + left: filter.left, + right: filter.right[0], + }; + case 'CONTAINSALL': + case 'CONTAINSANY': + return { + type: 'scalar', + op: 'CONTAINS', + left: filter.left, + right: filter.right[0], + }; + case 'CONTAINSNONE': + return { + type: 'scalar', + op: 'CONTAINSNOT', + left: filter.left, + right: filter.right[0], + }; + } + } + } + + if (filter.type === 'ref' && filter.right.length === 0) { + if (filter.op === 'IN' || filter.op === 'CONTAINSALL' || filter.op === 'CONTAINSANY') { + return { type: 'falsy' }; + } + return undefined; + } + + if (filter.type === 'and' || filter.type === 'or') { + let filters = filter.filters.flatMap((f) => { + const optimized = optimizeLocalFilter(f); + if (optimized === undefined) { + return []; + } + // Flatten nested "and" and "or" filters. + if (optimized.type === filter.type) { + return optimized.filters; + } + return [optimized]; + }); + if (filter.type === 'and' && filters.some((f) => f.type === 'falsy')) { + return { type: 'falsy' }; + } + if (filter.type === 'or') { + filters = filters.filter((f) => f.type !== 'falsy'); + // TODO: Combine multiple "=" of the same field inside "or" filter into "in" filter. + } + if (filters.length === 0) { + return undefined; + } + if (filters.length === 1) { + return filters[0]; + } + // TODO: Improve the scoring. + const scored = filters.map((i): { filter: Filter; score: number } => { + if (i.type === 'scalar') { + return { filter: i, score: filterOpScoreMap[i.op] ?? 0 }; + } + if (i.type === 'list') { + const baseScore = filterOpScoreMap[i.op] ?? 0; + return { filter: i, score: baseScore ** i.right.length }; + } + if (i.type === 'ref') { + const baseScore = filterOpScoreMap[i.op] ?? 0; + if (i.thing) { + return { filter: i, score: baseScore ** (i.right.length * i.thing.length) }; + } + // Without thing the filter is a bit slower because we need to call record::id() + return { filter: i, score: baseScore ** i.right.length * 0.9 }; + } + return { filter: i, score: 0 }; + }); + const sorted = scored.sort((a, b) => b.score - a.score); + return { + type: filter.type, + filters: sorted.map((i) => i.filter), + }; + } + + if (filter.type === 'not') { + const inner = optimizeLocalFilter(filter.filter); + if (inner === undefined) { + return undefined; + } + if (inner.type === 'not') { + return inner.filter; + } + if (inner.type === 'scalar') { + if (inner.op === '=') { + return { + type: 'scalar', + op: '!=', + left: inner.left, + right: inner.right, + }; + } + if (inner.op === '!=') { + return { + type: 'scalar', + op: '=', + left: inner.left, + right: inner.right, + }; + } + } + return { + type: 'not', + filter: inner, + }; + } + + if (filter.type === 'nested') { + const optimizedSubFilter = optimizeLocalFilter(filter.filter); + if (!optimizedSubFilter) { + return undefined; + } + return { + type: 'nested', + filter: optimizedSubFilter, + path: filter.path, + cardinality: filter.cardinality, + }; + } + + return filter; +}; + +const filterOpScoreMap: Record = { + '=': 0.9, + '>': 0.5, + '<': 0.5, + '>=': 0.5, + '<=': 0.5, + IN: 0.5, + 'NOT IN': 0.5, + CONTAINSALL: 0.3, + CONTAINSANY: 0.4, + CONTAINSNONE: 0.3, +}; + +/** + * Put indexed filters first. Only one set of indexed filter is pushed down. + * This function assumes all link/role fields are indexed. + */ +const pushDownIndexedFilter = ( + filter: Filter, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, +): Filter => { + // Push down indexed filters from "and" filter with composite indexes. + if (filter.type === 'and') { + const filterMap = Object.fromEntries( + filter.filters + .map((f, i): [string, { index: number; filter: Filter; score: number }] | undefined => { + if (f.type !== 'scalar') { + return undefined; + } + const score = filterOpScoreMap[f.op]; + if (!score) { + return undefined; + } + return [f.left, { filter: f, index: i, score }]; + }) + .filter((i) => i !== undefined), + ); + // Longest composite indexes first. + const compositeIndexes = thing.indexes + .filter((index) => index.type !== 'single') + .sort((a, b) => b.fields.length - a.fields.length); + if (compositeIndexes.length > 0) { + const compositeFilters: { filters: { index: number; filter: Filter }[]; score: number }[] = []; + for (const index of compositeIndexes) { + const fs: { index: number; filter: Filter; score: number }[] = []; + for (const field of index.fields) { + const filter = filterMap[field]; + if (!filter || fs.some((f) => f.index === filter.index)) { + // Avoid duplicate filters. + break; + } + fs.push(filter); + } + if (fs.length > 0) { + compositeFilters.push({ filters: fs, score: fs.reduce((a, b) => a + a * b.score, 1) }); + } + } + compositeFilters.sort((a, b) => b.score - a.score); + const [longestCompositeFilter] = compositeFilters; + if (longestCompositeFilter && longestCompositeFilter.score > 1) { + return { + type: 'and', + filters: [ + ...longestCompositeFilter.filters.map((f) => f.filter), + ...filter.filters.filter((_, i) => !longestCompositeFilter.filters.some((f) => f.index === i)), + ], + }; + } + } + } + + // Push down indexed filters from "and" or "or" filter with single indexes. + if (filter.type === 'and' || filter.type === 'or') { + const scored = filter.filters.map((f, index) => { + if (f.type === 'scalar' && f.op === '=') { + const field = thing.fields[f.left]; + if (isIndexed(field, thing.indexes)) { + return { filter: f, score: 1, index }; + } + } else if (f.type === 'list' && f.op === 'IN') { + const field = thing.fields[f.left]; + if (isIndexed(field, thing.indexes)) { + const score = 0.5 ** (f.right.length - 1); + return { filter: f, score, index }; + } + } + return { filter: f, score: 0, index }; + }); + const sorted = scored.sort((a, b) => b.score - a.score); + const [first] = sorted; + const indexed = first && first.score !== 0 ? first.filter : undefined; + // Convert indexed filter with IN operator to an OR filter of "=" scalar filters. + const optimized: Filter | undefined = + indexed?.type === 'list' && indexed.op === 'IN' + ? { + type: 'or', + filters: indexed.right.map((r) => ({ type: 'scalar', op: '=', left: indexed.left, right: r })), + } + : indexed; + return { + type: filter.type, + filters: optimized ? [optimized, ...filter.filters.filter((_, i) => i !== first.index)] : filter.filters, + }; + } + + return filter; +}; + +const mergeFilters = (filters: Filter[]): Filter | undefined => { + if (filters.length === 0) { + return undefined; + } + if (filters.length === 1) { + return filters[0]; + } + return { type: 'and', filters }; +}; + +const isIndexed = (field: DRAFT_EnrichedBormField, indexes: Index[]): boolean => { + // SurrealDB reference fields are assumed to be indexed. + return ( + field.type === 'role' || + field.type === 'link' || + indexes.some( + (i) => + (i.type === 'single' && i.field === field.name) || (i.type === 'composite' && i.fields.includes(field.name)), + ) + ); +}; + +const getThingSchema = ( + thing: string, + schema: DRAFT_EnrichedBormSchema, +): DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation => { + const thingSchema = schema[thing]; + if (!thingSchema) { + throw new Error(`Thing ${thing} not found in schema`); + } + return thingSchema; +}; + +const getSourceThing = ( + source: DataSource, + schema: DRAFT_EnrichedBormSchema, +): DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation => { + if (source.type === 'table_scan' || source.type === 'record_pointer') { + const thingSchema = getThingSchema(source.thing[0], schema); + return thingSchema; + } + + const subThing = getSourceThing(source.source, schema); + const field = subThing.fields[source.oppositePath]; + if (!field) { + throw new Error(`Field ${source.oppositePath} not found in ${subThing.name}`); + } + if (field.type === 'constant' || field.type === 'computed' || field.type === 'data' || field.type === 'ref') { + throw new Error(`Invalid source: ${JSON.stringify(source)}`); + } + const thing = schema[field.opposite.thing]; + if (!thing) { + throw new Error(`Thing ${field.opposite.thing} not found in schema`); + } + return thing; +}; diff --git a/src/stateMachine/query/surql2/processResults.ts b/src/stateMachine/query/surql2/processResults.ts new file mode 100644 index 00000000..771012f7 --- /dev/null +++ b/src/stateMachine/query/surql2/processResults.ts @@ -0,0 +1,155 @@ +import type { BQLQuery, NestedBQL } from '../../../types/requests/parser'; +import type { + DRAFT_EnrichedBormEntity, + DRAFT_EnrichedBormRelation, + DRAFT_EnrichedBormSchema, +} from '../../../types/schema/enriched.draft'; + +type ResultObject = Record; + +const isResultObject = (value: unknown): value is ResultObject => { + return value !== null && typeof value === 'object' && !Array.isArray(value); +}; + +const isNullish = (value: unknown): value is null | undefined => { + return value === null || value === undefined; +}; + +const isEmptyArray = (value: unknown): boolean => { + return Array.isArray(value) && value.length === 0; +}; + +export const processResults = (params: { + batch: BQLQuery[]; + results: unknown[]; + schema: DRAFT_EnrichedBormSchema; + metadata: boolean; + returnNulls: boolean; +}) => { + const { batch, results, schema, metadata, returnNulls } = params; + return batch.map((query, i) => processQueryResult({ query, result: results[i], schema, metadata, returnNulls })); +}; + +const processQueryResult = (params: { + query: BQLQuery; + result: unknown; + schema: DRAFT_EnrichedBormSchema; + metadata: boolean; + returnNulls: boolean; +}) => { + const { query, result, schema, metadata, returnNulls } = params; + if (!result) { + return result ?? null; + } + const thing = schema[query.$thing]; + if (!thing) { + throw new Error(`Thing ${query.$thing} not found in schema`); + } + if (Array.isArray(result)) { + return result.map((r) => transformResultObject({ query, result: r, thing, schema, metadata, returnNulls })); + } + return transformResultObject({ query, result, thing, schema, metadata, returnNulls }); +}; + +const processNestedResult = (params: { + query: NestedBQL; + result: unknown; + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation; + schema: DRAFT_EnrichedBormSchema; + metadata: boolean; + returnNulls: boolean; +}) => { + const { query, result, thing, schema, metadata, returnNulls } = params; + if (Array.isArray(result)) { + if (result.length === 0) { + return null; + } + return result.map((r) => transformResultObject({ query, result: r, thing, schema, metadata, returnNulls })); + } + return transformResultObject({ query, result, thing, schema, metadata, returnNulls }); +}; + +const transformResultObject = (params: { + query: BQLQuery | NestedBQL; + result: unknown; + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation; + schema: DRAFT_EnrichedBormSchema; + metadata: boolean; + returnNulls: boolean; +}): ResultObject | null => { + const { query, result, thing, schema, metadata, returnNulls } = params; + if (!isResultObject(result)) { + return null; + } + + const newResult: ResultObject = {}; + + if (metadata) { + newResult.$thing = result.$thing ?? null; + newResult.$id = result.$id ?? null; + newResult.$thingType = thing.type; + } + + const fieldsToProcess = query.$fields ?? Object.keys(thing.fields); + + for (const fieldQuery of fieldsToProcess) { + const path = typeof fieldQuery === 'string' ? fieldQuery : fieldQuery.$path; + const alias = typeof fieldQuery === 'string' ? fieldQuery : (fieldQuery.$as ?? path); + + if (query.$excludedFields?.includes(path)) { + continue; + } + + if (path === '$id' || path === '$thing') { + newResult[alias] = result[alias] ?? null; + continue; + } + + const field = thing.fields[path]; + + if (!field) { + throw new Error(`Field ${path} not found in ${thing.name}`); + } + + if (field.type === 'constant') { + newResult[alias] = field.value; + continue; + } + + if (field.type === 'computed') { + newResult[alias] = field.fn(result); + continue; + } + + const value = result[alias]; + + if (field.type === 'data') { + if (!returnNulls && isNullish(value)) { + continue; + } + newResult[alias] = value ?? null; + continue; + } + + if (!returnNulls && (isNullish(value) || isEmptyArray(value))) { + continue; + } + + if (typeof fieldQuery === 'string' || field.type === 'ref') { + newResult[alias] = isEmptyArray(value) ? null : (value ?? null); + continue; + } + + const opposite = schema[field.opposite.thing]; + newResult[alias] = processNestedResult({ + query: fieldQuery, + result: value, + thing: opposite, + schema, + metadata, + returnNulls, + }); + } + + return newResult; +}; diff --git a/src/stateMachine/query/surql2/query.ts b/src/stateMachine/query/surql2/query.ts new file mode 100644 index 00000000..2c6b137a --- /dev/null +++ b/src/stateMachine/query/surql2/query.ts @@ -0,0 +1,24 @@ +import type { SimpleSurrealClient } from '../../../adapters/surrealDB/client'; +import { logDebug } from '../../../logger'; +import type { BormConfig } from '../../../types'; +import { VERSION } from '../../../version'; +import type { SurqlParams } from './buildSurql'; + +export const query = async (props: { + client: SimpleSurrealClient; + queries: string[]; + config: BormConfig; + params: SurqlParams; +}): Promise => { + const { client, queries, config, params } = props; + const batchedQuery = `BEGIN TRANSACTION; +${queries.join(';\n')}; +COMMIT TRANSACTION;`; + + if (config.query?.debugger) { + logDebug(`batchedQuery[${VERSION}]`, JSON.stringify({ batchedQuery })); + } + + const result = await client.query(batchedQuery, params); + return result as any[][]; +}; diff --git a/src/stateMachine/query/surql2/run.ts b/src/stateMachine/query/surql2/run.ts new file mode 100644 index 00000000..ac4bdc4d --- /dev/null +++ b/src/stateMachine/query/surql2/run.ts @@ -0,0 +1,34 @@ +import type { SimpleSurrealClient } from '../../../adapters/surrealDB/client'; +import type { BormConfig } from '../../../types'; +import { BQLQueryParser } from '../../../types/requests/parser'; +import type { DRAFT_EnrichedBormSchema } from '../../../types/schema/enriched.draft'; +import { buildLogicalQuery } from './buildLogical'; +import { buildSurql, type SurqlParams } from './buildSurql'; +import { optimizeLogicalQuery } from './optimize'; +import { processResults } from './processResults'; +import { query } from './query'; + +export const runSurrealDbQueryMachine2 = async ( + bql: unknown[], + schema: DRAFT_EnrichedBormSchema, + config: BormConfig, + client: SimpleSurrealClient, +) => { + if (bql.length === 0) { + return []; + } + const bqlQueries = bql.map((q) => BQLQueryParser.parse(q)); + const logicalQueries = bqlQueries.map((q) => buildLogicalQuery(q, schema, !config.query?.noMetadata)); + const optimizedQueries = logicalQueries.map((q) => optimizeLogicalQuery(q, schema)); + const params: SurqlParams = {}; + const surqlQueries = optimizedQueries.map((q) => buildSurql(q, params)); + const result = await query({ client, queries: surqlQueries, config, params }); + const finalResult = processResults({ + batch: bqlQueries, + results: result, + schema, + metadata: !config.query?.noMetadata, + returnNulls: !!config.query?.returnNulls, + }); + return finalResult; +}; diff --git a/src/stateMachine/query/tql/machine.ts b/src/stateMachine/query/tql/machine.ts index a21c56bc..41a33dd5 100644 --- a/src/stateMachine/query/tql/machine.ts +++ b/src/stateMachine/query/tql/machine.ts @@ -1,6 +1,11 @@ import { assertDefined } from '../../../helpers'; +import { logDebug } from '../../../logger'; import { createMachine, interpret, invoke, reduce, state, transition } from '../../../robot3'; import type { BormConfig, DBHandles, EnrichedBormSchema, EnrichedBQLQuery, RawBQLQuery } from '../../../types'; +import { VERSION } from '../../../version'; +import { cleanQueryRes } from '../bql/clean'; +import { enrichBQLQuery } from '../bql/enrich'; +import { postHooks } from '../postHook'; import { buildTQLQuery } from './build'; import { parseTQLQuery } from './parse'; import { runTQLQuery } from './run'; @@ -8,7 +13,7 @@ import { runTQLQuery } from './run'; export type TypeDbMachineContext = { bql: { raw: RawBQLQuery[]; - queries: EnrichedBQLQuery[]; + queries?: EnrichedBQLQuery[]; res?: any[]; }; tql: { @@ -21,6 +26,16 @@ export type TypeDbMachineContext = { error?: string | null; }; +const updateBqlReq = (ctx: TypeDbMachineContext, event: any) => { + if (!event.data) { + return ctx; + } + return { + ...ctx, + bql: { ...ctx.bql, queries: event.data }, + }; +}; + const updateBqlRes = (ctx: TypeDbMachineContext, event: any): TypeDbMachineContext => { if (!event.data) { return ctx; @@ -63,10 +78,18 @@ const errorTransition = transition( ); export const typeDbQueryMachine = createMachine( - 'build', + 'enrich', { + enrich: invoke( + async (ctx: TypeDbMachineContext) => { + logDebug(`originalBQLQuery[${VERSION}]`, JSON.stringify(ctx.bql.raw)); + return enrichBQLQuery(ctx.bql.raw, ctx.schema); + }, + transition('done', 'build', reduce(updateBqlReq)), + errorTransition, + ), build: invoke( - async (ctx: TypeDbMachineContext) => buildTQLQuery({ queries: ctx.bql.queries, schema: ctx.schema }), + async (ctx: TypeDbMachineContext) => buildTQLQuery({ queries: ctx.bql.queries ?? [], schema: ctx.schema }), transition('done', 'run', reduce(updateTqlReq)), errorTransition, ), @@ -85,11 +108,22 @@ export const typeDbQueryMachine = createMachine( async (ctx: TypeDbMachineContext) => parseTQLQuery({ rawBqlRequest: ctx.bql.raw, - enrichedBqlQuery: ctx.bql.queries, + enrichedBqlQuery: ctx.bql.queries ?? [], schema: ctx.schema, config: ctx.config, rawTqlRes: assertDefined(ctx.tql.res), }), + transition('done', 'postHooks', reduce(updateBqlRes)), + errorTransition, + ), + postHooks: invoke( + async (ctx: TypeDbMachineContext) => + postHooks(ctx.schema, assertDefined(ctx.bql.queries), assertDefined(ctx.bql.res)), + transition('done', 'clean', reduce(updateBqlRes)), + errorTransition, + ), + clean: invoke( + async (ctx: TypeDbMachineContext) => cleanQueryRes(ctx.config, assertDefined(ctx.bql.res)), transition('done', 'success', reduce(updateBqlRes)), errorTransition, ), @@ -119,16 +153,12 @@ const awaitQueryMachine = async (context: TypeDbMachineContext) => { export const runTypeDbQueryMachine = async ( bql: RawBQLQuery[], - enrichedBql: EnrichedBQLQuery[], schema: EnrichedBormSchema, config: BormConfig, handles: DBHandles, ) => { return awaitQueryMachine({ - bql: { - raw: bql, - queries: enrichedBql, - }, + bql: { raw: bql }, tql: {}, schema: schema, config: config, diff --git a/src/stateMachine/query/tql/parse.ts b/src/stateMachine/query/tql/parse.ts index d6944679..202cfbae 100644 --- a/src/stateMachine/query/tql/parse.ts +++ b/src/stateMachine/query/tql/parse.ts @@ -94,12 +94,12 @@ const parseFields = (obj: any, schema: EnrichedBormSchema) => { //if there are multiValKeys, we replace it in the Object if (multiValKeys?.length > 0) { - multiValKeys.forEach((multiValKey) => { + for (const multiValKey of multiValKeys) { const multiValKeyWithout$multiVal = multiValKey.replace(/\.\$multiVal$/, ''); const realValue = obj[multiValKey][0][multiValKeyWithout$multiVal].attribute; //there is an easier way for sure // eslint-disable-next-line no-param-reassign obj[dataFieldsKey][multiValKeyWithout$multiVal] = realValue; - }); + } } const dataFields = obj[dataFieldsKey]; @@ -228,12 +228,12 @@ const parseRoleFields = ( ) => { const roleFieldsRes: Record = {}; - roleFields.forEach((roleField) => { + for (const roleField of roleFields) { const { $roleFields, $metaData, $cardinality } = roleField; const { as, justId, idNotIncluded, filterByUnique } = parseMetaData($metaData); if (as === null) { - return; + continue; } const items = $roleFields.map((item) => { @@ -247,7 +247,11 @@ const parseRoleFields = ( const parsedRoleFields = parseRoleFields(roleFields, schema, config); const resDataFields = { ...parsedDataFields }; if (idNotIncluded === 'true') { - currentSchema?.idFields?.forEach((field) => delete resDataFields[field]); + if (currentSchema?.idFields) { + for (const field of currentSchema.idFields) { + delete resDataFields[field]; + } + } } return { ...resDataFields, @@ -262,7 +266,7 @@ const parseRoleFields = ( } else if (config.query?.returnNulls) { roleFieldsRes[as] = null; } - }); + } return roleFieldsRes; }; @@ -274,12 +278,12 @@ const parseLinkFields = ( ) => { const linkFieldsRes: Record = {}; - linkFields.forEach((linkField) => { + for (const linkField of linkFields) { const { $linkFields, $metaData, $cardinality } = linkField; const { as, justId, idNotIncluded, filterByUnique } = parseMetaData($metaData); if (as === null) { - return; + continue; } const items = $linkFields.map((item) => { @@ -294,7 +298,9 @@ const parseLinkFields = ( const resDataFields = { ...parsedDataFields }; if (idNotIncluded === 'true') { - currentSchema.idFields?.forEach((field) => delete resDataFields[field]); + for (const field of currentSchema.idFields ?? []) { + delete resDataFields[field]; + } } return { @@ -314,7 +320,7 @@ const parseLinkFields = ( : config.query?.returnNulls ? null : undefined; - }); + } return linkFieldsRes; }; diff --git a/src/types/config/base.ts b/src/types/config/base.ts index 95e328ac..413ad736 100644 --- a/src/types/config/base.ts +++ b/src/types/config/base.ts @@ -6,6 +6,7 @@ export type QueryConfig = { returnNulls?: boolean; simplifiedLinks?: boolean; debugger?: boolean; + legacySurrealDBAdapter?: boolean; }; export type MutationConfig = { diff --git a/src/types/requests/parser.ts b/src/types/requests/parser.ts new file mode 100644 index 00000000..5cdda8b7 --- /dev/null +++ b/src/types/requests/parser.ts @@ -0,0 +1,172 @@ +import { z } from 'zod/v4'; + +export const BQLFilterValueParser = z.json(); + +export type BQLFilterValue = z.infer; + +export const BQLFilterValueListParser = z.array(BQLFilterValueParser); + +export type BQLFilterValueList = z.infer; + +export type BQLFilter = { + $or?: BQLFilter[]; + $not?: BQLFilter; + /** + * Depending on the field type, only a subset of this type is allowed. + */ + [key: string]: BQLFilterValue | BQLFilterValueList | NestedBQLFilter | NestedBQLFilter[] | undefined; +}; + +export interface NestedBQLFilter extends BQLFilter { + $exists?: boolean; + // Scalar Operators + $eq?: BQLFilterValue; + $neq?: BQLFilterValue; + $gt?: BQLFilterValue; + $lt?: BQLFilterValue; + $gte?: BQLFilterValue; + $lte?: BQLFilterValue; + $contains?: BQLFilterValue; + $containsNot?: BQLFilterValue; + // List Operators + $in?: BQLFilterValueList; + $nin?: BQLFilterValueList; + $containsAll?: BQLFilterValueList; + $containsAny?: BQLFilterValueList; + $containsNone?: BQLFilterValueList; +} + +export const StrictBQLValueFilterParser = z.strictObject({ + $exists: z.boolean().optional(), + $eq: BQLFilterValueParser.optional(), + $neq: BQLFilterValueParser.optional(), + $gt: BQLFilterValueParser.optional(), + $lt: BQLFilterValueParser.optional(), + $gte: BQLFilterValueParser.optional(), + $lte: BQLFilterValueParser.optional(), + $contains: BQLFilterValueParser.optional(), + $containsNot: BQLFilterValueParser.optional(), + $in: BQLFilterValueListParser.optional(), + $nin: BQLFilterValueListParser.optional(), + $containsAll: BQLFilterValueListParser.optional(), + $containsAny: BQLFilterValueListParser.optional(), + $containsNone: BQLFilterValueListParser.optional(), +}); + +export const BQLFilterParser: z.ZodType = z.lazy(() => + z + .object({ + // Recursive Operators + $or: z.array(z.lazy(() => BQLFilterParser)).optional(), + $not: z.lazy(() => BQLFilterParser).optional(), + }) + .catchall( + // "Everything else" (Custom fields) + z.union([ + BQLFilterValueParser, + BQLFilterValueListParser, + z.lazy(() => z.union([NestedBQLFilterParser, z.array(NestedBQLFilterParser)])), + ]), + ), +); + +export const NestedBQLFilterParser: z.ZodType = z.lazy(() => + z + .object({ + // Recursive Operators + $or: z.array(z.lazy(() => BQLFilterParser)).optional(), + $not: z.lazy(() => BQLFilterParser).optional(), + // Exists Operators + $exists: z.boolean().optional(), + // Scalar Value Operators + $eq: BQLFilterValueParser.optional(), + $neq: BQLFilterValueParser.optional(), + $gt: BQLFilterValueParser.optional(), + $lt: BQLFilterValueParser.optional(), + $gte: BQLFilterValueParser.optional(), + $lte: BQLFilterValueParser.optional(), + $contains: BQLFilterValueParser.optional(), + $containsNot: BQLFilterValueParser.optional(), + // List Value Operators + $in: BQLFilterValueListParser.optional(), + $nin: BQLFilterValueListParser.optional(), + $containsAll: BQLFilterValueListParser.optional(), + $containsAny: BQLFilterValueListParser.optional(), + $containsNone: BQLFilterValueListParser.optional(), + }) + .catchall( + // "Everything else" (Custom fields) + z.union([ + BQLFilterValueParser, + BQLFilterValueListParser, + z.lazy(() => z.union([NestedBQLFilterParser, z.array(NestedBQLFilterParser)])), + ]), + ), +); + +const BaseBQLParser = z.object({ + $id: z.union([z.string(), z.array(z.string())]).optional(), + $filter: z.union([BQLFilterParser, z.array(BQLFilterParser)]).optional(), + $fields: z.array(z.union([z.string(), z.lazy(() => NestedBQLParser)])).optional(), + $excludedFields: z.array(z.string()).optional(), + $limit: z.number().optional(), + $offset: z.number().optional(), + $sort: z + .array( + z.union([ + z.object({ + field: z.string(), + desc: z.boolean().optional(), + }), + z.string(), + ]), + ) + .optional(), +}); + +interface BaseBQL { + $id?: string | string[]; + $filter?: BQLFilter | BQLFilter[]; + $fields?: (string | NestedBQL)[]; + $excludedFields?: string[]; + $limit?: number; + $offset?: number; + $sort?: ({ field: string; desc?: boolean } | string)[]; +} + +export const NestedBQLParser: z.ZodType = BaseBQLParser.extend({ + $path: z.string(), + $as: z.string().optional(), +}); + +export interface NestedBQL extends BaseBQL { + $path: string; + $as?: string; +} + +export type BQLField = string | NestedBQL; + +export const BQLQueryParser = BaseBQLParser.extend({ + $thing: z.string().optional(), + $entity: z.string().optional(), + $relation: z.string().optional(), +}) + .superRefine((data, ctx) => { + if (!data.$thing && !data.$entity && !data.$relation) { + ctx.addIssue({ + code: 'custom', + message: 'Query must contain at least one of: $thing, $entity, or $relation', + path: ['$thing'], + }); + } + }) + .transform((data) => { + const { $thing, $entity, $relation, ...rest } = data; + + return { + ...rest, + $thing: $thing ?? $entity ?? ($relation as string), // Guaranteed to exist by superRefine + }; + }); + +export type BQLQuery = z.infer; diff --git a/src/types/schema/base.ts b/src/types/schema/base.ts index 770b8b57..9d1e0d3c 100644 --- a/src/types/schema/base.ts +++ b/src/types/schema/base.ts @@ -1,4 +1,5 @@ import type { DataField, DBConnector, EnrichedBQLMutationBlock, LinkField, RefField, RoleField } from '..'; +import type { Index } from './enriched.draft'; export type BormSchema = { entities: { [s: string]: BormEntity }; @@ -14,6 +15,7 @@ export type BormEntity = linkFields?: readonly LinkField[]; refFields?: { [key: string]: RefField }; hooks?: Hooks; + indexes?: Index[]; } | { idFields: readonly string[]; @@ -22,11 +24,13 @@ export type BormEntity = linkFields?: readonly LinkField[]; refFields?: { [key: string]: RefField }; hooks?: Hooks; + indexes?: Index[]; }; export type BormRelation = BormEntity & { defaultDBConnector: DBConnector & { path: string }; /// mandatory in relations roles?: { [key: string]: RoleField }; + indexes?: Index[]; }; export type BormOperation = 'create' | 'update' | 'delete' | 'link' | 'unlink' | 'replace' | 'match'; diff --git a/src/types/schema/enriched.draft.ts b/src/types/schema/enriched.draft.ts new file mode 100644 index 00000000..72f38933 --- /dev/null +++ b/src/types/schema/enriched.draft.ts @@ -0,0 +1,104 @@ +/** + * These types are design for SurrealDB query in mind. For other DBs or for mutation, they may be missing some fields. + */ + +import type { DataField, DiscreteCardinality } from './fields'; + +export type DRAFT_EnrichedBormSchema = Record; + +export interface DRAFT_EnrichedBormEntity extends EnrichedBormThing { + type: 'entity'; + fields: Record; +} + +export interface DRAFT_EnrichedBormRelation extends EnrichedBormThing { + type: 'relation'; + fields: Record; +} + +interface EnrichedBormThing { + name: string; + idFields: [string, ...string[]]; + extends?: string; + subTypes: string[]; + indexes: Index[]; +} + +export type DRAFT_EnrichedBaseBormField = + | DRAFT_EnrichedBormConstantField + | DRAFT_EnrichedBormComputedField + | DRAFT_EnrichedBormDataField + | DRAFT_EnrichedBormLinkField + | DRAFT_EnrichedBormRefField; + +export type DRAFT_EnrichedBormField = DRAFT_EnrichedBaseBormField | DRAFT_EnrichedBormRoleField; + +export interface DRAFT_EnrichedBormConstantField { + type: 'constant'; + name: string; + contentType: DataField['contentType']; + cardinality: DiscreteCardinality; + value: unknown; +} + +export interface DRAFT_EnrichedBormComputedField { + type: 'computed'; + name: string; + contentType: DataField['contentType']; + cardinality: DiscreteCardinality; + fn: (currentNode: Record) => unknown; +} + +export interface DRAFT_EnrichedBormDataField { + type: 'data'; + name: string; + contentType: DataField['contentType']; + cardinality: DiscreteCardinality; + unique: boolean; +} + +export interface DRAFT_EnrichedBormRoleField { + type: 'role'; + name: string; + cardinality: DiscreteCardinality; + opposite: { + thing: string; + path: string; + cardinality: DiscreteCardinality; + }; +} + +export interface DRAFT_EnrichedBormLinkField { + type: 'link'; + name: string; + cardinality: DiscreteCardinality; + target: 'relation' | 'role'; + opposite: { + thing: string; + path: string; + cardinality: DiscreteCardinality; + }; +} + +/** + * Content type REF allows referencing any record in the database. + * Content type FLEX allows storing any type of data including reference to any record in the database. + */ +export interface DRAFT_EnrichedBormRefField { + type: 'ref'; + name: string; + contentType: 'REF' | 'FLEX'; + cardinality: 'ONE' | 'MANY'; +} + +export type Index = SingleIndex | CompositeIndex; + +export interface SingleIndex { + type: 'single'; + field: string; +} + +export interface CompositeIndex { + type: 'composite'; + fields: [string, ...string[]]; +} diff --git a/src/types/schema/fields.ts b/src/types/schema/fields.ts index 9d6c3e5e..216ea271 100644 --- a/src/types/schema/fields.ts +++ b/src/types/schema/fields.ts @@ -35,6 +35,7 @@ export type LinkField = BormField & { | { target: 'role'; filter?: Filter | Filter[]; + targetRole: string; } | { target: 'relation'; diff --git a/tests/bench.sh b/tests/bench.sh index 047b21f4..03a210ae 100755 --- a/tests/bench.sh +++ b/tests/bench.sh @@ -59,9 +59,9 @@ DATA_FILE="./tests/adapters/surrealDB/mocks/${LINK}Data.surql" NAMESPACE="test_${LINK}" # Start the container -docker run --detach --rm --pull always -v $(pwd)/tests:/tests -p 8000:8000 --name $CONTAINER_NAME surrealdb/surrealdb:latest start --allow-all -u $USER -p $PASSWORD --bind 0.0.0.0:8000 || { echo "Failed to start SurrealDB container"; exit 1; } +docker run --detach --rm --pull always -v "$(pwd)/tests":/tests -p 8000:8000 --name $CONTAINER_NAME surrealdb/surrealdb:latest start --allow-all -u $USER -p $PASSWORD --bind 0.0.0.0:8000 || { echo "Failed to start SurrealDB container"; exit 1; } -until [ "`docker inspect -f {{.State.Running}} $CONTAINER_NAME`"=="true" ]; do +until [ "$(docker inspect -f {{.State.Running}} $CONTAINER_NAME)" == "true" ]; do sleep 0.1; done; diff --git a/tests/benchTests.sh b/tests/benchTests.sh new file mode 100755 index 00000000..2bbd6fc7 --- /dev/null +++ b/tests/benchTests.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash + +set -e + +CONTAINER_NAME=borm_bench +USER=test +PASSWORD=test + + +# Function to clean up the container +cleanup() { + echo "Stopping and removing container..." + docker stop ${CONTAINER_NAME} >/dev/null 2>&1 + exit ${EXIT_CODE:-1} +} + +# Set up trap to call cleanup function on script exit +trap cleanup EXIT INT TERM + +# Function to parse command line arguments +parse_args() { + VITEST_ARGS=() + for arg in "$@" + do + case $arg in + -link=*) + # We'll ignore this parameter now + ;; + *) + VITEST_ARGS+=("$arg") + ;; + esac + done +} + +# Parse the command line arguments +parse_args "$@" + +# Check if BORM_TEST_SURREALDB_LINK_MODE is set and valid +if [ -z "$BORM_TEST_SURREALDB_LINK_MODE" ]; then + echo "Error: BORM_TEST_SURREALDB_LINK_MODE environment variable is not set" + exit 1 +elif [ "$BORM_TEST_SURREALDB_LINK_MODE" != "edges" ] && [ "$BORM_TEST_SURREALDB_LINK_MODE" != "refs" ]; then + echo "Error: BORM_TEST_SURREALDB_LINK_MODE must be either 'edges' or 'refs'" + exit 1 +fi + +# Set LINK based on BORM_TEST_SURREALDB_LINK_MODE +if [ "$BORM_TEST_SURREALDB_LINK_MODE" == "edges" ]; then + LINK="edges" +else + LINK="refs" +fi + +# Set variables based on LINK +SCHEMA_FILE="./tests/adapters/surrealDB/mocks/${LINK}Schema.surql" +DATA_FILE="./tests/adapters/surrealDB/mocks/${LINK}Data.surql" +NAMESPACE="test_${LINK}" + +# Start the container +docker run --detach --rm --pull always -v "$(pwd)/tests":/tests -p 8000:8000 --name $CONTAINER_NAME surrealdb/surrealdb:latest start --allow-all -u $USER -p $PASSWORD --bind 0.0.0.0:8000 || { echo "Failed to start SurrealDB container"; exit 1; } + +until [ "$(docker inspect -f {{.State.Running}} $CONTAINER_NAME)" == "true" ]; do + sleep 0.1; +done; + + + + +# Setup surrealdb database for the surrealdb test +# Create the namespace, database, and user +docker exec -i $CONTAINER_NAME ./surreal sql -u $USER -p $PASSWORD <(obj: T, key = '$id'): T => { @@ -48,19 +45,6 @@ export const deepSort = (obj: T, key = '$id'): T => { return obj; }; -export const deepRemoveMetaData = (obj: object) => { - const removeMeta = ({ value }: TraversalCallbackContext) => { - if (value && typeof value === 'object' && '$id' in value) { - const metas = Object.keys(value).filter((k) => k.startsWith('$')); - metas.forEach((k) => delete value[k]); - const symbols = Object.keys(value).filter((s) => typeof s === 'symbol'); - symbols.forEach((s) => delete value[s]); - } - return value; - }; - return produce(obj, (draft) => traverse(draft, removeMeta)); -}; - const checkRecursive = (obj: T): T => { if (Array.isArray(obj)) { return expect.arrayContaining(obj.map(checkRecursive)) as unknown as T; diff --git a/tests/mocks/schema.ts b/tests/mocks/schema.ts index 2f954914..36d3e8a4 100644 --- a/tests/mocks/schema.ts +++ b/tests/mocks/schema.ts @@ -55,6 +55,7 @@ export const schema: BormSchema = { cardinality: 'MANY', plays: 'user', target: 'role', + targetRole: 'accounts', }, { path: 'sessions', @@ -62,6 +63,7 @@ export const schema: BormSchema = { cardinality: 'MANY', plays: 'user', target: 'role', + targetRole: 'sessions', }, { path: 'spaces', @@ -69,6 +71,7 @@ export const schema: BormSchema = { cardinality: 'MANY', plays: 'users', target: 'role', + targetRole: 'spaces', }, { path: 'user-tags', @@ -275,6 +278,7 @@ export const schema: BormSchema = { relation: 'Space-User', plays: 'spaces', target: 'role', + targetRole: 'users', }, { path: 'objects', @@ -362,6 +366,7 @@ export const schema: BormSchema = { relation: 'User-Accounts', plays: 'accounts', target: 'role', + targetRole: 'user', /// rights => Either you want to make it 1) read only 2)replace only 3) update only 4) delete only 5) create only ... }, ], @@ -381,6 +386,7 @@ export const schema: BormSchema = { relation: 'User-Sessions', plays: 'sessions', target: 'role', + targetRole: 'user', }, ], }, @@ -518,6 +524,7 @@ export const schema: BormSchema = { relation: 'UserTagGroup', plays: 'color', target: 'role', + targetRole: 'tags', }, { path: 'group', @@ -638,6 +645,7 @@ export const schema: BormSchema = { relation: 'HookATag', plays: 'hookTypeA', target: 'role', + targetRole: 'otherHooks', isVirtual: true, dbValue: { surrealDB: @@ -650,6 +658,7 @@ export const schema: BormSchema = { relation: 'HookATag', plays: 'otherHooks', target: 'role', + targetRole: 'hookTypeA', isVirtual: true, dbValue: { surrealDB: @@ -755,6 +764,7 @@ export const schema: BormSchema = { { path: 'color', target: 'role', + targetRole: 'color', cardinality: 'ONE', plays: 'tags', relation: 'UserTagGroup', diff --git a/tests/multidb/mocks/schema.ts b/tests/multidb/mocks/schema.ts index 3b3078df..f32cda2b 100644 --- a/tests/multidb/mocks/schema.ts +++ b/tests/multidb/mocks/schema.ts @@ -33,6 +33,7 @@ const typeDBSchema: BormSchema = { relation: 'SpaceOwner', plays: 'owner', target: 'role', + targetRole: 'space', }, { path: 'spaces', @@ -40,6 +41,7 @@ const typeDBSchema: BormSchema = { relation: 'SpaceMember', plays: 'member', target: 'role', + targetRole: 'space', }, { path: 'projects', @@ -47,6 +49,7 @@ const typeDBSchema: BormSchema = { relation: 'ProjectExecutor', plays: 'executor', target: 'role', + targetRole: 'project', }, ], }, @@ -67,6 +70,7 @@ const typeDBSchema: BormSchema = { relation: 'SpaceOwner', plays: 'space', target: 'role', + targetRole: 'owner', }, { path: 'members', @@ -74,6 +78,7 @@ const typeDBSchema: BormSchema = { relation: 'SpaceMember', plays: 'space', target: 'role', + targetRole: 'member', }, { path: 'projects', @@ -81,6 +86,7 @@ const typeDBSchema: BormSchema = { relation: 'SpaceProject', plays: 'space', target: 'role', + targetRole: 'projects', }, ], }, @@ -101,6 +107,7 @@ const typeDBSchema: BormSchema = { relation: 'SpaceProject', plays: 'project', target: 'role', + targetRole: 'space', }, { path: 'executors', @@ -108,6 +115,7 @@ const typeDBSchema: BormSchema = { relation: 'ProjectExecutor', plays: 'project', target: 'role', + targetRole: 'executor', }, ], }, diff --git a/tests/test.sh b/tests/test.sh index b593fc45..2f77dd7e 100755 --- a/tests/test.sh +++ b/tests/test.sh @@ -56,9 +56,9 @@ DATA_FILE="./tests/adapters/surrealDB/mocks/${LINK}Data.surql" NAMESPACE="test_${LINK}" # Start the container -docker run --detach --rm --pull always -v $(pwd)/tests:/tests -p 8000:8000 --name $CONTAINER_NAME surrealdb/surrealdb:v2.3.7 start --allow-all -u $USER -p $PASSWORD --bind 0.0.0.0:8000 +docker run --detach --rm --pull always -v "$(pwd)/tests":/tests -p 8000:8000 --name $CONTAINER_NAME surrealdb/surrealdb:v2.3.7 start --allow-all -u $USER -p $PASSWORD --bind 0.0.0.0:8000 -until [ "`docker inspect -f {{.State.Running}} $CONTAINER_NAME`"=="true" ]; do +until [ "$(docker inspect -f {{.State.Running}} $CONTAINER_NAME)" == "true" ]; do sleep 0.1; done; diff --git a/tests/unit/bench/bench.ts b/tests/unit/bench/bench.ts index bb60911b..2763f52a 100644 --- a/tests/unit/bench/bench.ts +++ b/tests/unit/bench/bench.ts @@ -1,10 +1,11 @@ import { v4 as uuidv4 } from 'uuid'; import { bench, expect } from 'vitest'; +import { deepRemoveMetaData } from '../../../src/helpers'; import type { WithBormMetadata } from '../../../src/index'; import type { TypeGen } from '../../../src/types/typeGen'; import { createTest } from '../../helpers/createTest'; -import { deepRemoveMetaData, deepSort, expectArraysInObjectToContainSameElements } from '../../helpers/matchers'; +import { deepSort, expectArraysInObjectToContainSameElements } from '../../helpers/matchers'; import type { typesSchema } from '../../mocks/generatedSchema'; import type { UserType } from '../../types/testTypes'; diff --git a/tests/unit/bench/testsBench.ts b/tests/unit/bench/testsBench.ts new file mode 100644 index 00000000..091be599 --- /dev/null +++ b/tests/unit/bench/testsBench.ts @@ -0,0 +1,717 @@ +import { Bench } from 'tinybench'; +import type { QueryConfig, RawBQLQuery } from '../../../src'; +import { init } from '../../helpers/init'; + +const LEGACY_SURREALDB_ADAPTER = process.env.LEGACY_SURREALDB_ADAPTER?.toLocaleLowerCase() === 'true'; + +type QueryFn = (query: RawBQLQuery | RawBQLQuery[], queryConfig?: QueryConfig) => Promise; + +const tasks: Record Promise> = { + 'v1[validation] - $entity missing': async (query) => { + try { + // @ts-expect-error - $entity is missing + await query({}); + } catch { + // No op + } + }, + 'v2[validation] - $entity not in schema': async (query) => { + try { + await query({ $entity: 'fakeEntity' }); + } catch { + // No op + } + }, + 'v3[validation] - $id not existing': async (query) => { + await query({ $entity: 'User', $id: 'nonExisting' }); + }, + 'e1[entity] - basic and direct link to relation': async (query) => { + await query({ $entity: 'User' }); + }, + 'e1.b[entity] - basic and direct link to relation sub entity': async (query) => { + await query({ $entity: 'God' }); + }, + 'e2[entity] - filter by single $id': async (query) => { + await query({ $entity: 'User', $id: 'user1' }); + }, + 'e3[entity, nested] - direct link to relation, query nested': async (query) => { + await query({ $entity: 'User', $fields: ['id', { $path: 'user-tags' }] }); + }, + 'opt1[options, noMetadata': async (query) => { + await query( + { $entity: 'User', $id: 'user1' }, + { + noMetadata: true, + }, + ); + }, + // 'TODO{TS}:opt2[options, debugger': async (query) => { + // await query({ $entity: 'User', $id: 'user1' }, { + // debugger: true, + // }); + // }, + 'opt3a[options, returnNull] - empty fields option in entity': async (query) => { + await query( + { + $entity: 'User', + $id: 'user4', + $fields: ['spaces', 'email', 'user-tags'], + }, + { returnNulls: true }, + ); + }, + 'opt3b[options, returnNull] - empty fields option in entity, dont return explicit': async (query) => { + await query( + { + $entity: 'User', + $id: 'user4', + $fields: ['spaces', 'email'], + }, + { returnNulls: true }, + ); + }, + 'r1[relation] - basic': async (query) => { + const q = { $relation: 'User-Accounts' }; + await query(q); + await query(q, { + noMetadata: true, + }); + }, + 'r2[relation] - filtered fields': async (query) => { + const q = { $relation: 'User-Accounts', $fields: ['user'] }; + await query(q); + await query(q, { + noMetadata: true, + }); + }, + 'r3[relation, nested] - nested entity': async (query) => { + const q = { + $relation: 'User-Accounts', + $fields: ['id', { $path: 'user', $fields: ['name'] }], + }; + await query(q); + await query(q, { + noMetadata: true, + }); + }, + 'r4[relation, nested, direct] - nested relation direct on relation': async (query) => { + const q = { + $relation: 'UserTag', + $fields: [ + 'id', + { $path: 'users', $fields: ['id'] }, + { $path: 'group', $fields: ['id'] }, + { $path: 'color', $fields: ['id'] }, + ], + }; + await query(q); + await query(q, { + noMetadata: true, + }); + }, + 'r5[relation nested] - that has both role, and linkfield pointing to same role': async (query) => { + const q = { + $entity: 'Color', + $fields: ['id', 'user-tags', 'group'], + }; + await query(q); + await query(q, { + noMetadata: true, + }); + }, + 'r6[relation nested] - relation connected to relation and a tunneled relation': async (query) => { + const q = { + $relation: 'UserTag', + }; + await query(q); + await query(q, { + noMetadata: true, + }); + }, + 'r7[relation, nested, direct] - nested on nested': async (query) => { + const q = { + $relation: 'UserTag', + $fields: [ + 'id', + { $path: 'users', $fields: ['id', 'spaces'] }, + { $path: 'group' }, + { $path: 'color', $fields: ['id', 'user-tags', 'group'] }, + ], + }; + await query(q); + await query(q, { + noMetadata: true, + }); + }, + 'r8[relation, nested, deep] - deep nested': async (query) => { + const q = { + $entity: 'Space', + $id: 'space-2', + $fields: [ + 'id', + { + $path: 'users', + $id: 'user2', + $fields: [ + 'id', + { $path: 'user-tags', $fields: [{ $path: 'color', $fields: ['id', 'user-tags', 'group'] }, 'id'] }, + ], + }, + ], + }; + await query(q); + await query(q, { + noMetadata: true, + }); + }, + 'r9[relation, nested, ids]': async (query) => { + await query({ + $relation: 'UserTagGroup', + $id: 'utg-1', + $fields: ['tags', 'color'], + }); + }, + 'ef1[entity] - $id single': async (query) => { + await query({ $entity: 'User', $id: 'non-existing-uuid-for-bench' }); + await query({ + $entity: 'User', + $id: 'user1', + $fields: ['id'], + }); + }, + 'ef2[entity] - $id multiple': async (query) => { + await query({ + $entity: 'User', + $id: ['user1', 'user2'], + $fields: ['id'], + }); + }, + 'ef3[entity] - $fields single': async (query) => { + await query({ $entity: 'User', $fields: ['id'] }); + }, + 'ef4[entity] - $fields multiple': async (query) => { + await query({ + $entity: 'User', + $id: 'user1', + $fields: ['name', 'email'], + }); + }, + 'ef5[entity,filter] - $filter single': async (query) => { + await query({ + $entity: 'User', + $filter: { name: 'Antoine' }, + $fields: ['name'], + }); + }, + 'ef6[entity,filter,id] - $filter by id in filter': async (query) => { + await query({ + $entity: 'User', + $filter: { id: 'user1' }, + $fields: ['name'], + }); + }, + 'ef7[entity,unique] - $filter by unique field': async (query) => { + await query({ + $entity: 'User', + $filter: { email: 'antoine@test.com' }, + $fields: ['name', 'email'], + }); + }, + 'n1[nested] Only ids': async (query) => { + await query({ + $entity: 'User', + $id: 'user1', + $fields: ['name', 'accounts'], + }); + }, + 'n2[nested] First level all fields': async (query) => { + const q = { + $entity: 'User', + $id: 'user1', + $fields: ['name', { $path: 'accounts' }], + }; + await query(q); + await query(q, { noMetadata: true }); + }, + 'n3[nested, $fields] First level filtered fields': async (query) => { + await query({ + $entity: 'User', + $id: 'user1', + $fields: ['name', { $path: 'accounts', $fields: ['provider'] }], + }); + }, + 'n4a[nested, $id] Local filter on nested, by id': async (query) => { + await query({ + $entity: 'User', + $id: ['user1', 'user2', 'user3'], + $fields: [ + 'name', + { + $path: 'accounts', + $id: 'account3-1', + $fields: ['provider'], + }, + ], + }); + }, + 'n4b[nested, $id] Local filter on nested depth two, by id': async (query) => { + await query({ + $entity: 'User', + $id: 'user1', + $fields: [ + { + $path: 'spaces', + $id: 'space-1', + $fields: [{ $path: 'users', $id: 'user1', $fields: ['$id'] }], + }, + ], + }); + }, + 'nf1[nested, $filters] Local filter on nested, single id': async (query) => { + await query({ + $entity: 'User', + $id: 'user1', + $fields: ['name', { $path: 'accounts', $filter: { provider: { $eq: 'github' } } }], + }); + }, + 'nf2[nested, $filters] Local filter on nested, by field, multiple sources, some are empty': async (query) => { + await query({ + $entity: 'User', + $id: ['user1', 'user2', 'user3'], + $fields: [ + 'name', + { + $path: 'accounts', + $filter: { provider: 'google' }, + $fields: ['provider'], + }, + ], + }); + }, + 'nf3[nested, $filters] Local filter on nested, by link field, multiple sources': async (query) => { + await query({ + $entity: 'Space', + $fields: [ + 'name', + { + $path: 'users', + $filter: { 'user-tags': ['tag-1', 'tag-2'] }, + $fields: ['name'], + }, + ], + }); + }, + 'nf4[nested, $filters] Local filter on nested, by link field, multiple sources': async (query) => { + await query({ + $relation: 'UserTag', + $fields: [ + 'name', + { + $path: 'users', + $filter: { spaces: ['space-1', 'space-2'] }, + $fields: ['name'], + }, + ], + }); + }, + 'lf1[$filter] Filter by a link field with cardinality ONE': async (query) => { + await query( + { + $relation: 'User-Accounts', + $filter: { user: 'user1' }, + $fields: ['id'], + }, + { noMetadata: true }, + ); + }, + 'lf2[$filter, $not] Filter out by a link field with cardinality ONE': async (query) => { + await query( + { + $relation: 'User-Accounts', + $filter: { + $not: { user: ['user1'] }, + }, + $fields: ['id'], + }, + { noMetadata: true }, + ); + }, + 'lf3[$filter] Filter by a link field with cardinality MANY': async (query) => { + await query( + { + $entity: 'User', + $filter: { spaces: ['space-1'] }, + $fields: ['id'], + }, + { noMetadata: true }, + ); + }, + 'TODO{T}:lf4[$filter, $or] Filter by a link field with cardinality MANY': async (query) => { + await query( + { + $entity: 'User', + // @ts-expect-error - TODO: This is valid syntax but requires refactoring the filters + $filter: [{ spaces: ['space-1'] }, { email: 'ann@test.com' }], + $fields: ['id'], + }, + { noMetadata: true }, + ); + }, + 'slo1[$sort, $limit, $offset] root': async (query) => { + await query( + { + $entity: 'Account', + $sort: [{ field: 'provider', desc: false }, 'id'], + $offset: 1, + $limit: 2, + $fields: ['id', 'provider'], + }, + { noMetadata: true }, + ); + }, + 'slo2[$sort, $limit, $offset] sub level': async (query) => { + await query( + { + $entity: 'User', + $id: 'user1', + $fields: [ + 'id', + { + $path: 'accounts', + $fields: ['id', 'provider'], + $sort: ['provider'], + $offset: 1, + $limit: 1, + }, + ], + }, + { noMetadata: true }, + ); + }, + // 'TODO{S}:slo3[$sort, $limit, $offset] with an empty attribute': async (query) => { + // await query( + // { + // $entity: 'User', + // $fields: ['id', 'email'], + // $sort: ['email'], + // }, + // { noMetadata: true }, + // ); + // }, + 'i1[inherited, attributes] Entity with inherited attributes': async (query) => { + await query({ $entity: 'God', $id: 'god1' }, { noMetadata: true }); + }, + // 'TODO{TS}:i2[inherited, attributes] Entity with inherited attributes should fetch them even when querying from parent class': async (query) => { + // await query({ $entity: 'User', $id: 'god1' }, { noMetadata: true }); + // }, + 's1[self] Relation playing a a role defined by itself': async (query) => { + await query({ $relation: 'Self' }, { noMetadata: true }); + }, + 'ex1[extends] Query where an object plays 3 different roles because it extends 2 types': async (query) => { + await query({ $entity: 'Space', $id: 'space-2' }, { noMetadata: true }); + }, + 'ex2[extends] Query of the parent': async (query) => { + await query({ $entity: 'Space', $id: 'space-2', $fields: ['objects'] }, { noMetadata: true }); + }, + // 'TODO{TS}:re1[repeated] Query with repeated path, different nested ids': async (query) => { + // await query( + // { + // $entity: 'Space', + // $id: 'space-2', + // $fields: [ + // { $path: 'users', $id: 'user2', $fields: ['id', 'name'] }, + // { $path: 'users', $id: 'user3', $fields: ['id', { $path: 'accounts', $fields: ['id', 'provider'] }] }, + // ], + // }, + // { noMetadata: true }, + // ); + // }, + // 'TODO{TS}:re2[repeated] Query with repeated path, different nested patterns': async (query) => { + // await query( + // { + // $entity: 'Space', + // $id: 'space-2', + // $fields: ['users', { $path: 'users', $id: 'user3', $fields: ['id', 'name'] }], + // }, + // { noMetadata: true }, + // ); + // }, + 'xf1[excludedFields] Testing excluded fields': async (query) => { + await query( + { + $entity: 'God', + $id: 'god1', + $excludedFields: ['email', 'isEvil'], + }, + { noMetadata: true }, + ); + }, + 'xf2[excludedFields, deep] - deep nested': async (query) => { + const q = { + $entity: 'Space', + $id: 'space-2', + $fields: [ + 'id', + { + $path: 'users', + $id: 'user2', + $fields: [ + 'id', + { $path: 'user-tags', $fields: [{ $path: 'color', $excludedFields: ['id', 'totalUserTags'] }, 'id'] }, + ], + }, + ], + }; + await query(q); + await query(q, { noMetadata: true }); + }, + 'xf3[excludedFields, deep] - Exclude virtual field': async (query) => { + const q = { + $entity: 'User', + $id: 'user2', + $fields: [ + 'id', + { $path: 'user-tags', $fields: [{ $path: 'color', $excludedFields: ['isBlue', 'totalUserTags'] }, 'id'] }, + ], + }; + await query(q, { noMetadata: true }); + await query(q, { noMetadata: true }); + }, + 'vi1[virtual, attribute] Virtual DB field': async (query) => { + await query({ $entity: 'Account', $fields: ['id', 'isSecureProvider'] }, { noMetadata: true }); + }, + 'vi2[virtual, edge] Virtual DB edge field': async (query) => { + await query({ $entity: 'Hook' }, { noMetadata: true }); + }, + 'co1[computed] Virtual computed field': async (query) => { + await query({ $entity: 'Color', $id: ['blue', 'yellow'], $fields: ['id', 'isBlue'] }, { noMetadata: true }); + }, + 'co2[computed] Computed virtual field depending on edge id': async (query) => { + await query( + { $entity: 'Color', $id: ['blue', 'yellow'], $fields: ['id', 'user-tags', 'totalUserTags'] }, + { noMetadata: true }, + ); + }, + // 'TODO{TS}:co3[computed], Computed virtual field depending on edge id, missing dependencies': async (query) => { + // await query( + // { $entity: 'Color', $id: ['blue', 'yellow'], $fields: ['id', 'totalUserTags'] }, + // { noMetadata: true }, + // ); + // }, + 'mv1[multiVal, query, ONE], get multiVal': async (query) => { + await query({ $entity: 'Color', $fields: ['id', 'freeForAll'] }, { noMetadata: true }); + }, + 'TODO{T}:mv2[multiVal, query, ONE], filter by multiVal': async (query) => { + await query( + { $entity: 'Color', $filter: { freeForAll: 'hey' }, $fields: ['id', 'freeForAll'] }, + { noMetadata: true }, + ); + }, + 'a1[$as] - as for attributes and roles and links': async (query) => { + await query( + { + $entity: 'User', + $id: 'user1', + $fields: [ + 'id', + { $path: 'email', $as: 'email_as' }, + { + $path: 'user-tags', + $as: 'user-tags_as', + $fields: ['id', { $path: 'users', $as: 'users_as', $fields: ['id', 'name'] }], + }, + ], + }, + { noMetadata: true }, + ); + }, + 'bq1[batched query] - as for attributes and roles and links': async (query) => { + await query( + [ + { + $entity: 'User', + $fields: ['id'], + $id: 'user1', + }, + { + $entity: 'Space', + $fields: ['id'], + $id: 'space-1', + }, + ], + { noMetadata: true }, + ); + }, + 'j1[json] Query a thing with a JSON attribute': async (query) => { + await query({ + $entity: 'Account', + $id: 'account1-1', + $fields: ['profile'], + }); + }, + 'j2[json] Query a thing with an empty JSON attribute': async (query) => { + await query({ + $entity: 'Account', + $id: 'account1-2', + $fields: ['profile'], + }); + }, + // 'TODO{TS}:bq2[batched query with $as] - as for attributes and roles and links': async (query) => { + // await query( + // { + // // @ts-expect-error change RawBQLQuery type + // $queryType: 'batched', + // users: { + // $entity: 'User', + // $fields: ['id'], + // $id: 'user1', + // }, + // spaces: { + // $entity: 'Space', + // $fields: ['id'], + // $id: 'space-1', + // }, + // }, + // { noMetadata: true }, + // ); + // }, + 'dn1[deep nested] ridiculously deep nested query': async (query) => { + await query({ + $entity: 'Color', + $fields: [ + 'id', + { + $path: 'user-tags', + $fields: [ + 'id', + { + $path: 'users', + $fields: [ + 'id', + { + $path: 'spaces', + $fields: ['id', { $path: 'users', $fields: ['id', { $path: 'accounts', $fields: ['id'] }] }], + }, + ], + }, + ], + }, + ], + }); + }, + 'TODO{T}:dn2[deep numbers] Big numbers': async (query) => { + await query( + { + $entity: 'Company', + $filter: { employees: { name: ['Employee 78f', 'Employee 187f', 'Employee 1272f', 'Employee 9997f'] } }, + $fields: ['id'], + }, + { noMetadata: true }, + ); + }, + 'TODO{T}:dn3[deep numbers] Big numbers nested': async (query) => { + await query( + { + $entity: 'Company', + $filter: { employees: { name: ['Employee 78f'] } }, + $fields: ['id', { $path: 'employees' }], + }, + { noMetadata: true }, + ); + }, + 'fk1[filter, keywords, exists], filter by undefined/null property': async (query) => { + await query({ $entity: 'User', $filter: { email: { $exists: false } } }, { noMetadata: true }); + }, + 'fk2[filter, keywords, exists], filter by undefined/null property': async (query) => { + await query({ $entity: 'User', $filter: { email: { $exists: true } } }, { noMetadata: true }); + }, + 'TODO{T}:ref1[ref, ONE] Get reference, id only': async (query) => { + await query({ $entity: 'FlexRef', $id: 'fr1', $fields: ['id', 'reference'] }, { noMetadata: true }); + }, + // 'TODO{T}:ref1n[ref, ONE, nested] Get also nested data': async (query) => { + // await query( + // { + // $entity: 'FlexRef', + // $id: 'fr1', + // $fields: ['id', { $path: 'reference' }], + // }, + // { noMetadata: true }, + // ); + // }, + // 'TODO{T}:ref1nf[ref, ONE, nested, someFields] Get also nested data but only some fields': async (query) => { + // await query( + // { + // $entity: 'FlexRef', + // $id: 'fr1', + // $fields: ['id', { $path: 'reference', $fields: ['id', 'accounts', 'email'] }], + // }, + // { noMetadata: true }, + // ); + // }, + 'TODO{T}:ref2[ref, MANY] Get references, id only': async (query) => { + await query({ $entity: 'FlexRef', $id: 'fr2' }, { noMetadata: true }); + }, + 'TODO{T}:ref3[ref, flex, ONE] Get flexReference': async (query) => { + await query({ $entity: 'FlexRef', $id: ['fr3', 'fr4'] }, { noMetadata: true }); + }, + 'TODO{T}:ref4[ref, flex, MANY] Get flexReferences': async (query) => { + await query({ $entity: 'FlexRef', $id: 'fr5' }, { noMetadata: true }); + }, + // 'TODO{T}:ref4nf[ref, flex, MANY, nested] Get flexReferences with nested data': async (query) => { + // await query( + // { $entity: 'FlexRef', $id: 'fr5', $fields: ['id', { $path: 'flexReferences' }] }, + // { noMetadata: true }, + // ); + // }, + // 'TODO{T}:ref4n[ref, flex, MANY, nested, $fields] Get flexReferences with nested data but only some fields': async (query) => { + // await query( + // { + // $entity: 'FlexRef', + // $id: 'fr5', + // $fields: ['id', { $path: 'flexReferences', $fields: ['id', 'name', 'user-tags'] }], + // }, + // { noMetadata: true }, + // ); + // }, +}; + +const main = async () => { + const { client, cleanup } = await init(); + + const query: QueryFn = async (query, config) => { + return client.query(query, { ...config, legacySurrealDBAdapter: LEGACY_SURREALDB_ADAPTER }); + }; + + const result: (Record | null)[] = []; + + for (const [name, task] of Object.entries(tasks)) { + const bench = new Bench({ + concurrency: null, + retainSamples: false, + time: 1, + iterations: 10, + }); + bench.add(name, async () => { + await task(query); + }); + await bench.run(); + const r = bench.table(); + if (r) { + result.push(...r); + } + bench.reset(); + } + + await cleanup(); + + console.table(result); +}; + +main() + .then(() => { + process.exit(0); + }) + .catch((error) => { + console.error(error); + process.exit(1); + }); diff --git a/tests/unit/queries/query.ts b/tests/unit/queries/query.ts index c4580d0b..e9347871 100644 --- a/tests/unit/queries/query.ts +++ b/tests/unit/queries/query.ts @@ -1,10 +1,11 @@ import { v4 as uuidv4 } from 'uuid'; import { expect, it } from 'vitest'; +import { deepRemoveMetaData } from '../../../src/helpers'; import type { WithBormMetadata } from '../../../src/index'; import type { TypeGen } from '../../../src/types/typeGen'; import { createTest } from '../../helpers/createTest'; -import { deepRemoveMetaData, deepSort, expectArraysInObjectToContainSameElements } from '../../helpers/matchers'; +import { deepSort, expectArraysInObjectToContainSameElements } from '../../helpers/matchers'; import type { typesSchema } from '../../mocks/generatedSchema'; import type { UserType } from '../../types/testTypes'; @@ -1479,8 +1480,9 @@ export const testQuery = createTest('Query', (ctx) => { const res = await ctx.query( { $entity: 'User', - //@ts-expect-error - TODO: This is valid syntax but requires refactoring the filters + // @ts-expect-error - TODO: This is valid syntax but requires refactoring the filters $filter: [{ spaces: ['space-1'] }, { email: 'ann@test.com' }], + // $filter: { $or: { spaces: ['space-1'], email: 'ann@test.com' } }, $fields: ['id'], }, { noMetadata: true }, @@ -2827,6 +2829,22 @@ export const testQuery = createTest('Query', (ctx) => { }); it('TODO{T}:ref1n[ref, ONE, nested] Get also nested data', async () => { + // SELECT + // "0" AS `$$queryPath`, + // (id && record::id(id)) || null AS `$id`, + // (id && record::tb(id)) || null AS `$thing`, + // record::id(id) AS id, + // ( + // SELECT + // "0.$fields.1" AS `$$queryPath`, + // (id && record::id(id)) || null AS `$id`, + // (id && record::tb(id)) || null AS `$thing`, + // (id && null) || $this AS `$value`, + // * + // FROM $parent.reference + // ) AS reference + // FROM FlexRef + // WHERE id AND record::id(id) IN ['fr1']; const res = await ctx.query( { $entity: 'FlexRef', @@ -2852,6 +2870,25 @@ export const testQuery = createTest('Query', (ctx) => { }); it('TODO{T}:ref1nf[ref, ONE, nested, someFields] Get also nested data but only some fields', async () => { + // SELECT + // "0" AS `$$queryPath`, + // id && record::id(id) || null AS `$id`, + // id && record::tb(id) || null AS `$thing`, + // record::id(id) AS id, + // ( + // SELECT + // "0.$fields.1" AS `$$queryPath`, + // id && record::id(id) || null AS `$id`, + // id && record::tb(id) || null AS `$thing`, + // id && null || $this AS `$value`, + // , + // , + // + // FROM $parent.reference + // ) AS reference + // FROM FlexRef + // WHERE id + // AND (record::id(id) IN ['fr1']); const res = await ctx.query( { $entity: 'FlexRef', @@ -2901,6 +2938,22 @@ export const testQuery = createTest('Query', (ctx) => { }); it('TODO{T}:ref4nf[ref, flex, MANY, nested] Get flexReferences with nested data', async () => { + // SELECT + // "0" AS `$$queryPath`, + // id && record::id(id) || null AS `$id`, + // id && record::tb(id) || null AS `$thing`, + // record::id(id) AS id, + // ( + // SELECT + // "0.$fields.1" AS `$$queryPath`, + // id && record::id(id) || null AS `$id`, + // id && record::tb(id) || null AS `$thing`, + // id && null || $this AS `$value`, + // * + // FROM $parent.flexReferences + // ) AS flexReferences + // FROM FlexRef + // WHERE id AND (record::id(id) IN ['fr5']); const res = await ctx.query( { $entity: 'FlexRef', $id: 'fr5', $fields: ['id', { $path: 'flexReferences' }] }, { noMetadata: true }, @@ -2926,6 +2979,25 @@ export const testQuery = createTest('Query', (ctx) => { }); it('TODO{T}:ref4n[ref, flex, MANY, nested, $fields] Get flexReferences with nested data but only some fields', async () => { + // SELECT + // "0" AS `$$queryPath`, + // (id && record::id(id)) || null AS `$id`, + // (id && record::tb(id)) || null AS `$thing`, + // record::id(id) AS id, + // ( + // SELECT + // "0.$fields.1" AS `$$queryPath`, + // (id && record::id(id)) || null AS `$id`, + // (id && record::tb(id)) || null AS `$thing`, + // (id && null) || $this AS `$value`, + // ⟨id⟩, + // ⟨name⟩, + // ⟨user-tags⟩ + // FROM $parent.`flexReferences` + // ) AS `flexReferences` + // FROM FlexRef + // WHERE id + // AND record::id(id) IN ['fr5']; const res = await ctx.query( { $entity: 'FlexRef',