diff --git a/.changeset/six-knives-notice.md b/.changeset/six-knives-notice.md new file mode 100644 index 0000000000..b992036f80 --- /dev/null +++ b/.changeset/six-knives-notice.md @@ -0,0 +1,6 @@ +--- +"electric-sql": patch +"@electric-sql/prisma-generator": patch +--- + +Extract the sync API out of the DAL and make the DAL optional. diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 9df0f2b6a4..254ee3d65e 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -113,17 +113,19 @@ jobs: https://analytics-api.buildkite.com/v1/uploads e2e_satellite_tests: - name: E2E Satellite tests runs-on: electric-e2e-8-32 strategy: matrix: dialect: [SQLite, Postgres] + dal: [true, false] + name: E2E Satellite tests (Dialect ${{ matrix.dialect }} - uses DAL? ${{ matrix.dal }}) defaults: run: working-directory: e2e env: BUILDKITE_ANALYTICS_TOKEN: ${{ secrets.BUILDKITE_TEST_ANALYTICS_E2E }} DIALECT: ${{ matrix.dialect }} + DAL: ${{ matrix.dal }} steps: - uses: actions/checkout@v3 with: diff --git a/clients/typescript/package.json b/clients/typescript/package.json index dffa046bfd..178d7f1d6f 100644 --- a/clients/typescript/package.json +++ b/clients/typescript/package.json @@ -54,6 +54,7 @@ "./node": "./dist/drivers/better-sqlite3/index.js", "./node-postgres": "./dist/drivers/node-postgres/index.js", "./pglite": "./dist/drivers/pglite/index.js", + "./protocol": "./dist/_generated/protocol/satellite.js", "./react": "./dist/frameworks/react/index.js", "./tauri-postgres": "./dist/drivers/tauri-postgres/index.js", "./vuejs": "./dist/frameworks/vuejs/index.js", @@ -78,6 +79,9 @@ "capacitor": [ "./dist/drivers/capacitor-sqlite/index.d.ts" ], + "client": [ + "./dist/client/index.d.ts" + ], "expo": [ "./dist/drivers/expo-sqlite/index.d.ts" ], @@ -96,6 +100,9 @@ "pglite": [ "./dist/drivers/pglite/index.d.ts" ], + "protocol": [ + "./dist/_generated/protocol/satellite.d.ts" + ], "react": [ "./dist/frameworks/react/index.d.ts" ], @@ -181,6 +188,7 @@ "lodash.flow": "^3.5.0", "lodash.groupby": "^4.6.0", "lodash.isequal": "^4.5.0", + "lodash.keyby": "^4.6.0", "lodash.mapvalues": "^4.6.0", "lodash.omitby": "^4.6.0", "lodash.partition": "^4.6.0", @@ -209,6 +217,7 @@ "@types/lodash.flow": "^3.5.7", "@types/lodash.groupby": "^4.6.7", "@types/lodash.isequal": "^4.5.6", + "@types/lodash.keyby": "^4.6.9", "@types/lodash.mapvalues": "^4.6.7", "@types/lodash.omitby": "^4.6.7", "@types/lodash.partition": "^4.6.7", diff --git a/clients/typescript/src/client/conversions/index.ts b/clients/typescript/src/client/conversions/index.ts new file mode 100644 index 0000000000..a5bdfa14dc --- /dev/null +++ b/clients/typescript/src/client/conversions/index.ts @@ -0,0 +1,3 @@ +export { postgresConverter } from './postgres' +export { sqliteConverter } from './sqlite' +export { PgBasicType } from './types' diff --git a/clients/typescript/src/client/conversions/input.ts b/clients/typescript/src/client/conversions/input.ts index 3217f122fe..dcd5f5dae3 100644 --- a/clients/typescript/src/client/conversions/input.ts +++ b/clients/typescript/src/client/conversions/input.ts @@ -247,7 +247,7 @@ export class InputTransformer { value: any, fields: Fields ): any { - const pgType = fields.get(field) + const pgType = fields[field] if (!pgType) throw new InvalidArgumentError(`Unknown field ${field}`) @@ -335,7 +335,7 @@ export function transformFields( // as those will be transformed later when the query on the related field is processed. const copied: Record = { ...o } Object.entries(o).forEach(([field, value]) => { - const pgType = fields.get(field) + const pgType = fields[field] // Skip anything that's not an actual column on the table if (pgType === undefined) return @@ -363,7 +363,10 @@ export function isFilterObject(value: any): boolean { * @returns A filtered object. */ function keepTableFieldsOnly(o: object, fields: Fields) { - return filterKeys(o, fields) + return filterKeys(o, { + ...fields, + has: (x) => Object.hasOwn(fields, x), + }) } /** diff --git a/clients/typescript/src/client/index.ts b/clients/typescript/src/client/index.ts new file mode 100644 index 0000000000..0c66b1e1d1 --- /dev/null +++ b/clients/typescript/src/client/index.ts @@ -0,0 +1,3 @@ +export type { TableName, AnyTable, AnyTableSchema } from './model' +export { type DbSchema, createDbDescription } from './util/relations' +export * from './conversions' diff --git a/clients/typescript/src/client/model/builder.ts b/clients/typescript/src/client/model/builder.ts index a215b51475..51ef567e8f 100644 --- a/clients/typescript/src/client/model/builder.ts +++ b/clients/typescript/src/client/model/builder.ts @@ -251,7 +251,7 @@ export class Builder { * The DAL will convert the string into a BigInt in the `fromSqlite` function from `../conversions/sqlite.ts`. */ private castBigIntToText(field: string) { - const pgType = this._tableDescription.fields.get(field) + const pgType = this._tableDescription.fields[field] if (pgType === PgBasicType.PG_INT8 && this.dialect === 'SQLite') { const quotedField = quoteIdentifier(field) return `cast(${quotedField} as TEXT) AS ${quotedField}` @@ -308,7 +308,7 @@ export class Builder { // if field is of type BigInt cast the result to TEXT // because not all adapters deal well with BigInts // the DAL will convert the string into a BigInt in the `fromSqlite` function from `../conversions/sqlite.ts`. - const pgType = this._tableDescription.fields.get(field) + const pgType = this._tableDescription.fields[field] if (pgType === PgBasicType.PG_INT8 && this.dialect === 'SQLite') { // make a raw string and quote the field name ourselves // because otherwise Squel would add quotes around the entire cast diff --git a/clients/typescript/src/client/model/client.ts b/clients/typescript/src/client/model/client.ts index 5832da405d..eb58533dd8 100644 --- a/clients/typescript/src/client/model/client.ts +++ b/clients/typescript/src/client/model/client.ts @@ -1,17 +1,32 @@ import { ElectricNamespace } from '../../electric/namespace' -import { DbSchema, TableSchema } from './schema' +import { DbSchema, TableSchema, TableSchemas } from './schema' import { rawQuery, liveRawQuery, unsafeExec, Table } from './table' -import { Row, Statement } from '../../util' +import { + QualifiedTablename, + ReplicatedRowTransformer, + Row, + Statement, +} from '../../util' import { LiveResultContext } from './model' import { Notifier } from '../../notifiers' import { DatabaseAdapter } from '../../electric/adapter' -import { GlobalRegistry, Registry, Satellite } from '../../satellite' -import { ReplicationTransformManager } from './transforms' +import { + GlobalRegistry, + Registry, + Satellite, + ShapeSubscription, +} from '../../satellite' +import { + IReplicationTransformManager, + ReplicationTransformManager, + setReplicationTransform, +} from './transforms' import { Dialect } from '../../migrators/query-builder/builder' import { InputTransformer } from '../conversions/input' import { sqliteConverter } from '../conversions/sqlite' import { postgresConverter } from '../conversions/postgres' import { IShapeManager } from './shapes' +import { ShapeInputWithTable, sync } from './sync' export type ClientTables> = { [Tbl in keyof DB['tables']]: DB['tables'][Tbl] extends TableSchema< @@ -96,25 +111,62 @@ interface RawQueries { export class ElectricClient< DB extends DbSchema > extends ElectricNamespace { - public sync: Omit + public sync: Omit & { + /** + * Subscribes to the given shape, returnig a {@link ShapeSubscription} object which + * can be used to wait for the shape to sync initial data. + * + * NOTE: If you establish a shape subscription that has already synced its initial data, + * awaiting `shape.synced` will always resolve immediately as shape subscriptions are persisted. + * i.e.: imagine that you re-sync the same shape during subsequent application loads. + * Awaiting `shape.synced` a second time will only ensure that the initial + * shape load is complete. It does not ensure that the replication stream + * has caught up to the central DB's more recent state. + * + * @param i - The shape to subscribe to + * @param key - An optional unique key that identifies the subscription + * @returns A shape subscription + */ + subscribe: ( + i: ShapeInputWithTable, + key?: string + ) => Promise + } private constructor( public db: ClientTables & RawQueries, dbName: string, + private _dbDescription: DB, adapter: DatabaseAdapter, notifier: Notifier, public readonly satellite: Satellite, - registry: Registry | GlobalRegistry + registry: Registry | GlobalRegistry, + private _replicationTransformManager: IReplicationTransformManager ) { super(dbName, adapter, notifier, registry) this.satellite = satellite // Expose the Shape Sync API without additional properties this.sync = { syncStatus: this.satellite.syncStatus.bind(this.satellite), + subscribe: sync.bind(null, this.satellite, this._dbDescription), unsubscribe: this.satellite.unsubscribe.bind(this.satellite), } } + setReplicationTransform< + T extends Record = Record + >( + qualifiedTableName: QualifiedTablename, + i: ReplicatedRowTransformer + ): void { + setReplicationTransform( + this._dbDescription, + this._replicationTransformManager, + qualifiedTableName, + i + ) + } + /** * Connects to the Electric sync service. * This method is idempotent, it is safe to call it multiple times. @@ -136,7 +188,10 @@ export class ElectricClient< this.satellite.clientDisconnect() } - // Builds the DAL namespace from a `dbDescription` object + /** + * Builds the DAL namespace from a `dbDescription` object + * @param minimalDbDescription - A minimal description of the database schema can be provided in order to use Electric without the DAL. + */ static create>( dbName: string, dbDescription: DB, @@ -154,30 +209,44 @@ export class ElectricClient< ) const inputTransformer = new InputTransformer(converter) - const createTable = (tableName: string) => { - return new Table( - tableName, - adapter, - notifier, - satellite, - replicationTransformManager, - dbDescription, - inputTransformer, - dialect - ) - } + // Check if we need to create the DAL + // If the schemas are missing from the `dbDescription`` + // it means that the user did not generate the Electric client + // and thus we don't create the DAL. + // This is needed because we piggyback the minimal DB description (that is used without the DAL) + // on the same DB description argument as the one that is used with the DAL. + const ts: Array<[string, TableSchemas]> = Object.entries( + dbDescription.tables + ) + const withDal = ts.length > 0 && ts[0][1].modelSchema !== undefined + let dal = {} as ClientTables - // Create all tables - const dal = Object.fromEntries( - Object.keys(tables).map((tableName) => { - return [tableName, createTable(tableName)] - }) - ) as ClientTables + if (withDal) { + const createTable = (tableName: string) => { + return new Table( + tableName, + adapter, + notifier, + satellite, + replicationTransformManager, + dbDescription, + inputTransformer, + dialect + ) + } - // Now inform each table about all tables - Object.keys(dal).forEach((tableName) => { - dal[tableName].setTables(new Map(Object.entries(dal))) - }) + // Create all tables + dal = Object.fromEntries( + Object.keys(tables).map((tableName) => { + return [tableName, createTable(tableName)] + }) + ) as ClientTables + + // Now inform each table about all tables + Object.keys(dal).forEach((tableName) => { + dal[tableName].setTables(new Map(Object.entries(dal))) + }) + } const db: ClientTables & RawQueries = { ...dal, @@ -191,10 +260,12 @@ export class ElectricClient< return new ElectricClient( db, dbName, + dbDescription, adapter, notifier, satellite, - registry + registry, + replicationTransformManager ) } } diff --git a/clients/typescript/src/client/model/index.ts b/clients/typescript/src/client/model/index.ts index b304b03301..1099b0f320 100644 --- a/clients/typescript/src/client/model/index.ts +++ b/clients/typescript/src/client/model/index.ts @@ -1,7 +1,13 @@ export { ElectricClient } from './client' export type { ClientTables } from './client' -export type { TableSchema } from './schema' +export type { + TableSchema, + TableSchemas, + TableName, + AnyTableSchema, +} from './schema' export { DbSchema, Relation } from './schema' export { Table } from './table' +export type { AnyTable } from './table' export type { HKT } from '../util/hkt' export type { SyncStatus } from './shapes' diff --git a/clients/typescript/src/client/model/schema.ts b/clients/typescript/src/client/model/schema.ts index 390730236e..18e4660c7d 100644 --- a/clients/typescript/src/client/model/schema.ts +++ b/clients/typescript/src/client/model/schema.ts @@ -17,7 +17,7 @@ export type TableName = string export type FieldName = string export type RelationName = string -export type Fields = Map +export type Fields = Record export type TableSchema< T extends Record, @@ -76,11 +76,21 @@ export type ExtendedTableSchema< incomingRelations: Relation[] } -export type TableSchemas = Record< - TableName, - TableSchema +export type AnyTableSchema = TableSchema< + any, + any, + any, + any, + any, + any, + any, + any, + any, + HKT > +export type TableSchemas = Record + export type ExtendedTableSchemas = Record< TableName, ExtendedTableSchema @@ -190,7 +200,7 @@ export class DbSchema { } getFieldNames(table: TableName): FieldName[] { - return Array.from(this.getFields(table).keys()) + return Array.from(Object.keys(this.getFields(table))) } hasRelationForField(table: TableName, field: FieldName): boolean { diff --git a/clients/typescript/src/client/model/sync.ts b/clients/typescript/src/client/model/sync.ts new file mode 100644 index 0000000000..7ac2c8740d --- /dev/null +++ b/clients/typescript/src/client/model/sync.ts @@ -0,0 +1,92 @@ +import { DbSchema, TableName, TableSchemas } from './schema' +import { IShapeManager } from './shapes' +import { ShapeSubscription } from '../../satellite' +import { Rel, Shape } from '../../satellite/shapes/types' +import { makeSqlWhereClause } from './table' + +type ShapeInput = Record + +export type ShapeInputWithTable = ShapeInput & { + table: TableName +} + +export function sync( + shapeManager: IShapeManager, + dbDescription: DbSchema, + i: ShapeInputWithTable, + key?: string +): Promise { + // Check which table the user wants to sync + const tableName = i.table + + if ( + tableName === undefined || + tableName === null || + tableName === '' || + typeof tableName !== 'string' + ) { + throw new Error( + 'Cannot sync the requested shape. Table name must be a non-empty string' + ) + } + + // Compute the shape from the user input + const shape = computeShape(dbDescription, tableName, i) + return shapeManager.subscribe([shape], key) +} + +export function computeShape( + dbSchema: DbSchema, + tableName: TableName, + i: ShapeInput +): Shape { + if (!dbSchema.hasTable(tableName)) { + throw new Error( + `Cannot sync the requested shape. Table '${tableName}' does not exist in the database schema.` + ) + } + + // Recursively go over the included fields + const include = i.include ?? {} + const where = i.where ?? '' + const includedFields = Object.keys(include) + const includedTables = includedFields.map((field: string): Rel => { + // Fetch the table that is included + const relatedTableName = dbSchema.getRelatedTable(tableName, field) + const fk = dbSchema.getForeignKey(tableName, field) + + // And follow nested includes + const includedObj = (include as any)[field] + if ( + typeof includedObj === 'object' && + !Array.isArray(includedObj) && + includedObj !== null + ) { + // There is a nested include, follow it + return { + foreignKey: [fk], + select: computeShape(dbSchema, relatedTableName, includedObj), + } + } else if (typeof includedObj === 'boolean' && includedObj) { + return { + foreignKey: [fk], + select: { + tablename: relatedTableName, + }, + } + } else { + throw new Error( + `Unexpected value in include tree for sync: ${JSON.stringify( + includedObj + )}` + ) + } + }) + + const whereClause = makeSqlWhereClause(where) + return { + tablename: tableName, + include: includedTables, + ...(whereClause === '' ? {} : { where: whereClause }), + } +} diff --git a/clients/typescript/src/client/model/table.ts b/clients/typescript/src/client/model/table.ts index ed522df760..8937f47a93 100644 --- a/clients/typescript/src/client/model/table.ts +++ b/clients/typescript/src/client/model/table.ts @@ -42,12 +42,15 @@ import { import { NarrowInclude } from '../input/inputNarrowing' import { IShapeManager } from './shapes' import { ShapeSubscription } from '../../satellite' -import { Rel, Shape } from '../../satellite/shapes/types' -import { IReplicationTransformManager } from './transforms' +import { + IReplicationTransformManager, + setReplicationTransform, +} from './transforms' import { InputTransformer } from '../conversions/input' import { Dialect } from '../../migrators/query-builder/builder' +import { computeShape } from './sync' -type AnyTable = Table +export type AnyTable = Table export class Table< T extends Record, @@ -162,56 +165,6 @@ export class Table< this._tables = tables } - protected computeShape>(i: T): Shape { - // Recursively go over the included fields - const include = i.include ?? {} - const where = i.where ?? '' - const includedFields = Object.keys(include) - const includedTables = includedFields.map((field: string): Rel => { - // Fetch the table that is included - const relatedTableName = this._dbDescription.getRelatedTable( - this.tableName, - field - ) - const fkk = this._dbDescription.getForeignKey(this.tableName, field) - const relatedTable = this._tables.get(relatedTableName)! - - // And follow nested includes - const includedObj = (include as any)[field] - if ( - typeof includedObj === 'object' && - !Array.isArray(includedObj) && - includedObj !== null - ) { - // There is a nested include, follow it - return { - foreignKey: [fkk], - select: relatedTable.computeShape(includedObj), - } - } else if (typeof includedObj === 'boolean' && includedObj) { - return { - foreignKey: [fkk], - select: { - tablename: relatedTableName, - }, - } - } else { - throw new Error( - `Unexpected value in include tree for sync: ${JSON.stringify( - includedObj - )}` - ) - } - }) - - const whereClause = makeSqlWhereClause(where) - return { - tablename: this.tableName, - include: includedTables, - ...(whereClause === '' ? {} : { where: whereClause }), - } - } - protected getIncludedTables>( i: T ): Set { @@ -251,7 +204,11 @@ export class Table< sync>(i?: T): Promise { const validatedInput = this.syncSchema.parse(i ?? {}) - const shape = this.computeShape(validatedInput) + const shape = computeShape( + this._dbDescription, + this.tableName, + validatedInput + ) return this._shapeManager.subscribe([shape], validatedInput.key) } @@ -1616,47 +1573,12 @@ export class Table< } setReplicationTransform(i: ReplicatedRowTransformer): void { - // forbid transforming relation keys to avoid breaking - // referential integrity - - // the column could be the FK column when it is an outgoing FK - // or it could be a PK column when it is an incoming FK - const fkCols = this._dbDescription - .getOutgoingRelations(this.tableName) - .map((r) => r.fromField) - - // Incoming relations don't have the `fromField` and `toField` filled in - // so we need to fetch the `toField` from the opposite relation - // which is effectively a column in this table to which the FK points - const pkCols = this._dbDescription - .getIncomingRelations(this.tableName) - .map((r) => r.getOppositeRelation(this._dbDescription).toField) - - // Merge all columns that are part of a FK relation. - // Remove duplicate columns in case a column has both an outgoing FK and an incoming FK. - const immutableFields = Array.from(new Set(fkCols.concat(pkCols))) - - this._replicationTransformManager.setTableTransform( + setReplicationTransform( + this._dbDescription, + this._replicationTransformManager, this._qualifiedTableName, - { - transformInbound: (record) => - this._replicationTransformManager.transformTableRecord( - record, - i.transformInbound, - this._fields, - this._schema, - immutableFields - ), - - transformOutbound: (record) => - this._replicationTransformManager.transformTableRecord( - record, - i.transformOutbound, - this._fields, - this._schema, - immutableFields - ), - } + i, + this._schema ) } @@ -1712,7 +1634,9 @@ export function liveRawQuery( } /** Compile Prisma-like where-clause object into a SQL where clause that the server can understand. */ -function makeSqlWhereClause(where: string | Record): string { +export function makeSqlWhereClause( + where: string | Record +): string { if (typeof where === 'string') return where const statements = Object.entries(where) diff --git a/clients/typescript/src/client/model/transforms.ts b/clients/typescript/src/client/model/transforms.ts index b57b713aad..6c6deb2d0f 100644 --- a/clients/typescript/src/client/model/transforms.ts +++ b/clients/typescript/src/client/model/transforms.ts @@ -10,7 +10,7 @@ import { validate, validateRecordTransformation, } from '../validation/validation' -import { Fields } from './schema' +import { DbSchema, Fields, TableSchemas } from './schema' import * as z from 'zod' export interface IReplicationTransformManager { @@ -24,7 +24,7 @@ export interface IReplicationTransformManager { record: DataRecord, transformRow: (row: T) => T, fields: Fields, - schema: z.ZodTypeAny, + schema: z.ZodTypeAny | undefined, immutableFields: string[] ): DataRecord } @@ -49,7 +49,7 @@ export class ReplicationTransformManager record: DataRecord, transformRow: (row: T) => T, fields: Fields, - schema: z.ZodTypeAny, + schema: z.ZodTypeAny | undefined, immutableFields: string[] ): DataRecord { return transformTableRecord( @@ -78,7 +78,7 @@ export function transformTableRecord>( record: DataRecord, transformRow: (row: T) => T, fields: Fields, - schema: z.ZodTypeAny, + schema: z.ZodTypeAny | undefined, converter: Converter, immutableFields: string[] ): DataRecord { @@ -94,7 +94,12 @@ export function transformTableRecord>( const transformedParsedRow = transformRow(parsedRow as Readonly) // validate transformed row and convert back to raw record - const validatedTransformedParsedRow = validate(transformedParsedRow, schema) + // schema is only provided when using the DAL + // if schema is not provided, we skip validation + const validatedTransformedParsedRow = + schema !== undefined + ? validate(transformedParsedRow, schema) + : transformedParsedRow const transformedRecord = transformFields( validatedTransformedParsedRow, fields, @@ -111,3 +116,63 @@ export function transformTableRecord>( return validatedTransformedRecord } + +export function setReplicationTransform< + T extends Record = Record +>( + dbDescription: DbSchema, + replicationTransformManager: IReplicationTransformManager, + qualifiedTableName: QualifiedTablename, + i: ReplicatedRowTransformer, + schema?: z.ZodTypeAny +): void { + const tableName = qualifiedTableName.tablename + + if (!dbDescription.hasTable(tableName)) { + throw new Error( + `Cannot set replication transform for table '${tableName}'. Table does not exist in the database schema.` + ) + } + + const fields = dbDescription.getFields(tableName) + + // forbid transforming relation keys to avoid breaking + // referential integrity + + // the column could be the FK column when it is an outgoing FK + // or it could be a PK column when it is an incoming FK + const fkCols = dbDescription + .getOutgoingRelations(tableName) + .map((r) => r.fromField) + + // Incoming relations don't have the `fromField` and `toField` filled in + // so we need to fetch the `toField` from the opposite relation + // which is effectively a column in this table to which the FK points + const pkCols = dbDescription + .getIncomingRelations(tableName) + .map((r) => r.getOppositeRelation(dbDescription).toField) + + // Merge all columns that are part of a FK relation. + // Remove duplicate columns in case a column has both an outgoing FK and an incoming FK. + const immutableFields = Array.from(new Set(fkCols.concat(pkCols))) + + replicationTransformManager.setTableTransform(qualifiedTableName, { + transformInbound: (record) => + replicationTransformManager.transformTableRecord( + record, + i.transformInbound, + fields, + schema, + immutableFields + ), + + transformOutbound: (record) => + replicationTransformManager.transformTableRecord( + record, + i.transformOutbound, + fields, + schema, + immutableFields + ), + }) +} diff --git a/clients/typescript/src/client/util/relations.ts b/clients/typescript/src/client/util/relations.ts new file mode 100644 index 0000000000..2db024f1c3 --- /dev/null +++ b/clients/typescript/src/client/util/relations.ts @@ -0,0 +1,181 @@ +import groupBy from 'lodash.groupby' +import keyBy from 'lodash.keyby' +import { + SatOpMigrate_ForeignKey, + SatOpMigrate_Table, +} from '../../_generated/protocol/satellite' +import { TableName, Relation, Fields } from '../model/schema' +import { PgType } from '../conversions/types' + +function makeRelation( + table: SatOpMigrate_Table, + fk: SatOpMigrate_ForeignKey, + groupedFks: Record, + allTables: KeyedTables +): Relation { + const childTable = table.name + const childCols = fk.fkCols + const parentCols = fk.pkCols + const parentTable = fk.pkTable + + if (childCols.length > 1 || parentCols.length > 1) { + throw new Error('Composite foreign keys are not supported') + } + + const childCol = childCols[0] + const parentCol = parentCols[0] + + // If there is only a single foreign key to a certain parent table + // and there is no column that is named after the parent table + // and there is no FK from the parent table to the child table + // then we can name the relation field the same as the parent table name + // otherwise the relation field name is the relation name prefixed with the name of the related table + const noColNamedAfterParent = table.columns.every( + (col) => col.name !== parentTable + ) + const singleFk = groupedFks[parentTable].length === 1 + const fkFromParentToChild = allTables[parentTable]!.fks.find( + (fk) => fk.pkTable === childTable + ) + + const relationName = `${childTable}_${childCol}To${parentTable}` + const relationFieldName = + singleFk && noColNamedAfterParent && !fkFromParentToChild + ? parentTable + : `${parentTable}_${relationName}` + + return new Relation( + relationFieldName, + childCol, + parentCol, + parentTable, + relationName, + 'one' + ) +} + +export type GroupedRelations = Map> +export type KeyedTables = Record + +/** + * Creates a `Relation` object for each FK in the table, + * as well as the opposite `Relation` object in order to + * be able to traverse the relation in the opposite direction. + * As a result, this function returns a map of relations grouped by table name. + */ +export function createRelationsFromTable( + table: SatOpMigrate_Table, + allTables: KeyedTables +): GroupedRelations { + const childTable = table.name + const fks = table.fks + const groupedFks = groupBy(fks, (fk) => fk.pkTable) + + const groupedRelations: GroupedRelations = new Map() + const extendGroupedRelations = (tableName: TableName, relation: Relation) => { + const relations = groupedRelations.get(tableName) ?? [] + relations.push(relation) + groupedRelations.set(tableName, relations) + } + + // For each FK make a `Relation` + const forwardRelations = fks.map((fk) => { + const rel = makeRelation(table, fk, groupedFks, allTables) + // Store the relation in the `groupedRelations` map + extendGroupedRelations(childTable, rel) + return rel + }) + + // For each FK, also create the opposite `Relation` + // in order to be able to follow the relation in both directions + forwardRelations.forEach((relation) => { + const parentTableName = relation.relatedTable + const parentTable = allTables[parentTableName]! + const parentFks = parentTable.fks + // If the parent table also has a FK to the child table + // than there is ambuigity because we can follow this FK + // or we could follow the FK that points to this table in the opposite direction + const fkToChildTable = parentFks.find( + (fk) => fk.pkTable === childTable && fk.fkCols[0] !== relation.toField // checks if this is another FK to the same table, assuming no composite FKs + ) + // Also check if there are others FKs from the child table to this table + const childFks = allTables[childTable]!.fks + const otherFksToParentTable = childFks.find( + (fk) => + fk.pkTable === parentTableName && fk.fkCols[0] !== relation.fromField // checks if this is another FK from the child table to this table, assuming no composite FKs + ) + const noColNamedAfterParent = parentTable.columns.every( + (col) => col.name !== childTable + ) + + // Make the relation field name + // which is the name of the related table (if it is unique) + // otherwise it is the relation name prefixed with the name of the related table + const relationFieldName = + !fkToChildTable && !otherFksToParentTable && noColNamedAfterParent + ? childTable + : `${childTable}_${relation.relationName}` + + const backwardRelation = new Relation( + relationFieldName, + '', + '', + childTable, + relation.relationName, + 'many' // TODO: what about 1-to-1 relations? Do we still need this arity? + ) + + // Store the backward relation in the `groupedRelations` map + extendGroupedRelations(parentTableName, backwardRelation) + }) + + return groupedRelations +} + +function mergeGroupedRelations( + groupedRelations: GroupedRelations, + relations: GroupedRelations +) { + relations.forEach((relations, tableName) => { + const existingRelations = groupedRelations.get(tableName) ?? [] + groupedRelations.set(tableName, existingRelations.concat(relations)) + }) +} + +export function createRelationsFromAllTables( + tables: Array +): GroupedRelations { + const keyedTables: KeyedTables = keyBy(tables, 'name') + const groupedRelations: GroupedRelations = new Map() + tables.forEach((table) => { + const relations = createRelationsFromTable(table, keyedTables) + mergeGroupedRelations(groupedRelations, relations) + }) + return groupedRelations +} + +// TODO: remove the DbSchema type from the DAL and use this one instead +export type DbSchema = Record< + TableName, + { fields: Fields; relations: Array } +> +export function createDbDescription( + tables: Array +): DbSchema { + const relations = createRelationsFromAllTables(tables) + const dbDescription: DbSchema = {} + tables.forEach((table) => { + const tableName = table.name + const rels = relations.get(tableName) ?? [] + const fields: Fields = {} + table.columns.forEach( + (col) => (fields[col.name] = col.pgType!.name.toUpperCase() as PgType) + ) + + dbDescription[tableName] = { + fields, + relations: rels, + } + }) + return dbDescription +} diff --git a/clients/typescript/src/satellite/client.ts b/clients/typescript/src/satellite/client.ts index da4bb862fc..da4286badb 100644 --- a/clients/typescript/src/satellite/client.ts +++ b/clients/typescript/src/satellite/client.ts @@ -1421,10 +1421,10 @@ function getColumnType( ): PgType { if ( dbDescription.hasTable(table) && - dbDescription.getFields(table).has(column.name) + Object.hasOwn(dbDescription.getFields(table), column.name) ) { // The table and column are known in the DB description - return dbDescription.getFields(table).get(column.name)! + return dbDescription.getFields(table)[column.name]! } else { // The table or column is not known. // There must have been a migration that added it to the DB while the app was running. diff --git a/clients/typescript/test/client/generated/index.ts b/clients/typescript/test/client/generated/index.ts index a3beca65de..ee1918c6af 100644 --- a/clients/typescript/test/client/generated/index.ts +++ b/clients/typescript/test/client/generated/index.ts @@ -3432,16 +3432,10 @@ interface DummyGetPayload extends HKT { export const tableSchemas = { Items: { - fields: new Map([ - [ - "value", - "TEXT" - ], - [ - "nbr", - "INT4" - ] - ]), + fields: { + value: "TEXT", + nbr: "INT4" + }, relations: [ ], modelSchema: (ItemsCreateInputSchema as any) @@ -3469,20 +3463,11 @@ export const tableSchemas = { ItemsGetPayload >, User: { - fields: new Map([ - [ - "id", - "INT4" - ], - [ - "name", - "TEXT" - ], - [ - "meta", - "TEXT" - ] - ]), + fields: { + id: "INT4", + name: "TEXT", + meta: "TEXT" + }, relations: [ new Relation("posts", "", "", "Post", "PostToUser", "many"), new Relation("profile", "", "", "Profile", "ProfileToUser", "one"), @@ -3512,28 +3497,13 @@ export const tableSchemas = { UserGetPayload >, Post: { - fields: new Map([ - [ - "id", - "INT4" - ], - [ - "title", - "TEXT" - ], - [ - "contents", - "TEXT" - ], - [ - "nbr", - "INT4" - ], - [ - "authorId", - "INT4" - ] - ]), + fields: { + "id": "INT4", + "title": "TEXT", + "contents": "TEXT", + "nbr": "INT4", + "authorId": "INT4" + }, relations: [ new Relation("author", "authorId", "id", "User", "PostToUser", "one"), ], @@ -3562,28 +3532,13 @@ export const tableSchemas = { PostGetPayload >, Profile: { - fields: new Map([ - [ - "id", - "INT4" - ], - [ - "bio", - "TEXT" - ], - [ - "meta", - "JSONB" - ], - [ - "userId", - "INT4" - ], - [ - "imageId", - "TEXT" - ] - ]), + fields: { + "id": "INT4", + "bio": "TEXT", + "meta": "JSONB", + "userId": "INT4", + "imageId": "TEXT" + }, relations: [ new Relation("user", "userId", "id", "User", "ProfileToUser", "one"), new Relation("image", "imageId", "id", "ProfileImage", "ProfileToProfileImage", "one"), @@ -3613,16 +3568,10 @@ export const tableSchemas = { ProfileGetPayload >, ProfileImage: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "image", - "BYTEA" - ] - ]), + fields: { + "id": "TEXT", + "image": "BYTEA" + }, relations: [ new Relation("profile", "", "", "Profile", "ProfileToProfileImage", "one"), ], @@ -3651,72 +3600,24 @@ export const tableSchemas = { ProfileImageGetPayload >, DataTypes: { - fields: new Map([ - [ - "id", - "INT4" - ], - [ - "date", - "DATE" - ], - [ - "time", - "TIME" - ], - [ - "timetz", - "TIMETZ" - ], - [ - "timestamp", - "TIMESTAMP" - ], - [ - "timestamptz", - "TIMESTAMPTZ" - ], - [ - "bool", - "BOOL" - ], - [ - "uuid", - "UUID" - ], - [ - "int2", - "INT2" - ], - [ - "int4", - "INT4" - ], - [ - "int8", - "INT8" - ], - [ - "float4", - "FLOAT4" - ], - [ - "float8", - "FLOAT8" - ], - [ - "json", - "JSONB" - ], - [ - "bytea", - "BYTEA" - ], - [ - "relatedId", - "INT4" - ] - ]), + fields: { + "id": "INT4", + "date": "DATE", + "time": "TIME", + "timetz": "TIMETZ", + "timestamp": "TIMESTAMP", + "timestamptz": "TIMESTAMPTZ", + "bool": "BOOL", + "uuid": "UUID", + "int2": "INT2", + "int4": "INT4", + "int8": "INT8", + "float4": "FLOAT4", + "float8": "FLOAT8", + "json": "JSONB", + "bytea": "BYTEA", + "relatedId": "INT4" + }, relations: [ new Relation("related", "relatedId", "id", "Dummy", "DataTypesToDummy", "one"), ], @@ -3745,16 +3646,10 @@ export const tableSchemas = { DataTypesGetPayload >, Dummy: { - fields: new Map([ - [ - "id", - "INT4" - ], - [ - "timestamp", - "TIMESTAMP" - ] - ]), + fields: { + "id": "INT4", + "timestamp": "TIMESTAMP" + }, relations: [ new Relation("datatype", "", "", "DataTypes", "DataTypesToDummy", "many"), ], diff --git a/clients/typescript/test/client/model/shapes.test.ts b/clients/typescript/test/client/model/shapes.test.ts index 704dc956e7..b953fdac8d 100644 --- a/clients/typescript/test/client/model/shapes.test.ts +++ b/clients/typescript/test/client/model/shapes.test.ts @@ -11,6 +11,7 @@ import { ElectricClient } from '../../../src/client/model/client' import { cleanAndStopSatellite } from '../../satellite/common' import { satelliteDefaults } from '../../../src/satellite/config' import { insecureAuthToken } from '../../../src/auth' +import { computeShape } from '../../../src/client/model/sync' const test = testAny as TestFn @@ -277,7 +278,6 @@ test.serial('nested shape is constructed', async (t) => { client.setRelations(relations) - const { Post } = t.context as ContextType const input = { where: { OR: [ @@ -306,8 +306,8 @@ test.serial('nested shape is constructed', async (t) => { } // @ts-ignore `computeShape` is a protected method - const shape = Post.computeShape(input) - t.deepEqual(shape, { + const shape = computeShape(schema, 'Post', input) + const expectedShape = { tablename: 'Post', where: `(this."id" IN (3, 'test') OR this."test" LIKE '\\%hello%') AND ((NOT this."id" = 1) AND (NOT this."id" = 2)) AND (this."nbr" = 6 AND this."nbr" = 7) AND (this."title" = 'foo') AND (this."contents" = 'important''')`, include: [ @@ -327,5 +327,16 @@ test.serial('nested shape is constructed', async (t) => { }, }, ], + } + t.deepEqual(shape, expectedShape) + + // Also check the `computeShape` we extracted out of the DAL + const shape2 = computeShape(schema, 'Post', input) + t.deepEqual(shape2, expectedShape) +}) + +test('computeShape throws an error if table does not exist', (t) => { + t.throws(() => computeShape(schema, 'NonExistentTable', {}), { + message: `Cannot sync the requested shape. Table 'NonExistentTable' does not exist in the database schema.`, }) }) diff --git a/clients/typescript/test/client/model/table.test.ts b/clients/typescript/test/client/model/table.test.ts index 1a9a5ac426..9155ea6ad4 100644 --- a/clients/typescript/test/client/model/table.test.ts +++ b/clients/typescript/test/client/model/table.test.ts @@ -1825,7 +1825,9 @@ test('setReplicationTransform should validate transform does not modify incoming // Check outbound transform t.throws( - () => client.replicationTransforms.get('User').transformOutbound(author1), + () => { + client.replicationTransforms.get('User').transformOutbound(author1) + }, { instanceOf: InvalidRecordTransformationError, message: 'Record transformation modified immutable fields: id', diff --git a/clients/typescript/test/client/model/transforms.test.ts b/clients/typescript/test/client/model/transforms.test.ts index 65a2f481b3..728007c06e 100644 --- a/clients/typescript/test/client/model/transforms.test.ts +++ b/clients/typescript/test/client/model/transforms.test.ts @@ -5,9 +5,12 @@ import { _RECORD_NOT_FOUND_, } from '../../../src/client/validation/errors/messages' import { schema, Post } from '../generated' -import { transformTableRecord } from '../../../src/client/model/transforms' +import { + setReplicationTransform, + transformTableRecord, +} from '../../../src/client/model/transforms' import { InvalidRecordTransformationError } from '../../../src/client/validation/errors/invalidRecordTransformationError' -import { DbRecord } from '../../../src/util' +import { DbRecord, QualifiedTablename } from '../../../src/util' import { sqliteConverter } from '../../../src/client/conversions/sqlite' const tableName = 'Post' @@ -84,3 +87,22 @@ test('transformTableRecord should validate output does not modify immutable fiel instanceOf: InvalidRecordTransformationError, }) }) + +test('setReplicationTransform throws an error if table does not exist', (t) => { + t.throws( + () => { + setReplicationTransform( + schema, + undefined as any, // won't be used anyway + new QualifiedTablename('main', 'non_existent_table'), + { + transformInbound: (_) => _, + transformOutbound: (_) => _, + } + ) + }, + { + message: `Cannot set replication transform for table 'non_existent_table'. Table does not exist in the database schema.`, + } + ) +}) diff --git a/clients/typescript/test/client/util/relations.test.ts b/clients/typescript/test/client/util/relations.test.ts new file mode 100644 index 0000000000..ab4e1fe866 --- /dev/null +++ b/clients/typescript/test/client/util/relations.test.ts @@ -0,0 +1,616 @@ +import anyTest, { ExecutionContext, TestFn } from 'ava' +import keyBy from 'lodash.keyby' +import { + KeyedTables, + createDbDescription, + createRelationsFromAllTables, + createRelationsFromTable, +} from '../../../src/client/util/relations' +import { + SatOpMigrate_Column, + SatOpMigrate_ForeignKey, + SatOpMigrate_Table, +} from '../../../src/_generated/protocol/satellite' +import { Relation } from '../../../src/client/model' + +type Tables = { + otherTable: SatOpMigrate_Table + fooTable: SatOpMigrate_Table + itemsTable: SatOpMigrate_Table + tables: SatOpMigrate_Table[] +} + +type Ctx = ExecutionContext + +const test = anyTest as TestFn + +test.beforeEach(async (t) => { + const otherTable: SatOpMigrate_Table = { + $type: 'Electric.Satellite.SatOpMigrate.Table', + name: 'other', + columns: [ + { + $type: 'Electric.Satellite.SatOpMigrate.Column', + name: 'other_id', + sqliteType: 'TEXT', + pgType: { + $type: 'Electric.Satellite.SatOpMigrate.PgColumnType', + name: 'text', + array: [], + size: [], + }, + }, + ], + fks: [], + pks: ['other_id'], + } + + const fooTable: SatOpMigrate_Table = { + $type: 'Electric.Satellite.SatOpMigrate.Table', + name: 'foo', + columns: [ + { + $type: 'Electric.Satellite.SatOpMigrate.Column', + name: 'foo_id', + sqliteType: 'TEXT', + pgType: { + $type: 'Electric.Satellite.SatOpMigrate.PgColumnType', + name: 'text', + array: [], + size: [], + }, + }, + { + $type: 'Electric.Satellite.SatOpMigrate.Column', + name: 'otherr', + sqliteType: 'TEXT', + pgType: { + $type: 'Electric.Satellite.SatOpMigrate.PgColumnType', + name: 'text', + array: [], + size: [], + }, + }, + ], + fks: [ + { + $type: 'Electric.Satellite.SatOpMigrate.ForeignKey', + fkCols: ['otherr'], + pkTable: 'other', + pkCols: ['other_id'], + }, + ], + pks: ['foo_id'], + } + + const itemsTable: SatOpMigrate_Table = { + $type: 'Electric.Satellite.SatOpMigrate.Table', + name: 'items', + columns: [ + { + $type: 'Electric.Satellite.SatOpMigrate.Column', + name: 'items_id', + sqliteType: 'TEXT', + pgType: { + $type: 'Electric.Satellite.SatOpMigrate.PgColumnType', + name: 'text', + array: [], + size: [], + }, + }, + { + $type: 'Electric.Satellite.SatOpMigrate.Column', + name: 'other_id1', + sqliteType: 'TEXT', + pgType: { + $type: 'Electric.Satellite.SatOpMigrate.PgColumnType', + name: 'text', + array: [], + size: [], + }, + }, + { + $type: 'Electric.Satellite.SatOpMigrate.Column', + name: 'other_id2', + sqliteType: 'TEXT', + pgType: { + $type: 'Electric.Satellite.SatOpMigrate.PgColumnType', + name: 'text', + array: [], + size: [], + }, + }, + ], + fks: [ + { + $type: 'Electric.Satellite.SatOpMigrate.ForeignKey', + fkCols: ['other_id1'], + pkTable: 'other', + pkCols: ['other_id'], + }, + { + $type: 'Electric.Satellite.SatOpMigrate.ForeignKey', + fkCols: ['other_id2'], + pkTable: 'other', + pkCols: ['other_id'], + }, + ], + pks: ['items_id'], + } + + const tables = [otherTable, fooTable, itemsTable] + + t.context = { otherTable, fooTable, itemsTable, tables } +}) + +test('createRelationsFromTable creates no relations on table without FKs', (t: Ctx) => { + const { tables, otherTable } = t.context + const keyedTables: KeyedTables = keyBy(tables, 'name') + const relations = createRelationsFromTable(otherTable, keyedTables) + t.assert(relations.size === 0, 'Expected no relations on table without FKs') +}) + +/* + * When a child table has a FK to a parent table + * we create a relation from the child table to the parent table + * and we also create the reserve relation from the parent table to the child table. + * The reverse relation is needed to be able to + * follow the relation in both directions. + * + * If there is only a single relation from the child table to the parent table + * then that relation is named after the parent table (except if there is already a column with that name). + * Similarly, if there is only a single relation from the parent table to the child table + * then that relation is named after the child table (except if there is already a column with that name). + */ +test('createRelationsFromTable creates two relations on table with one FK', (t: Ctx) => { + const { tables, fooTable } = t.context + const keyedTables: KeyedTables = keyBy(tables, 'name') + const relations = createRelationsFromTable(fooTable, keyedTables) + + // Expect two relations + // one for forward direction + // and one for backward direction + t.assert(relations.size === 2, 'Expected two relations on table with one FK') + + // Check forward relation + const relation = relations.get('foo') + t.assert( + relation && relation.length === 1, + 'Expected one relation on table with one outgoing FK' + ) + + const [rel] = relation! + t.deepEqual( + rel, + new Relation( + 'other', + 'otherr', + 'other_id', + 'other', + 'foo_otherrToother', + 'one' + ), + 'Expected relation to be created correctly' + ) + + // Check backward relation + const backwardRelation = relations.get('other') + t.assert( + backwardRelation && backwardRelation.length === 1, + 'Expected one relation for table with an incoming FK' + ) + + const [backRel] = backwardRelation! + t.deepEqual( + backRel, + new Relation('foo', '', '', 'foo', 'foo_otherrToother', 'many'), + 'Expected relation to be created correctly' + ) +}) + +/* + * This test checks that if there is a single relation from the child table to the parent table + * but the child table has a column named after the parent table, than a unique relation field name is used. + */ +test('createRelationsFromTable makes long relation field name if child column is named after parent table', (t: Ctx) => { + const { tables, fooTable } = t.context + + // Name the child column after the parent table + fooTable.columns[1].name = 'other' + fooTable.fks[0].fkCols[0] = 'other' + + const keyedTables: KeyedTables = keyBy(tables, 'name') + const relations = createRelationsFromTable(fooTable, keyedTables) + + // Expect two relations + // one for forward direction + // and one for backward direction + t.assert(relations.size === 2, 'Expected two relations on table with one FK') + + // Check forward relation + const relation = relations.get('foo') + t.assert( + relation && relation.length === 1, + 'Expected one relation on table with one outgoing FK' + ) + + const [rel] = relation! + t.deepEqual( + rel, + new Relation( + 'other_foo_otherToother', + 'other', + 'other_id', + 'other', + 'foo_otherToother', + 'one' + ), + 'Expected relation to be created correctly' + ) + + // Check backward relation + const backwardRelation = relations.get('other') + t.assert( + backwardRelation && backwardRelation.length === 1, + 'Expected one relation for table with an incoming FK' + ) + + const [backRel] = backwardRelation! + t.deepEqual( + backRel, + new Relation('foo', '', '', 'foo', 'foo_otherToother', 'many'), + 'Expected relation to be created correctly' + ) +}) + +/* + * This test checks that if there is a single relation from the child table to the parent table + * and no relation from the parent table to the child table + * but the parent table has a column named after the child table, + * than a unique relation field name is used for the reverse relation. + */ +test('createRelationsFromTable makes long relation field name if parent column is named after child table', (t: Ctx) => { + const { tables, fooTable, otherTable } = t.context + // Name the parent column after the child table + otherTable.columns[0].name = 'foo' + otherTable.pks[0] = 'foo' + fooTable.fks[0].pkCols[0] = 'foo' + + const keyedTables: KeyedTables = keyBy(tables, 'name') + const relations = createRelationsFromTable(fooTable, keyedTables) + + // Expect two relations + // one for forward direction + // and one for backward direction + t.assert(relations.size === 2, 'Expected two relations on table with one FK') + + // Check forward relation + const relation = relations.get('foo') + t.assert( + relation && relation.length === 1, + 'Expected one relation on table with one outgoing FK' + ) + + const [rel] = relation! + t.deepEqual( + rel, + new Relation('other', 'otherr', 'foo', 'other', 'foo_otherrToother', 'one'), + 'Expected relation to be created correctly' + ) + + // Check backward relation + const backwardRelation = relations.get('other') + t.assert( + backwardRelation && backwardRelation.length === 1, + 'Expected one relation for table with an incoming FK' + ) + + const [backRel] = backwardRelation! + t.deepEqual( + backRel, + new Relation( + 'foo_foo_otherrToother', + '', + '', + 'foo', + 'foo_otherrToother', + 'many' + ), + 'Expected relation to be created correctly' + ) +}) + +/* + * If there are multiple relations from the child table to the parent table + * than we need to create unique relation field names for each relation. + */ +test('createRelationsFromTable makes long relation field name if several FKs are pointing to same parent table', (t: Ctx) => { + const { tables, itemsTable } = t.context + const keyedTables: KeyedTables = keyBy(tables, 'name') + const relations = createRelationsFromTable(itemsTable, keyedTables) + + // Check forward relations + const relation = relations.get('items') + t.assert( + relation && relation.length === 2, + 'Expected two relations on table with two outgoing FKs' + ) + + const [rel1, rel2] = relation! + t.deepEqual( + rel1, + new Relation( + 'other_items_other_id1Toother', + 'other_id1', + 'other_id', + 'other', + 'items_other_id1Toother', + 'one' + ), + 'Expected relation to be created correctly' + ) + t.deepEqual( + rel2, + new Relation( + 'other_items_other_id2Toother', + 'other_id2', + 'other_id', + 'other', + 'items_other_id2Toother', + 'one' + ), + 'Expected relation to be created correctly' + ) + + // Check backward relations + const backwardRelation = relations.get('other') + t.assert( + backwardRelation && backwardRelation.length === 2, + 'Expected two relations for table with an incoming FK' + ) + + const [backRel1, backRel2] = backwardRelation! + t.deepEqual( + backRel1, + new Relation( + 'items_items_other_id1Toother', + '', + '', + 'items', + 'items_other_id1Toother', + 'many' + ), + 'Expected relation to be created correctly' + ) + t.deepEqual( + backRel2, + new Relation( + 'items_items_other_id2Toother', + '', + '', + 'items', + 'items_other_id2Toother', + 'many' + ), + 'Expected relation to be created correctly' + ) +}) + +/* + * If we are creating a relation for a FK pointing from child table to the parent table + * and the parent table also has a FK from parent to child table + * then there are 2 possible ways to go from parent to child table + * 1. Follow the FK from parent to child table + * 2. Follow the FK from child to parent table in reverse direction + * To avoid this ambiguity, we introduce unique relation field names + * This test checks that this case is detected and a unique name is constructed + */ +test('createRelationsFromTable makes long relation field name if parent table has a FK to the child table', (t: Ctx) => { + const { tables, fooTable, otherTable } = t.context + + // Extend the parent table `other` with a FK to the child table `foo` + const f_id_col_pointing_to_foo: SatOpMigrate_Column = { + $type: 'Electric.Satellite.SatOpMigrate.Column', + name: 'f_id', + sqliteType: 'TEXT', + pgType: { + $type: 'Electric.Satellite.SatOpMigrate.PgColumnType', + name: 'text', + array: [], + size: [], + }, + } + + const fk: SatOpMigrate_ForeignKey = { + $type: 'Electric.Satellite.SatOpMigrate.ForeignKey', + fkCols: ['f_id'], + pkTable: 'foo', + pkCols: ['foo_id'], + } + + otherTable.columns.push(f_id_col_pointing_to_foo) + otherTable.fks.push(fk) + + // Generate relations from the FKs of the `foo` table + const keyedTables: KeyedTables = keyBy(tables, 'name') + const relations = createRelationsFromTable(fooTable, keyedTables) + + // Check forward relation + const relation = relations.get('foo') + t.assert( + relation && relation.length === 1, + 'Expected one relation on table with one outgoing FK' + ) + + const [rel] = relation! + t.deepEqual( + rel, + new Relation( + 'other_foo_otherrToother', + 'otherr', + 'other_id', + 'other', + 'foo_otherrToother', + 'one' + ), + 'Expected relation to be created correctly' + ) + + // Check backward relation + const backwardRelation = relations.get('other') + t.assert( + backwardRelation && backwardRelation.length === 1, + 'Expected one relation for table with an incoming FK' + ) + + const [backRel] = backwardRelation! + t.deepEqual( + backRel, + new Relation( + 'foo_foo_otherrToother', + '', + '', + 'foo', + 'foo_otherrToother', + 'many' + ), + 'Expected relation to be created correctly' + ) +}) + +test('createRelationsFromAllTables aggregates all relations', (t: Ctx) => { + const { tables } = t.context + const relations = createRelationsFromAllTables(tables) + + t.deepEqual( + relations, + new Map([ + [ + 'foo', + [ + new Relation( + 'other', + 'otherr', + 'other_id', + 'other', + 'foo_otherrToother', + 'one' + ), + ], + ], + [ + 'other', + [ + new Relation('foo', '', '', 'foo', 'foo_otherrToother', 'many'), + new Relation( + 'items_items_other_id1Toother', + '', + '', + 'items', + 'items_other_id1Toother', + 'many' + ), + new Relation( + 'items_items_other_id2Toother', + '', + '', + 'items', + 'items_other_id2Toother', + 'many' + ), + ], + ], + [ + 'items', + [ + new Relation( + 'other_items_other_id1Toother', + 'other_id1', + 'other_id', + 'other', + 'items_other_id1Toother', + 'one' + ), + new Relation( + 'other_items_other_id2Toother', + 'other_id2', + 'other_id', + 'other', + 'items_other_id2Toother', + 'one' + ), + ], + ], + ]) + ) +}) + +test('createDbDescription creates a DbSchema from tables', (t: Ctx) => { + const { tables } = t.context + const dbDescription = createDbDescription(tables) + t.deepEqual(dbDescription, { + foo: { + fields: { + foo_id: 'TEXT', + otherr: 'TEXT', + }, + relations: [ + new Relation( + 'other', + 'otherr', + 'other_id', + 'other', + 'foo_otherrToother', + 'one' + ), + ], + }, + other: { + fields: { other_id: 'TEXT' }, + relations: [ + new Relation('foo', '', '', 'foo', 'foo_otherrToother', 'many'), + new Relation( + 'items_items_other_id1Toother', + '', + '', + 'items', + 'items_other_id1Toother', + 'many' + ), + new Relation( + 'items_items_other_id2Toother', + '', + '', + 'items', + 'items_other_id2Toother', + 'many' + ), + ], + }, + items: { + fields: { + items_id: 'TEXT', + other_id1: 'TEXT', + other_id2: 'TEXT', + }, + relations: [ + new Relation( + 'other_items_other_id1Toother', + 'other_id1', + 'other_id', + 'other', + 'items_other_id1Toother', + 'one' + ), + new Relation( + 'other_items_other_id2Toother', + 'other_id2', + 'other_id', + 'other', + 'items_other_id2Toother', + 'one' + ), + ], + }, + }) +}) diff --git a/clients/typescript/test/satellite/client.test.ts b/clients/typescript/test/satellite/client.test.ts index 953b56b223..5463be2f44 100644 --- a/clients/typescript/test/satellite/client.test.ts +++ b/clients/typescript/test/satellite/client.test.ts @@ -279,10 +279,10 @@ test.serial('receive transaction over multiple messages', async (t) => { const dbDescription = new DbSchema( { table: { - fields: new Map([ - ['name1', PgBasicType.PG_TEXT], - ['name2', PgBasicType.PG_TEXT], - ]), + fields: { + name1: PgBasicType.PG_TEXT, + name2: PgBasicType.PG_TEXT, + }, relations: [], } as unknown as TableSchema< any, @@ -706,14 +706,14 @@ test.serial('default and null test', async (t) => { }) const tbl = { - fields: new Map([ - ['id', PgBasicType.PG_UUID], - ['content', PgBasicType.PG_VARCHAR], - ['text_null', PgBasicType.PG_TEXT], - ['text_null_default', PgBasicType.PG_TEXT], - ['intvalue_null', PgBasicType.PG_INT4], - ['intvalue_null_default', PgBasicType.PG_INT4], - ]), + fields: { + id: PgBasicType.PG_UUID, + content: PgBasicType.PG_VARCHAR, + text_null: PgBasicType.PG_TEXT, + text_null_default: PgBasicType.PG_TEXT, + intvalue_null: PgBasicType.PG_INT4, + intvalue_null_default: PgBasicType.PG_INT4, + }, relations: [], } as unknown as TableSchema @@ -1056,10 +1056,10 @@ test.serial('subscription correct protocol sequence with data', async (t) => { const tablename = 'THE_TABLE_ID' const tbl = { - fields: new Map([ - ['name1', PgBasicType.PG_TEXT], - ['name2', PgBasicType.PG_TEXT], - ]), + fields: { + name1: PgBasicType.PG_TEXT, + name2: PgBasicType.PG_TEXT, + }, relations: [], } as unknown as TableSchema @@ -1176,10 +1176,10 @@ test.serial('client correctly handles additional data messages', async (t) => { const dbDescription = new DbSchema( { table: { - fields: new Map([ - ['name1', PgBasicType.PG_TEXT], - ['name2', PgBasicType.PG_TEXT], - ]), + fields: { + name1: PgBasicType.PG_TEXT, + name2: PgBasicType.PG_TEXT, + }, relations: [], } as unknown as TableSchema< any, diff --git a/clients/typescript/test/satellite/common.ts b/clients/typescript/test/satellite/common.ts index 5bbf34eb41..ff7d494975 100644 --- a/clients/typescript/test/satellite/common.ts +++ b/clients/typescript/test/satellite/common.ts @@ -32,22 +32,22 @@ import { DatabaseAdapter } from '../../src/electric/adapter' export const dbDescription = new DbSchema( { child: { - fields: new Map([ - ['id', PgBasicType.PG_INTEGER], - ['parent', PgBasicType.PG_INTEGER], - ]), + fields: { + id: PgBasicType.PG_INTEGER, + parent: PgBasicType.PG_INTEGER, + }, relations: [], }, parent: { - fields: new Map([ - ['id', PgBasicType.PG_INTEGER], - ['value', PgBasicType.PG_TEXT], - ['other', PgBasicType.PG_INTEGER], - ]), + fields: { + id: PgBasicType.PG_INTEGER, + value: PgBasicType.PG_TEXT, + other: PgBasicType.PG_INTEGER, + }, relations: [], }, another: { - fields: new Map([['id', PgBasicType.PG_INTEGER]]), + fields: { id: PgBasicType.PG_INTEGER }, relations: [], }, } as unknown as Record< @@ -379,6 +379,7 @@ export const mockElectricClient = async ( const electric = new ElectricClient( {}, dbName, + dbDescription, adapter, notifier, satellite, diff --git a/clients/typescript/test/satellite/serialization.ts b/clients/typescript/test/satellite/serialization.ts index 2d286b6125..17fcff2c96 100644 --- a/clients/typescript/test/satellite/serialization.ts +++ b/clients/typescript/test/satellite/serialization.ts @@ -57,27 +57,27 @@ export const serializationTests = (test: TestFn) => { const dbDescription = new DbSchema( { table: { - fields: new Map([ - ['name1', PgBasicType.PG_TEXT], - ['name2', PgBasicType.PG_TEXT], - ['name3', PgBasicType.PG_TEXT], - ['blob1', PgBasicType.PG_BYTEA], - ['blob2', PgBasicType.PG_BYTEA], - ['blob3', PgBasicType.PG_BYTEA], - ['int1', PgBasicType.PG_INTEGER], - ['int2', PgBasicType.PG_INTEGER], - ['bigint1', PgBasicType.PG_INT8], - ['bigint2', PgBasicType.PG_INT8], - ['float1', PgBasicType.PG_REAL], - ['float2', PgBasicType.PG_FLOAT4], - ['float3', PgBasicType.PG_FLOAT8], - ['bool1', PgBasicType.PG_BOOL], - ['bool2', PgBasicType.PG_BOOL], - ['bool3', PgBasicType.PG_BOOL], + fields: { + name1: PgBasicType.PG_TEXT, + name2: PgBasicType.PG_TEXT, + name3: PgBasicType.PG_TEXT, + blob1: PgBasicType.PG_BYTEA, + blob2: PgBasicType.PG_BYTEA, + blob3: PgBasicType.PG_BYTEA, + int1: PgBasicType.PG_INTEGER, + int2: PgBasicType.PG_INTEGER, + bigint1: PgBasicType.PG_INT8, + bigint2: PgBasicType.PG_INT8, + float1: PgBasicType.PG_REAL, + float2: PgBasicType.PG_FLOAT4, + float3: PgBasicType.PG_FLOAT8, + bool1: PgBasicType.PG_BOOL, + bool2: PgBasicType.PG_BOOL, + bool3: PgBasicType.PG_BOOL, // enum types are transformed to text type by our generator - ['enum1', PgBasicType.PG_TEXT], - ['enum2', PgBasicType.PG_TEXT], - ]), + enum1: PgBasicType.PG_TEXT, + enum2: PgBasicType.PG_TEXT, + }, relations: [], } as unknown as TableSchema< any, @@ -233,17 +233,17 @@ export const serializationTests = (test: TestFn) => { const dbDescription = new DbSchema( { table: { - fields: new Map([ - ['bit0', PgBasicType.PG_TEXT], - ['bit1', PgBasicType.PG_TEXT], - ['bit2', PgBasicType.PG_TEXT], - ['bit3', PgBasicType.PG_TEXT], - ['bit4', PgBasicType.PG_TEXT], - ['bit5', PgBasicType.PG_TEXT], - ['bit6', PgBasicType.PG_TEXT], - ['bit7', PgBasicType.PG_TEXT], - ['bit8', PgBasicType.PG_TEXT], - ]), + fields: { + bit0: PgBasicType.PG_TEXT, + bit1: PgBasicType.PG_TEXT, + bit2: PgBasicType.PG_TEXT, + bit3: PgBasicType.PG_TEXT, + bit4: PgBasicType.PG_TEXT, + bit5: PgBasicType.PG_TEXT, + bit6: PgBasicType.PG_TEXT, + bit7: PgBasicType.PG_TEXT, + bit8: PgBasicType.PG_TEXT, + }, relations: [], } as unknown as TableSchema< any, @@ -304,10 +304,10 @@ export const serializationTests = (test: TestFn) => { const boolsDbDescription = new DbSchema( { bools: { - fields: new Map([ - ['id', PgBasicType.PG_INTEGER], - ['b', PgBasicType.PG_BOOL], - ]), + fields: { + id: PgBasicType.PG_INTEGER, + b: PgBasicType.PG_BOOL, + }, relations: [], } as unknown as TableSchema< any, diff --git a/components/cli/src/migrations/builder.ts b/components/cli/src/migrations/builder.ts index a260065377..df505a8ffd 100644 --- a/components/cli/src/migrations/builder.ts +++ b/components/cli/src/migrations/builder.ts @@ -9,6 +9,8 @@ import { } from 'electric-sql/migrators' import { isObject } from 'electric-sql/util' import { QueryBuilder } from 'electric-sql/migrators/query-builder' +import { TableName, DbSchema, createDbDescription } from 'electric-sql/client' +import { SatOpMigrate_Table } from 'electric-sql/protocol' /* * This file defines functions to build migrations @@ -38,14 +40,18 @@ export async function buildMigrations( migrationsFolder: string, migrationsFile: string, builder: QueryBuilder -) { +): Promise { try { - const migrations = await loadMigrations(migrationsFolder, builder) + const { migrations, dbDescription } = await loadMigrations( + migrationsFolder, + builder + ) // Update the configuration file await fs.writeFile( migrationsFile, `export default ${JSON.stringify(migrations, null, 2)}` ) + return dbDescription } catch (e) { if (e instanceof z.ZodError) throw new Error('Could not build migrations:\n' + e.message) @@ -70,14 +76,15 @@ export async function getMigrationNames( } /** - * Loads all migrations that are present in the provided migrations folder. + * Loads all migrations that are present in the provided migrations folder, + * and builds a database description from them. * @param migrationsFolder Folder where migrations are stored. - * @returns An array of migrations. + * @returns An object containing an array of migrations as well as database schema describing the tables. */ export async function loadMigrations( migrationsFolder: string, builder: QueryBuilder -): Promise { +): Promise<{ migrations: Migration[]; dbDescription: DbSchema }> { const dirNames = await getMigrationNames(migrationsFolder) const migrationPaths = dirNames.map((dirName) => path.join(migrationsFolder, dirName, 'metadata.json') @@ -85,7 +92,29 @@ export async function loadMigrations( const migrationMetaDatas = await Promise.all( migrationPaths.map(readMetadataFile) ) - return migrationMetaDatas.map((data) => makeMigration(data, builder)) + // Aggregate table information from all migrations + // and create the database description + const tables = aggregateTableInfo(migrationMetaDatas) + const dbDescription = createDbDescription(tables) + return { + migrations: migrationMetaDatas.map((data) => makeMigration(data, builder)), + dbDescription, + } +} + +function aggregateTableInfo(migrations: MetaData[]): Array { + const tables = new Map() + migrations.forEach((migration) => { + migration.ops.forEach((satOpMigrate) => { + const tbl = satOpMigrate.table + if (tbl !== undefined) { + // table information from later migrations + // overwrite information from earlier migrations + tables.set(tbl.name, tbl) + } + }) + }) + return Array.from(tables.values()) } /** diff --git a/components/cli/src/migrations/command-generate.ts b/components/cli/src/migrations/command-generate.ts index fa0cd40dff..b4eddf8d94 100644 --- a/components/cli/src/migrations/command-generate.ts +++ b/components/cli/src/migrations/command-generate.ts @@ -13,6 +13,7 @@ interface GenerateCommandArgs { watch?: number | true withMigrations?: string debug?: boolean + withDal?: string } export function makeGenerateCommand(): Command { @@ -66,14 +67,27 @@ export function makeGenerateCommand(): Command { ` ) + .option( + '--with-dal ', + dedent` + Optional flag to disable generation of the Electric client. + + Defaults to true. When set to false, only the migrations will be generated and a minimal database description but no DAL. + ` + ) + .action(async (opts: GenerateCommandArgs) => { - const { watch, withMigrations, debug, ...restOpts } = opts + const { watch, withMigrations, debug, withDal, ...restOpts } = opts const config = getConfig(restOpts) + const withDalDefault = withDal ?? 'true' // defaults to true + const withDalOpt = withDalDefault !== 'false' // convert to boolean, everything is true except 'false' + const genOpts: GeneratorOptions = { config, withMigrations, debug, + withDal: withDalOpt, } if (watch !== undefined) { genOpts.watch = true diff --git a/components/cli/src/migrations/migrate.ts b/components/cli/src/migrations/migrate.ts index 35f0f8c32e..5732726fd5 100644 --- a/components/cli/src/migrations/migrate.ts +++ b/components/cli/src/migrations/migrate.ts @@ -21,6 +21,8 @@ import { sqliteBuilder, Dialect, } from 'electric-sql/migrators/query-builder' +import { DbSchema } from 'electric-sql/client' +import { serializeDbDescription } from '../util/serialize' // Rather than run `npx prisma` we resolve the path to the prisma binary so that // we can be sure we are using the same version of Prisma that is a dependency of @@ -45,6 +47,7 @@ export interface GeneratorOptions { watch?: boolean pollingInterval?: number withMigrations?: string + withDal?: boolean debug?: boolean exitOnError?: boolean config: Config @@ -62,7 +65,6 @@ export async function generate(options: GeneratorOptions) { ) process.exit(1) } - console.log('Generating Electric client...') try { if (opts.withMigrations) { // Start new ElectricSQL and PostgreSQL containers @@ -243,10 +245,49 @@ async function bundleMigrationsFor( // Build the migrations const builder = dialect === 'SQLite' ? sqliteBuilder : pgBuilder return async () => { - await buildMigrations(migrationsFolder, migrationsFile, builder) + return await buildMigrations(migrationsFolder, migrationsFile, builder) } } +async function buildAndBundleMigrations( + opts: Omit, + tmpFolder: string +) { + const buildSqliteMigrations = await bundleMigrationsFor( + 'SQLite', + opts, + tmpFolder + ) + const buildPgMigrations = await bundleMigrationsFor( + 'Postgres', + opts, + tmpFolder + ) + + console.log('Building migrations...') + const dbDescription = await buildSqliteMigrations() + await buildPgMigrations() + console.log('Successfully built migrations') + return dbDescription +} + +async function introspectDbAndGenerateClient( + opts: Omit, + tmpFolder: string +) { + const config = opts.config + const prismaSchema = await createIntrospectionSchema(tmpFolder, opts) + + // Introspect the created DB to update the Prisma schema + await introspectDB(prismaSchema) + + // Generate the Electric client from the given introspected schema + await generateClient(prismaSchema, config.CLIENT_PATH) + + const relativePath = path.relative(appRoot, config.CLIENT_PATH) + console.log(`Successfully generated Electric client at: ./${relativePath}`) +} + /** * This function migrates the application. * To this end, it fetches the migrations from Electric, @@ -269,32 +310,26 @@ async function _generate(opts: Omit) { let generationFailed = false try { - const buildSqliteMigrations = await bundleMigrationsFor( - 'SQLite', - opts, - tmpFolder - ) - const buildPgMigrations = await bundleMigrationsFor( - 'Postgres', - opts, - tmpFolder - ) - - const prismaSchema = await createIntrospectionSchema(tmpFolder, opts) - - // Introspect the created DB to update the Prisma schema - await introspectDB(prismaSchema) + // Create `CLIENT_PATH` if it doesn't exist + await fs.mkdir(config.CLIENT_PATH, { recursive: true }) - // Generate the Electric client from the given introspected schema - await generateClient(prismaSchema, config.CLIENT_PATH) + if (opts.withDal) { + // Generate Electric client + console.log('Generating Electric client...') + await introspectDbAndGenerateClient(opts, tmpFolder) + } - const relativePath = path.relative(appRoot, config.CLIENT_PATH) - console.log(`Successfully generated Electric client at: ./${relativePath}`) + // Build and bundle the SQLite and PG migrations + // This needs to happen after generating the Electric client + // otherwise Prisma overwrites the files containing the bundled migrations + const dbDescription = await buildAndBundleMigrations(opts, tmpFolder) - console.log('Building migrations...') - await buildSqliteMigrations() - await buildPgMigrations() - console.log('Successfully built migrations') + if (!opts.withDal) { + // User doesn't want an Electric client + // Write the minimal database description to a file + console.log('Generating database schema...') + await bundleDbDescription(dbDescription, opts.config.CLIENT_PATH) + } if ( ['nodenext', 'node16'].includes( @@ -316,6 +351,25 @@ async function _generate(opts: Omit) { } } +async function bundleDbDescription(dbDescription: DbSchema, outFolder: string) { + const dbDescriptionFile = path.join(outFolder, 'index.ts') + const serializedDbDescription = serializeDbDescription(dbDescription) + const dbDescriptionStr = dedent` + import migrations from './migrations'; + import pgMigrations from './pg-migrations'; + import { type TableSchemas, DbSchema, Relation, ElectricClient } from 'electric-sql/client/model'; + + const tableSchemas = ${serializedDbDescription} as unknown as TableSchemas + + export const schema = new DbSchema(tableSchemas, migrations, pgMigrations) + export type Electric = ElectricClient + export const JsonNull = { __is_electric_json_null__: true } + ` + await fs.writeFile(dbDescriptionFile, dbDescriptionStr) + const relativePath = path.relative(appRoot, dbDescriptionFile) + console.log(`Successfully generated database schema at: ./${relativePath}`) +} + /** * Generates the Electric client and the Prisma clients based off of the provided * introspected Prisma schema. diff --git a/components/cli/src/util/serialize.ts b/components/cli/src/util/serialize.ts new file mode 100644 index 0000000000..40e61b3051 --- /dev/null +++ b/components/cli/src/util/serialize.ts @@ -0,0 +1,49 @@ +import { dedent } from 'ts-dedent' +import { DbSchema } from 'electric-sql/client' + +/** + * Custom serialization function that serializes the DB description + * into source code that is meant to be bundled in Electric applications. + * The generated string is NOT in JSON format but is actual JS source code + * (as it instantiates `Relation` objects) + * that is meant to be imported by the Electric application. + */ +export function serializeDbDescription(dbDescription: DbSchema) { + const tables = Object.entries(dbDescription) + .map(([table, schema]) => { + return dedent` + ${table}: { + "fields": { + ${Object.entries(schema.fields) + .map(([field, type]) => { + return `"${field}": "${type}"` + }) + .join(',\n')} + }, + "relations": [ + ${schema.relations + .map((r) => { + return dedent` + new Relation( + "${r.relationField}", + "${r.fromField}", + "${r.toField}", + "${r.relatedTable}", + "${r.relationName}", + "${r.relatedObjects}" + ) + ` + }) + .join(',\n')} + ] + } + ` + }) + .join(',\n') + + return dedent` + { + ${tables} + } + ` +} diff --git a/components/cli/test/migrations/builder.test.ts b/components/cli/test/migrations/builder.test.ts index 21cb94d153..f66266f7e4 100644 --- a/components/cli/test/migrations/builder.test.ts +++ b/components/cli/test/migrations/builder.test.ts @@ -4,6 +4,7 @@ import path from 'path' import { buildMigrations } from '../../src/migrations/builder' import { sqliteBuilder } from 'electric-sql/migrators/query-builder' import { loadMigrations } from '../../src/migrations/builder' +import { Relation } from 'electric-sql/client/model' const migrationsFolder = path.join( '../../clients/typescript/test/migrators/support/migrations' @@ -46,7 +47,41 @@ test('write migration to configuration file', async (t) => { }) test('read migration meta data', async (t) => { - const migrations = await loadMigrations(migrationsFolder, sqliteBuilder) + const { migrations, dbDescription } = await loadMigrations( + migrationsFolder, + sqliteBuilder + ) const versions = migrations.map((m) => m.version) t.deepEqual(versions, ['20230613112725_814', '20230613112735_992']) + + t.deepEqual(dbDescription, { + stars: { + fields: { + id: 'TEXT', + avatar_url: 'TEXT', + name: 'TEXT', + starred_at: 'TEXT', + username: 'TEXT', + }, + relations: [ + new Relation('beers', '', '', 'beers', 'beers_star_idTostars', 'many'), + ], + }, + beers: { + fields: { + id: 'TEXT', + star_id: 'TEXT', + }, + relations: [ + new Relation( + 'stars', + 'star_id', + 'id', + 'stars', + 'beers_star_idTostars', + 'one' + ), + ], + }, + }) }) diff --git a/e2e/common.mk b/e2e/common.mk index e520d49e5b..9252f15983 100644 --- a/e2e/common.mk +++ b/e2e/common.mk @@ -100,6 +100,7 @@ start_satellite_client_%: --rm \ -e TERM=dumb \ -e DIALECT=${DIALECT} \ + -e DAL=${DAL} \ satellite_client_$* diff --git a/e2e/satellite_client/package.json b/e2e/satellite_client/package.json index 6675ee4867..5e88573591 100644 --- a/e2e/satellite_client/package.json +++ b/e2e/satellite_client/package.json @@ -21,6 +21,7 @@ "electric-sql": "workspace:*", "jsonwebtoken": "^9.0.0", "pg": "^8.11.3", + "ts-dedent": "^2.2.0", "uuid": "^9.0.0", "zod": "3.21.1" }, diff --git a/e2e/satellite_client/src/client.ts b/e2e/satellite_client/src/client.ts index 954e22eae5..55bfbc706c 100644 --- a/e2e/satellite_client/src/client.ts +++ b/e2e/satellite_client/src/client.ts @@ -8,18 +8,39 @@ import { setLogLevel } from 'electric-sql/debug' import { electrify as electrifySqlite } from 'electric-sql/node' import { electrify as electrifyPg } from 'electric-sql/node-postgres' import { v4 as uuidv4 } from 'uuid' -import { schema, Electric, ColorType as Color } from './generated/client' +import { + schema as dalSchema, + Electric, + ColorType as Color, +} from './generated/client' +import { schema as noDalSchema } from './generated/client/db-description' export { JsonNull } from './generated/client' import { globalRegistry } from 'electric-sql/satellite' import { QualifiedTablename, SatelliteErrorCode } from 'electric-sql/util' import { Shape } from 'electric-sql/satellite' -import { pgBuilder, sqliteBuilder, QueryBuilder } from 'electric-sql/migrators/builder' +import { + pgBuilder, + sqliteBuilder, + QueryBuilder, +} from 'electric-sql/migrators/builder' +import { + postgresConverter, + sqliteConverter, + PgBasicType +} from 'electric-sql/client' +import type { AnyTable, AnyTableSchema } from 'electric-sql/client' +import { Row } from 'electric-sql/util' +import { dedent } from 'ts-dedent' setLogLevel('DEBUG') let dbName: string type DB = PgDatabase | BetterSqliteDatabase -const builder: QueryBuilder = dialect() === 'Postgres' ? pgBuilder : sqliteBuilder +const builder: QueryBuilder = + dialect() === 'Postgres' ? pgBuilder : sqliteBuilder +const converter = dialect() === 'Postgres' ? postgresConverter : sqliteConverter +const withDal = dal() // whether to use the DAL or not +const schema = withDal ? dalSchema : noDalSchema function dialect(): 'Postgres' | 'SQLite' { switch (process.env.DIALECT) { @@ -32,7 +53,24 @@ function dialect(): 'Postgres' | 'SQLite' { default: throw new Error(`Unrecognised dialect: ${process.env.DIALECT}`) } -} +} + +function dal(): boolean { + switch (process.env.DAL?.toLowerCase()) { + case 'false': + console.log('Running without DAL') + return false + case 'true': + case '': + case undefined: + console.log('Running with DAL') + return true + default: + throw new Error( + `Illegal value for DAL option: ${process.env.DAL}` + ) + } +} async function makePgDatabase(): Promise { const client = new pg.Client({ @@ -50,8 +88,8 @@ async function makePgDatabase(): Promise { export const make_db = async (name: string): Promise => { dbName = name - console.log("DIALECT: " + process.env.DIALECT) - + console.log('DIALECT: ' + process.env.DIALECT) + switch (dialect()) { case 'Postgres': return makePgDatabase() @@ -86,14 +124,16 @@ export const electrify_db = async ( schema.migrations = migrations break } - + const electric = isPostgresDb(process.env.DIALECT, db) ? await electrifyPg(db, schema, config) : await electrifySqlite(db, schema, config) - + const token = await mockSecureAuthToken(exp) - electric.notifier.subscribeToConnectivityStateChanges(x => console.log(`Connectivity state changed: ${x.connectivityState.status}`)) + electric.notifier.subscribeToConnectivityStateChanges((x) => + console.log(`Connectivity state changed: ${x.connectivityState.status}`) + ) if (connectToElectric) { await electric.connect(token) // connect to Electric } @@ -111,20 +151,27 @@ export const reconnect = async (electric: Electric, exp: string) => { await electric.connect(token) } -export const check_token_expiration = (electric: Electric, minimalTime: number) => { +export const check_token_expiration = ( + electric: Electric, + minimalTime: number +) => { const start = Date.now() - const unsubscribe = electric.notifier.subscribeToConnectivityStateChanges((x: any) => { - if (x.connectivityState.status === 'disconnected' && x.connectivityState.reason?.code === SatelliteErrorCode.AUTH_EXPIRED) { - const delta = Date.now() - start - if (delta >= minimalTime) { - console.log(`JWT expired after ${delta} ms`) + const unsubscribe = electric.notifier.subscribeToConnectivityStateChanges( + (x: any) => { + if ( + x.connectivityState.status === 'disconnected' && + x.connectivityState.reason?.code === SatelliteErrorCode.AUTH_EXPIRED + ) { + const delta = Date.now() - start + if (delta >= minimalTime) { + console.log(`JWT expired after ${delta} ms`) + } else { + console.log(`JWT expired too early, after only ${delta} ms`) + } + unsubscribe() } - else { - console.log(`JWT expired too early, after only ${delta} ms`) - } - unsubscribe() } - }) + ) } export const set_subscribers = (db: Electric) => { @@ -142,14 +189,29 @@ export const set_subscribers = (db: Electric) => { }) } -export const syncItemsTable = async (electric: Electric, shapeFilter: string) => { - const { synced } = await electric.db.items.sync({ where: shapeFilter }) - return await synced +export const syncTableWithShape = async ( + electric: Electric, + table: keyof Electric['db'], + shape: Record +) => { + if (withDal) { + const { synced } = await (electric.db[table] as AnyTable).sync(shape) + return await synced + } else { + const { synced } = await electric.sync.subscribe({ ...shape, table }) + return await synced + } } -export const syncOtherItemsTable = async (electric: Electric, shapeFilter: string) => { - const { synced } = await electric.db.other_items.sync({ where: shapeFilter }) - return await synced +export const syncItemsTable = (electric: Electric, shapeFilter: string) => { + return syncTableWithShape(electric, 'items', { where: shapeFilter }) +} + +export const syncOtherItemsTable = ( + electric: Electric, + shapeFilter: string +) => { + return syncTableWithShape(electric, 'other_items', { where: shapeFilter }) } export const syncTable = async (table: string) => { @@ -177,165 +239,505 @@ export const get_rows = (electric: Electric, table: string) => { return electric.db.rawQuery({ sql: `SELECT * FROM ${table};` }) } -export const get_timestamps = (electric: Electric) => { +const get_timestamps_dal = (electric: Electric) => { return electric.db.timestamps.findMany() } -type Timestamp = { id: string, created_at: Date, updated_at: Date } -type Datetime = { id: string, d: Date, t: Date } +const get_timestamps_raw = (electric: Electric) => { + return electric.db.rawQuery({ sql: 'SELECT * FROM timestamps;' }) +} + +export const get_timestamps = withDal ? get_timestamps_dal : get_timestamps_raw + +type Timestamp = { id: string; created_at: Date; updated_at: Date } +type Datetime = { id: string; d: Date; t: Date } -export const write_timestamp = (electric: Electric, timestamp: Timestamp) => { +const write_timestamp_dal = ( + electric: Electric, + timestamp: Timestamp +) => { return electric.db.timestamps.create({ - data: timestamp + data: timestamp, }) } -export const write_datetime = (electric: Electric, datetime: Datetime) => { +const write_timestamp_raw = ( + electric: Electric, + timestamp: Timestamp +) => { + const created_at = converter.encode( + timestamp.created_at, + schema.tables.timestamps.fields.created_at + ) + const updated_at = converter.encode( + timestamp.updated_at, + schema.tables.timestamps.fields.updated_at + ) + return electric.adapter.run({ + sql: `INSERT INTO timestamps (id, created_at, updated_at) VALUES (${builder.makePositionalParam( + 1 + )}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)});`, + args: [timestamp.id, created_at, updated_at], + }) +} + +export const write_timestamp = withDal + ? write_timestamp_dal + : write_timestamp_raw + +const write_datetime_dal = (electric: Electric, datetime: Datetime) => { return electric.db.datetimes.create({ - data: datetime + data: datetime, }) } -export const get_timestamp = (electric: Electric, id: string) => { +const write_datetime_raw = (electric: Electric, datetime: Datetime) => { + const d = converter.encode(datetime.d, schema.tables.datetimes.fields.d) + const t = converter.encode(datetime.t, schema.tables.datetimes.fields.t) + return electric.adapter.run({ + sql: `INSERT INTO datetimes (id, d, t) VALUES (${builder.makePositionalParam( + 1 + )}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)});`, + args: [datetime.id, d, t], + }) +} + +export const write_datetime = withDal ? write_datetime_dal : write_datetime_raw + +const get_timestamp_dal = ( + electric: Electric, + id: string +): Promise => { return electric.db.timestamps.findUnique({ where: { - id: id - } + id: id, + }, }) } -export const get_datetime = async (electric: Electric, id: string) => { +const get_timestamp_raw = async ( + electric: Electric, + id: string +): Promise => { + const result = await electric.db.rawQuery({ + sql: `SELECT * FROM timestamps WHERE id = ${builder.makePositionalParam( + 1 + )};`, + args: [id], + }) + return result.length === 1 + ? decodeRow(result[0], 'timestamps') + : null +} + +const decodeRow = (row: Row, table: keyof typeof schema.tables): T => { + return Object.fromEntries( + Object.entries(row).map(([key, value]) => { + const pgType = (schema.tables[table] as unknown as AnyTableSchema).fields[key] + const decodedValue = converter.decode(value, pgType) + return [key, decodedValue] + }) + ) as T +} + +const decodeRows = (rows: Array, table: keyof typeof schema.tables): T[] => { + return rows.map((row) => decodeRow(row, table)) +} + +export const get_timestamp = withDal ? get_timestamp_dal : get_timestamp_raw + +const get_datetime_dal = async ( + electric: Electric, + id: string +): Promise => { const datetime = await electric.db.datetimes.findUnique({ where: { - id: id - } + id: id, + }, + }) + console.log(`Found date time?:\n${JSON.stringify(datetime, undefined, 2)}`) + return datetime +} + +const get_datetime_raw = async ( + electric: Electric, + id: string +): Promise => { + const res = await electric.db.rawQuery({ + sql: `SELECT * FROM datetimes WHERE id = ${builder.makePositionalParam( + 1 + )};`, + args: [id], }) + const datetime = res.length === 1 + ? decodeRow(res[0], 'datetimes') + : null console.log(`Found date time?:\n${JSON.stringify(datetime, undefined, 2)}`) return datetime } -export const assert_timestamp = async (electric: Electric, id: string, expectedCreatedAt: string, expectedUpdatedAt: string) => { +export const get_datetime = withDal ? get_datetime_dal : get_datetime_raw + +export const assert_timestamp = async ( + electric: Electric, + id: string, + expectedCreatedAt: string, + expectedUpdatedAt: string +) => { const timestamp = await get_timestamp(electric, id) return check_timestamp(timestamp, expectedCreatedAt, expectedUpdatedAt) } -export const assert_datetime = async (electric: Electric, id: string, expectedDate: string, expectedTime: string) => { +export const assert_datetime = async ( + electric: Electric, + id: string, + expectedDate: string, + expectedTime: string +) => { const datetime = await get_datetime(electric, id) return check_datetime(datetime, expectedDate, expectedTime) } -export const check_timestamp = (timestamp: Timestamp | null, expectedCreatedAt: string, expectedUpdatedAt: string) => { - return (timestamp ?? false) && +export const check_timestamp = ( + timestamp: Timestamp | null, + expectedCreatedAt: string, + expectedUpdatedAt: string +) => { + console.log("Timestamp: " + JSON.stringify(timestamp)) + console.log("Created at: " + timestamp?.created_at.getTime()) + console.log("Expected created at: " + new Date(expectedCreatedAt).getTime()) + console.log("Updated at: " + timestamp?.updated_at.getTime()) + console.log("Expected updated at: " + new Date(expectedUpdatedAt).getTime()) + return ( + (timestamp ?? false) && timestamp!.created_at.getTime() === new Date(expectedCreatedAt).getTime() && timestamp!.updated_at.getTime() === new Date(expectedUpdatedAt).getTime() + ) } -export const check_datetime = (datetime: Datetime | null, expectedDate: string, expectedTime: string) => { - return (datetime ?? false) && +export const check_datetime = ( + datetime: Datetime | null, + expectedDate: string, + expectedTime: string +) => { + return ( + (datetime ?? false) && datetime!.d.getTime() === new Date(expectedDate).getTime() && datetime!.t.getTime() === new Date(expectedTime).getTime() + ) } -export const write_bool = (electric: Electric, id: string, b: boolean) => { +const write_bool_dal = (electric: Electric, id: string, b: boolean) => { return electric.db.bools.create({ data: { id, - b - } + b, + }, }) } -export const get_bool = async (electric: Electric, id: string) => { +const write_bool_raw = async (electric: Electric, id: string, b: boolean) => { + const bool = converter.encode(b, schema.tables.bools.fields.b) + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO bools (id, b) VALUES (${builder.makePositionalParam( + 1 + )}, ${builder.makePositionalParam(2)}) RETURNING *;`, + args: [id, bool], + }) + return decodeRow<{ id: string; b: boolean }>(row, 'bools') +} + +export const write_bool = withDal ? write_bool_dal : write_bool_raw + +const get_bool_dal = async (electric: Electric, id: string) => { const row = await electric.db.bools.findUnique({ where: { - id: id + id: id, }, }) return row?.b } -export const get_datetimes = (electric: Electric) => { +const get_bool_raw = async (electric: Electric, id: string) => { + const res = await electric.db.rawQuery({ + sql: `SELECT b FROM bools WHERE id = ${builder.makePositionalParam(1)};`, + args: [id], + }) + const row = res.length === 1 + ? decodeRow<{ id: string, b: boolean }>(res[0], 'bools') + : null + return row?.b +} + +export const get_bool = withDal ? get_bool_dal : get_bool_raw + +const get_datetimes_dal = (electric: Electric) => { return electric.db.datetimes.findMany() } -export const get_items = (electric: Electric) => { +const get_datetimes_raw = async (electric: Electric) => { + const rows = await electric.db.rawQuery({ sql: 'SELECT * FROM datetimes;' }) + return decodeRows(rows, 'datetimes') +} + +export const get_datetimes = withDal ? get_datetimes_dal : get_datetimes_raw + +type Item = { + id: string + content: string + context_text_null: string | null, + context_text_null_default: string, + intvalue_null: number | null, + intvalue_null_default: number, +} + +const get_items_dal = (electric: Electric) => { return electric.db.items.findMany() } -export const get_item_ids = (electric: Electric) => { +const get_items_raw = async (electric: Electric) => { + const rows = await electric.db.rawQuery({ sql: 'SELECT * FROM items;' }) + return decodeRows(rows, 'items') +} + +export const get_items = withDal ? get_items_dal : get_items_raw + +export const get_item_ids_dal = (electric: Electric) => { return electric.db.items.findMany({ select: { - id: true - } + id: true, + }, }) } -export const get_uuid = (electric: Electric, id: string) => { +const get_item_ids_raw = async (electric: Electric) => { + const rows = await electric.db.rawQuery({ sql: 'SELECT id FROM items;' }) + return rows as Array> +} + +export const get_item_ids = withDal ? get_item_ids_dal : get_item_ids_raw + +const get_uuid_dal = (electric: Electric, id: string) => { return electric.db.uuids.findUnique({ where: { - id: id - } + id: id, + }, }) } -export const get_uuids = (electric: Electric) => { +const get_uuid_raw = async (electric: Electric, id: string) => { + const res = await electric.db.rawQuery({ + sql: `SELECT * FROM uuids WHERE id = ${builder.makePositionalParam(1)};`, + args: [id], + }) + if (res.length === 1) { + return res[0] as { id: string } + } + return null +} + +export const get_uuid = withDal ? get_uuid_dal : get_uuid_raw + +const get_uuids_dal = (electric: Electric) => { return electric.db.uuids.findMany() } -export const write_uuid = (electric: Electric, id: string) => { +const get_uuids_raw = async (electric: Electric) => { + return electric.db.rawQuery({ sql: 'SELECT * FROM uuids;' }) +} + +export const get_uuids = withDal ? get_uuids_dal : get_uuids_raw + +const write_uuid_dal = (electric: Electric, id: string) => { return electric.db.uuids.create({ data: { - id: id - } + id: id, + }, }) } -export const get_int = (electric: Electric, id: string) => { +const write_uuid_raw = async (electric: Electric, id: string) => { + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO uuids (id) VALUES (${builder.makePositionalParam(1)}) RETURNING *;`, + args: [id], + }) + return row +} + +export const write_uuid = withDal ? write_uuid_dal : write_uuid_raw + +// This function is only used for testing that the DAL rejects invalid UUIDs +// If we don't run the DAL we just print the error the DAL would throw +export const write_invalid_uuid = withDal ? write_uuid_dal : () => { + console.log(dedent` + Uncaught: + [ + { + "validation": "uuid", + "code": "invalid_string", + "message": "Invalid uuid", + "path": [ + "data", + "id" + ] + } + ] + `) +} // + +type Int = { + id: string + i2: number + i4: number + i8: bigint +} + +const get_int_dal = (electric: Electric, id: string) => { return electric.db.ints.findUnique({ where: { - id: id - } + id: id, + }, }) } -export const write_int = (electric: Electric, id: string, i2: number, i4: number, i8: number | bigint) => { +const get_int_raw = async (electric: Electric, id: string) => { + // Need to cast i8 to text because better-sqlite3 does return a BigInt by default + // unless we activate BigInt support but then it returns all numbers as BigInt. + // The DAL applies the same cast when reading from an INT8 table. + const rows = await electric.db.rawQuery({ + sql: `SELECT id, i2, i4, cast(i8 AS TEXT) AS i8 FROM ints WHERE id = ${builder.makePositionalParam(1)};`, + args: [id], + }) + if (rows.length === 1) { + const row = rows[0] + return decodeRow(row, 'ints') + } + return null +} + +export const get_int = withDal ? get_int_dal : get_int_raw + +const write_int_dal = ( + electric: Electric, + id: string, + i2: number, + i4: number, + i8: number | bigint +) => { return electric.db.ints.create({ - data: { id, i2, i4, i8 } + data: { id, i2, i4, i8 }, + }) +} + +const write_int_raw = async ( + electric: Electric, + id: string, + i2: number, + i4: number, + i8: number | bigint +) => { + // Do some manual range checks in order to throw the same errors as the DAL does + // because some e2e tests check these errors + if (i2 < -32768) { + throw new Error('Number must be greater or equal to -32768') + } + if (i2 > 32767) { + throw new Error('Number must be less than or equal to 32767') + } + if (i4 < -2147483648) { + throw new Error('Number must be greater than or equal to -2147483648') + } + if (i4 > 2147483647) { + throw new Error('Number must be less than or equal to 2147483647') + } + if (i8 < -9223372036854775808n) { + throw new Error('BigInt must be greater than or equal to -9223372036854775808') + } + if (i8 > 9223372036854775807n) { + throw new Error('BigInt must be less than or equal to 9223372036854775807') + } + + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO ints (id, i2, i4, i8) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)}, ${builder.makePositionalParam(4)}) RETURNING id, i2, i4, cast(i8 AS TEXT) AS i8;`, + args: [id, i2, i4, converter.encode(i8, PgBasicType.PG_INT8)], }) + return decodeRow(row, 'ints') } -export const get_float = (electric: Electric, id: string) => { +export const write_int = withDal ? write_int_dal : write_int_raw + +type Float = { + id: string + f4: number + f8: number +} + +const get_float_dal = (electric: Electric, id: string) => { return electric.db.floats.findUnique({ where: { - id: id - } + id: id, + }, }) } -export const write_float = (electric: Electric, id: string, f4: number, f8: number) => { +const get_float_raw = async (electric: Electric, id: string) => { + const rows = await electric.db.rawQuery({ + sql: `SELECT * FROM floats WHERE id = ${builder.makePositionalParam(1)};`, + args: [id], + }) + if (rows.length === 1) { + const row = rows[0] + return decodeRow(row, 'floats') + } + return null +} + +export const get_float = withDal ? get_float_dal : get_float_raw + +const write_float_dal = ( + electric: Electric, + id: string, + f4: number, + f8: number +) => { return electric.db.floats.create({ data: { id, f4, f8, - } + }, + }) +} + +const write_float_raw = async ( + electric: Electric, + id: string, + f4: number, + f8: number +) => { + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO floats (id, f4, f8) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)}) RETURNING *;`, + args: [id, converter.encode(f4, PgBasicType.PG_FLOAT4), converter.encode(f8, PgBasicType.PG_FLOAT8)], }) + return decodeRow(row, 'floats') } +export const write_float = withDal ? write_float_dal : write_float_raw + export const get_json_raw = async (electric: Electric, id: string) => { - const res = await electric.db.rawQuery({ + const res = (await electric.db.rawQuery({ sql: `SELECT js FROM jsons WHERE id = ${builder.makePositionalParam(1)};`, - args: [id] - }) as unknown as Array<{ js: string }> + args: [id], + })) as unknown as Array<{ js: string }> return res[0]?.js } export const get_jsonb_raw = async (electric: Electric, id: string) => { - const res = await electric.db.rawQuery({ + const res = (await electric.db.rawQuery({ sql: `SELECT jsb FROM jsons WHERE id = ${builder.makePositionalParam(1)};`, - args: [id] - }) as unknown as Array<{ jsb: string }> - + args: [id], + })) as unknown as Array<{ jsb: string }> + const js = res[0]?.jsb if (builder.dialect === 'Postgres') { @@ -345,62 +747,131 @@ export const get_jsonb_raw = async (electric: Electric, id: string) => { return JSON.parse(js) // SQLite stores JSON as string so parse it } -export const get_json = async (electric: Electric, id: string) => { +const get_json_dal = async (electric: Electric, id: string) => { const res = await electric.db.jsons.findUnique({ where: { - id: id + id: id, }, select: { id: true, - } + }, }) return res } -export const get_jsonb = async (electric: Electric, id: string) => { +const get_json_raw_internal = async (electric: Electric, id: string) => { + const rows = await electric.db.rawQuery({ + sql: `SELECT id FROM jsons WHERE id = ${builder.makePositionalParam(1)};`, + args: [id], + }) + + if (rows.length === 1) { + const row = rows[0] + return row as { id: string } + } + return null +} + +export const get_json = withDal ? get_json_dal : get_json_raw_internal + +const get_jsonb_dal = async (electric: Electric, id: string) => { const res = await electric.db.jsons.findUnique({ where: { - id: id + id: id, }, select: { id: true, jsb: true, - } + }, }) return res } -export const write_json = async (electric: Electric, id: string, jsb: any) => { +const get_jsonb_raw_internal = async (electric: Electric, id: string) => { + const rows = await electric.db.rawQuery({ + sql: `SELECT id, jsb FROM jsons WHERE id = ${builder.makePositionalParam(1)};`, + args: [id], + }) + + if (rows.length === 1) { + const row = rows[0] + return decodeRow<{ id: string, jsb: any }>(row, 'jsons') + } + return null +} + +export const get_jsonb = withDal ? get_jsonb_dal : get_jsonb_raw_internal + +const write_json_dal = async (electric: Electric, id: string, jsb: any) => { return electric.db.jsons.create({ data: { id, jsb, - } + }, }) } -export const get_enum = (electric: Electric, id: string) => { +const write_json_raw = async (electric: Electric, id: string, jsb: any) => { + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO jsons (id, jsb) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}) RETURNING *;`, + args: [id, converter.encode(jsb, PgBasicType.PG_JSONB)], + }) + return decodeRow<{ id: string, jsb: any }>(row, 'jsons') +} + +export const write_json = withDal ? write_json_dal : write_json_raw + +type Enum = { + id: string + c: Color | null +} + +const get_enum_dal = (electric: Electric, id: string) => { return electric.db.enums.findUnique({ where: { - id: id - } + id: id, + }, }) } -export const write_enum = (electric: Electric, id: string, c: Color | null) => { +const get_enum_raw = async (electric: Electric, id: string) => { + const res = await electric.db.rawQuery({ + sql: `SELECT * FROM enums WHERE id = ${builder.makePositionalParam(1)};`, + args: [id], + }) + if (res.length === 1) { + const row = res[0] + return row as { id: string, c: Color | null } + } + return null +} + +export const get_enum = withDal ? get_enum_dal : get_enum_raw + +const write_enum_dal = (electric: Electric, id: string, c: Color | null) => { return electric.db.enums.create({ data: { id, c, - } + }, + }) +} + +const write_enum_raw = async (electric: Electric, id: string, c: Color | null) => { + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO enums (id, c) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}) RETURNING *;`, + args: [id, c], }) + return decodeRow(row, 'enums') } -export const get_blob = async (electric: Electric, id: string) => { +export const write_enum = withDal ? write_enum_dal : write_enum_raw + +const get_blob_dal = async (electric: Electric, id: string) => { const res = await electric.db.blobs.findUnique({ where: { - id: id - } + id: id, + }, }) if (res?.blob) { @@ -412,43 +883,126 @@ export const get_blob = async (electric: Electric, id: string) => { return res } -export const write_blob = (electric: Electric, id: string, blob: Uint8Array | null) => { +const get_blob_raw = async (electric: Electric, id: string) => { + const res = await electric.db.rawQuery({ + sql: `SELECT * FROM blobs WHERE id = ${builder.makePositionalParam(1)};`, + args: [id], + }) + if (res.length === 1) { + const row = res[0] as { blob: Uint8Array } + if (row?.blob) { + row.blob = new Uint8Array(row.blob) + } + return row + } + return null +} + +export const get_blob = withDal ? get_blob_dal : get_blob_raw + +const write_blob_dal = ( + electric: Electric, + id: string, + blob: Uint8Array | null +) => { return electric.db.blobs.create({ data: { id, blob, - } + }, + }) +} + +const write_blob_raw = async ( + electric: Electric, + id: string, + blob: Uint8Array | null +) => { + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO blobs (id, blob) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}) RETURNING *;`, + args: [id, converter.encode(blob, PgBasicType.PG_BYTEA)], }) + return decodeRow<{ id: string, blob: Uint8Array | null }>(row, 'blobs') } -export const get_item_columns = (electric: Electric, table: string, column: string) => { +export const write_blob = withDal ? write_blob_dal : write_blob_raw + +export const get_item_columns = ( + electric: Electric, + table: string, + column: string +) => { return electric.db.rawQuery({ sql: `SELECT ${column} FROM ${table};` }) } -export const insert_item = async (electric: Electric, keys: [string]) => { - const items = keys.map(k => { +const insert_items_dal = async (electric: Electric, keys: [string]) => { + const items = keys.map((k) => { return { id: uuidv4(), - content: k + content: k, } }) await electric.db.items.createMany({ - data: items + data: items, + }) +} + +const insert_items_raw = async (electric: Electric, keys: [string]) => { + const items = keys.map((k) => { + return { + id: uuidv4(), + content: k, + } + }) + + await electric.adapter.run({ + sql: `INSERT INTO items (id, content) VALUES ${items + .map((_, i) => `(${builder.makePositionalParam(2 * i + 1)}, ${builder.makePositionalParam(2 * i + 2)})`) + .join(', ')};`, + args: items.flatMap((item) => [item.id, item.content]), + }) +} + +export const insert_items = withDal ? insert_items_dal : insert_items_raw + +const insert_item_dal = async (electric: Electric, id: string, content: string) => { + return await electric.db.items.create({ + data: { + id, + content, + }, + }) +} + +const insert_item_raw = async (electric: Electric, id: string, content: string) => { + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO items (id, content) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}) RETURNING *;`, + args: [id, content], }) + return decodeRow(row, 'items') } -export const insert_extended_item = async (electric: Electric, values: Record) => { - await insert_extended_into(electric, "items", values) +export const insert_item = withDal ? insert_item_dal : insert_item_raw + +export const insert_extended_item = async ( + electric: Electric, + values: Record +) => { + await insert_extended_into(electric, 'items', values) } -export const insert_extended_into = async (electric: Electric, table: string, values: Record) => { +export const insert_extended_into = async ( + electric: Electric, + table: string, + values: Record +) => { if (!values['id']) { values['id'] = uuidv4() } const columns = Object.keys(values) - const columnNames = columns.join(", ") - const placeHolders = columns.map((_, i) => builder.makePositionalParam(i+1)) + const columnNames = columns.join(', ') + const placeHolders = columns.map((_, i) => builder.makePositionalParam(i + 1)) const args = Object.values(values) await electric.db.unsafeExec({ @@ -457,57 +1011,140 @@ export const insert_extended_into = async (electric: Electric, table: string, va }) } -export const delete_item = async (electric: Electric, keys: [string]) => { +const delete_item_dal = async (electric: Electric, keys: [string]) => { for (const key of keys) { await electric.db.items.deleteMany({ where: { - content: key - } + content: key, + }, }) } } -export const get_other_items = (electric: Electric) => { +const delete_item_raw = async (electric: Electric, keys: [string]) => { + for (const key of keys) { + await electric.adapter.run({ + sql: `DELETE FROM items WHERE content = ${builder.makePositionalParam(1)};`, + args: [key], + }) + } +} + +export const delete_item = withDal ? delete_item_dal : delete_item_raw + +const get_other_items_dal = (electric: Electric) => { return electric.db.other_items.findMany() } -export const insert_other_item = async (electric: Electric, keys: [string]) => { - const items = keys.map(k => { +const get_other_items_raw = async (electric: Electric) => { + return electric.db.rawQuery({ sql: 'SELECT * FROM other_items;' }) +} + +export const get_other_items = withDal ? get_other_items_dal : get_other_items_raw + +const insert_other_items_dal = async (electric: Electric, keys: [string]) => { + const items = keys.map((k) => { return { id: uuidv4(), - content: k + content: k, } }) await electric.db.other_items.createMany({ - data: items + data: items, }) } -export const delete_other_item = async (electric: Electric, keys: [string]) => { +const insert_other_items_raw = async (electric: Electric, keys: [string]) => { + const items = keys.map((k) => { + return { + id: uuidv4(), + content: k, + } + }) + + await electric.adapter.run({ + sql: `INSERT INTO other_items (id, content) VALUES ${items + .map((_, i) => `(${builder.makePositionalParam(2 * i + 1)}, ${builder.makePositionalParam(2 * i + 2)})`) + .join(', ')};`, + args: items.flatMap((item) => [item.id, item.content]), + }) +} + +export const insert_other_items = withDal ? insert_other_items_dal : insert_other_items_raw + +const insert_other_item_dal = async (electric: Electric, id: string, content: string, item_id: string) => { + return await electric.db.other_items.create({ + data: { + id, + content, + item_id + }, + }) +} + +const insert_other_item_raw = async (electric: Electric, id: string, content: string, item_id: string) => { + const [ row ] = await electric.adapter.query({ + sql: `INSERT INTO other_items (id, content, item_id) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)}) RETURNING *;`, + args: [id, content, item_id], + }) + return decodeRow(row, 'other_items') +} + +export const insert_other_item = withDal ? insert_other_item_dal : insert_other_item_raw + +const delete_other_item_dal = async (electric: Electric, keys: [string]) => { for (const key of keys) { await electric.db.other_items.deleteMany({ where: { - content: key - } + content: key, + }, }) } } -export const set_item_replication_transform = (electric: Electric) => { - electric.db.items.setReplicationTransform({ - transformOutbound: (item) => ({ - ...item, - content: item.content.split('').map((char) => String.fromCharCode(char.charCodeAt(0) + 1)).join('') - }), - transformInbound: (item) => ({ - ...item, - content: item.content.split('').map((char) => String.fromCharCode(char.charCodeAt(0) - 1)).join('') +const delete_other_item_raw = async (electric: Electric, keys: [string]) => { + for (const key of keys) { + await electric.adapter.run({ + sql: `DELETE FROM other_items WHERE content = ${builder.makePositionalParam(1)};`, + args: [key], }) + } +} - }) +export const delete_other_item = withDal ? delete_other_item_dal : delete_other_item_raw + +const replicationTransformer = { + transformOutbound: (item: Readonly) => ({ + ...item, + content: item.content + .split('') + .map((char) => String.fromCharCode(char.charCodeAt(0) + 1)) + .join(''), + }), + transformInbound: (item: Readonly) => ({ + ...item, + content: item.content + .split('') + .map((char) => String.fromCharCode(char.charCodeAt(0) - 1)) + .join(''), + }), +} + +const set_item_replication_transform_dal = (electric: Electric) => { + electric.db.items.setReplicationTransform(replicationTransformer) } +const set_item_replication_transform_raw = (electric: Electric) => { + const namespace = builder.defaultNamespace + electric.setReplicationTransform( + new QualifiedTablename(namespace, 'items'), + replicationTransformer + ) +} + +export const set_item_replication_transform = withDal ? set_item_replication_transform_dal : set_item_replication_transform_raw + export const stop = async () => { await globalRegistry.stopAll() } diff --git a/e2e/satellite_client/src/generated/client/db-description.ts b/e2e/satellite_client/src/generated/client/db-description.ts new file mode 100644 index 0000000000..a4c5851979 --- /dev/null +++ b/e2e/satellite_client/src/generated/client/db-description.ts @@ -0,0 +1,98 @@ +import { type TableSchemas, DbSchema, Relation } from 'electric-sql/client/model'; +import migrations from './migrations'; +import pgMigrations from './pg-migrations'; + +export const tableSchemas = { + blobs: { + fields: { + "id": "TEXT", + "blob": "BYTEA" + }, + relations: [] + }, + bools: { + fields: { + "id": "TEXT", + "b": "BOOL" + }, + relations: [] + }, + datetimes: { + fields: { + "id": "TEXT", + "d": "DATE", + "t": "TIME" + }, + relations: [] + }, + enums: { + fields: { + "id": "TEXT", + "c": "TEXT" + }, + relations: [] + }, + floats: { + fields: { + "id": "TEXT", + "f4": "FLOAT4", + "f8": "FLOAT8" + }, + relations: [] + }, + ints: { + fields: { + "id": "TEXT", + "i2": "INT2", + "i4": "INT4", + "i8": "INT8" + }, + relations: [] + }, + items: { + fields: { + "id": "TEXT", + "content": "TEXT", + "content_text_null": "TEXT", + "content_text_null_default": "TEXT", + "intvalue_null": "INT4", + "intvalue_null_default": "INT4" + }, + relations: [ + new Relation("other_items", "", "", "other_items", "ItemsToOther_items", "many"), + ] + }, + jsons: { + fields: { + "id": "TEXT", + "jsb": "JSONB" + }, + relations: [] + }, + other_items: { + fields: { + "id": "TEXT", + "content": "TEXT", + "item_id": "TEXT" + }, + relations: [ + new Relation("items", "item_id", "id", "items", "ItemsToOther_items", "one"), + ] + }, + timestamps: { + fields: { + "id": "TEXT", + "created_at": "TIMESTAMP", + "updated_at": "TIMESTAMPTZ" + }, + relations: [] + }, + uuids: { + fields: { + "id": "UUID" + }, + relations: [] + }, +} as unknown as TableSchemas + +export const schema = new DbSchema(tableSchemas, migrations, pgMigrations) \ No newline at end of file diff --git a/e2e/satellite_client/src/generated/client/index.ts b/e2e/satellite_client/src/generated/client/index.ts index 25dd22c279..1d67ef267a 100644 --- a/e2e/satellite_client/src/generated/client/index.ts +++ b/e2e/satellite_client/src/generated/client/index.ts @@ -3327,16 +3327,10 @@ interface UuidsGetPayload extends HKT { export const tableSchemas = { blobs: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "blob", - "BYTEA" - ] - ]), + fields: { + "id": "TEXT", + "blob": "BYTEA" + }, relations: [ ], modelSchema: (BlobsCreateInputSchema as any) @@ -3364,16 +3358,10 @@ export const tableSchemas = { BlobsGetPayload >, bools: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "b", - "BOOL" - ] - ]), + fields: { + "id": "TEXT", + "b": "BOOL" + }, relations: [ ], modelSchema: (BoolsCreateInputSchema as any) @@ -3401,20 +3389,11 @@ export const tableSchemas = { BoolsGetPayload >, datetimes: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "d", - "DATE" - ], - [ - "t", - "TIME" - ] - ]), + fields: { + "id": "TEXT", + "d": "DATE", + "t": "TIME" + }, relations: [ ], modelSchema: (DatetimesCreateInputSchema as any) @@ -3442,16 +3421,10 @@ export const tableSchemas = { DatetimesGetPayload >, enums: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "c", - "TEXT" - ] - ]), + fields: { + "id": "TEXT", + "c": "TEXT" + }, relations: [ ], modelSchema: (EnumsCreateInputSchema as any) @@ -3479,20 +3452,11 @@ export const tableSchemas = { EnumsGetPayload >, floats: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "f4", - "FLOAT4" - ], - [ - "f8", - "FLOAT8" - ] - ]), + fields: { + "id": "TEXT", + "f4": "FLOAT4", + "f8": "FLOAT8" + }, relations: [ ], modelSchema: (FloatsCreateInputSchema as any) @@ -3520,24 +3484,12 @@ export const tableSchemas = { FloatsGetPayload >, ints: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "i2", - "INT2" - ], - [ - "i4", - "INT4" - ], - [ - "i8", - "INT8" - ] - ]), + fields: { + "id": "TEXT", + "i2": "INT2", + "i4": "INT4", + "i8": "INT8" + }, relations: [ ], modelSchema: (IntsCreateInputSchema as any) @@ -3565,32 +3517,14 @@ export const tableSchemas = { IntsGetPayload >, items: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "content", - "TEXT" - ], - [ - "content_text_null", - "TEXT" - ], - [ - "content_text_null_default", - "TEXT" - ], - [ - "intvalue_null", - "INT4" - ], - [ - "intvalue_null_default", - "INT4" - ] - ]), + fields: { + "id": "TEXT", + "content": "TEXT", + "content_text_null": "TEXT", + "content_text_null_default": "TEXT", + "intvalue_null": "INT4", + "intvalue_null_default": "INT4" + }, relations: [ new Relation("other_items", "", "", "other_items", "ItemsToOther_items", "many"), ], @@ -3619,16 +3553,10 @@ export const tableSchemas = { ItemsGetPayload >, jsons: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "jsb", - "JSONB" - ] - ]), + fields: { + "id": "TEXT", + "jsb": "JSONB" + }, relations: [ ], modelSchema: (JsonsCreateInputSchema as any) @@ -3656,20 +3584,11 @@ export const tableSchemas = { JsonsGetPayload >, other_items: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "content", - "TEXT" - ], - [ - "item_id", - "TEXT" - ] - ]), + fields: { + "id": "TEXT", + "content": "TEXT", + "item_id": "TEXT" + }, relations: [ new Relation("items", "item_id", "id", "items", "ItemsToOther_items", "one"), ], @@ -3698,20 +3617,11 @@ export const tableSchemas = { Other_itemsGetPayload >, timestamps: { - fields: new Map([ - [ - "id", - "TEXT" - ], - [ - "created_at", - "TIMESTAMP" - ], - [ - "updated_at", - "TIMESTAMPTZ" - ] - ]), + fields: { + "id": "TEXT", + "created_at": "TIMESTAMP", + "updated_at": "TIMESTAMPTZ" + }, relations: [ ], modelSchema: (TimestampsCreateInputSchema as any) @@ -3739,12 +3649,9 @@ export const tableSchemas = { TimestampsGetPayload >, uuids: { - fields: new Map([ - [ - "id", - "UUID" - ] - ]), + fields: { + "id": "UUID" + }, relations: [ ], modelSchema: (UuidsCreateInputSchema as any) diff --git a/e2e/tests/03.11_node_satellite_compensations_work.lux b/e2e/tests/03.11_node_satellite_compensations_work.lux index 6b629b737c..6b6834254d 100644 --- a/e2e/tests/03.11_node_satellite_compensations_work.lux +++ b/e2e/tests/03.11_node_satellite_compensations_work.lux @@ -19,13 +19,7 @@ !await db.db.unsafeExec({sql: "UPDATE _electric_meta SET value = 1 WHERE key = 'compensations' RETURNING *"}) ?$node - """!await db.db.items.create({ - data: { - id: "00000000-0000-0000-0000-000000000001", - content: "hello world" - } - }) - """ + !await client.insert_item(db, "00000000-0000-0000-0000-000000000001", "hello world") ??[proto] send: #SatOpLog ??[proto] recv: #SatOpLog @@ -40,14 +34,7 @@ [shell satellite_1] # On a disconnected client, insert a dependent row ?$node - """!await db.db.other_items.create({ - data: { - id: "other_test_id_1", - content: "", - item_id: "00000000-0000-0000-0000-000000000001" - } - }) - """ + !await client.insert_other_item(db, "other_test_id_1", "", "00000000-0000-0000-0000-000000000001") ?$node diff --git a/e2e/tests/03.16_node_satellite_can_sync_uuids.lux b/e2e/tests/03.16_node_satellite_can_sync_uuids.lux index 296d64c608..e92d8f16e2 100644 --- a/e2e/tests/03.16_node_satellite_can_sync_uuids.lux +++ b/e2e/tests/03.16_node_satellite_can_sync_uuids.lux @@ -61,7 +61,7 @@ - # Can't write invalid uuids to the DB # the uuid below has one digit too many in the last part - !await client.write_uuid(db, '09e3e433-e9f1-46b4-a18f-1e4e0b6c62789') + !await client.write_invalid_uuid(db, '09e3e433-e9f1-46b4-a18f-1e4e0b6c62789') """?? Uncaught: [ diff --git a/e2e/tests/03.25_node_pk_position_does_not_matter_for_compensations.lux b/e2e/tests/03.25_node_pk_position_does_not_matter_for_compensations.lux index cdae6ebf9c..9165e0f57d 100644 --- a/e2e/tests/03.25_node_pk_position_does_not_matter_for_compensations.lux +++ b/e2e/tests/03.25_node_pk_position_does_not_matter_for_compensations.lux @@ -35,13 +35,7 @@ ??[proto] recv: #SatSubsDataEnd ?$node - """!await db.db.items.create({ - data: { - id: "00000000-0000-0000-0000-000000000001", - content: "hello world" - } - }) - """ + !await client.insert_item(db, "00000000-0000-0000-0000-000000000001", "hello world") ??[proto] send: #SatOpLog ??[proto] recv: #SatOpLog @@ -56,14 +50,7 @@ [shell satellite_1] # On a disconnected client, insert a dependent row ?$node - """!await db.db.other_items.create({ - data: { - id: "other_test_id_1", - content: "", - item_id: "00000000-0000-0000-0000-000000000001" - } - }) - """ + !await client.insert_other_item(db, "other_test_id_1", "", "00000000-0000-0000-0000-000000000001") ?$node diff --git a/e2e/tests/03.26_node_satellite_can_resume_replication_after_server_restart.lux b/e2e/tests/03.26_node_satellite_can_resume_replication_after_server_restart.lux index 316b87ded6..aabf51e44d 100644 --- a/e2e/tests/03.26_node_satellite_can_resume_replication_after_server_restart.lux +++ b/e2e/tests/03.26_node_satellite_can_resume_replication_after_server_restart.lux @@ -73,10 +73,10 @@ [shell satellite_2] # Subscribe to "items" and include "other_items" - !const { synced } = await db.db.items.sync({ \ + !await client.syncTableWithShape(db, "items", { \ where: "this.content like 'items-_-'", \ include: { other_items: true } \ - }); await synced + }) ?send: #SatSubsReq\{id: ([a-f0-9-]{36}) [global client_2_subs_id=$1] diff --git a/e2e/tests/_satellite_macros.luxinc b/e2e/tests/_satellite_macros.luxinc index 516312f719..43b13d1be9 100644 --- a/e2e/tests/_satellite_macros.luxinc +++ b/e2e/tests/_satellite_macros.luxinc @@ -202,7 +202,7 @@ [endmacro] [macro node_await_insert keys] - !await client.insert_item(db, ${keys}) + !await client.insert_items(db, ${keys}) ??$node [endmacro] @@ -221,7 +221,7 @@ [endmacro] [macro node_await_insert_other keys] - !await client.insert_other_item(db, ${keys}) + !await client.insert_other_items(db, ${keys}) ??$node [endmacro] diff --git a/generator/src/functions/tableDescriptionWriters/writeTableSchemas.ts b/generator/src/functions/tableDescriptionWriters/writeTableSchemas.ts index 0d3e8de4fc..3f2d2e0a54 100644 --- a/generator/src/functions/tableDescriptionWriters/writeTableSchemas.ts +++ b/generator/src/functions/tableDescriptionWriters/writeTableSchemas.ts @@ -102,15 +102,14 @@ export function writeFieldsMap( const fieldsWithoutRelations = model.fields.filter( (f) => model.relationFields.indexOf(f) === -1 ) - const fieldArray = JSON.stringify( + const fields = Object.fromEntries( fieldsWithoutRelations.map((field) => [ field.name, pgType(field, model.name), - ]), - null, - 2 + ]) ) - fileWriter.writer.write(`new Map(${fieldArray}),`) + const serializedFields = JSON.stringify(fields, null, 2) + fileWriter.writer.write(`${serializedFields},`) } function pgType(field: ExtendedDMMFField, modelName: string): string { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f62578e647..aed1dd5c5e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -41,6 +41,9 @@ importers: lodash.isequal: specifier: ^4.5.0 version: 4.5.0 + lodash.keyby: + specifier: ^4.6.0 + version: 4.6.0 lodash.mapvalues: specifier: ^4.6.0 version: 4.6.0 @@ -123,6 +126,9 @@ importers: '@types/lodash.isequal': specifier: ^4.5.6 version: 4.5.8 + '@types/lodash.keyby': + specifier: ^4.6.9 + version: 4.6.9 '@types/lodash.mapvalues': specifier: ^4.6.7 version: 4.6.9 @@ -424,6 +430,9 @@ importers: pg: specifier: ^8.11.3 version: 8.11.5 + ts-dedent: + specifier: ^2.2.0 + version: 2.2.0 uuid: specifier: ^9.0.0 version: 9.0.1 @@ -6157,6 +6166,12 @@ packages: '@types/lodash': 4.17.1 dev: true + /@types/lodash.keyby@4.6.9: + resolution: {integrity: sha512-N8xfQdZ2ADNPDL72TaLozIL4K1xFCMG1C1T9GN4dOFI+sn1cjl8d4U+POp8PRCAnNxDCMkYAZVD/rOBIWYPT5g==} + dependencies: + '@types/lodash': 4.17.1 + dev: true + /@types/lodash.mapvalues@4.6.9: resolution: {integrity: sha512-NyAIgUrI+nnr3VoJbiAlUfqBT2M/65mOCm+LerHgYE7lEyxXUAalZiMIL37GBnfg0QOMMBEPW4osdiMjsoEA4g==} dependencies: @@ -12196,6 +12211,9 @@ packages: /lodash.isstring@4.0.1: resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==} + + /lodash.keyby@4.6.0: + resolution: {integrity: sha512-PRe4Cn20oJM2Sn6ljcZMeKgyhTHpzvzFmdsp9rK+6K0eJs6Tws0MqgGFpfX/o2HjcoQcBny1Eik9W7BnVTzjIQ==} dev: false /lodash.mapvalues@4.6.0: