diff --git a/changelogs/drizzle-kit/0.25.0.md b/changelogs/drizzle-kit/0.25.0.md
new file mode 100644
index 000000000..e33879d97
--- /dev/null
+++ b/changelogs/drizzle-kit/0.25.0.md
@@ -0,0 +1,180 @@
+## Breaking changes and migrate guide for Turso users
+
+If you are using Turso and libsql, you will need to upgrade your `drizzle.config` and `@libsql/client` package.
+
+1. This version of drizzle-orm will only work with `@libsql/client@0.10.0` or higher if you are using the `migrate` function. For other use cases, you can continue using previous versions(But the suggestion is to upgrade)
+To install the latest version, use the command:
+
+```bash
+npm i @libsql/client@latest
+```
+
+2. Previously, we had a common `drizzle.config` for SQLite and Turso users, which allowed a shared strategy for both dialects. Starting with this release, we are introducing the turso dialect in drizzle-kit. We will evolve and improve Turso as a separate dialect with its own migration strategies.
+
+**Before**
+
+```ts
+import { defineConfig } from "drizzle-kit";
+
+export default defineConfig({
+ dialect: "sqlite",
+ schema: "./schema.ts",
+ out: "./drizzle",
+ dbCredentials: {
+ url: "database.db",
+ },
+ breakpoints: true,
+ verbose: true,
+ strict: true,
+});
+```
+
+**After**
+
+```ts
+import { defineConfig } from "drizzle-kit";
+
+export default defineConfig({
+ dialect: "turso",
+ schema: "./schema.ts",
+ out: "./drizzle",
+ dbCredentials: {
+ url: "database.db",
+ },
+ breakpoints: true,
+ verbose: true,
+ strict: true,
+});
+```
+
+If you are using only SQLite, you can use `dialect: "sqlite"`
+
+## LibSQL/Turso and Sqlite migration updates
+
+### SQLite "generate" and "push" statements updates
+
+Starting from this release, we will no longer generate comments like this:
+
+```sql
+ '/*\n SQLite does not support "Changing existing column type" out of the box, we do not generate automatic migration for that, so it has to be done manually'
+ + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
+ + '\n https://www.sqlite.org/lang_altertable.html'
+ + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
+ + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
+ + '\n*/'
+```
+
+We will generate a set of statements, and you can decide if it's appropriate to create data-moving statements instead. Here is an example of the SQL file you'll receive now:
+
+```sql
+PRAGMA foreign_keys=OFF;
+--> statement-breakpoint
+CREATE TABLE `__new_worker` (
+ `id` integer PRIMARY KEY NOT NULL,
+ `name` text NOT NULL,
+ `salary` text NOT NULL,
+ `job_id` integer,
+ FOREIGN KEY (`job_id`) REFERENCES `job`(`id`) ON UPDATE no action ON DELETE no action
+);
+--> statement-breakpoint
+INSERT INTO `__new_worker`("id", "name", "salary", "job_id") SELECT "id", "name", "salary", "job_id" FROM `worker`;
+--> statement-breakpoint
+DROP TABLE `worker`;
+--> statement-breakpoint
+ALTER TABLE `__new_worker` RENAME TO `worker`;
+--> statement-breakpoint
+PRAGMA foreign_keys=ON;
+```
+
+### LibSQL/Turso "generate" and "push" statements updates
+
+Since LibSQL supports more ALTER statements than SQLite, we can generate more statements without recreating your schema and moving all the data, which can be potentially dangerous for production environments.
+
+LibSQL and Turso will now have a separate dialect in the Drizzle config file, meaning that we will evolve Turso and LibSQL independently from SQLite and will aim to support as many features as Turso/LibSQL offer.
+
+With the updated LibSQL migration strategy, you will have the ability to:
+
+- **Change Data Type**: Set a new data type for existing columns.
+- **Set and Drop Default Values**: Add or remove default values for existing columns.
+- **Set and Drop NOT NULL**: Add or remove the NOT NULL constraint on existing columns.
+- **Add References to Existing Columns**: Add foreign key references to existing columns
+
+You can find more information in the [LibSQL documentation](https://github.com/tursodatabase/libsql/blob/main/libsql-sqlite3/doc/libsql_extensions.md#altering-columns)
+
+### LIMITATIONS
+
+- Dropping or altering an index will cause table recreation.
+
+This is because LibSQL/Turso does not support dropping this type of index.
+
+```sql
+CREATE TABLE `users` (
+ `id` integer NOT NULL,
+ `name` integer,
+ `age` integer PRIMARY KEY NOT NULL
+ FOREIGN KEY (`name`) REFERENCES `users1`("id") ON UPDATE no action ON DELETE no action
+);
+```
+
+- If the table has indexes, altering columns will cause table recreation.
+- Drizzle-Kit will drop the indexes, modify the columns, and then recreate the indexes.
+- Adding or dropping composite foreign keys is not supported and will cause table recreation
+
+### NOTES
+
+- You can create a reference on any column type, but if you want to insert values, the referenced column must have a unique index or primary key.
+
+```sql
+CREATE TABLE parent(a PRIMARY KEY, b UNIQUE, c, d, e, f);
+CREATE UNIQUE INDEX i1 ON parent(c, d);
+CREATE INDEX i2 ON parent(e);
+CREATE UNIQUE INDEX i3 ON parent(f COLLATE nocase);
+
+CREATE TABLE child1(f, g REFERENCES parent(a)); -- Ok
+CREATE TABLE child2(h, i REFERENCES parent(b)); -- Ok
+CREATE TABLE child3(j, k, FOREIGN KEY(j, k) REFERENCES parent(c, d)); -- Ok
+CREATE TABLE child4(l, m REFERENCES parent(e)); -- Error!
+CREATE TABLE child5(n, o REFERENCES parent(f)); -- Error!
+CREATE TABLE child6(p, q, FOREIGN KEY(p, q) REFERENCES parent(b, c)); -- Error!
+CREATE TABLE child7(r REFERENCES parent(c)); -- Error!
+```
+
+> **NOTE**: The foreign key for the table child5 is an error because, although the parent key column has a unique index, the index uses a different collating sequence.
+
+See more: https://www.sqlite.org/foreignkeys.html
+
+## New `casing` param in `drizzle-orm` and `drizzle-kit`
+
+There are more improvements you can make to your schema definition. The most common way to name your variables in a database and in TypeScript code is usually `snake_case` in the database and `camelCase` in the code. For this case, in Drizzle, you can now define a naming strategy in your database to help Drizzle map column keys automatically. Let's take a table from the previous example and make it work with the new casing API in Drizzle
+
+Table can now become:
+```ts
+import { pgTable } from "drizzle-orm/pg-core";
+
+export const ingredients = pgTable("ingredients", (t) => ({
+ id: t.uuid().defaultRandom().primaryKey(),
+ name: t.text().notNull(),
+ description: t.text(),
+ inStock: t.boolean().default(true),
+}));
+```
+As you can see, `inStock` doesn't have a database name alias, but by defining the casing configuration at the connection level, all queries will automatically map it to `snake_case`
+
+```ts
+const db = await drizzle('node-postgres', { connection: '', casing: 'snake_case' })
+```
+
+For `drizzle-kit` migrations generation you should also specify `casing` param in drizzle config, so you can be sure you casing strategy will be applied to drizzle-kit as well
+
+```ts
+import { defineConfig } from "drizzle-kit";
+
+export default defineConfig({
+ dialect: "postgresql",
+ schema: "./schema.ts",
+ dbCredentials: {
+ url: "postgresql://postgres:password@localhost:5432/db",
+ },
+ casing: "snake_case",
+});
+```
\ No newline at end of file
diff --git a/changelogs/drizzle-orm/0.34.0.md b/changelogs/drizzle-orm/0.34.0.md
new file mode 100644
index 000000000..cfc3e4e38
--- /dev/null
+++ b/changelogs/drizzle-orm/0.34.0.md
@@ -0,0 +1,345 @@
+## Breaking changes and migrate guide for Turso users
+
+If you are using Turso and libsql, you will need to upgrade your `drizzle.config` and `@libsql/client` package.
+
+1. This version of drizzle-orm will only work with `@libsql/client@0.10.0` or higher if you are using the `migrate` function. For other use cases, you can continue using previous versions(But the suggestion is to upgrade)
+To install the latest version, use the command:
+
+```bash
+npm i @libsql/client@latest
+```
+
+2. Previously, we had a common `drizzle.config` for SQLite and Turso users, which allowed a shared strategy for both dialects. Starting with this release, we are introducing the turso dialect in drizzle-kit. We will evolve and improve Turso as a separate dialect with its own migration strategies.
+
+**Before**
+
+```ts
+import { defineConfig } from "drizzle-kit";
+
+export default defineConfig({
+ dialect: "sqlite",
+ schema: "./schema.ts",
+ out: "./drizzle",
+ dbCredentials: {
+ url: "database.db",
+ },
+ breakpoints: true,
+ verbose: true,
+ strict: true,
+});
+```
+
+**After**
+
+```ts
+import { defineConfig } from "drizzle-kit";
+
+export default defineConfig({
+ dialect: "turso",
+ schema: "./schema.ts",
+ out: "./drizzle",
+ dbCredentials: {
+ url: "database.db",
+ },
+ breakpoints: true,
+ verbose: true,
+ strict: true,
+});
+```
+
+If you are using only SQLite, you can use `dialect: "sqlite"`
+
+## LibSQL/Turso and Sqlite migration updates
+
+### SQLite "generate" and "push" statements updates
+
+Starting from this release, we will no longer generate comments like this:
+
+```sql
+ '/*\n SQLite does not support "Changing existing column type" out of the box, we do not generate automatic migration for that, so it has to be done manually'
+ + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
+ + '\n https://www.sqlite.org/lang_altertable.html'
+ + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
+ + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
+ + '\n*/'
+```
+
+We will generate a set of statements, and you can decide if it's appropriate to create data-moving statements instead. Here is an example of the SQL file you'll receive now:
+
+```sql
+PRAGMA foreign_keys=OFF;
+--> statement-breakpoint
+CREATE TABLE `__new_worker` (
+ `id` integer PRIMARY KEY NOT NULL,
+ `name` text NOT NULL,
+ `salary` text NOT NULL,
+ `job_id` integer,
+ FOREIGN KEY (`job_id`) REFERENCES `job`(`id`) ON UPDATE no action ON DELETE no action
+);
+--> statement-breakpoint
+INSERT INTO `__new_worker`("id", "name", "salary", "job_id") SELECT "id", "name", "salary", "job_id" FROM `worker`;
+--> statement-breakpoint
+DROP TABLE `worker`;
+--> statement-breakpoint
+ALTER TABLE `__new_worker` RENAME TO `worker`;
+--> statement-breakpoint
+PRAGMA foreign_keys=ON;
+```
+
+### LibSQL/Turso "generate" and "push" statements updates
+
+Since LibSQL supports more ALTER statements than SQLite, we can generate more statements without recreating your schema and moving all the data, which can be potentially dangerous for production environments.
+
+LibSQL and Turso will now have a separate dialect in the Drizzle config file, meaning that we will evolve Turso and LibSQL independently from SQLite and will aim to support as many features as Turso/LibSQL offer.
+
+With the updated LibSQL migration strategy, you will have the ability to:
+
+- **Change Data Type**: Set a new data type for existing columns.
+- **Set and Drop Default Values**: Add or remove default values for existing columns.
+- **Set and Drop NOT NULL**: Add or remove the NOT NULL constraint on existing columns.
+- **Add References to Existing Columns**: Add foreign key references to existing columns
+
+You can find more information in the [LibSQL documentation](https://github.com/tursodatabase/libsql/blob/main/libsql-sqlite3/doc/libsql_extensions.md#altering-columns)
+
+### LIMITATIONS
+
+- Dropping or altering an index will cause table recreation.
+
+This is because LibSQL/Turso does not support dropping this type of index.
+
+```sql
+CREATE TABLE `users` (
+ `id` integer NOT NULL,
+ `name` integer,
+ `age` integer PRIMARY KEY NOT NULL
+ FOREIGN KEY (`name`) REFERENCES `users1`("id") ON UPDATE no action ON DELETE no action
+);
+```
+
+- If the table has indexes, altering columns will cause table recreation.
+- Drizzle-Kit will drop the indexes, modify the columns, and then recreate the indexes.
+- Adding or dropping composite foreign keys is not supported and will cause table recreation
+
+### NOTES
+
+- You can create a reference on any column type, but if you want to insert values, the referenced column must have a unique index or primary key.
+
+```sql
+CREATE TABLE parent(a PRIMARY KEY, b UNIQUE, c, d, e, f);
+CREATE UNIQUE INDEX i1 ON parent(c, d);
+CREATE INDEX i2 ON parent(e);
+CREATE UNIQUE INDEX i3 ON parent(f COLLATE nocase);
+
+CREATE TABLE child1(f, g REFERENCES parent(a)); -- Ok
+CREATE TABLE child2(h, i REFERENCES parent(b)); -- Ok
+CREATE TABLE child3(j, k, FOREIGN KEY(j, k) REFERENCES parent(c, d)); -- Ok
+CREATE TABLE child4(l, m REFERENCES parent(e)); -- Error!
+CREATE TABLE child5(n, o REFERENCES parent(f)); -- Error!
+CREATE TABLE child6(p, q, FOREIGN KEY(p, q) REFERENCES parent(b, c)); -- Error!
+CREATE TABLE child7(r REFERENCES parent(c)); -- Error!
+```
+
+> **NOTE**: The foreign key for the table child5 is an error because, although the parent key column has a unique index, the index uses a different collating sequence.
+
+See more: https://www.sqlite.org/foreignkeys.html
+
+## A new and easy way to start using drizzle
+
+Current and the only way to do, is to define client yourself and pass it to drizzle
+
+```ts
+const client = new Pool({ url: '' });
+drizzle(client, { logger: true });
+```
+
+But we want to introduce you to a new API, which is a simplified method in addition to the existing one.
+
+Most clients will have a few options to connect, starting with the easiest and most common one, and allowing you to control your client connection as needed.
+
+Let's use `node-postgres` as an example, but the same pattern can be applied to all other clients
+
+```ts
+// Finally, one import for all available clients and dialects!
+import { drizzle } from 'drizzle-orm'
+
+// Choose a client and use a connection URL — nothing else is needed!
+const db1 = await drizzle("node-postgres", process.env.POSTGRES_URL);
+
+// If you need to pass a logger, schema, or other configurations, you can use an object and specify the client-specific URL in the connection
+const db2 = await drizzle("node-postgres", {
+ connection: process.env.POSTGRES_URL,
+ logger: true
+});
+
+// And finally, if you need to use full client/driver-specific types in connections, you can use a URL or host/port/etc. as an object inferred from the underlying client connection types
+const db3 = await drizzle("node-postgres", {
+ connection: {
+ connectionString: process.env.POSTGRES_URL,
+ },
+});
+
+const db4 = await drizzle("node-postgres", {
+ connection: {
+ user: process.env.DB_USER,
+ password: process.env.DB_PASSWORD,
+ host: process.env.DB_HOST,
+ port: process.env.DB_PORT,
+ database: process.env.DB_NAME,
+ ssl: true,
+ },
+});
+```
+
+A few clients will have a slightly different API due to their specific behavior. Let's take a look at them:
+
+For `aws-data-api-pg`, Drizzle will require `resourceArn`, `database`, and `secretArn`, along with any other AWS Data API client types for the connection, such as credentials, region, etc.
+
+```ts
+drizzle("aws-data-api-pg", {
+ connection: {
+ resourceArn: "",
+ database: "",
+ secretArn: "",
+ },
+});
+```
+
+For `d1`, the CloudFlare Worker types as described in the [documentation](https://developers.cloudflare.com/d1/get-started/) here will be required.
+
+```ts
+drizzle("d1", {
+ connection: env.DB // CloudFlare Worker Types
+})
+```
+
+For `vercel-postgres`, nothing is needed since Vercel automatically retrieves the `POSTGRES_URL` from the `.env` file. You can check this [documentation](https://vercel.com/docs/storage/vercel-postgres/quickstart) for more info
+
+```ts
+drizzle("vercel-postgres")
+```
+
+> Note that the first example with the client is still available and not deprecated. You can use it if you don't want to await the drizzle object. The new way of defining drizzle is designed to make it easier to import from one place and get autocomplete for all the available clients
+
+## Optional names for columns and callback in drizzle table
+
+We believe that schema definition in Drizzle is extremely powerful and aims to be as close to SQL as possible while adding more helper functions for JS runtime values.
+
+However, there are a few areas that could be improved, which we addressed in this release. These include:
+
+- Unnecessary database column names when TypeScript keys are essentially just copies of them
+- A callback that provides all column types available for a specific table.
+
+Let's look at an example with PostgreSQL (this applies to all the dialects supported by Drizzle)
+
+**Previously**
+```ts
+import { boolean, pgTable, text, uuid } from "drizzle-orm/pg-core";
+
+export const ingredients = pgTable("ingredients", {
+ id: uuid("id").defaultRandom().primaryKey(),
+ name: text("name").notNull(),
+ description: text("description"),
+ inStock: boolean("in_stock").default(true),
+});
+```
+
+The previous table definition will still be valid in the new release, but it can be replaced with this instead
+
+```ts
+import { pgTable } from "drizzle-orm/pg-core";
+
+export const ingredients = pgTable("ingredients", (t) => ({
+ id: t.uuid().defaultRandom().primaryKey(),
+ name: t.text().notNull(),
+ description: t.text(),
+ inStock: t.boolean("in_stock").default(true),
+}));
+```
+
+## New `casing` param in `drizzle-orm` and `drizzle-kit`
+
+There are more improvements you can make to your schema definition. The most common way to name your variables in a database and in TypeScript code is usually `snake_case` in the database and `camelCase` in the code. For this case, in Drizzle, you can now define a naming strategy in your database to help Drizzle map column keys automatically. Let's take a table from the previous example and make it work with the new casing API in Drizzle
+
+Table can now become:
+```ts
+import { pgTable } from "drizzle-orm/pg-core";
+
+export const ingredients = pgTable("ingredients", (t) => ({
+ id: t.uuid().defaultRandom().primaryKey(),
+ name: t.text().notNull(),
+ description: t.text(),
+ inStock: t.boolean().default(true),
+}));
+```
+As you can see, `inStock` doesn't have a database name alias, but by defining the casing configuration at the connection level, all queries will automatically map it to `snake_case`
+
+```ts
+const db = await drizzle('node-postgres', { connection: '', casing: 'snake_case' })
+```
+
+For `drizzle-kit` migrations generation you should also specify `casing` param in drizzle config, so you can be sure you casing strategy will be applied to drizzle-kit as well
+
+```ts
+import { defineConfig } from "drizzle-kit";
+
+export default defineConfig({
+ dialect: "postgresql",
+ schema: "./schema.ts",
+ dbCredentials: {
+ url: "postgresql://postgres:password@localhost:5432/db",
+ },
+ casing: "snake_case",
+});
+```
+
+## New "count" API
+
+Before this release to count entities in a table, you would need to do this:
+
+```ts
+const res = await db.select({ count: sql`count(*)` }).from(users);
+const count = res[0].count;
+```
+
+The new API will look like this:
+
+```ts
+// how many users are in the database
+const count: number = await db.$count(users);
+
+// how many users with the name "Dan" are in the database
+const count: number = await db.$count(users, eq(name, "Dan"));
+```
+
+This can also work as a subquery and within relational queries
+
+```ts
+const users = await db.select({
+ ...users,
+ postsCount: db.$count(posts, eq(posts.authorId, users.id))
+});
+
+const users = await db.query.users.findMany({
+ extras: {
+ postsCount: db.$count(posts, eq(posts.authorId, users.id))
+ }
+})
+```
+
+## Ability to execute raw strings instead of using SQL templates for raw queries
+
+Previously, you would have needed to do this to execute a raw query with Drizzle
+
+```ts
+import { sql } from 'drizzle-orm'
+
+db.execute(sql`select * from ${users}`);
+// or
+db.execute(sql.raw(`select * from ${users}`));
+```
+
+You can now do this as well
+
+```ts
+db.execute('select * from users')
+```
diff --git a/changelogs/drizzle-orm/0.34.1.md b/changelogs/drizzle-orm/0.34.1.md
new file mode 100644
index 000000000..e314b5fd2
--- /dev/null
+++ b/changelogs/drizzle-orm/0.34.1.md
@@ -0,0 +1 @@
+- Fixed dynamic imports for CJS and MJS in the `/connect` module
\ No newline at end of file
diff --git a/drizzle-kit/build.ts b/drizzle-kit/build.ts
index 701e9c84c..ec7fc76c0 100644
--- a/drizzle-kit/build.ts
+++ b/drizzle-kit/build.ts
@@ -1,3 +1,4 @@
+///
import * as esbuild from 'esbuild';
import { readFileSync, writeFileSync } from 'node:fs';
import * as tsup from 'tsup';
@@ -16,6 +17,7 @@ const driversPackages = [
// sqlite drivers
'@libsql/client',
'better-sqlite3',
+ 'bun:sqlite',
];
esbuild.buildSync({
@@ -82,6 +84,7 @@ const main = async () => {
await tsup.build({
entryPoints: ['./src/index.ts', './src/api.ts'],
outDir: './dist',
+ external: ['bun:sqlite'],
splitting: false,
dts: true,
format: ['cjs', 'esm'],
diff --git a/drizzle-kit/package.json b/drizzle-kit/package.json
index 9d9e1d227..cf771296a 100644
--- a/drizzle-kit/package.json
+++ b/drizzle-kit/package.json
@@ -1,6 +1,6 @@
{
"name": "drizzle-kit",
- "version": "0.24.2",
+ "version": "0.25.0",
"homepage": "https://orm.drizzle.team",
"keywords": [
"drizzle",
@@ -54,7 +54,7 @@
"@electric-sql/pglite": "^0.1.5",
"@hono/node-server": "^1.9.0",
"@hono/zod-validator": "^0.2.1",
- "@libsql/client": "^0.4.2",
+ "@libsql/client": "^0.10.0",
"@neondatabase/serverless": "^0.9.1",
"@originjs/vite-plugin-commonjs": "^1.0.3",
"@planetscale/database": "^1.16.0",
@@ -74,12 +74,13 @@
"@vercel/postgres": "^0.8.0",
"ava": "^5.1.0",
"better-sqlite3": "^9.4.3",
+ "bun-types": "^0.6.6",
"camelcase": "^7.0.1",
"chalk": "^5.2.0",
"commander": "^12.1.0",
"dockerode": "^3.3.4",
"dotenv": "^16.0.3",
- "drizzle-kit": "0.21.2",
+ "drizzle-kit": "0.25.0-b1faa33",
"drizzle-orm": "workspace:./drizzle-orm/dist",
"env-paths": "^3.0.0",
"esbuild-node-externals": "^1.9.0",
diff --git a/drizzle-kit/schema.ts b/drizzle-kit/schema.ts
deleted file mode 100644
index e69de29bb..000000000
diff --git a/drizzle-kit/src/api.ts b/drizzle-kit/src/api.ts
index 3922da088..128e1cd53 100644
--- a/drizzle-kit/src/api.ts
+++ b/drizzle-kit/src/api.ts
@@ -15,6 +15,7 @@ import { pgSuggestions } from './cli/commands/pgPushUtils';
import { updateUpToV6 as upPgV6, updateUpToV7 as upPgV7 } from './cli/commands/pgUp';
import { sqlitePushIntrospect } from './cli/commands/sqliteIntrospect';
import { logSuggestionsAndReturn } from './cli/commands/sqlitePushUtils';
+import type { CasingType } from './cli/validations/common';
import { originUUID } from './global';
import { fillPgSnapshot } from './migrationPreparator';
import { MySqlSchema as MySQLSchemaKit, mysqlSchema, squashMysqlScheme } from './serializer/mysqlSchema';
@@ -40,6 +41,7 @@ export const generateDrizzleJson = (
imports: Record,
prevId?: string,
schemaFilters?: string[],
+ casing?: CasingType,
): PgSchemaKit => {
const prepared = prepareFromExports(imports);
@@ -50,6 +52,7 @@ export const generateDrizzleJson = (
prepared.enums,
prepared.schemas,
prepared.sequences,
+ casing,
schemaFilters,
);
@@ -147,6 +150,7 @@ export const pushSchema = async (
export const generateSQLiteDrizzleJson = async (
imports: Record,
prevId?: string,
+ casing?: CasingType,
): Promise => {
const { prepareFromExports } = await import('./serializer/sqliteImports');
@@ -154,7 +158,7 @@ export const generateSQLiteDrizzleJson = async (
const id = randomUUID();
- const snapshot = generateSqliteSnapshot(prepared.tables);
+ const snapshot = generateSqliteSnapshot(prepared.tables, casing);
return {
...snapshot,
@@ -250,6 +254,7 @@ export const pushSQLiteSchema = async (
export const generateMySQLDrizzleJson = async (
imports: Record,
prevId?: string,
+ casing?: CasingType,
): Promise => {
const { prepareFromExports } = await import('./serializer/mysqlImports');
@@ -257,7 +262,7 @@ export const generateMySQLDrizzleJson = async (
const id = randomUUID();
- const snapshot = generateMySqlSnapshot(prepared.tables);
+ const snapshot = generateMySqlSnapshot(prepared.tables, casing);
return {
...snapshot,
diff --git a/drizzle-kit/src/cli/commands/introspect.ts b/drizzle-kit/src/cli/commands/introspect.ts
index 3558bf83c..7e7185a58 100644
--- a/drizzle-kit/src/cli/commands/introspect.ts
+++ b/drizzle-kit/src/cli/commands/introspect.ts
@@ -25,6 +25,7 @@ import {
} from '../../snapshotsDiffer';
import { prepareOutFolder } from '../../utils';
import type { Casing, Prefix } from '../validations/common';
+import { LibSQLCredentials } from '../validations/libsql';
import type { MysqlCredentials } from '../validations/mysql';
import type { PostgresCredentials } from '../validations/postgres';
import { SingleStoreCredentials } from '../validations/singlestore';
@@ -469,6 +470,117 @@ export const introspectSqlite = async (
process.exit(0);
};
+export const introspectLibSQL = async (
+ casing: Casing,
+ out: string,
+ breakpoints: boolean,
+ credentials: LibSQLCredentials,
+ tablesFilter: string[],
+ prefix: Prefix,
+) => {
+ const { connectToLibSQL } = await import('../connections');
+ const db = await connectToLibSQL(credentials);
+
+ const matchers = tablesFilter.map((it) => {
+ return new Minimatch(it);
+ });
+
+ const filter = (tableName: string) => {
+ if (matchers.length === 0) return true;
+
+ let flags: boolean[] = [];
+
+ for (let matcher of matchers) {
+ if (matcher.negate) {
+ if (!matcher.match(tableName)) {
+ flags.push(false);
+ }
+ }
+
+ if (matcher.match(tableName)) {
+ flags.push(true);
+ }
+ }
+
+ if (flags.length > 0) {
+ return flags.every(Boolean);
+ }
+ return false;
+ };
+
+ const progress = new IntrospectProgress();
+ const res = await renderWithTask(
+ progress,
+ fromSqliteDatabase(db, filter, (stage, count, status) => {
+ progress.update(stage, count, status);
+ }),
+ );
+
+ const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema;
+ const ts = sqliteSchemaToTypeScript(schema, casing);
+ const relationsTs = relationsToTypeScript(schema, casing);
+
+ // check orm and orm-pg api version
+
+ const schemaFile = join(out, 'schema.ts');
+ writeFileSync(schemaFile, ts.file);
+ const relationsFile = join(out, 'relations.ts');
+ writeFileSync(relationsFile, relationsTs.file);
+ console.log();
+
+ const { snapshots, journal } = prepareOutFolder(out, 'sqlite');
+
+ if (snapshots.length === 0) {
+ const { sqlStatements, _meta } = await applySqliteSnapshotsDiff(
+ squashSqliteScheme(drySQLite),
+ squashSqliteScheme(schema),
+ tablesResolver,
+ columnsResolver,
+ drySQLite,
+ schema,
+ );
+
+ writeResult({
+ cur: schema,
+ sqlStatements,
+ journal,
+ _meta,
+ outFolder: out,
+ breakpoints,
+ type: 'introspect',
+ prefixMode: prefix,
+ });
+ } else {
+ render(
+ `[${
+ chalk.blue(
+ 'i',
+ )
+ }] No SQL generated, you already have migrations in project`,
+ );
+ }
+
+ render(
+ `[${
+ chalk.green(
+ '✓',
+ )
+ }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`,
+ );
+ render(
+ `[${
+ chalk.green(
+ '✓',
+ )
+ }] You relations file is ready ➜ ${
+ chalk.bold.underline.blue(
+ relationsFile,
+ )
+ } 🚀`,
+ );
+ process.exit(0);
+};
+
const withCasing = (value: string, casing: Casing) => {
if (casing === 'preserve') {
return value;
diff --git a/drizzle-kit/src/cli/commands/libSqlPushUtils.ts b/drizzle-kit/src/cli/commands/libSqlPushUtils.ts
new file mode 100644
index 000000000..01bb61334
--- /dev/null
+++ b/drizzle-kit/src/cli/commands/libSqlPushUtils.ts
@@ -0,0 +1,346 @@
+import chalk from 'chalk';
+
+import { JsonStatement } from 'src/jsonStatements';
+import { findAddedAndRemoved, SQLiteDB } from 'src/utils';
+import { SQLiteSchemaInternal, SQLiteSchemaSquashed, SQLiteSquasher } from '../../serializer/sqliteSchema';
+import {
+ CreateSqliteIndexConvertor,
+ fromJson,
+ LibSQLModifyColumn,
+ SQLiteCreateTableConvertor,
+ SQLiteDropTableConvertor,
+ SqliteRenameTableConvertor,
+} from '../../sqlgenerator';
+
+export const getOldTableName = (
+ tableName: string,
+ meta: SQLiteSchemaInternal['_meta'],
+) => {
+ for (const key of Object.keys(meta.tables)) {
+ const value = meta.tables[key];
+ if (`"${tableName}"` === value) {
+ return key.substring(1, key.length - 1);
+ }
+ }
+ return tableName;
+};
+
+export const _moveDataStatements = (
+ tableName: string,
+ json: SQLiteSchemaSquashed,
+ dataLoss: boolean = false,
+) => {
+ const statements: string[] = [];
+
+ const newTableName = `__new_${tableName}`;
+
+ // create table statement from a new json2 with proper name
+ const tableColumns = Object.values(json.tables[tableName].columns);
+ const referenceData = Object.values(json.tables[tableName].foreignKeys);
+ const compositePKs = Object.values(
+ json.tables[tableName].compositePrimaryKeys,
+ ).map((it) => SQLiteSquasher.unsquashPK(it));
+
+ const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it));
+
+ // create new table
+ statements.push(
+ new SQLiteCreateTableConvertor().convert({
+ type: 'sqlite_create_table',
+ tableName: newTableName,
+ columns: tableColumns,
+ referenceData: fks,
+ compositePKs,
+ }),
+ );
+
+ // move data
+ if (!dataLoss) {
+ const columns = Object.keys(json.tables[tableName].columns).map(
+ (c) => `"${c}"`,
+ );
+
+ statements.push(
+ `INSERT INTO \`${newTableName}\`(${
+ columns.join(
+ ', ',
+ )
+ }) SELECT ${columns.join(', ')} FROM \`${tableName}\`;`,
+ );
+ }
+
+ statements.push(
+ new SQLiteDropTableConvertor().convert({
+ type: 'drop_table',
+ tableName: tableName,
+ schema: '',
+ }),
+ );
+
+ // rename table
+ statements.push(
+ new SqliteRenameTableConvertor().convert({
+ fromSchema: '',
+ tableNameFrom: newTableName,
+ tableNameTo: tableName,
+ toSchema: '',
+ type: 'rename_table',
+ }),
+ );
+
+ for (const idx of Object.values(json.tables[tableName].indexes)) {
+ statements.push(
+ new CreateSqliteIndexConvertor().convert({
+ type: 'create_index',
+ tableName: tableName,
+ schema: '',
+ data: idx,
+ }),
+ );
+ }
+ return statements;
+};
+
+export const libSqlLogSuggestionsAndReturn = async (
+ connection: SQLiteDB,
+ statements: JsonStatement[],
+ json1: SQLiteSchemaSquashed,
+ json2: SQLiteSchemaSquashed,
+ meta: SQLiteSchemaInternal['_meta'],
+) => {
+ let shouldAskForApprove = false;
+ const statementsToExecute: string[] = [];
+ const infoToPrint: string[] = [];
+
+ const tablesToRemove: string[] = [];
+ const columnsToRemove: string[] = [];
+ const tablesToTruncate: string[] = [];
+
+ for (const statement of statements) {
+ if (statement.type === 'drop_table') {
+ const res = await connection.query<{ count: string }>(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to delete ${
+ chalk.underline(
+ statement.tableName,
+ )
+ } table with ${count} items`,
+ );
+ tablesToRemove.push(statement.tableName);
+ shouldAskForApprove = true;
+ }
+ const fromJsonStatement = fromJson([statement], 'turso', 'push', json2);
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
+ );
+ } else if (statement.type === 'alter_table_drop_column') {
+ const tableName = statement.tableName;
+
+ const res = await connection.query<{ count: string }>(
+ `select count(*) as count from \`${tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to delete ${
+ chalk.underline(
+ statement.columnName,
+ )
+ } column in ${tableName} table with ${count} items`,
+ );
+ columnsToRemove.push(`${tableName}_${statement.columnName}`);
+ shouldAskForApprove = true;
+ }
+
+ const fromJsonStatement = fromJson([statement], 'turso', 'push', json2);
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
+ );
+ } else if (
+ statement.type === 'sqlite_alter_table_add_column'
+ && statement.column.notNull
+ && !statement.column.default
+ ) {
+ const newTableName = statement.tableName;
+ const res = await connection.query<{ count: string }>(
+ `select count(*) as count from \`${newTableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to add not-null ${
+ chalk.underline(
+ statement.column.name,
+ )
+ } column without default value, which contains ${count} items`,
+ );
+
+ tablesToTruncate.push(newTableName);
+ statementsToExecute.push(`delete from ${newTableName};`);
+
+ shouldAskForApprove = true;
+ }
+
+ const fromJsonStatement = fromJson([statement], 'turso', 'push', json2);
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
+ );
+ } else if (statement.type === 'alter_table_alter_column_set_notnull') {
+ const tableName = statement.tableName;
+
+ if (
+ statement.type === 'alter_table_alter_column_set_notnull'
+ && typeof statement.columnDefault === 'undefined'
+ ) {
+ const res = await connection.query<{ count: string }>(
+ `select count(*) as count from \`${tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to add not-null constraint to ${
+ chalk.underline(
+ statement.columnName,
+ )
+ } column without default value, which contains ${count} items`,
+ );
+
+ tablesToTruncate.push(tableName);
+ statementsToExecute.push(`delete from \`${tableName}\``);
+ shouldAskForApprove = true;
+ }
+ }
+
+ const modifyStatements = new LibSQLModifyColumn().convert(statement, json2);
+
+ statementsToExecute.push(
+ ...(Array.isArray(modifyStatements) ? modifyStatements : [modifyStatements]),
+ );
+ } else if (statement.type === 'recreate_table') {
+ const tableName = statement.tableName;
+
+ let dataLoss = false;
+
+ const oldTableName = getOldTableName(tableName, meta);
+
+ const prevColumnNames = Object.keys(json1.tables[oldTableName].columns);
+ const currentColumnNames = Object.keys(json2.tables[tableName].columns);
+ const { removedColumns, addedColumns } = findAddedAndRemoved(
+ prevColumnNames,
+ currentColumnNames,
+ );
+
+ if (removedColumns.length) {
+ for (const removedColumn of removedColumns) {
+ const res = await connection.query<{ count: string }>(
+ `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``,
+ );
+
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to delete ${
+ chalk.underline(
+ removedColumn,
+ )
+ } column in ${tableName} table with ${count} items`,
+ );
+ columnsToRemove.push(removedColumn);
+ shouldAskForApprove = true;
+ }
+ }
+ }
+
+ if (addedColumns.length) {
+ for (const addedColumn of addedColumns) {
+ const [res] = await connection.query<{ count: string }>(
+ `select count(*) as count from \`${tableName}\``,
+ );
+
+ const columnConf = json2.tables[tableName].columns[addedColumn];
+
+ const count = Number(res.count);
+ if (count > 0 && columnConf.notNull && !columnConf.default) {
+ dataLoss = true;
+
+ infoToPrint.push(
+ `· You're about to add not-null ${
+ chalk.underline(
+ addedColumn,
+ )
+ } column without default value to table, which contains ${count} items`,
+ );
+ shouldAskForApprove = true;
+ tablesToTruncate.push(tableName);
+
+ statementsToExecute.push(`DELETE FROM \`${tableName}\`;`);
+ }
+ }
+ }
+
+ // check if some tables referencing current for pragma
+ const tablesReferencingCurrent: string[] = [];
+
+ for (const table of Object.values(json2.tables)) {
+ const tablesRefs = Object.values(json2.tables[table.name].foreignKeys)
+ .filter((t) => SQLiteSquasher.unsquashPushFK(t).tableTo === tableName)
+ .map((it) => SQLiteSquasher.unsquashPushFK(it).tableFrom);
+
+ tablesReferencingCurrent.push(...tablesRefs);
+ }
+
+ if (!tablesReferencingCurrent.length) {
+ statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss));
+ continue;
+ }
+
+ // recreate table
+ statementsToExecute.push(
+ ..._moveDataStatements(tableName, json2, dataLoss),
+ );
+ } else if (
+ statement.type === 'alter_table_alter_column_set_generated'
+ || statement.type === 'alter_table_alter_column_drop_generated'
+ ) {
+ const tableName = statement.tableName;
+
+ const res = await connection.query<{ count: string }>(
+ `select count("${statement.columnName}") as count from \`${tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to delete ${
+ chalk.underline(
+ statement.columnName,
+ )
+ } column in ${tableName} table with ${count} items`,
+ );
+ columnsToRemove.push(`${tableName}_${statement.columnName}`);
+ shouldAskForApprove = true;
+ }
+ const fromJsonStatement = fromJson([statement], 'turso', 'push', json2);
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
+ );
+ } else {
+ const fromJsonStatement = fromJson([statement], 'turso', 'push', json2);
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
+ );
+ }
+ }
+
+ return {
+ statementsToExecute: [...new Set(statementsToExecute)],
+ shouldAskForApprove,
+ infoToPrint,
+ columnsToRemove: [...new Set(columnsToRemove)],
+ tablesToTruncate: [...new Set(tablesToTruncate)],
+ tablesToRemove: [...new Set(tablesToRemove)],
+ };
+};
diff --git a/drizzle-kit/src/cli/commands/migrate.ts b/drizzle-kit/src/cli/commands/migrate.ts
index 726c8ed4b..3110f641d 100644
--- a/drizzle-kit/src/cli/commands/migrate.ts
+++ b/drizzle-kit/src/cli/commands/migrate.ts
@@ -20,6 +20,7 @@ import { MySqlSchema, mysqlSchema, squashMysqlScheme } from '../../serializer/my
import { PgSchema, pgSchema, squashPgScheme } from '../../serializer/pgSchema';
import { SQLiteSchema, sqliteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema';
import {
+ applyLibSQLSnapshotsDiff,
applyMysqlSnapshotsDiff,
applyPgSnapshotsDiff,
applySingleStoreSnapshotsDiff,
@@ -36,7 +37,7 @@ import {
} from '../../snapshotsDiffer';
import { assertV1OutFolder, Journal, prepareMigrationFolder } from '../../utils';
import { prepareMigrationMetadata } from '../../utils/words';
-import { Prefix } from '../validations/common';
+import { CasingType, Prefix } from '../validations/common';
import { withStyle } from '../validations/outputs';
import {
isRenamePromptItem,
@@ -159,6 +160,7 @@ export const columnsResolver = async (
export const prepareAndMigratePg = async (config: GenerateConfig) => {
const outFolder = config.out;
const schemaPath = config.schema;
+ const casing = config.casing;
try {
assertV1OutFolder(outFolder);
@@ -171,6 +173,7 @@ export const prepareAndMigratePg = async (config: GenerateConfig) => {
const { prev, cur, custom } = await preparePgMigrationSnapshot(
snapshots,
schemaPath,
+ casing,
);
const validatedPrev = pgSchema.parse(prev);
@@ -223,10 +226,12 @@ export const preparePgPush = async (
schemaPath: string | string[],
snapshot: PgSchema,
schemaFilter: string[],
+ casing: CasingType | undefined,
) => {
const { prev, cur } = await preparePgDbPushSnapshot(
snapshot,
schemaPath,
+ casing,
schemaFilter,
);
@@ -307,11 +312,13 @@ function mysqlSchemaSuggestions(
export const prepareMySQLPush = async (
schemaPath: string | string[],
snapshot: MySqlSchema,
+ casing: CasingType | undefined,
) => {
try {
const { prev, cur } = await prepareMySqlDbPushSnapshot(
snapshot,
schemaPath,
+ casing,
);
const validatedPrev = mysqlSchema.parse(prev);
@@ -340,6 +347,7 @@ export const prepareMySQLPush = async (
export const prepareAndMigrateMysql = async (config: GenerateConfig) => {
const outFolder = config.out;
const schemaPath = config.schema;
+ const casing = config.casing;
try {
// TODO: remove
@@ -349,6 +357,7 @@ export const prepareAndMigrateMysql = async (config: GenerateConfig) => {
const { prev, cur, custom } = await prepareMySqlMigrationSnapshot(
snapshots,
schemaPath,
+ casing,
);
const validatedPrev = mysqlSchema.parse(prev);
@@ -542,6 +551,7 @@ export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => {
export const prepareAndMigrateSqlite = async (config: GenerateConfig) => {
const outFolder = config.out;
const schemaPath = config.schema;
+ const casing = config.casing;
try {
assertV1OutFolder(outFolder);
@@ -550,6 +560,7 @@ export const prepareAndMigrateSqlite = async (config: GenerateConfig) => {
const { prev, cur, custom } = await prepareSqliteMigrationSnapshot(
snapshots,
schemaPath,
+ casing,
);
const validatedPrev = sqliteSchema.parse(prev);
@@ -598,11 +609,73 @@ export const prepareAndMigrateSqlite = async (config: GenerateConfig) => {
}
};
+export const prepareAndMigrateLibSQL = async (config: GenerateConfig) => {
+ const outFolder = config.out;
+ const schemaPath = config.schema;
+ const casing = config.casing;
+
+ try {
+ assertV1OutFolder(outFolder);
+
+ const { snapshots, journal } = prepareMigrationFolder(outFolder, 'sqlite');
+ const { prev, cur, custom } = await prepareSqliteMigrationSnapshot(
+ snapshots,
+ schemaPath,
+ casing,
+ );
+
+ const validatedPrev = sqliteSchema.parse(prev);
+ const validatedCur = sqliteSchema.parse(cur);
+
+ if (config.custom) {
+ writeResult({
+ cur: custom,
+ sqlStatements: [],
+ journal,
+ outFolder,
+ name: config.name,
+ breakpoints: config.breakpoints,
+ bundle: config.bundle,
+ type: 'custom',
+ prefixMode: config.prefix,
+ });
+ return;
+ }
+
+ const squashedPrev = squashSqliteScheme(validatedPrev);
+ const squashedCur = squashSqliteScheme(validatedCur);
+
+ const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff(
+ squashedPrev,
+ squashedCur,
+ tablesResolver,
+ columnsResolver,
+ validatedPrev,
+ validatedCur,
+ );
+
+ writeResult({
+ cur,
+ sqlStatements,
+ journal,
+ _meta,
+ outFolder,
+ name: config.name,
+ breakpoints: config.breakpoints,
+ bundle: config.bundle,
+ prefixMode: config.prefix,
+ });
+ } catch (e) {
+ console.error(e);
+ }
+};
+
export const prepareSQLitePush = async (
schemaPath: string | string[],
snapshot: SQLiteSchema,
+ casing: CasingType | undefined,
) => {
- const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath);
+ const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing);
const validatedPrev = sqliteSchema.parse(prev);
const validatedCur = sqliteSchema.parse(cur);
@@ -629,6 +702,38 @@ export const prepareSQLitePush = async (
};
};
+export const prepareLibSQLPush = async (
+ schemaPath: string | string[],
+ snapshot: SQLiteSchema,
+ casing: CasingType | undefined,
+) => {
+ const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing);
+
+ const validatedPrev = sqliteSchema.parse(prev);
+ const validatedCur = sqliteSchema.parse(cur);
+
+ const squashedPrev = squashSqliteScheme(validatedPrev, 'push');
+ const squashedCur = squashSqliteScheme(validatedCur, 'push');
+
+ const { sqlStatements, statements, _meta } = await applyLibSQLSnapshotsDiff(
+ squashedPrev,
+ squashedCur,
+ tablesResolver,
+ columnsResolver,
+ validatedPrev,
+ validatedCur,
+ 'push',
+ );
+
+ return {
+ sqlStatements,
+ statements,
+ squashedPrev,
+ squashedCur,
+ meta: _meta,
+ };
+};
+
const freeeeeeze = (obj: any) => {
Object.freeze(obj);
for (let key in obj) {
diff --git a/drizzle-kit/src/cli/commands/push.ts b/drizzle-kit/src/cli/commands/push.ts
index 464e574c2..8440f94ce 100644
--- a/drizzle-kit/src/cli/commands/push.ts
+++ b/drizzle-kit/src/cli/commands/push.ts
@@ -2,11 +2,14 @@ import chalk from 'chalk';
import { render } from 'hanji';
import { fromJson } from '../../sqlgenerator';
import { Select } from '../selector-ui';
+import { CasingType } from '../validations/common';
+import { LibSQLCredentials } from '../validations/libsql';
import type { MysqlCredentials } from '../validations/mysql';
import { withStyle } from '../validations/outputs';
import type { PostgresCredentials } from '../validations/postgres';
import { SingleStoreCredentials } from '../validations/singlestore';
import type { SqliteCredentials } from '../validations/sqlite';
+import { libSqlLogSuggestionsAndReturn } from './libSqlPushUtils';
import {
filterStatements as mySqlFilterStatements,
logSuggestionsAndReturn as mySqlLogSuggestionsAndReturn,
@@ -25,6 +28,7 @@ export const mysqlPush = async (
strict: boolean,
verbose: boolean,
force: boolean,
+ casing: CasingType | undefined,
) => {
const { connectToMySQL } = await import('../connections');
const { mysqlPushIntrospect } = await import('./mysqlIntrospect');
@@ -34,7 +38,7 @@ export const mysqlPush = async (
const { schema } = await mysqlPushIntrospect(db, database, tablesFilter);
const { prepareMySQLPush } = await import('./migrate');
- const statements = await prepareMySQLPush(schemaPath, schema);
+ const statements = await prepareMySQLPush(schemaPath, schema, casing);
const filteredStatements = mySqlFilterStatements(
statements.statements ?? [],
@@ -215,7 +219,6 @@ export const singlestorePush = async (
if (verbose) {
console.log();
- // console.log(chalk.gray('Verbose logs:'));
console.log(
withStyle.warning('You are about to execute current statements:'),
);
@@ -304,6 +307,7 @@ export const pgPush = async (
tablesFilter: string[],
schemasFilter: string[],
force: boolean,
+ casing: CasingType | undefined,
) => {
const { preparePostgresDB } = await import('../connections');
const { pgPushIntrospect } = await import('./pgIntrospect');
@@ -313,7 +317,7 @@ export const pgPush = async (
const { preparePgPush } = await import('./migrate');
- const statements = await preparePgPush(schemaPath, schema, schemasFilter);
+ const statements = await preparePgPush(schemaPath, schema, schemasFilter, casing);
try {
if (statements.sqlStatements.length === 0) {
@@ -413,6 +417,7 @@ export const sqlitePush = async (
credentials: SqliteCredentials,
tablesFilter: string[],
force: boolean,
+ casing: CasingType | undefined,
) => {
const { connectToSQLite } = await import('../connections');
const { sqlitePushIntrospect } = await import('./sqliteIntrospect');
@@ -421,7 +426,7 @@ export const sqlitePush = async (
const { schema } = await sqlitePushIntrospect(db, tablesFilter);
const { prepareSQLitePush } = await import('./migrate');
- const statements = await prepareSQLitePush(schemaPath, schema);
+ const statements = await prepareSQLitePush(schemaPath, schema, casing);
if (statements.sqlStatements.length === 0) {
render(`\n[${chalk.blue('i')}] No changes detected`);
@@ -436,8 +441,8 @@ export const sqlitePush = async (
} = await sqliteSuggestions(
db,
statements.statements,
- statements.squashedCur,
statements.squashedPrev,
+ statements.squashedCur,
statements.meta!,
);
@@ -517,10 +522,115 @@ export const sqlitePush = async (
await db.query('rollback');
process.exit(1);
}
- } else if (credentials.driver === 'turso') {
- await db.batch!(statementsToExecute.map((it) => ({ query: it })));
}
render(`[${chalk.green('✓')}] Changes applied`);
}
}
};
+
+export const libSQLPush = async (
+ schemaPath: string | string[],
+ verbose: boolean,
+ strict: boolean,
+ credentials: LibSQLCredentials,
+ tablesFilter: string[],
+ force: boolean,
+ casing: CasingType | undefined,
+) => {
+ const { connectToLibSQL } = await import('../connections');
+ const { sqlitePushIntrospect } = await import('./sqliteIntrospect');
+
+ const db = await connectToLibSQL(credentials);
+ const { schema } = await sqlitePushIntrospect(db, tablesFilter);
+
+ const { prepareLibSQLPush } = await import('./migrate');
+
+ const statements = await prepareLibSQLPush(schemaPath, schema, casing);
+
+ if (statements.sqlStatements.length === 0) {
+ render(`\n[${chalk.blue('i')}] No changes detected`);
+ } else {
+ const {
+ shouldAskForApprove,
+ statementsToExecute,
+ columnsToRemove,
+ tablesToRemove,
+ tablesToTruncate,
+ infoToPrint,
+ } = await libSqlLogSuggestionsAndReturn(
+ db,
+ statements.statements,
+ statements.squashedPrev,
+ statements.squashedCur,
+ statements.meta!,
+ );
+
+ if (verbose && statementsToExecute.length > 0) {
+ console.log();
+ console.log(
+ withStyle.warning('You are about to execute current statements:'),
+ );
+ console.log();
+ console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n'));
+ console.log();
+ }
+
+ if (!force && strict) {
+ if (!shouldAskForApprove) {
+ const { status, data } = await render(
+ new Select(['No, abort', `Yes, I want to execute all statements`]),
+ );
+ if (data?.index === 0) {
+ render(`[${chalk.red('x')}] All changes were aborted`);
+ process.exit(0);
+ }
+ }
+ }
+
+ if (!force && shouldAskForApprove) {
+ console.log(withStyle.warning('Found data-loss statements:'));
+ console.log(infoToPrint.join('\n'));
+ console.log();
+ console.log(
+ chalk.red.bold(
+ 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n',
+ ),
+ );
+
+ console.log(chalk.white('Do you still want to push changes?'));
+
+ const { status, data } = await render(
+ new Select([
+ 'No, abort',
+ `Yes, I want to${
+ tablesToRemove.length > 0
+ ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},`
+ : ' '
+ }${
+ columnsToRemove.length > 0
+ ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},`
+ : ' '
+ }${
+ tablesToTruncate.length > 0
+ ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}`
+ : ''
+ }`
+ .trimEnd()
+ .replace(/(^,)|(,$)/g, '')
+ .replace(/ +(?= )/g, ''),
+ ]),
+ );
+ if (data?.index === 0) {
+ render(`[${chalk.red('x')}] All changes were aborted`);
+ process.exit(0);
+ }
+ }
+
+ if (statementsToExecute.length === 0) {
+ render(`\n[${chalk.blue('i')}] No changes detected`);
+ } else {
+ await db.batchWithPragma!(statementsToExecute);
+ render(`[${chalk.green('✓')}] Changes applied`);
+ }
+ }
+};
diff --git a/drizzle-kit/src/cli/commands/sqlitePushUtils.ts b/drizzle-kit/src/cli/commands/sqlitePushUtils.ts
index d11a4ce62..bcc2d19db 100644
--- a/drizzle-kit/src/cli/commands/sqlitePushUtils.ts
+++ b/drizzle-kit/src/cli/commands/sqlitePushUtils.ts
@@ -10,7 +10,7 @@ import {
} from '../../sqlgenerator';
import type { JsonStatement } from '../../jsonStatements';
-import type { SQLiteDB } from '../../utils';
+import { findAddedAndRemoved, type SQLiteDB } from '../../utils';
export const _moveDataStatements = (
tableName: string,
@@ -19,16 +19,7 @@ export const _moveDataStatements = (
) => {
const statements: string[] = [];
- // rename table to __old_${tablename}
- statements.push(
- new SqliteRenameTableConvertor().convert({
- type: 'rename_table',
- tableNameFrom: tableName,
- tableNameTo: `__old_push_${tableName}`,
- fromSchema: '',
- toSchema: '',
- }),
- );
+ const newTableName = `__new_${tableName}`;
// create table statement from a new json2 with proper name
const tableColumns = Object.values(json.tables[tableName].columns);
@@ -39,10 +30,11 @@ export const _moveDataStatements = (
const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it));
+ // create new table
statements.push(
new SQLiteCreateTableConvertor().convert({
type: 'sqlite_create_table',
- tableName: tableName,
+ tableName: newTableName,
columns: tableColumns,
referenceData: fks,
compositePKs,
@@ -51,19 +43,38 @@ export const _moveDataStatements = (
// move data
if (!dataLoss) {
+ const columns = Object.keys(json.tables[tableName].columns).map(
+ (c) => `"${c}"`,
+ );
+
statements.push(
- `INSERT INTO "${tableName}" SELECT * FROM "__old_push_${tableName}";`,
+ `INSERT INTO \`${newTableName}\`(${
+ columns.join(
+ ', ',
+ )
+ }) SELECT ${columns.join(', ')} FROM \`${tableName}\`;`,
);
}
- // drop table with name __old_${tablename}
+
statements.push(
new SQLiteDropTableConvertor().convert({
type: 'drop_table',
- tableName: `__old_push_${tableName}`,
+ tableName: tableName,
schema: '',
}),
);
+ // rename table
+ statements.push(
+ new SqliteRenameTableConvertor().convert({
+ fromSchema: '',
+ tableNameFrom: newTableName,
+ tableNameTo: tableName,
+ toSchema: '',
+ type: 'rename_table',
+ }),
+ );
+
for (const idx of Object.values(json.tables[tableName].indexes)) {
statements.push(
new CreateSqliteIndexConvertor().convert({
@@ -120,8 +131,6 @@ export const logSuggestionsAndReturn = async (
const schemasToRemove: string[] = [];
const tablesToTruncate: string[] = [];
- const tablesContext: Record = {};
-
for (const statement of statements) {
if (statement.type === 'drop_table') {
const res = await connection.query<{ count: string }>(
@@ -139,248 +148,159 @@ export const logSuggestionsAndReturn = async (
tablesToRemove.push(statement.tableName);
shouldAskForApprove = true;
}
- const stmnt = fromJson([statement], 'sqlite')[0];
- statementsToExecute.push(stmnt);
- } else if (statement.type === 'alter_table_drop_column') {
- const newTableName = getOldTableName(statement.tableName, meta);
- const columnIsPartOfPk = Object.values(
- json1.tables[newTableName].compositePrimaryKeys,
- ).find((c) => SQLiteSquasher.unsquashPK(c).includes(statement.columnName));
-
- const columnIsPartOfIndex = Object.values(
- json1.tables[newTableName].indexes,
- ).find((c) => SQLiteSquasher.unsquashIdx(c).columns.includes(statement.columnName));
-
- const columnIsPk = json1.tables[newTableName].columns[statement.columnName].primaryKey;
-
- const columnIsPartOfFk = Object.values(
- json1.tables[newTableName].foreignKeys,
- ).find((t) =>
- SQLiteSquasher.unsquashPushFK(t).columnsFrom.includes(
- statement.columnName,
- )
+ const fromJsonStatement = fromJson([statement], 'sqlite', 'push');
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
);
+ } else if (statement.type === 'alter_table_drop_column') {
+ const tableName = statement.tableName;
+ const columnName = statement.columnName;
const res = await connection.query<{ count: string }>(
- `select count(*) as count from \`${newTableName}\``,
+ `select count(\`${tableName}\`.\`${columnName}\`) as count from \`${tableName}\``,
);
const count = Number(res[0].count);
if (count > 0) {
infoToPrint.push(
`· You're about to delete ${
chalk.underline(
- statement.columnName,
+ columnName,
)
- } column in ${newTableName} table with ${count} items`,
+ } column in ${tableName} table with ${count} items`,
);
- columnsToRemove.push(`${newTableName}_${statement.columnName}`);
+ columnsToRemove.push(`${tableName}_${statement.columnName}`);
shouldAskForApprove = true;
}
- if (
- columnIsPk
- || columnIsPartOfPk
- || columnIsPartOfIndex
- || columnIsPartOfFk
- ) {
- tablesContext[newTableName] = [
- ..._moveDataStatements(statement.tableName, json2, true),
- ];
- // check table that have fk to this table
-
- const tablesReferncingCurrent: string[] = [];
-
- for (const table of Object.values(json1.tables)) {
- const tablesRefs = Object.values(json1.tables[table.name].foreignKeys)
- .filter(
- (t) => SQLiteSquasher.unsquashPushFK(t).tableTo === newTableName,
+ const fromJsonStatement = fromJson([statement], 'sqlite', 'push');
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
+ );
+ } else if (
+ statement.type === 'sqlite_alter_table_add_column'
+ && (statement.column.notNull && !statement.column.default)
+ ) {
+ const tableName = statement.tableName;
+ const columnName = statement.column.name;
+ const res = await connection.query<{ count: string }>(
+ `select count(*) as count from \`${tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to add not-null ${
+ chalk.underline(
+ columnName,
)
- .map((t) => SQLiteSquasher.unsquashPushFK(t).tableFrom);
-
- tablesReferncingCurrent.push(...tablesRefs);
- }
-
- const uniqueTableRefs = [...new Set(tablesReferncingCurrent)];
-
- for (const table of uniqueTableRefs) {
- if (typeof tablesContext[table] === 'undefined') {
- tablesContext[table] = [..._moveDataStatements(table, json2)];
- }
- }
- } else {
- if (typeof tablesContext[newTableName] === 'undefined') {
- const stmnt = fromJson([statement], 'sqlite')[0];
- statementsToExecute.push(stmnt);
- }
- }
- } else if (statement.type === 'sqlite_alter_table_add_column') {
- const newTableName = getOldTableName(statement.tableName, meta);
- if (statement.column.notNull && !statement.column.default) {
- const res = await connection.query<{ count: string }>(
- `select count(*) as count from \`${newTableName}\``,
+ } column without default value, which contains ${count} items`,
);
- const count = Number(res[0].count);
- if (count > 0) {
- infoToPrint.push(
- `· You're about to add not-null ${
- chalk.underline(
- statement.column.name,
- )
- } column without default value, which contains ${count} items`,
- );
- tablesToTruncate.push(newTableName);
- statementsToExecute.push(`delete from ${newTableName};`);
+ tablesToTruncate.push(tableName);
+ statementsToExecute.push(`delete from ${tableName};`);
- shouldAskForApprove = true;
- }
+ shouldAskForApprove = true;
}
- if (statement.column.primaryKey) {
- tablesContext[newTableName] = [
- ..._moveDataStatements(statement.tableName, json2, true),
- ];
- const tablesReferncingCurrent: string[] = [];
-
- for (const table of Object.values(json1.tables)) {
- const tablesRefs = Object.values(json1.tables[table.name].foreignKeys)
- .filter(
- (t) => SQLiteSquasher.unsquashPushFK(t).tableTo === newTableName,
- )
- .map((t) => SQLiteSquasher.unsquashPushFK(t).tableFrom);
- tablesReferncingCurrent.push(...tablesRefs);
- }
+ const fromJsonStatement = fromJson([statement], 'sqlite', 'push');
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
+ );
+ } else if (statement.type === 'recreate_table') {
+ const tableName = statement.tableName;
+ const oldTableName = getOldTableName(tableName, meta);
- const uniqueTableRefs = [...new Set(tablesReferncingCurrent)];
+ let dataLoss = false;
- for (const table of uniqueTableRefs) {
- if (typeof tablesContext[table] === 'undefined') {
- tablesContext[table] = [..._moveDataStatements(table, json2)];
- }
- }
- } else {
- if (typeof tablesContext[newTableName] === 'undefined') {
- const stmnt = fromJson([statement], 'sqlite')[0];
- statementsToExecute.push(stmnt);
- }
- }
- } else if (
- statement.type === 'alter_table_alter_column_set_type'
- || statement.type === 'alter_table_alter_column_set_default'
- || statement.type === 'alter_table_alter_column_drop_default'
- || statement.type === 'alter_table_alter_column_set_notnull'
- || statement.type === 'alter_table_alter_column_drop_notnull'
- || statement.type === 'alter_table_alter_column_drop_autoincrement'
- || statement.type === 'alter_table_alter_column_set_autoincrement'
- || statement.type === 'alter_table_alter_column_drop_pk'
- || statement.type === 'alter_table_alter_column_set_pk'
- ) {
- if (
- !(
- statement.type === 'alter_table_alter_column_set_notnull'
- && statement.columnPk
- )
- ) {
- const newTableName = getOldTableName(statement.tableName, meta);
- if (
- statement.type === 'alter_table_alter_column_set_notnull'
- && typeof statement.columnDefault === 'undefined'
- ) {
+ const prevColumnNames = Object.keys(json1.tables[oldTableName].columns);
+ const currentColumnNames = Object.keys(json2.tables[tableName].columns);
+ const { removedColumns, addedColumns } = findAddedAndRemoved(
+ prevColumnNames,
+ currentColumnNames,
+ );
+
+ if (removedColumns.length) {
+ for (const removedColumn of removedColumns) {
const res = await connection.query<{ count: string }>(
- `select count(*) as count from \`${newTableName}\``,
+ `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``,
);
+
const count = Number(res[0].count);
if (count > 0) {
infoToPrint.push(
- `· You're about to add not-null constraint to ${
+ `· You're about to delete ${
chalk.underline(
- statement.columnName,
+ removedColumn,
)
- } column without default value, which contains ${count} items`,
+ } column in ${tableName} table with ${count} items`,
);
-
- tablesToTruncate.push(newTableName);
+ columnsToRemove.push(removedColumn);
shouldAskForApprove = true;
}
- tablesContext[newTableName] = _moveDataStatements(
- statement.tableName,
- json1,
- true,
+ }
+ }
+
+ if (addedColumns.length) {
+ for (const addedColumn of addedColumns) {
+ const [res] = await connection.query<{ count: string }>(
+ `select count(*) as count from \`${tableName}\``,
);
- } else {
- if (typeof tablesContext[newTableName] === 'undefined') {
- tablesContext[newTableName] = _moveDataStatements(
- statement.tableName,
- json1,
+
+ const columnConf = json2.tables[tableName].columns[addedColumn];
+
+ const count = Number(res.count);
+ if (count > 0 && columnConf.notNull && !columnConf.default) {
+ dataLoss = true;
+ infoToPrint.push(
+ `· You're about to add not-null ${
+ chalk.underline(
+ addedColumn,
+ )
+ } column without default value to table, which contains ${count} items`,
);
+ shouldAskForApprove = true;
+ tablesToTruncate.push(tableName);
+
+ statementsToExecute.push(`DELETE FROM \`${tableName}\`;`);
}
}
+ }
- const tablesReferncingCurrent: string[] = [];
+ // check if some tables referencing current for pragma
+ const tablesReferencingCurrent: string[] = [];
- for (const table of Object.values(json1.tables)) {
- const tablesRefs = Object.values(json1.tables[table.name].foreignKeys)
- .filter(
- (t) => SQLiteSquasher.unsquashPushFK(t).tableTo === newTableName,
- )
- .map((t) => {
- return getNewTableName(
- SQLiteSquasher.unsquashPushFK(t).tableFrom,
- meta,
- );
- });
-
- tablesReferncingCurrent.push(...tablesRefs);
- }
+ for (const table of Object.values(json2.tables)) {
+ const tablesRefs = Object.values(json2.tables[table.name].foreignKeys)
+ .filter((t) => SQLiteSquasher.unsquashPushFK(t).tableTo === tableName)
+ .map((it) => SQLiteSquasher.unsquashPushFK(it).tableFrom);
- const uniqueTableRefs = [...new Set(tablesReferncingCurrent)];
+ tablesReferencingCurrent.push(...tablesRefs);
+ }
- for (const table of uniqueTableRefs) {
- if (typeof tablesContext[table] === 'undefined') {
- tablesContext[table] = [..._moveDataStatements(table, json1)];
- }
- }
+ if (!tablesReferencingCurrent.length) {
+ statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss));
+ continue;
}
- } else if (
- statement.type === 'create_reference'
- || statement.type === 'delete_reference'
- || statement.type === 'alter_reference'
- ) {
- const fk = SQLiteSquasher.unsquashPushFK(statement.data);
- if (typeof tablesContext[statement.tableName] === 'undefined') {
- tablesContext[statement.tableName] = _moveDataStatements(
- statement.tableName,
- json2,
- );
+ const [{ foreign_keys: pragmaState }] = await connection.query<{
+ foreign_keys: number;
+ }>(`PRAGMA foreign_keys;`);
+
+ if (pragmaState) {
+ statementsToExecute.push(`PRAGMA foreign_keys=OFF;`);
}
- } else if (
- statement.type === 'create_composite_pk'
- || statement.type === 'alter_composite_pk'
- || statement.type === 'delete_composite_pk'
- || statement.type === 'create_unique_constraint'
- || statement.type === 'delete_unique_constraint'
- ) {
- const newTableName = getOldTableName(statement.tableName, meta);
- if (typeof tablesContext[newTableName] === 'undefined') {
- tablesContext[newTableName] = _moveDataStatements(
- statement.tableName,
- json2,
- );
+ statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss));
+ if (pragmaState) {
+ statementsToExecute.push(`PRAGMA foreign_keys=ON;`);
}
} else {
- const stmnt = fromJson([statement], 'sqlite');
- if (typeof stmnt !== 'undefined') {
- statementsToExecute.push(...stmnt);
- }
+ const fromJsonStatement = fromJson([statement], 'sqlite', 'push');
+ statementsToExecute.push(
+ ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]),
+ );
}
}
- for (const context of Object.values(tablesContext)) {
- statementsToExecute.push(...context);
- }
-
return {
statementsToExecute,
shouldAskForApprove,
diff --git a/drizzle-kit/src/cli/commands/utils.ts b/drizzle-kit/src/cli/commands/utils.ts
index e8a8f2b95..1375dd9df 100644
--- a/drizzle-kit/src/cli/commands/utils.ts
+++ b/drizzle-kit/src/cli/commands/utils.ts
@@ -9,6 +9,7 @@ import { prepareFilenames } from '../../serializer';
import { pullParams, pushParams } from '../validations/cli';
import {
Casing,
+ CasingType,
CliConfig,
configCommonSchema,
configMigrations,
@@ -16,6 +17,8 @@ import {
Prefix,
wrapParam,
} from '../validations/common';
+import { LibSQLCredentials, libSQLCredentials } from '../validations/libsql';
+import { printConfigConnectionIssues as printIssuesLibSql } from '../validations/libsql';
import {
MysqlCredentials,
mysqlCredentials,
@@ -127,6 +130,7 @@ export type GenerateConfig = {
prefix: Prefix;
custom: boolean;
bundle: boolean;
+ casing?: CasingType;
};
export const prepareGenerateConfig = async (
@@ -140,12 +144,13 @@ export const prepareGenerateConfig = async (
dialect?: Dialect;
driver?: Driver;
prefix?: Prefix;
+ casing?: CasingType;
},
from: 'config' | 'cli',
): Promise => {
const config = from === 'config' ? await drizzleConfigFromFile(options.config) : options;
- const { schema, out, breakpoints, dialect, driver } = config;
+ const { schema, out, breakpoints, dialect, driver, casing } = config;
if (!schema || !dialect) {
console.log(error('Please provide required params:'));
@@ -173,6 +178,7 @@ export const prepareGenerateConfig = async (
schema: schema,
out: out || 'drizzle',
bundle: driver === 'expo',
+ casing,
};
};
@@ -220,6 +226,10 @@ export const preparePushConfig = async (
dialect: 'singlestore';
credentials: SingleStoreCredentials;
}
+ | {
+ dialect: 'turso';
+ credentials: LibSQLCredentials;
+ }
) & {
schemaPath: string | string[];
verbose: boolean;
@@ -227,6 +237,7 @@ export const preparePushConfig = async (
force: boolean;
tablesFilter: string[];
schemasFilter: string[];
+ casing?: CasingType;
}
> => {
const raw = flattenDatabaseCredentials(
@@ -295,6 +306,7 @@ export const preparePushConfig = async (
verbose: config.verbose ?? false,
force: (options.force as boolean) ?? false,
credentials: parsed.data,
+ casing: config.casing,
tablesFilter,
schemasFilter,
};
@@ -313,6 +325,7 @@ export const preparePushConfig = async (
verbose: config.verbose ?? false,
force: (options.force as boolean) ?? false,
credentials: parsed.data,
+ casing: config.casing,
tablesFilter,
schemasFilter,
};
@@ -350,6 +363,26 @@ export const preparePushConfig = async (
verbose: config.verbose ?? false,
force: (options.force as boolean) ?? false,
credentials: parsed.data,
+ casing: config.casing,
+ tablesFilter,
+ schemasFilter,
+ };
+ }
+
+ if (config.dialect === 'turso') {
+ const parsed = libSQLCredentials.safeParse(config);
+ if (!parsed.success) {
+ printIssuesSqlite(config, 'pull');
+ process.exit(1);
+ }
+ return {
+ dialect: 'turso',
+ schemaPath: config.schema,
+ strict: config.strict ?? false,
+ verbose: config.verbose ?? false,
+ force: (options.force as boolean) ?? false,
+ credentials: parsed.data,
+ casing: config.casing,
tablesFilter,
schemasFilter,
};
@@ -379,6 +412,10 @@ export const preparePullConfig = async (
dialect: 'singlestore';
credentials: SingleStoreCredentials;
}
+ | {
+ dialect: 'turso';
+ credentials: LibSQLCredentials;
+ }
) & {
out: string;
breakpoints: boolean;
@@ -503,6 +540,24 @@ export const preparePullConfig = async (
};
}
+ if (dialect === 'turso') {
+ const parsed = libSQLCredentials.safeParse(config);
+ if (!parsed.success) {
+ printIssuesLibSql(config, 'pull');
+ process.exit(1);
+ }
+ return {
+ dialect,
+ out: config.out,
+ breakpoints: config.breakpoints,
+ casing: config.casing,
+ credentials: parsed.data,
+ tablesFilter,
+ schemasFilter,
+ prefix: config.migrations?.prefix || 'index',
+ };
+ }
+
assertUnreachable(dialect);
};
@@ -589,6 +644,22 @@ export const prepareStudioConfig = async (options: Record) => {
};
}
+ if (dialect === 'turso') {
+ const parsed = libSQLCredentials.safeParse(flattened);
+ if (!parsed.success) {
+ printIssuesLibSql(flattened as Record, 'studio');
+ process.exit(1);
+ }
+ const credentials = parsed.data;
+ return {
+ dialect,
+ schema,
+ host,
+ port,
+ credentials,
+ };
+ }
+
assertUnreachable(dialect);
};
@@ -674,6 +745,21 @@ export const prepareMigrateConfig = async (configPath: string | undefined) => {
table,
};
}
+ if (dialect === 'turso') {
+ const parsed = libSQLCredentials.safeParse(flattened);
+ if (!parsed.success) {
+ printIssuesLibSql(flattened as Record, 'migrate');
+ process.exit(1);
+ }
+ const credentials = parsed.data;
+ return {
+ dialect,
+ out,
+ credentials,
+ schema,
+ table,
+ };
+ }
assertUnreachable(dialect);
};
@@ -719,6 +805,7 @@ export const drizzleConfigFromFile = async (
// --- get response and then check by each dialect independently
const res = configCommonSchema.safeParse(content);
if (!res.success) {
+ console.log(res.error);
if (!('dialect' in content)) {
console.log(error("Please specify 'dialect' param in config file"));
}
diff --git a/drizzle-kit/src/cli/connections.ts b/drizzle-kit/src/cli/connections.ts
index ba741bfed..0ff780bf1 100644
--- a/drizzle-kit/src/cli/connections.ts
+++ b/drizzle-kit/src/cli/connections.ts
@@ -5,12 +5,22 @@ import fetch from 'node-fetch';
import ws from 'ws';
import { assertUnreachable } from '../global';
import type { ProxyParams } from '../serializer/studio';
-import { type DB, normalisePGliteUrl, normaliseSQLiteUrl, type Proxy, type SQLiteDB, type SqliteProxy } from '../utils';
+import {
+ type DB,
+ LibSQLDB,
+ normalisePGliteUrl,
+ normaliseSQLiteUrl,
+ type Proxy,
+ type SQLiteDB,
+ type SqliteProxy,
+} from '../utils';
import { assertPackages, checkPackage } from './utils';
+import { LibSQLCredentials } from './validations/libsql';
import type { MysqlCredentials } from './validations/mysql';
import { withStyle } from './validations/outputs';
import type { PostgresCredentials } from './validations/postgres';
import type { SqliteCredentials } from './validations/sqlite';
+import { SingleStoreCredentials } from './validations/singlestore';
export const preparePostgresDB = async (
credentials: PostgresCredentials,
@@ -431,6 +441,85 @@ export const connectToMySQL = async (
process.exit(1);
};
+const parseSingleStoreCredentials = (credentials: SingleStoreCredentials) => {
+ if ('url' in credentials) {
+ const url = credentials.url;
+
+ const connectionUrl = new URL(url);
+ const pathname = connectionUrl.pathname;
+
+ const database = pathname.split('/')[pathname.split('/').length - 1];
+ if (!database) {
+ console.error(
+ 'You should specify a database name in connection string (singlestore://USER:PASSWORD@HOST:PORT/DATABASE)',
+ );
+ process.exit(1);
+ }
+ return { database, url };
+ } else {
+ return {
+ database: credentials.database,
+ credentials,
+ };
+ }
+};
+
+export const connectToSingleStore = async (
+ it: SingleStoreCredentials,
+): Promise<{
+ db: DB;
+ proxy: Proxy;
+ database: string;
+ migrate: (config: MigrationConfig) => Promise;
+}> => {
+ const result = parseSingleStoreCredentials(it);
+
+ if (await checkPackage('mysql2')) {
+ const { createConnection } = await import('mysql2/promise');
+ const { drizzle } = await import('drizzle-orm/singlestore');
+ const { migrate } = await import('drizzle-orm/singlestore/migrator');
+
+ const connection = result.url
+ ? await createConnection(result.url)
+ : await createConnection(result.credentials!); // needed for some reason!
+
+ const db = drizzle(connection);
+ const migrateFn = async (config: MigrationConfig) => {
+ return migrate(db, config);
+ };
+
+ await connection.connect();
+ const query: DB['query'] = async (
+ sql: string,
+ params?: any[],
+ ): Promise => {
+ const res = await connection.execute(sql, params);
+ return res[0] as any;
+ };
+
+ const proxy: Proxy = async (params: ProxyParams) => {
+ const result = await connection.query({
+ sql: params.sql,
+ values: params.params,
+ rowsAsArray: params.mode === 'array',
+ });
+ return result[0] as any[];
+ };
+
+ return {
+ db: { query },
+ proxy,
+ database: result.database,
+ migrate: migrateFn,
+ };
+ }
+
+ console.error(
+ "To connect to SingleStore database - please install 'singlestore' driver",
+ );
+ process.exit(1);
+};
+
const prepareSqliteParams = (params: any[], driver?: string) => {
return params.map((param) => {
if (
@@ -482,56 +571,7 @@ export const connectToSQLite = async (
> => {
if ('driver' in credentials) {
const { driver } = credentials;
- if (driver === 'turso') {
- assertPackages('@libsql/client');
- const { createClient } = await import('@libsql/client');
- const { drizzle } = await import('drizzle-orm/libsql');
- const { migrate } = await import('drizzle-orm/libsql/migrator');
-
- const client = createClient({
- url: credentials.url,
- authToken: credentials.authToken,
- });
-
- const drzl = drizzle(client);
- const migrateFn = async (config: MigrationConfig) => {
- return migrate(drzl, config);
- };
-
- const db: SQLiteDB = {
- query: async (sql: string, params?: any[]) => {
- const res = await client.execute({ sql, args: params || [] });
- return res.rows as T[];
- },
- run: async (query: string) => {
- await client.execute(query);
- },
- batch: async (
- queries: { query: string; values?: any[] | undefined }[],
- ) => {
- await client.batch(
- queries.map((it) => ({ sql: it.query, args: it.values ?? [] })),
- );
- },
- };
- const proxy: SqliteProxy = {
- proxy: async (params: ProxyParams) => {
- const preparedParams = prepareSqliteParams(params.params);
- const result = await client.execute({
- sql: params.sql,
- args: preparedParams,
- });
-
- if (params.mode === 'array') {
- return result.rows.map((row) => Object.values(row));
- } else {
- return result.rows;
- }
- },
- };
-
- return { ...db, ...proxy, migrate: migrateFn };
- } else if (driver === 'd1-http') {
+ if (driver === 'd1-http') {
const { drizzle } = await import('drizzle-orm/sqlite-proxy');
const { migrate } = await import('drizzle-orm/sqlite-proxy/migrator');
@@ -708,8 +748,66 @@ export const connectToSQLite = async (
};
return { ...db, ...proxy, migrate: migrateFn };
}
+
console.log(
"Please install either 'better-sqlite3' or '@libsql/client' for Drizzle Kit to connect to SQLite databases",
);
process.exit(1);
};
+
+export const connectToLibSQL = async (credentials: LibSQLCredentials): Promise<
+ & LibSQLDB
+ & SqliteProxy
+ & { migrate: (config: MigrationConfig) => Promise }
+> => {
+ if (await checkPackage('@libsql/client')) {
+ const { createClient } = await import('@libsql/client');
+ const { drizzle } = await import('drizzle-orm/libsql');
+ const { migrate } = await import('drizzle-orm/libsql/migrator');
+
+ const client = createClient({
+ url: normaliseSQLiteUrl(credentials.url, 'libsql'),
+ authToken: credentials.authToken,
+ });
+ const drzl = drizzle(client);
+ const migrateFn = async (config: MigrationConfig) => {
+ return migrate(drzl, config);
+ };
+
+ const db: LibSQLDB = {
+ query: async (sql: string, params?: any[]) => {
+ const res = await client.execute({ sql, args: params || [] });
+ return res.rows as T[];
+ },
+ run: async (query: string) => {
+ await client.execute(query);
+ },
+ batchWithPragma: async (queries: string[]) => {
+ await client.migrate(queries);
+ },
+ };
+
+ const proxy: SqliteProxy = {
+ proxy: async (params: ProxyParams) => {
+ const preparedParams = prepareSqliteParams(params.params);
+ const result = await client.execute({
+ sql: params.sql,
+ args: preparedParams,
+ });
+
+ if (params.mode === 'array') {
+ return result.rows.map((row) => Object.values(row));
+ } else {
+ return result.rows;
+ }
+ },
+ };
+
+ return { ...db, ...proxy, migrate: migrateFn };
+ }
+
+ console.log(
+ "Please install '@libsql/client' for Drizzle Kit to connect to LibSQL databases",
+ );
+ process.exit(1);
+};
diff --git a/drizzle-kit/src/cli/schema.ts b/drizzle-kit/src/cli/schema.ts
index 256ee6ffd..8b4e7af42 100644
--- a/drizzle-kit/src/cli/schema.ts
+++ b/drizzle-kit/src/cli/schema.ts
@@ -6,7 +6,7 @@ import { renderWithTask } from 'hanji';
import { dialects } from 'src/schemaValidator';
import '../@types/utils';
import { assertUnreachable } from '../global';
-import { type Setup } from '../serializer/studio';
+import { drizzleForLibSQL, type Setup } from '../serializer/studio';
import { assertV1OutFolder } from '../utils';
import { certs } from '../utils/certs';
import { checkHandler } from './commands/check';
@@ -31,7 +31,7 @@ import { grey, MigrateProgress } from './views';
const optionDialect = string('dialect')
.enum(...dialects)
- .desc(`Database dialect: 'postgresql', 'mysql', 'sqlite' or 'singlestore'`);
+ .desc(`Database dialect: 'postgresql', 'mysql', 'sqlite', 'turso' or 'singlestore'`);
const optionOut = string().desc("Output folder, 'drizzle' by default");
const optionConfig = string().desc('Path to drizzle config file');
const optionBreakpoints = boolean().desc(
@@ -42,12 +42,15 @@ const optionDriver = string()
.enum(...drivers)
.desc('Database driver');
+const optionCasing = string().enum('camelCase', 'snake_case').desc('Casing for serialization');
+
export const generate = command({
name: 'generate',
options: {
config: optionConfig,
dialect: optionDialect,
driver: optionDriver,
+ casing: optionCasing,
schema: string().desc('Path to a schema file or folder'),
out: optionOut,
name: string().desc('Migration file name'),
@@ -64,7 +67,7 @@ export const generate = command({
'generate',
opts,
['prefix', 'name', 'custom'],
- ['driver', 'breakpoints', 'schema', 'out', 'dialect'],
+ ['driver', 'breakpoints', 'schema', 'out', 'dialect', 'casing'],
);
return prepareGenerateConfig(opts, from);
},
@@ -79,6 +82,7 @@ export const generate = command({
prepareAndMigrateMysql,
prepareAndMigrateSqlite,
prepareAndMigrateSingleStore,
+ prepareAndMigrateLibSQL,
} = await import('./commands/migrate');
const dialect = opts.dialect;
@@ -90,6 +94,8 @@ export const generate = command({
await prepareAndMigrateSqlite(opts);
} else if (dialect === 'singlestore') {
await prepareAndMigrateSqlite(opts);
+ } else if (dialect === 'turso') {
+ await prepareAndMigrateLibSQL(opts);
} else {
assertUnreachable(dialect);
}
@@ -174,6 +180,17 @@ export const migrate = command({
migrationsSchema: schema,
}),
);
+ } else if (dialect === 'turso') {
+ const { connectToLibSQL } = await import('./connections');
+ const { migrate } = await connectToLibSQL(credentials);
+ await renderWithTask(
+ new MigrateProgress(),
+ migrate({
+ migrationsFolder: opts.out,
+ migrationsTable: table,
+ migrationsSchema: schema,
+ }),
+ );
} else {
assertUnreachable(dialect);
}
@@ -213,6 +230,7 @@ export const push = command({
options: {
config: optionConfig,
dialect: optionDialect,
+ casing: optionCasing,
schema: string().desc('Path to a schema file or folder'),
...optionsFilters,
...optionsDatabaseCredentials,
@@ -246,6 +264,7 @@ export const push = command({
'schemaFilters',
'extensionsFilters',
'tablesFilter',
+ 'casing',
],
);
@@ -264,6 +283,7 @@ export const push = command({
tablesFilter,
schemasFilter,
force,
+ casing,
} = config;
try {
@@ -276,6 +296,7 @@ export const push = command({
strict,
verbose,
force,
+ casing,
);
} else if (dialect === 'postgresql') {
if ('driver' in credentials) {
@@ -308,6 +329,7 @@ export const push = command({
tablesFilter,
schemasFilter,
force,
+ casing,
);
} else if (dialect === 'sqlite') {
const { sqlitePush } = await import('./commands/push');
@@ -318,6 +340,18 @@ export const push = command({
credentials,
tablesFilter,
force,
+ casing,
+ );
+ } else if (dialect === 'turso') {
+ const { libSQLPush } = await import('./commands/push');
+ await libSQLPush(
+ schemaPath,
+ verbose,
+ strict,
+ credentials,
+ tablesFilter,
+ force,
+ casing,
);
} else if (dialect === 'singlestore') {
const { singlestorePush } = await import('./commands/push');
@@ -384,7 +418,7 @@ export const up = command({
upMysqlHandler(out);
}
- if (dialect === 'sqlite') {
+ if (dialect === 'sqlite' || dialect === 'turso') {
upSqliteHandler(out);
}
@@ -522,6 +556,16 @@ export const pull = command({
tablesFilter,
prefix,
);
+ } else if (dialect === 'turso') {
+ const { introspectLibSQL } = await import('./commands/introspect');
+ await introspectLibSQL(
+ casing,
+ out,
+ breakpoints,
+ credentials,
+ tablesFilter,
+ prefix,
+ );
} else {
assertUnreachable(dialect);
}
@@ -629,6 +673,11 @@ export const studio = command({
? await prepareSingleStoreSchema(schemaPath)
: { schema: {}, relations: {}, files: [] };
setup = await drizzleForSingleStore(credentials, schema, relations, files);
+ } else if (dialect === 'turso') {
+ const { schema, relations, files } = schemaPath
+ ? await prepareSQLiteSchema(schemaPath)
+ : { schema: {}, relations: {}, files: [] };
+ setup = await drizzleForLibSQL(credentials, schema, relations, files);
} else {
assertUnreachable(dialect);
}
diff --git a/drizzle-kit/src/cli/utils.ts b/drizzle-kit/src/cli/utils.ts
index f7e7a2ae9..0a5d7862e 100644
--- a/drizzle-kit/src/cli/utils.ts
+++ b/drizzle-kit/src/cli/utils.ts
@@ -74,7 +74,7 @@ export const assertEitherPackage = async (
process.exit(1);
};
-const requiredApiVersion = 7;
+const requiredApiVersion = 8;
export const assertOrmCoreVersion = async () => {
try {
const { compatibilityVersion } = await import('drizzle-orm/version');
diff --git a/drizzle-kit/src/cli/validations/cli.ts b/drizzle-kit/src/cli/validations/cli.ts
index c4bbbe530..aa92f7c6d 100644
--- a/drizzle-kit/src/cli/validations/cli.ts
+++ b/drizzle-kit/src/cli/validations/cli.ts
@@ -1,6 +1,6 @@
import { boolean, intersection, literal, object, string, TypeOf, union } from 'zod';
import { dialect } from '../../schemaValidator';
-import { casing, prefix } from './common';
+import { casing, casingType, prefix } from './common';
export const cliConfigGenerate = object({
dialect: dialect.optional(),
@@ -17,6 +17,7 @@ export type CliConfigGenerate = TypeOf;
export const pushParams = object({
dialect: dialect,
+ casing: casingType.optional(),
schema: union([string(), string().array()]),
tablesFilter: union([string(), string().array()]).optional(),
schemaFilter: union([string(), string().array()])
diff --git a/drizzle-kit/src/cli/validations/common.ts b/drizzle-kit/src/cli/validations/common.ts
index a7307f4d6..1662e87bb 100644
--- a/drizzle-kit/src/cli/validations/common.ts
+++ b/drizzle-kit/src/cli/validations/common.ts
@@ -61,7 +61,6 @@ export const assertCollisions = <
};
export const sqliteDriversLiterals = [
- literal('turso'),
literal('d1-http'),
literal('expo'),
] as const;
@@ -85,6 +84,10 @@ export type Prefix = (typeof prefixes)[number];
const _: Prefix = '' as TypeOf;
}
+export const casingTypes = ['snake_case', 'camelCase'] as const;
+export const casingType = enum_(casingTypes);
+export type CasingType = (typeof casingTypes)[number];
+
export const sqliteDriver = union(sqliteDriversLiterals);
export const postgresDriver = union(postgresqlDriversLiterals);
export const driver = union([sqliteDriver, postgresDriver]);
@@ -106,6 +109,7 @@ export const configCommonSchema = object({
schemaFilter: union([string(), string().array()]).default(['public']),
migrations: configMigrations,
dbCredentials: any().optional(),
+ casing: casingType.optional(),
}).passthrough();
export const casing = union([literal('camel'), literal('preserve')]).default(
@@ -156,7 +160,7 @@ export const configPushSchema = object({
});
export type CliConfig = TypeOf;
-export const drivers = ['turso', 'd1-http', 'expo', 'aws-data-api', 'pglite'] as const;
+export const drivers = ['d1-http', 'expo', 'aws-data-api', 'pglite'] as const;
export type Driver = (typeof drivers)[number];
const _: Driver = '' as TypeOf;
diff --git a/drizzle-kit/src/cli/validations/libsql.ts b/drizzle-kit/src/cli/validations/libsql.ts
new file mode 100644
index 000000000..a9b03c168
--- /dev/null
+++ b/drizzle-kit/src/cli/validations/libsql.ts
@@ -0,0 +1,27 @@
+import { softAssertUnreachable } from 'src/global';
+import { object, string, TypeOf } from 'zod';
+import { error } from '../views';
+import { wrapParam } from './common';
+
+export const libSQLCredentials = object({
+ url: string().min(1),
+ authToken: string().min(1).optional(),
+});
+
+export type LibSQLCredentials = {
+ url: string;
+ authToken?: string;
+};
+
+const _: LibSQLCredentials = {} as TypeOf;
+
+export const printConfigConnectionIssues = (
+ options: Record,
+ command: 'generate' | 'migrate' | 'push' | 'pull' | 'studio',
+) => {
+ let text = `Please provide required params for 'turso' dialect:\n`;
+ console.log(error(text));
+ console.log(wrapParam('url', options.url));
+ console.log(wrapParam('authToken', options.authToken, true, 'secret'));
+ process.exit(1);
+};
diff --git a/drizzle-kit/src/cli/validations/sqlite.ts b/drizzle-kit/src/cli/validations/sqlite.ts
index b6ad062d5..54178fd4a 100644
--- a/drizzle-kit/src/cli/validations/sqlite.ts
+++ b/drizzle-kit/src/cli/validations/sqlite.ts
@@ -25,11 +25,6 @@ export const sqliteCredentials = union([
]);
export type SqliteCredentials =
- | {
- driver: 'turso';
- url: string;
- authToken: string;
- }
| {
driver: 'd1-http';
accountId: string;
diff --git a/drizzle-kit/src/index.ts b/drizzle-kit/src/index.ts
index 9fab4bcb8..d5b73123d 100644
--- a/drizzle-kit/src/index.ts
+++ b/drizzle-kit/src/index.ts
@@ -117,6 +117,7 @@ export type Config =
schema?: string | string[];
verbose?: boolean;
strict?: boolean;
+ casing?: 'camelCase' | 'snake_case';
migrations?: {
table?: string;
schema?: string;
@@ -128,8 +129,7 @@ export type Config =
}
& (
| {
- dialect: Verify;
- driver: Verify;
+ dialect: Verify;
dbCredentials: {
url: string;
authToken?: string;
diff --git a/drizzle-kit/src/introspect-mysql.ts b/drizzle-kit/src/introspect-mysql.ts
index f206935a3..8c1aa3a76 100644
--- a/drizzle-kit/src/introspect-mysql.ts
+++ b/drizzle-kit/src/introspect-mysql.ts
@@ -1,6 +1,8 @@
/* eslint-disable @typescript-eslint/no-unsafe-argument */
+import { toCamelCase } from 'drizzle-orm/casing';
import './@types/utils';
import type { Casing } from './cli/validations/common';
+import { assertUnreachable } from './global';
import {
Column,
ForeignKey,
@@ -116,7 +118,18 @@ const prepareCasing = (casing?: Casing) => (value: string) => {
return escapeColumnKey(value.camelCase());
}
- return escapeColumnKey(value);
+ assertUnreachable(casing);
+};
+
+const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => {
+ if (casing === 'preserve') {
+ return '';
+ }
+ if (casing === 'camel') {
+ return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`;
+ }
+
+ assertUnreachable(casing);
};
export const schemaToTypeScript = (
@@ -188,6 +201,7 @@ export const schemaToTypeScript = (
Object.values(table.columns),
Object.values(table.foreignKeys),
withCasing,
+ casing,
table.name,
schema,
);
@@ -298,6 +312,7 @@ const column = (
type: string,
name: string,
casing: (value: string) => string,
+ rawCasing: Casing,
defaultValue?: any,
autoincrement?: boolean,
onUpdate?: boolean,
@@ -309,12 +324,14 @@ const column = (
}
if (lowered === 'serial') {
- return `${casing(name)}: serial("${name}")`;
+ return `${casing(name)}: serial(${dbColumnName({ name, casing: rawCasing })})`;
}
if (lowered.startsWith('int')) {
const isUnsigned = lowered.startsWith('int unsigned');
- let out = `${casing(name)}: int("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`;
+ let out = `${casing(name)}: int(${dbColumnName({ name, casing: rawCasing, withMode: isUnsigned })}${
+ isUnsigned ? '{ unsigned: true }' : ''
+ })`;
out += autoincrement ? `.autoincrement()` : '';
out += typeof defaultValue !== 'undefined'
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
@@ -325,7 +342,9 @@ const column = (
if (lowered.startsWith('tinyint')) {
const isUnsigned = lowered.startsWith('tinyint unsigned');
// let out = `${name.camelCase()}: tinyint("${name}")`;
- let out: string = `${casing(name)}: tinyint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`;
+ let out: string = `${casing(name)}: tinyint(${dbColumnName({ name, casing: rawCasing, withMode: isUnsigned })}${
+ isUnsigned ? ', { unsigned: true }' : ''
+ })`;
out += autoincrement ? `.autoincrement()` : '';
out += typeof defaultValue !== 'undefined'
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
@@ -335,7 +354,9 @@ const column = (
if (lowered.startsWith('smallint')) {
const isUnsigned = lowered.startsWith('smallint unsigned');
- let out = `${casing(name)}: smallint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`;
+ let out = `${casing(name)}: smallint(${dbColumnName({ name, casing: rawCasing, withMode: isUnsigned })}${
+ isUnsigned ? ', { unsigned: true }' : ''
+ })`;
out += autoincrement ? `.autoincrement()` : '';
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
@@ -345,7 +366,9 @@ const column = (
if (lowered.startsWith('mediumint')) {
const isUnsigned = lowered.startsWith('mediumint unsigned');
- let out = `${casing(name)}: mediumint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`;
+ let out = `${casing(name)}: mediumint(${dbColumnName({ name, casing: rawCasing, withMode: isUnsigned })}${
+ isUnsigned ? ', { unsigned: true }' : ''
+ })`;
out += autoincrement ? `.autoincrement()` : '';
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
@@ -355,7 +378,9 @@ const column = (
if (lowered.startsWith('bigint')) {
const isUnsigned = lowered.startsWith('bigint unsigned');
- let out = `${casing(name)}: bigint("${name}", { mode: "number"${isUnsigned ? ', unsigned: true' : ''} })`;
+ let out = `${casing(name)}: bigint(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: "number"${
+ isUnsigned ? ', unsigned: true' : ''
+ } })`;
out += autoincrement ? `.autoincrement()` : '';
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
@@ -364,7 +389,7 @@ const column = (
}
if (lowered === 'boolean') {
- let out = `${casing(name)}: boolean("${name}")`;
+ let out = `${casing(name)}: boolean(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -383,9 +408,13 @@ const column = (
params = { precision, scale };
}
+ const timeConfigParams = params ? timeConfig(params) : undefined;
+
let out = params
- ? `${casing(name)}: double("${name}", ${timeConfig(params)})`
- : `${casing(name)}: double("${name}")`;
+ ? `${casing(name)}: double(${
+ dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined })
+ }${timeConfig(params)})`
+ : `${casing(name)}: double(${dbColumnName({ name, casing: rawCasing })})`;
// let out = `${name.camelCase()}: double("${name}")`;
out += defaultValue
@@ -395,7 +424,7 @@ const column = (
}
if (lowered === 'float') {
- let out = `${casing(name)}: float("${name}")`;
+ let out = `${casing(name)}: float(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -403,7 +432,7 @@ const column = (
}
if (lowered === 'real') {
- let out = `${casing(name)}: real("${name}")`;
+ let out = `${casing(name)}: real(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -420,8 +449,10 @@ const column = (
const params = timeConfig({ fsp, mode: "'string'" });
let out = params
- ? `${casing(name)}: timestamp("${name}", ${params})`
- : `${casing(name)}: timestamp("${name}")`;
+ ? `${casing(name)}: timestamp(${
+ dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })
+ }${params})`
+ : `${casing(name)}: timestamp(${dbColumnName({ name, casing: rawCasing })})`;
// mysql has only CURRENT_TIMESTAMP, as I found from docs. But will leave now() for just a case
defaultValue = defaultValue === 'now()' || defaultValue === '(CURRENT_TIMESTAMP)'
@@ -448,8 +479,8 @@ const column = (
const params = timeConfig({ fsp });
let out = params
- ? `${casing(name)}: time("${name}", ${params})`
- : `${casing(name)}: time("${name}")`;
+ ? `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})`
+ : `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing })})`;
defaultValue = defaultValue === 'now()'
? '.defaultNow()'
@@ -466,7 +497,7 @@ const column = (
casing(
name,
)
- }: date("${name}", { mode: 'string' })`;
+ }: date(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string' })`;
defaultValue = defaultValue === 'now()'
? '.defaultNow()'
@@ -480,7 +511,7 @@ const column = (
// in mysql text can't have default value. Will leave it in case smth ;)
if (lowered === 'text') {
- let out = `${casing(name)}: text("${name}")`;
+ let out = `${casing(name)}: text(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -489,7 +520,7 @@ const column = (
// in mysql text can't have default value. Will leave it in case smth ;)
if (lowered === 'tinytext') {
- let out = `${casing(name)}: tinytext("${name}")`;
+ let out = `${casing(name)}: tinytext(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -498,7 +529,7 @@ const column = (
// in mysql text can't have default value. Will leave it in case smth ;)
if (lowered === 'mediumtext') {
- let out = `${casing(name)}: mediumtext("${name}")`;
+ let out = `${casing(name)}: mediumtext(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -507,7 +538,7 @@ const column = (
// in mysql text can't have default value. Will leave it in case smth ;)
if (lowered === 'longtext') {
- let out = `${casing(name)}: longtext("${name}")`;
+ let out = `${casing(name)}: longtext(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -515,7 +546,7 @@ const column = (
}
if (lowered === 'year') {
- let out = `${casing(name)}: year("${name}")`;
+ let out = `${casing(name)}: year(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -524,7 +555,7 @@ const column = (
// in mysql json can't have default value. Will leave it in case smth ;)
if (lowered === 'json') {
- let out = `${casing(name)}: json("${name}")`;
+ let out = `${casing(name)}: json(${dbColumnName({ name, casing: rawCasing })})`;
out += defaultValue
? `.default(${mapColumnDefaultForJson(defaultValue)})`
@@ -538,7 +569,7 @@ const column = (
casing(
name,
)
- }: varchar("${name}", { length: ${
+ }: varchar(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${
lowered.substring(
'varchar'.length + 1,
lowered.length - 1,
@@ -556,7 +587,7 @@ const column = (
casing(
name,
)
- }: char("${name}", { length: ${
+ }: char(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${
lowered.substring(
'char'.length + 1,
lowered.length - 1,
@@ -581,13 +612,13 @@ const column = (
casing(
name,
)
- }: datetime("${name}", { mode: 'string', fsp: ${
+ }: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string', fsp: ${
lowered.substring(
'datetime'.length + 1,
lowered.length - 1,
)
} })`
- : `${casing(name)}: datetime("${name}", { mode: 'string'})`;
+ : `${casing(name)}: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string'})`;
defaultValue = defaultValue === 'now()'
? '.defaultNow()'
@@ -611,9 +642,13 @@ const column = (
params = { precision, scale };
}
+ const timeConfigParams = params ? timeConfig(params) : undefined;
+
let out = params
- ? `${casing(name)}: decimal("${name}", ${timeConfig(params)})`
- : `${casing(name)}: decimal("${name}")`;
+ ? `${casing(name)}: decimal(${
+ dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined })
+ }${timeConfigParams})`
+ : `${casing(name)}: decimal(${dbColumnName({ name, casing: rawCasing })})`;
defaultValue = typeof defaultValue !== 'undefined'
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
@@ -633,8 +668,8 @@ const column = (
const params = binaryConfig({ length });
let out = params
- ? `${casing(name)}: binary("${name}", ${params})`
- : `${casing(name)}: binary("${name}")`;
+ ? `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})`
+ : `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing })})`;
defaultValue = defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
@@ -646,7 +681,7 @@ const column = (
if (lowered.startsWith('enum')) {
const values = lowered.substring('enum'.length + 1, lowered.length - 1);
- let out = `${casing(name)}: mysqlEnum("${name}", [${values}])`;
+ let out = `${casing(name)}: mysqlEnum(${dbColumnName({ name, casing: rawCasing, withMode: true })}[${values}])`;
out += defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
: '';
@@ -663,8 +698,10 @@ const column = (
const params = binaryConfig({ length });
let out = params
- ? `${casing(name)}: varbinary("${name}", ${params})`
- : `${casing(name)}: varbinary("${name}")`;
+ ? `${casing(name)}: varbinary(${
+ dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })
+ }${params})`
+ : `${casing(name)}: varbinary(${dbColumnName({ name, casing: rawCasing })})`;
defaultValue = defaultValue
? `.default(${mapColumnDefault(defaultValue, isExpression)})`
@@ -682,6 +719,7 @@ const createTableColumns = (
columns: Column[],
fks: ForeignKey[],
casing: (val: string) => string,
+ rawCasing: Casing,
tableName: string,
schema: MySqlSchemaInternal,
): string => {
@@ -707,6 +745,7 @@ const createTableColumns = (
it.type,
it.name,
casing,
+ rawCasing,
it.default,
it.autoincrement,
it.onUpdate,
diff --git a/drizzle-kit/src/introspect-pg.ts b/drizzle-kit/src/introspect-pg.ts
index b7a52b735..8eed3d35f 100644
--- a/drizzle-kit/src/introspect-pg.ts
+++ b/drizzle-kit/src/introspect-pg.ts
@@ -8,8 +8,8 @@ import {
Relation,
Relations,
} from 'drizzle-orm/relations';
-import { plural, singular } from 'pluralize';
import './@types/utils';
+import { toCamelCase } from 'drizzle-orm/casing';
import { Casing } from './cli/validations/common';
import { vectorOps } from './extensions/vector';
import { assertUnreachable } from './global';
@@ -176,6 +176,17 @@ const withCasing = (value: string, casing: Casing) => {
assertUnreachable(casing);
};
+const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => {
+ if (casing === 'preserve') {
+ return '';
+ }
+ if (casing === 'camel') {
+ return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`;
+ }
+
+ assertUnreachable(casing);
+};
+
export const relationsToTypeScriptForStudio = (
schema: Record>>,
relations: Record>>>,
@@ -760,16 +771,16 @@ const column = (
paramNameFor(type.replace('[]', ''), typeSchema),
casing,
)
- }("${name}")`;
+ }(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('serial')) {
- return `${withCasing(name, casing)}: serial("${name}")`;
+ return `${withCasing(name, casing)}: serial(${dbColumnName({ name, casing })})`;
}
if (lowered.startsWith('smallserial')) {
- return `${withCasing(name, casing)}: smallserial("${name}")`;
+ return `${withCasing(name, casing)}: smallserial(${dbColumnName({ name, casing })})`;
}
if (lowered.startsWith('bigserial')) {
@@ -778,42 +789,42 @@ const column = (
name,
casing,
)
- }: bigserial("${name}", { mode: "bigint" })`;
+ }: bigserial(${dbColumnName({ name, casing, withMode: true })}{ mode: "bigint" })`;
}
if (lowered.startsWith('integer')) {
- let out = `${withCasing(name, casing)}: integer("${name}")`;
+ let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('smallint')) {
- let out = `${withCasing(name, casing)}: smallint("${name}")`;
+ let out = `${withCasing(name, casing)}: smallint(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('bigint')) {
let out = `// You can use { mode: "bigint" } if numbers are exceeding js number limitations\n\t`;
- out += `${withCasing(name, casing)}: bigint("${name}", { mode: "number" })`;
+ out += `${withCasing(name, casing)}: bigint(${dbColumnName({ name, casing, withMode: true })}{ mode: "number" })`;
return out;
}
if (lowered.startsWith('boolean')) {
- let out = `${withCasing(name, casing)}: boolean("${name}")`;
+ let out = `${withCasing(name, casing)}: boolean(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('double precision')) {
- let out = `${withCasing(name, casing)}: doublePrecision("${name}")`;
+ let out = `${withCasing(name, casing)}: doublePrecision(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('real')) {
- let out = `${withCasing(name, casing)}: real("${name}")`;
+ let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('uuid')) {
- let out = `${withCasing(name, casing)}: uuid("${name}")`;
+ let out = `${withCasing(name, casing)}: uuid(${dbColumnName({ name, casing })})`;
return out;
}
@@ -831,8 +842,8 @@ const column = (
}
let out = params
- ? `${withCasing(name, casing)}: numeric("${name}", ${timeConfig(params)})`
- : `${withCasing(name, casing)}: numeric("${name}")`;
+ ? `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing, withMode: true })}${timeConfig(params)})`
+ : `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing })})`;
return out;
}
@@ -856,8 +867,8 @@ const column = (
});
let out = params
- ? `${withCasing(name, casing)}: timestamp("${name}", ${params})`
- : `${withCasing(name, casing)}: timestamp("${name}")`;
+ ? `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing, withMode: true })}${params})`
+ : `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing })})`;
return out;
}
@@ -877,8 +888,8 @@ const column = (
const params = timeConfig({ precision, withTimezone });
let out = params
- ? `${withCasing(name, casing)}: time("${name}", ${params})`
- : `${withCasing(name, casing)}: time("${name}")`;
+ ? `${withCasing(name, casing)}: time(${dbColumnName({ name, casing, withMode: true })}${params})`
+ : `${withCasing(name, casing)}: time(${dbColumnName({ name, casing })})`;
return out;
}
@@ -892,50 +903,50 @@ const column = (
const params = intervalConfig(lowered);
let out = params
- ? `${withCasing(name, casing)}: interval("${name}", ${params})`
- : `${withCasing(name, casing)}: interval("${name}")`;
+ ? `${withCasing(name, casing)}: interval(${dbColumnName({ name, casing, withMode: true })}${params})`
+ : `${withCasing(name, casing)}: interval(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered === 'date') {
- let out = `${withCasing(name, casing)}: date("${name}")`;
+ let out = `${withCasing(name, casing)}: date(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('text')) {
- let out = `${withCasing(name, casing)}: text("${name}")`;
+ let out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('jsonb')) {
- let out = `${withCasing(name, casing)}: jsonb("${name}")`;
+ let out = `${withCasing(name, casing)}: jsonb(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('json')) {
- let out = `${withCasing(name, casing)}: json("${name}")`;
+ let out = `${withCasing(name, casing)}: json(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('inet')) {
- let out = `${withCasing(name, casing)}: inet("${name}")`;
+ let out = `${withCasing(name, casing)}: inet(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('cidr')) {
- let out = `${withCasing(name, casing)}: cidr("${name}")`;
+ let out = `${withCasing(name, casing)}: cidr(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('macaddr8')) {
- let out = `${withCasing(name, casing)}: macaddr8("${name}")`;
+ let out = `${withCasing(name, casing)}: macaddr8(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('macaddr')) {
- let out = `${withCasing(name, casing)}: macaddr("${name}")`;
+ let out = `${withCasing(name, casing)}: macaddr(${dbColumnName({ name, casing })})`;
return out;
}
@@ -947,26 +958,26 @@ const column = (
name,
casing,
)
- }: varchar("${name}", { length: ${
+ }: varchar(${dbColumnName({ name, casing, withMode: true })}{ length: ${
lowered.substring(
8,
lowered.length - 1,
)
} })`;
} else {
- out = `${withCasing(name, casing)}: varchar("${name}")`;
+ out = `${withCasing(name, casing)}: varchar(${dbColumnName({ name, casing })})`;
}
return out;
}
if (lowered.startsWith('point')) {
- let out: string = `${withCasing(name, casing)}: point("${name}")`;
+ let out: string = `${withCasing(name, casing)}: point(${dbColumnName({ name, casing })})`;
return out;
}
if (lowered.startsWith('line')) {
- let out: string = `${withCasing(name, casing)}: point("${name}")`;
+ let out: string = `${withCasing(name, casing)}: point(${dbColumnName({ name, casing })})`;
return out;
}
@@ -978,16 +989,18 @@ const column = (
if (lowered.length !== 8) {
const geometryOptions = lowered.slice(9, -1).split(',');
if (geometryOptions.length === 1 && geometryOptions[0] !== '') {
- out = `${withCasing(name, casing)}: geometry("${name}", { type: "${geometryOptions[0]}" })`;
+ out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing, withMode: true })}{ type: "${
+ geometryOptions[0]
+ }" })`;
} else if (geometryOptions.length === 2) {
- out = `${withCasing(name, casing)}: geometry("${name}", { type: "${geometryOptions[0]}", srid: ${
- geometryOptions[1]
- } })`;
+ out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing, withMode: true })}{ type: "${
+ geometryOptions[0]
+ }", srid: ${geometryOptions[1]} })`;
} else {
isGeoUnknown = true;
}
} else {
- out = `${withCasing(name, casing)}: geometry("${name}")`;
+ out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing })})`;
}
if (isGeoUnknown) {
@@ -1007,14 +1020,14 @@ const column = (
name,
casing,
)
- }: vector("${name}", { dimensions: ${
+ }: vector(${dbColumnName({ name, casing, withMode: true })}{ dimensions: ${
lowered.substring(
7,
lowered.length - 1,
)
} })`;
} else {
- out = `${withCasing(name, casing)}: vector("${name}")`;
+ out = `${withCasing(name, casing)}: vector(${dbColumnName({ name, casing })})`;
}
return out;
@@ -1028,14 +1041,14 @@ const column = (
name,
casing,
)
- }: char("${name}", { length: ${
+ }: char(${dbColumnName({ name, casing, withMode: true })}{ length: ${
lowered.substring(
5,
lowered.length - 1,
)
} })`;
} else {
- out = `${withCasing(name, casing)}: char("${name}")`;
+ out = `${withCasing(name, casing)}: char(${dbColumnName({ name, casing })})`;
}
return out;
diff --git a/drizzle-kit/src/introspect-sqlite.ts b/drizzle-kit/src/introspect-sqlite.ts
index b4a729f4c..422e58f86 100644
--- a/drizzle-kit/src/introspect-sqlite.ts
+++ b/drizzle-kit/src/introspect-sqlite.ts
@@ -1,6 +1,8 @@
/* eslint-disable @typescript-eslint/no-unsafe-argument */
+import { toCamelCase } from 'drizzle-orm/casing';
import './@types/utils';
import type { Casing } from './cli/validations/common';
+import { assertUnreachable } from './global';
import type {
Column,
ForeignKey,
@@ -56,6 +58,17 @@ const withCasing = (value: string, casing?: Casing) => {
return value;
};
+const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => {
+ if (casing === 'preserve') {
+ return '';
+ }
+ if (casing === 'camel') {
+ return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`;
+ }
+
+ assertUnreachable(casing);
+};
+
export const schemaToTypeScript = (
schema: SQLiteSchemaInternal,
casing: Casing,
@@ -226,9 +239,10 @@ const column = (
casing?: Casing,
) => {
let lowered = type;
+ casing = casing!;
if (lowered === 'integer') {
- let out = `${withCasing(name, casing)}: integer("${name}")`;
+ let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`;
// out += autoincrement ? `.autoincrement()` : "";
out += typeof defaultValue !== 'undefined'
? `.default(${mapColumnDefault(defaultValue)})`
@@ -237,7 +251,7 @@ const column = (
}
if (lowered === 'real') {
- let out = `${withCasing(name, casing)}: real("${name}")`;
+ let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`;
out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : '';
return out;
}
@@ -247,9 +261,11 @@ const column = (
let out: string;
if (match) {
- out = `${withCasing(name, casing)}: text("${name}", { length: ${match[0]} })`;
+ out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing, withMode: true })}{ length: ${
+ match[0]
+ } })`;
} else {
- out = `${withCasing(name, casing)}: text("${name}")`;
+ out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`;
}
out += defaultValue ? `.default("${mapColumnDefault(defaultValue)}")` : '';
@@ -257,13 +273,13 @@ const column = (
}
if (lowered === 'blob') {
- let out = `${withCasing(name, casing)}: blob("${name}")`;
+ let out = `${withCasing(name, casing)}: blob(${dbColumnName({ name, casing })})`;
out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : '';
return out;
}
if (lowered === 'numeric') {
- let out = `${withCasing(name, casing)}: numeric("${name}")`;
+ let out = `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing })})`;
out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : '';
return out;
}
diff --git a/drizzle-kit/src/jsonStatements.ts b/drizzle-kit/src/jsonStatements.ts
index 090b0cdde..b27785d9a 100644
--- a/drizzle-kit/src/jsonStatements.ts
+++ b/drizzle-kit/src/jsonStatements.ts
@@ -1,10 +1,16 @@
import chalk from 'chalk';
+import { getNewTableName } from './cli/commands/sqlitePushUtils';
import { warning } from './cli/views';
import { CommonSquashedSchema } from './schemaValidator';
import { MySqlKitInternals, MySqlSchema, MySqlSquasher } from './serializer/mysqlSchema';
import { Index, PgSchema, PgSquasher } from './serializer/pgSchema';
import { SingleStoreKitInternals, SingleStoreSchema, SingleStoreSquasher } from './serializer/singlestoreSchema';
-import { SQLiteKitInternals, SQLiteSquasher } from './serializer/sqliteSchema';
+import {
+ SQLiteKitInternals,
+ SQLiteSchemaInternal,
+ SQLiteSchemaSquashed,
+ SQLiteSquasher,
+} from './serializer/sqliteSchema';
import { AlteredColumn, Column, Sequence, Table } from './snapshotsDiffer';
export interface JsonSqliteCreateTableStatement {
@@ -35,6 +41,23 @@ export interface JsonCreateTableStatement {
internals?: MySqlKitInternals | SingleStoreKitInternals;
}
+export interface JsonRecreateTableStatement {
+ type: 'recreate_table';
+ tableName: string;
+ columns: Column[];
+ referenceData: {
+ name: string;
+ tableFrom: string;
+ columnsFrom: string[];
+ tableTo: string;
+ columnsTo: string[];
+ onUpdate?: string | undefined;
+ onDelete?: string | undefined;
+ }[];
+ compositePKs: string[][];
+ uniqueConstraints?: string[];
+}
+
export interface JsonDropTableStatement {
type: 'drop_table';
tableName: string;
@@ -173,6 +196,10 @@ export interface JsonReferenceStatement {
data: string;
schema: string;
tableName: string;
+ isMulticolumn?: boolean;
+ columnNotNull?: boolean;
+ columnDefault?: string;
+ columnType?: string;
// fromTable: string;
// fromColumns: string[];
// toTable: string;
@@ -519,6 +546,7 @@ export type JsonAlterColumnStatement =
| JsonAlterColumnDropIdentityStatement;
export type JsonStatement =
+ | JsonRecreateTableStatement
| JsonAlterColumnStatement
| JsonCreateTableStatement
| JsonDropTableStatement
@@ -2022,6 +2050,55 @@ export const prepareSqliteAlterColumns = (
`${tableName}_${columnName}`
];
+ if (column.autoincrement?.type === 'added') {
+ statements.push({
+ type: 'alter_table_alter_column_set_autoincrement',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
+ if (column.autoincrement?.type === 'changed') {
+ const type = column.autoincrement.new
+ ? 'alter_table_alter_column_set_autoincrement'
+ : 'alter_table_alter_column_drop_autoincrement';
+
+ statements.push({
+ type,
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
+ if (column.autoincrement?.type === 'deleted') {
+ statements.push({
+ type: 'alter_table_alter_column_drop_autoincrement',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
if (typeof column.name !== 'string') {
statements.push({
type: 'alter_table_rename_column',
@@ -2329,6 +2406,54 @@ export const prepareCreateReferencesJson = (
};
});
};
+export const prepareLibSQLCreateReferencesJson = (
+ tableName: string,
+ schema: string,
+ foreignKeys: Record,
+ json2: SQLiteSchemaSquashed,
+ action?: 'push',
+): JsonCreateReferenceStatement[] => {
+ return Object.values(foreignKeys).map((fkData) => {
+ const { columnsFrom, tableFrom, columnsTo } = action === 'push'
+ ? SQLiteSquasher.unsquashPushFK(fkData)
+ : SQLiteSquasher.unsquashFK(fkData);
+
+ // When trying to alter table in lib sql it is necessary to pass all config for column like "NOT NULL", "DEFAULT", etc.
+ // If it is multicolumn reference it is not possible to pass this data for all columns
+ // Pass multicolumn flag for sql statements to not generate migration
+ let isMulticolumn = false;
+
+ if (columnsFrom.length > 1 || columnsTo.length > 1) {
+ isMulticolumn = true;
+
+ return {
+ type: 'create_reference',
+ tableName,
+ data: fkData,
+ schema,
+ isMulticolumn,
+ };
+ }
+
+ const columnFrom = columnsFrom[0];
+
+ const {
+ notNull: columnNotNull,
+ default: columnDefault,
+ type: columnType,
+ } = json2.tables[tableFrom].columns[columnFrom];
+
+ return {
+ type: 'create_reference',
+ tableName,
+ data: fkData,
+ schema,
+ columnNotNull,
+ columnDefault,
+ columnType,
+ };
+ });
+};
export const prepareDropReferencesJson = (
tableName: string,
@@ -2344,6 +2469,77 @@ export const prepareDropReferencesJson = (
};
});
};
+export const prepareLibSQLDropReferencesJson = (
+ tableName: string,
+ schema: string,
+ foreignKeys: Record,
+ json2: SQLiteSchemaSquashed,
+ meta: SQLiteSchemaInternal['_meta'],
+ action?: 'push',
+): JsonDeleteReferenceStatement[] => {
+ const statements = Object.values(foreignKeys).map((fkData) => {
+ const { columnsFrom, tableFrom, columnsTo, name, tableTo, onDelete, onUpdate } = action === 'push'
+ ? SQLiteSquasher.unsquashPushFK(fkData)
+ : SQLiteSquasher.unsquashFK(fkData);
+
+ // If all columns from where were references were deleted -> skip this logic
+ // Drop columns will cover this scenario
+ const keys = Object.keys(json2.tables[tableName].columns);
+ const filtered = columnsFrom.filter((it) => keys.includes(it));
+ const fullDrop = filtered.length === 0;
+ if (fullDrop) return;
+
+ // When trying to alter table in lib sql it is necessary to pass all config for column like "NOT NULL", "DEFAULT", etc.
+ // If it is multicolumn reference it is not possible to pass this data for all columns
+ // Pass multicolumn flag for sql statements to not generate migration
+ let isMulticolumn = false;
+
+ if (columnsFrom.length > 1 || columnsTo.length > 1) {
+ isMulticolumn = true;
+
+ return {
+ type: 'delete_reference',
+ tableName,
+ data: fkData,
+ schema,
+ isMulticolumn,
+ };
+ }
+
+ const columnFrom = columnsFrom[0];
+ const newTableName = getNewTableName(tableFrom, meta);
+
+ const {
+ notNull: columnNotNull,
+ default: columnDefault,
+ type: columnType,
+ } = json2.tables[newTableName].columns[columnFrom];
+
+ const fkToSquash = {
+ columnsFrom,
+ columnsTo,
+ name,
+ tableFrom: newTableName,
+ tableTo,
+ onDelete,
+ onUpdate,
+ };
+ const foreignKey = action === 'push'
+ ? SQLiteSquasher.squashPushFK(fkToSquash)
+ : SQLiteSquasher.squashFK(fkToSquash);
+ return {
+ type: 'delete_reference',
+ tableName,
+ data: foreignKey,
+ schema,
+ columnNotNull,
+ columnDefault,
+ columnType,
+ };
+ });
+
+ return statements.filter((it) => it) as JsonDeleteReferenceStatement[];
+};
// alter should create 2 statements. It's important to make only 1 sql per statement(for breakpoints)
export const prepareAlterReferencesJson = (
diff --git a/drizzle-kit/src/migrationPreparator.ts b/drizzle-kit/src/migrationPreparator.ts
index 4e5664290..262f4dcba 100644
--- a/drizzle-kit/src/migrationPreparator.ts
+++ b/drizzle-kit/src/migrationPreparator.ts
@@ -1,5 +1,6 @@
import { randomUUID } from 'crypto';
import fs from 'fs';
+import { CasingType } from './cli/validations/common';
import { serializeMySql, serializePg, serializeSingleStore, serializeSQLite } from './serializer';
import { dryMySql, MySqlSchema, mysqlSchema } from './serializer/mysqlSchema';
import { dryPg, PgSchema, pgSchema, PgSchemaInternal } from './serializer/pgSchema';
@@ -9,8 +10,9 @@ import { drySQLite, SQLiteSchema, sqliteSchema } from './serializer/sqliteSchema
export const prepareMySqlDbPushSnapshot = async (
prev: MySqlSchema,
schemaPath: string | string[],
+ casing: CasingType | undefined,
): Promise<{ prev: MySqlSchema; cur: MySqlSchema }> => {
- const serialized = await serializeMySql(schemaPath);
+ const serialized = await serializeMySql(schemaPath, casing);
const id = randomUUID();
const idPrev = prev.id;
@@ -39,8 +41,9 @@ export const prepareSingleStoreDbPushSnapshot = async (
export const prepareSQLiteDbPushSnapshot = async (
prev: SQLiteSchema,
schemaPath: string | string[],
+ casing: CasingType | undefined,
): Promise<{ prev: SQLiteSchema; cur: SQLiteSchema }> => {
- const serialized = await serializeSQLite(schemaPath);
+ const serialized = await serializeSQLite(schemaPath, casing);
const id = randomUUID();
const idPrev = prev.id;
@@ -60,9 +63,10 @@ export const prepareSQLiteDbPushSnapshot = async (
export const preparePgDbPushSnapshot = async (
prev: PgSchema,
schemaPath: string | string[],
+ casing: CasingType | undefined,
schemaFilter: string[] = ['public'],
): Promise<{ prev: PgSchema; cur: PgSchema }> => {
- const serialized = await serializePg(schemaPath, schemaFilter);
+ const serialized = await serializePg(schemaPath, casing, schemaFilter);
const id = randomUUID();
const idPrev = prev.id;
@@ -76,11 +80,12 @@ export const preparePgDbPushSnapshot = async (
export const prepareMySqlMigrationSnapshot = async (
migrationFolders: string[],
schemaPath: string | string[],
+ casing: CasingType | undefined,
): Promise<{ prev: MySqlSchema; cur: MySqlSchema; custom: MySqlSchema }> => {
const prevSnapshot = mysqlSchema.parse(
preparePrevSnapshot(migrationFolders, dryMySql),
);
- const serialized = await serializeMySql(schemaPath);
+ const serialized = await serializeMySql(schemaPath, casing);
const id = randomUUID();
const idPrev = prevSnapshot.id;
@@ -130,11 +135,12 @@ export const prepareSingleStoreMigrationSnapshot = async (
export const prepareSqliteMigrationSnapshot = async (
snapshots: string[],
schemaPath: string | string[],
+ casing: CasingType | undefined,
): Promise<{ prev: SQLiteSchema; cur: SQLiteSchema; custom: SQLiteSchema }> => {
const prevSnapshot = sqliteSchema.parse(
preparePrevSnapshot(snapshots, drySQLite),
);
- const serialized = await serializeSQLite(schemaPath);
+ const serialized = await serializeSQLite(schemaPath, casing);
const id = randomUUID();
const idPrev = prevSnapshot.id;
@@ -176,9 +182,10 @@ export const fillPgSnapshot = ({
export const preparePgMigrationSnapshot = async (
snapshots: string[],
schemaPath: string | string[],
+ casing: CasingType | undefined,
): Promise<{ prev: PgSchema; cur: PgSchema; custom: PgSchema }> => {
const prevSnapshot = pgSchema.parse(preparePrevSnapshot(snapshots, dryPg));
- const serialized = await serializePg(schemaPath);
+ const serialized = await serializePg(schemaPath, casing);
const id = randomUUID();
const idPrev = prevSnapshot.id;
diff --git a/drizzle-kit/src/schemaValidator.ts b/drizzle-kit/src/schemaValidator.ts
index 712252f37..e91b5ab11 100644
--- a/drizzle-kit/src/schemaValidator.ts
+++ b/drizzle-kit/src/schemaValidator.ts
@@ -4,7 +4,7 @@ import { pgSchema, pgSchemaSquashed } from './serializer/pgSchema';
import { singlestoreSchema, singlestoreSchemaSquashed } from './serializer/singlestoreSchema';
import { sqliteSchema, SQLiteSchemaSquashed } from './serializer/sqliteSchema';
-export const dialects = ['postgresql', 'mysql', 'sqlite', 'singlestore'] as const;
+export const dialects = ['postgresql', 'mysql', 'sqlite', 'turso', 'singlestore'] as const;
export const dialect = enumType(dialects);
export type Dialect = (typeof dialects)[number];
diff --git a/drizzle-kit/src/serializer/index.ts b/drizzle-kit/src/serializer/index.ts
index 6eb55c6f6..c32395e0d 100644
--- a/drizzle-kit/src/serializer/index.ts
+++ b/drizzle-kit/src/serializer/index.ts
@@ -1,15 +1,17 @@
import chalk from 'chalk';
-import type { SQL } from 'drizzle-orm';
+import { SQL, Table } from 'drizzle-orm';
+import { CasingCache } from 'drizzle-orm/casing';
import fs from 'fs';
import * as glob from 'glob';
import Path from 'path';
+import { CasingType } from 'src/cli/validations/common';
import { error } from '../cli/views';
import type { MySqlSchemaInternal } from './mysqlSchema';
import type { PgSchemaInternal } from './pgSchema';
import { SingleStoreSchemaInternal } from './singlestoreSchema';
import type { SQLiteSchemaInternal } from './sqliteSchema';
-export const sqlToStr = (sql: SQL) => {
+export const sqlToStr = (sql: SQL, casing: CasingType | undefined) => {
return sql.toQuery({
escapeName: () => {
throw new Error("we don't support params for `sql` default values");
@@ -20,10 +22,11 @@ export const sqlToStr = (sql: SQL) => {
escapeString: () => {
throw new Error("we don't support params for `sql` default values");
},
+ casing: new CasingCache(casing),
}).sql;
};
-export const sqlToStrGenerated = (sql: SQL) => {
+export const sqlToStrGenerated = (sql: SQL, casing: CasingType | undefined) => {
return sql.toQuery({
escapeName: () => {
throw new Error("we don't support params for `sql` default values");
@@ -34,11 +37,13 @@ export const sqlToStrGenerated = (sql: SQL) => {
escapeString: () => {
throw new Error("we don't support params for `sql` default values");
},
+ casing: new CasingCache(casing),
}).sql;
};
export const serializeMySql = async (
path: string | string[],
+ casing: CasingType | undefined,
): Promise => {
const filenames = prepareFilenames(path);
@@ -49,11 +54,12 @@ export const serializeMySql = async (
const { tables } = await prepareFromMySqlImports(filenames);
- return generateMySqlSnapshot(tables);
+ return generateMySqlSnapshot(tables, casing);
};
export const serializePg = async (
path: string | string[],
+ casing: CasingType | undefined,
schemaFilter?: string[],
): Promise => {
const filenames = prepareFilenames(path);
@@ -65,18 +71,19 @@ export const serializePg = async (
filenames,
);
- return generatePgSnapshot(tables, enums, schemas, sequences, schemaFilter);
+ return generatePgSnapshot(tables, enums, schemas, sequences, casing, schemaFilter);
};
export const serializeSQLite = async (
path: string | string[],
+ casing: CasingType | undefined,
): Promise => {
const filenames = prepareFilenames(path);
const { prepareFromSqliteImports } = await import('./sqliteImports');
const { generateSqliteSnapshot } = await import('./sqliteSerializer');
const { tables } = await prepareFromSqliteImports(filenames);
- return generateSqliteSnapshot(tables);
+ return generateSqliteSnapshot(tables, casing);
};
export const serializeSingleStore = async (
diff --git a/drizzle-kit/src/serializer/mysqlSerializer.ts b/drizzle-kit/src/serializer/mysqlSerializer.ts
index 14e867128..da52ac2fb 100644
--- a/drizzle-kit/src/serializer/mysqlSerializer.ts
+++ b/drizzle-kit/src/serializer/mysqlSerializer.ts
@@ -1,9 +1,11 @@
import chalk from 'chalk';
import { getTableName, is } from 'drizzle-orm';
import { SQL } from 'drizzle-orm';
+import { toCamelCase, toSnakeCase } from 'drizzle-orm/casing';
import { AnyMySqlTable, MySqlDialect, type PrimaryKey as PrimaryKeyORM, uniqueKeyName } from 'drizzle-orm/mysql-core';
import { getTableConfig } from 'drizzle-orm/mysql-core';
import { RowDataPacket } from 'mysql2/promise';
+import { CasingType } from 'src/cli/validations/common';
import { withStyle } from '../cli/validations/outputs';
import { IntrospectStage, IntrospectStatus } from '../cli/views';
import {
@@ -16,20 +18,20 @@ import {
Table,
UniqueConstraint,
} from '../serializer/mysqlSchema';
-import type { DB } from '../utils';
+import { type DB, getColumnCasing } from '../utils';
import { sqlToStr } from '.';
// import { MySqlColumnWithAutoIncrement } from "drizzle-orm/mysql-core";
// import { MySqlDateBaseColumn } from "drizzle-orm/mysql-core";
-const dialect = new MySqlDialect();
-
export const indexName = (tableName: string, columns: string[]) => {
return `${tableName}_${columns.join('_')}_index`;
};
export const generateMySqlSnapshot = (
tables: AnyMySqlTable[],
+ casing: CasingType | undefined,
): MySqlSchemaInternal => {
+ const dialect = new MySqlDialect({ casing });
const result: Record = {};
const internal: MySqlKitInternals = { tables: {}, indexes: {} };
for (const table of tables) {
@@ -49,6 +51,7 @@ export const generateMySqlSnapshot = (
const uniqueConstraintObject: Record = {};
columns.forEach((column) => {
+ const name = getColumnCasing(column, casing);
const notNull: boolean = column.notNull;
const sqlTypeLowered = column.getSQLType().toLowerCase();
const autoIncrement = typeof (column as any).autoIncrement === 'undefined'
@@ -58,7 +61,7 @@ export const generateMySqlSnapshot = (
const generated = column.generated;
const columnToSet: Column = {
- name: column.name,
+ name,
type: column.getSQLType(),
primaryKey: false,
// If field is autoincrement it's notNull by default
@@ -79,9 +82,9 @@ export const generateMySqlSnapshot = (
};
if (column.primary) {
- primaryKeysObject[`${tableName}_${column.name}`] = {
- name: `${tableName}_${column.name}`,
- columns: [column.name],
+ primaryKeysObject[`${tableName}_${name}`] = {
+ name: `${tableName}_${name}`,
+ columns: [name],
};
}
@@ -101,7 +104,7 @@ export const generateMySqlSnapshot = (
)
} on the ${
chalk.underline.blue(
- column.name,
+ name,
)
} column is confilcting with a unique constraint name already defined for ${
chalk.underline.blue(
@@ -120,7 +123,7 @@ export const generateMySqlSnapshot = (
if (column.default !== undefined) {
if (is(column.default, SQL)) {
- columnToSet.default = sqlToStr(column.default);
+ columnToSet.default = sqlToStr(column.default, casing);
} else {
if (typeof column.default === 'string') {
columnToSet.default = `'${column.default}'`;
@@ -150,24 +153,33 @@ export const generateMySqlSnapshot = (
}
}
}
- columnsObject[column.name] = columnToSet;
+ columnsObject[name] = columnToSet;
});
primaryKeys.map((pk: PrimaryKeyORM) => {
- const columnNames = pk.columns.map((c: any) => c.name);
- primaryKeysObject[pk.getName()] = {
- name: pk.getName(),
+ const originalColumnNames = pk.columns.map((c) => c.name);
+ const columnNames = pk.columns.map((c: any) => getColumnCasing(c, casing));
+
+ let name = pk.getName();
+ if (casing !== undefined) {
+ for (let i = 0; i < originalColumnNames.length; i++) {
+ name = name.replace(originalColumnNames[i], columnNames[i]);
+ }
+ }
+
+ primaryKeysObject[name] = {
+ name,
columns: columnNames,
};
// all composite pk's should be treated as notNull
for (const column of pk.columns) {
- columnsObject[column.name].notNull = true;
+ columnsObject[getColumnCasing(column, casing)].notNull = true;
}
});
uniqueConstraints?.map((unq) => {
- const columnNames = unq.columns.map((c) => c.name);
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing));
const name = unq.name ?? uniqueKeyName(table, columnNames);
@@ -206,7 +218,6 @@ export const generateMySqlSnapshot = (
});
const fks: ForeignKey[] = foreignKeys.map((fk) => {
- const name = fk.getName();
const tableFrom = tableName;
const onDelete = fk.onDelete ?? 'no action';
const onUpdate = fk.onUpdate ?? 'no action';
@@ -216,8 +227,22 @@ export const generateMySqlSnapshot = (
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
const tableTo = getTableName(referenceFT);
- const columnsFrom = reference.columns.map((it) => it.name);
- const columnsTo = reference.foreignColumns.map((it) => it.name);
+
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing));
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing));
+
+ let name = fk.getName();
+ if (casing !== undefined) {
+ for (let i = 0; i < originalColumnsFrom.length; i++) {
+ name = name.replace(originalColumnsFrom[i], columnsFrom[i]);
+ }
+ for (let i = 0; i < originalColumnsTo.length; i++) {
+ name = name.replace(originalColumnsTo[i], columnsTo[i]);
+ }
+ }
+
return {
name,
tableFrom,
@@ -259,7 +284,7 @@ export const generateMySqlSnapshot = (
}
return sql;
} else {
- return `${it.name}`;
+ return `${getColumnCasing(it, casing)}`;
}
});
diff --git a/drizzle-kit/src/serializer/pgSerializer.ts b/drizzle-kit/src/serializer/pgSerializer.ts
index b479e59e2..cc7b18725 100644
--- a/drizzle-kit/src/serializer/pgSerializer.ts
+++ b/drizzle-kit/src/serializer/pgSerializer.ts
@@ -1,5 +1,6 @@
import chalk from 'chalk';
import { getTableName, is, SQL } from 'drizzle-orm';
+import { toCamelCase, toSnakeCase } from 'drizzle-orm/casing';
import {
AnyPgTable,
ExtraConfigColumn,
@@ -14,6 +15,7 @@ import {
uniqueKeyName,
} from 'drizzle-orm/pg-core';
import { getTableConfig } from 'drizzle-orm/pg-core';
+import { CasingType } from 'src/cli/validations/common';
import { vectorOps } from 'src/extensions/vector';
import { withStyle } from '../cli/validations/outputs';
import type { IntrospectStage, IntrospectStatus } from '../cli/views';
@@ -30,11 +32,9 @@ import type {
Table,
UniqueConstraint,
} from '../serializer/pgSchema';
-import { type DB, isPgArrayType } from '../utils';
+import { type DB, getColumnCasing, isPgArrayType } from '../utils';
import { sqlToStr } from '.';
-const dialect = new PgDialect();
-
export const indexName = (tableName: string, columns: string[]) => {
return `${tableName}_${columns.join('_')}_index`;
};
@@ -117,8 +117,10 @@ export const generatePgSnapshot = (
enums: PgEnum[],
schemas: PgSchema[],
sequences: PgSequence[],
+ casing: CasingType | undefined,
schemaFilter?: string[],
): PgSchemaInternal => {
+ const dialect = new PgDialect({ casing });
const result: Record = {};
const sequencesToReturn: Record = {};
@@ -149,6 +151,7 @@ export const generatePgSnapshot = (
const uniqueConstraintObject: Record = {};
columns.forEach((column) => {
+ const name = getColumnCasing(column, casing);
const notNull: boolean = column.notNull;
const primaryKey: boolean = column.primary;
const sqlTypeLowered = column.getSQLType().toLowerCase();
@@ -173,7 +176,7 @@ export const generatePgSnapshot = (
const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1';
const columnToSet: Column = {
- name: column.name,
+ name,
type: column.getSQLType(),
typeSchema: typeSchema,
primaryKey,
@@ -191,7 +194,7 @@ export const generatePgSnapshot = (
identity: identity
? {
type: identity.type,
- name: identity.sequenceName ?? `${tableName}_${column.name}_seq`,
+ name: identity.sequenceName ?? `${tableName}_${name}_seq`,
schema: schema ?? 'public',
increment,
startWith,
@@ -219,7 +222,7 @@ export const generatePgSnapshot = (
)
} on the ${
chalk.underline.blue(
- column.name,
+ name,
)
} column is confilcting with a unique constraint name already defined for ${
chalk.underline.blue(
@@ -239,7 +242,7 @@ export const generatePgSnapshot = (
if (column.default !== undefined) {
if (is(column.default, SQL)) {
- columnToSet.default = sqlToStr(column.default);
+ columnToSet.default = sqlToStr(column.default, casing);
} else {
if (typeof column.default === 'string') {
columnToSet.default = `'${column.default}'`;
@@ -278,19 +281,28 @@ export const generatePgSnapshot = (
}
}
}
- columnsObject[column.name] = columnToSet;
+ columnsObject[name] = columnToSet;
});
primaryKeys.map((pk) => {
- const columnNames = pk.columns.map((c) => c.name);
- primaryKeysObject[pk.getName()] = {
- name: pk.getName(),
+ const originalColumnNames = pk.columns.map((c) => c.name);
+ const columnNames = pk.columns.map((c) => getColumnCasing(c, casing));
+
+ let name = pk.getName();
+ if (casing !== undefined) {
+ for (let i = 0; i < originalColumnNames.length; i++) {
+ name = name.replace(originalColumnNames[i], columnNames[i]);
+ }
+ }
+
+ primaryKeysObject[name] = {
+ name,
columns: columnNames,
};
});
uniqueConstraints?.map((unq) => {
- const columnNames = unq.columns.map((c) => c.name);
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing));
const name = unq.name ?? uniqueKeyName(table, columnNames);
@@ -329,7 +341,6 @@ export const generatePgSnapshot = (
});
const fks: ForeignKey[] = foreignKeys.map((fk) => {
- const name = fk.getName();
const tableFrom = tableName;
const onDelete = fk.onDelete;
const onUpdate = fk.onUpdate;
@@ -340,8 +351,20 @@ export const generatePgSnapshot = (
// getTableConfig(reference.foreignTable).schema || "public";
const schemaTo = getTableConfig(reference.foreignTable).schema;
- const columnsFrom = reference.columns.map((it) => it.name);
- const columnsTo = reference.foreignColumns.map((it) => it.name);
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing));
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing));
+
+ let name = fk.getName();
+ if (casing !== undefined) {
+ for (let i = 0; i < originalColumnsFrom.length; i++) {
+ name = name.replace(originalColumnsFrom[i], columnsFrom[i]);
+ }
+ for (let i = 0; i < originalColumnsTo.length; i++) {
+ name = name.replace(originalColumnsTo[i], columnsTo[i]);
+ }
+ }
return {
name,
@@ -383,6 +406,7 @@ export const generatePgSnapshot = (
}
}
it = it as IndexedColumn;
+ const name = getColumnCasing(it as IndexedColumn, casing);
if (
!is(it, SQL)
&& it.type! === 'PgVector'
@@ -393,7 +417,7 @@ export const generatePgSnapshot = (
withStyle.errorWarning(
`You are specifying an index on the ${
chalk.blueBright(
- it.name,
+ name,
)
} column inside the ${
chalk.blueBright(
@@ -411,7 +435,7 @@ export const generatePgSnapshot = (
)
}].\n\nYou can specify it using current syntax: ${
chalk.underline(
- `index("${value.config.name}").using("${value.config.method}", table.${it.name}.op("${
+ `index("${value.config.name}").using("${value.config.method}", table.${name}.op("${
vectorOps[0]
}"))`,
)
@@ -421,7 +445,7 @@ export const generatePgSnapshot = (
);
process.exit(1);
}
- indexColumnNames.push((it as ExtraConfigColumn).name);
+ indexColumnNames.push(name);
});
const name = value.config.name
@@ -440,7 +464,7 @@ export const generatePgSnapshot = (
} else {
it = it as IndexedColumn;
return {
- expression: it.name!,
+ expression: getColumnCasing(it as IndexedColumn, casing),
isExpression: false,
asc: it.indexConfig?.order === 'asc',
nulls: it.indexConfig?.nulls
@@ -1040,7 +1064,7 @@ export const fromDatabase = async (
: undefined,
};
- if (identityName) {
+ if (identityName && typeof identityName === 'string') {
// remove "" from sequence name
delete sequencesToReturn[
`${tableSchema}.${
diff --git a/drizzle-kit/src/serializer/sqliteSerializer.ts b/drizzle-kit/src/serializer/sqliteSerializer.ts
index ce544235b..f1d28f759 100644
--- a/drizzle-kit/src/serializer/sqliteSerializer.ts
+++ b/drizzle-kit/src/serializer/sqliteSerializer.ts
@@ -1,5 +1,6 @@
import chalk from 'chalk';
import { getTableName, is, SQL } from 'drizzle-orm';
+import { toCamelCase, toSnakeCase } from 'drizzle-orm/casing';
import {
// AnySQLiteColumnBuilder,
AnySQLiteTable,
@@ -8,6 +9,7 @@ import {
SQLiteSyncDialect,
uniqueKeyName,
} from 'drizzle-orm/sqlite-core';
+import { CasingType } from 'src/cli/validations/common';
import { withStyle } from '../cli/validations/outputs';
import type { IntrospectStage, IntrospectStatus } from '../cli/views';
import type {
@@ -20,14 +22,14 @@ import type {
Table,
UniqueConstraint,
} from '../serializer/sqliteSchema';
-import type { SQLiteDB } from '../utils';
+import { getColumnCasing, type SQLiteDB } from '../utils';
import { sqlToStr } from '.';
-const dialect = new SQLiteSyncDialect();
-
export const generateSqliteSnapshot = (
tables: AnySQLiteTable[],
+ casing: CasingType | undefined,
): SQLiteSchemaInternal => {
+ const dialect = new SQLiteSyncDialect({ casing });
const result: Record = {};
const internal: SQLiteKitInternals = { indexes: {} };
for (const table of tables) {
@@ -48,12 +50,13 @@ export const generateSqliteSnapshot = (
} = getTableConfig(table);
columns.forEach((column) => {
+ const name = getColumnCasing(column, casing);
const notNull: boolean = column.notNull;
const primaryKey: boolean = column.primary;
const generated = column.generated;
const columnToSet: Column = {
- name: column.name,
+ name,
type: column.getSQLType(),
primaryKey,
notNull,
@@ -74,7 +77,7 @@ export const generateSqliteSnapshot = (
if (column.default !== undefined) {
if (is(column.default, SQL)) {
- columnToSet.default = sqlToStr(column.default);
+ columnToSet.default = sqlToStr(column.default, casing);
} else {
columnToSet.default = typeof column.default === 'string'
? `'${column.default}'`
@@ -84,7 +87,7 @@ export const generateSqliteSnapshot = (
: column.default;
}
}
- columnsObject[column.name] = columnToSet;
+ columnsObject[name] = columnToSet;
if (column.isUnique) {
const existingUnique = indexesObject[column.uniqueName!];
@@ -102,7 +105,7 @@ export const generateSqliteSnapshot = (
)
} on the ${
chalk.underline.blue(
- column.name,
+ name,
)
} column is confilcting with a unique constraint name already defined for ${
chalk.underline.blue(
@@ -122,7 +125,6 @@ export const generateSqliteSnapshot = (
});
const foreignKeys: ForeignKey[] = tableForeignKeys.map((fk) => {
- const name = fk.getName();
const tableFrom = tableName;
const onDelete = fk.onDelete ?? 'no action';
const onUpdate = fk.onUpdate ?? 'no action';
@@ -132,8 +134,22 @@ export const generateSqliteSnapshot = (
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
const tableTo = getTableName(referenceFT);
- const columnsFrom = reference.columns.map((it) => it.name);
- const columnsTo = reference.foreignColumns.map((it) => it.name);
+
+ const originalColumnsFrom = reference.columns.map((it) => it.name);
+ const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing));
+ const originalColumnsTo = reference.foreignColumns.map((it) => it.name);
+ const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing));
+
+ let name = fk.getName();
+ if (casing !== undefined) {
+ for (let i = 0; i < originalColumnsFrom.length; i++) {
+ name = name.replace(originalColumnsFrom[i], columnsFrom[i]);
+ }
+ for (let i = 0; i < originalColumnsTo.length; i++) {
+ name = name.replace(originalColumnsTo[i], columnsTo[i]);
+ }
+ }
+
return {
name,
tableFrom,
@@ -175,7 +191,7 @@ export const generateSqliteSnapshot = (
}
return sql;
} else {
- return it.name;
+ return getColumnCasing(it, casing);
}
});
@@ -195,7 +211,7 @@ export const generateSqliteSnapshot = (
});
uniqueConstraints?.map((unq) => {
- const columnNames = unq.columns.map((c) => c.name);
+ const columnNames = unq.columns.map((c) => getColumnCasing(c, casing));
const name = unq.name ?? uniqueKeyName(table, columnNames);
@@ -236,12 +252,22 @@ export const generateSqliteSnapshot = (
primaryKeys.forEach((it) => {
if (it.columns.length > 1) {
- primaryKeysObject[it.getName()] = {
- columns: it.columns.map((it) => it.name),
- name: it.getName(),
+ const originalColumnNames = it.columns.map((c) => c.name);
+ const columnNames = it.columns.map((c) => getColumnCasing(c, casing));
+
+ let name = it.getName();
+ if (casing !== undefined) {
+ for (let i = 0; i < originalColumnNames.length; i++) {
+ name = name.replace(originalColumnNames[i], columnNames[i]);
+ }
+ }
+
+ primaryKeysObject[name] = {
+ columns: columnNames,
+ name,
};
} else {
- columnsObject[it.columns[0].name].primaryKey = true;
+ columnsObject[getColumnCasing(it.columns[0], casing)].primaryKey = true;
}
});
@@ -363,7 +389,6 @@ export const fromDatabase = async (
) => void,
): Promise => {
const result: Record = {};
-
const columns = await db.query<{
tableName: string;
columnName: string;
diff --git a/drizzle-kit/src/serializer/studio.ts b/drizzle-kit/src/serializer/studio.ts
index 5515e6f59..12ea8207c 100644
--- a/drizzle-kit/src/serializer/studio.ts
+++ b/drizzle-kit/src/serializer/studio.ts
@@ -25,6 +25,7 @@ import fs from 'fs';
import { Hono } from 'hono';
import { cors } from 'hono/cors';
import { createServer } from 'node:https';
+import { LibSQLCredentials } from 'src/cli/validations/libsql';
import { assertUnreachable } from 'src/global';
import superjson from 'superjson';
import { z } from 'zod';
@@ -342,8 +343,6 @@ export const drizzleForSQLite = async (
const { driver } = credentials;
if (driver === 'd1-http') {
dbUrl = `d1-http://${credentials.accountId}/${credentials.databaseId}/${credentials.token}`;
- } else if (driver === 'turso') {
- dbUrl = `turso://${credentials.url}/${credentials.authToken}`;
} else {
assertUnreachable(driver);
}
@@ -364,6 +363,32 @@ export const drizzleForSQLite = async (
schemaFiles,
};
};
+export const drizzleForLibSQL = async (
+ credentials: LibSQLCredentials,
+ sqliteSchema: Record>,
+ relations: Record,
+ schemaFiles?: SchemaFile[],
+): Promise => {
+ const { connectToLibSQL } = await import('../cli/connections');
+
+ const sqliteDB = await connectToLibSQL(credentials);
+ const customDefaults = getCustomDefaults(sqliteSchema);
+
+ let dbUrl: string = `turso://${credentials.url}/${credentials.authToken}`;
+
+ const dbHash = createHash('sha256').update(dbUrl).digest('hex');
+
+ return {
+ dbHash,
+ dialect: 'sqlite',
+ driver: undefined,
+ proxy: sqliteDB.proxy,
+ customDefaults,
+ schema: sqliteSchema,
+ relations,
+ schemaFiles,
+ };
+};
export const drizzleForSingleStore = async (
credentials: SingleStoreCredentials,
diff --git a/drizzle-kit/src/snapshotsDiffer.ts b/drizzle-kit/src/snapshotsDiffer.ts
index 11d126013..6f27a2505 100644
--- a/drizzle-kit/src/snapshotsDiffer.ts
+++ b/drizzle-kit/src/snapshotsDiffer.ts
@@ -63,6 +63,8 @@ import {
prepareDropReferencesJson,
prepareDropSequenceJson,
prepareDropTableJson,
+ prepareLibSQLCreateReferencesJson,
+ prepareLibSQLDropReferencesJson,
prepareMoveEnumJson,
prepareMoveSequenceJson,
prepareMySqlCreateTableJson,
@@ -85,6 +87,7 @@ import { MySqlSchema, MySqlSchemaSquashed, MySqlSquasher } from './serializer/my
import { PgSchema, PgSchemaSquashed, sequenceSquashed } from './serializer/pgSchema';
import { SingleStoreSchema, SingleStoreSchemaSquashed, SingleStoreSquasher } from './serializer/singlestoreSchema';
import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema';
+import { libSQLCombineStatements, sqliteCombineStatements } from './statementCombiner';
import { copy, prepareMigrationMeta } from './utils';
const makeChanged = (schema: T) => {
@@ -2468,7 +2471,8 @@ export const applySqliteSnapshotsDiff = async (
jsonStatements.push(...jsonAlteredUniqueConstraints);
- const sqlStatements = fromJson(jsonStatements, 'sqlite');
+ const combinedJsonStatements = sqliteCombineStatements(jsonStatements, json2, action);
+ const sqlStatements = fromJson(combinedJsonStatements, 'sqlite');
const uniqueSqlStatements: string[] = [];
sqlStatements.forEach((ss) => {
@@ -2484,7 +2488,428 @@ export const applySqliteSnapshotsDiff = async (
const _meta = prepareMigrationMeta([], rTables, rColumns);
return {
- statements: jsonStatements,
+ statements: combinedJsonStatements,
+ sqlStatements: uniqueSqlStatements,
+ _meta,
+ };
+};
+
+export const applyLibSQLSnapshotsDiff = async (
+ json1: SQLiteSchemaSquashed,
+ json2: SQLiteSchemaSquashed,
+ tablesResolver: (
+ input: ResolverInput,
+ ) => Promise>,
+ columnsResolver: (
+ input: ColumnsResolverInput,
+ ) => Promise>,
+ prevFull: SQLiteSchema,
+ curFull: SQLiteSchema,
+ action?: 'push',
+): Promise<{
+ statements: JsonStatement[];
+ sqlStatements: string[];
+ _meta:
+ | {
+ schemas: {};
+ tables: {};
+ columns: {};
+ }
+ | undefined;
+}> => {
+ const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables);
+ const {
+ created: createdTables,
+ deleted: deletedTables,
+ renamed: renamedTables,
+ } = await tablesResolver({
+ created: tablesDiff.added,
+ deleted: tablesDiff.deleted,
+ });
+
+ const tablesPatchedSnap1 = copy(json1);
+ tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => {
+ const { name } = nameChangeFor(it, renamedTables);
+ it.name = name;
+ return [name, it];
+ });
+
+ const res = diffColumns(tablesPatchedSnap1.tables, json2.tables);
+
+ const columnRenames = [] as {
+ table: string;
+ renames: { from: Column; to: Column }[];
+ }[];
+
+ const columnCreates = [] as {
+ table: string;
+ columns: Column[];
+ }[];
+
+ const columnDeletes = [] as {
+ table: string;
+ columns: Column[];
+ }[];
+
+ for (let entry of Object.values(res)) {
+ const { renamed, created, deleted } = await columnsResolver({
+ tableName: entry.name,
+ schema: entry.schema,
+ deleted: entry.columns.deleted,
+ created: entry.columns.added,
+ });
+
+ if (created.length > 0) {
+ columnCreates.push({
+ table: entry.name,
+ columns: created,
+ });
+ }
+
+ if (deleted.length > 0) {
+ columnDeletes.push({
+ table: entry.name,
+ columns: deleted,
+ });
+ }
+
+ if (renamed.length > 0) {
+ columnRenames.push({
+ table: entry.name,
+ renames: renamed,
+ });
+ }
+ }
+
+ const columnRenamesDict = columnRenames.reduce(
+ (acc, it) => {
+ acc[it.table] = it.renames;
+ return acc;
+ },
+ {} as Record<
+ string,
+ {
+ from: Named;
+ to: Named;
+ }[]
+ >,
+ );
+
+ const columnsPatchedSnap1 = copy(tablesPatchedSnap1);
+ columnsPatchedSnap1.tables = mapEntries(
+ columnsPatchedSnap1.tables,
+ (tableKey, tableValue) => {
+ const patchedColumns = mapKeys(
+ tableValue.columns,
+ (columnKey, column) => {
+ const rens = columnRenamesDict[tableValue.name] || [];
+ const newName = columnChangeFor(columnKey, rens);
+ column.name = newName;
+ return newName;
+ },
+ );
+
+ tableValue.columns = patchedColumns;
+ return [tableKey, tableValue];
+ },
+ );
+
+ const diffResult = applyJsonDiff(columnsPatchedSnap1, json2);
+
+ const typedResult = diffResultSchemeSQLite.parse(diffResult);
+
+ // Map array of objects to map
+ const tablesMap: {
+ [key: string]: (typeof typedResult.alteredTablesWithColumns)[number];
+ } = {};
+
+ typedResult.alteredTablesWithColumns.forEach((obj) => {
+ tablesMap[obj.name] = obj;
+ });
+
+ const jsonCreateTables = createdTables.map((it) => {
+ return prepareSQLiteCreateTable(it, action);
+ });
+
+ const jsonCreateIndexesForCreatedTables = createdTables
+ .map((it) => {
+ return prepareCreateIndexesJson(
+ it.name,
+ it.schema,
+ it.indexes,
+ curFull.internal,
+ );
+ })
+ .flat();
+
+ const jsonDropTables = deletedTables.map((it) => {
+ return prepareDropTableJson(it);
+ });
+
+ const jsonRenameTables = renamedTables.map((it) => {
+ return prepareRenameTableJson(it.from, it.to);
+ });
+
+ const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames
+ .map((it) => prepareRenameColumns(it.table, '', it.renames))
+ .flat();
+
+ const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes
+ .map((it) => _prepareDropColumns(it.table, '', it.columns))
+ .flat();
+
+ const jsonAddColumnsStatemets: JsonSqliteAddColumnStatement[] = columnCreates
+ .map((it) => {
+ return _prepareSqliteAddColumns(
+ it.table,
+ it.columns,
+ tablesMap[it.table] && tablesMap[it.table].addedForeignKeys
+ ? Object.values(tablesMap[it.table].addedForeignKeys)
+ : [],
+ );
+ })
+ .flat();
+
+ const rColumns = jsonRenameColumnsStatements.map((it) => {
+ const tableName = it.tableName;
+ const schema = it.schema;
+ return {
+ from: { schema, table: tableName, column: it.oldColumnName },
+ to: { schema, table: tableName, column: it.newColumnName },
+ };
+ });
+
+ const rTables = renamedTables.map((it) => {
+ return { from: it.from, to: it.to };
+ });
+
+ const _meta = prepareMigrationMeta([], rTables, rColumns);
+
+ const allAltered = typedResult.alteredTablesWithColumns;
+
+ const jsonAddedCompositePKs: JsonCreateCompositePK[] = [];
+ const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = [];
+ const jsonAlteredCompositePKs: JsonAlterCompositePK[] = [];
+
+ const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = [];
+ const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = [];
+ const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = [];
+
+ allAltered.forEach((it) => {
+ // This part is needed to make sure that same columns in a table are not triggered for change
+ // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name
+ // We double-check that pk with same set of columns are both in added and deleted diffs
+ let addedColumns: string[] = [];
+ for (const addedPkName of Object.keys(it.addedCompositePKs)) {
+ const addedPkColumns = it.addedCompositePKs[addedPkName];
+ addedColumns = SQLiteSquasher.unsquashPK(addedPkColumns);
+ }
+
+ let deletedColumns: string[] = [];
+ for (const deletedPkName of Object.keys(it.deletedCompositePKs)) {
+ const deletedPkColumns = it.deletedCompositePKs[deletedPkName];
+ deletedColumns = SQLiteSquasher.unsquashPK(deletedPkColumns);
+ }
+
+ // Don't need to sort, but need to add tests for it
+ // addedColumns.sort();
+ // deletedColumns.sort();
+
+ const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns);
+
+ let addedCompositePKs: JsonCreateCompositePK[] = [];
+ let deletedCompositePKs: JsonDeleteCompositePK[] = [];
+ let alteredCompositePKs: JsonAlterCompositePK[] = [];
+ if (doPerformDeleteAndCreate) {
+ addedCompositePKs = prepareAddCompositePrimaryKeySqlite(
+ it.name,
+ it.addedCompositePKs,
+ );
+ deletedCompositePKs = prepareDeleteCompositePrimaryKeySqlite(
+ it.name,
+ it.deletedCompositePKs,
+ );
+ }
+ alteredCompositePKs = prepareAlterCompositePrimaryKeySqlite(
+ it.name,
+ it.alteredCompositePKs,
+ );
+
+ // add logic for unique constraints
+ let addedUniqueConstraints: JsonCreateUniqueConstraint[] = [];
+ let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = [];
+ let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = [];
+
+ addedUniqueConstraints = prepareAddUniqueConstraint(
+ it.name,
+ it.schema,
+ it.addedUniqueConstraints,
+ );
+
+ deletedUniqueConstraints = prepareDeleteUniqueConstraint(
+ it.name,
+ it.schema,
+ it.deletedUniqueConstraints,
+ );
+ if (it.alteredUniqueConstraints) {
+ const added: Record = {};
+ const deleted: Record = {};
+ for (const k of Object.keys(it.alteredUniqueConstraints)) {
+ added[k] = it.alteredUniqueConstraints[k].__new;
+ deleted[k] = it.alteredUniqueConstraints[k].__old;
+ }
+ addedUniqueConstraints.push(
+ ...prepareAddUniqueConstraint(it.name, it.schema, added),
+ );
+ deletedUniqueConstraints.push(
+ ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted),
+ );
+ }
+
+ jsonAddedCompositePKs.push(...addedCompositePKs);
+ jsonDeletedCompositePKs.push(...deletedCompositePKs);
+ jsonAlteredCompositePKs.push(...alteredCompositePKs);
+
+ jsonAddedUniqueConstraints.push(...addedUniqueConstraints);
+ jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints);
+ jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints);
+ });
+
+ const jsonTableAlternations = allAltered
+ .map((it) => {
+ return prepareSqliteAlterColumns(it.name, it.schema, it.altered, json2);
+ })
+ .flat();
+
+ const jsonCreateIndexesForAllAlteredTables = allAltered
+ .map((it) => {
+ return prepareCreateIndexesJson(
+ it.name,
+ it.schema,
+ it.addedIndexes || {},
+ curFull.internal,
+ );
+ })
+ .flat();
+
+ const jsonDropIndexesForAllAlteredTables = allAltered
+ .map((it) => {
+ return prepareDropIndexesJson(
+ it.name,
+ it.schema,
+ it.deletedIndexes || {},
+ );
+ })
+ .flat();
+
+ allAltered.forEach((it) => {
+ const droppedIndexes = Object.keys(it.alteredIndexes).reduce(
+ (current, item: string) => {
+ current[item] = it.alteredIndexes[item].__old;
+ return current;
+ },
+ {} as Record,
+ );
+ const createdIndexes = Object.keys(it.alteredIndexes).reduce(
+ (current, item: string) => {
+ current[item] = it.alteredIndexes[item].__new;
+ return current;
+ },
+ {} as Record,
+ );
+
+ jsonCreateIndexesForAllAlteredTables.push(
+ ...prepareCreateIndexesJson(
+ it.name,
+ it.schema,
+ createdIndexes || {},
+ curFull.internal,
+ ),
+ );
+ jsonDropIndexesForAllAlteredTables.push(
+ ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}),
+ );
+ });
+
+ const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = allAltered
+ .map((it) => {
+ const forAdded = prepareLibSQLCreateReferencesJson(
+ it.name,
+ it.schema,
+ it.addedForeignKeys,
+ json2,
+ action,
+ );
+
+ const forAltered = prepareLibSQLDropReferencesJson(
+ it.name,
+ it.schema,
+ it.deletedForeignKeys,
+ json2,
+ _meta,
+ action,
+ );
+
+ const alteredFKs = prepareAlterReferencesJson(it.name, it.schema, it.alteredForeignKeys);
+
+ return [...forAdded, ...forAltered, ...alteredFKs];
+ })
+ .flat();
+
+ const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter(
+ (t) => t.type === 'create_reference',
+ );
+ const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter(
+ (t) => t.type === 'delete_reference',
+ );
+
+ const jsonStatements: JsonStatement[] = [];
+ jsonStatements.push(...jsonCreateTables);
+
+ jsonStatements.push(...jsonDropTables);
+ jsonStatements.push(...jsonRenameTables);
+ jsonStatements.push(...jsonRenameColumnsStatements);
+
+ jsonStatements.push(...jsonDroppedReferencesForAlteredTables);
+
+ // Will need to drop indexes before changing any columns in table
+ // Then should go column alternations and then index creation
+ jsonStatements.push(...jsonDropIndexesForAllAlteredTables);
+
+ jsonStatements.push(...jsonDeletedCompositePKs);
+ jsonStatements.push(...jsonTableAlternations);
+ jsonStatements.push(...jsonAddedCompositePKs);
+ jsonStatements.push(...jsonAddColumnsStatemets);
+
+ jsonStatements.push(...jsonCreateIndexesForCreatedTables);
+ jsonStatements.push(...jsonCreateIndexesForAllAlteredTables);
+
+ jsonStatements.push(...jsonCreatedReferencesForAlteredTables);
+
+ jsonStatements.push(...jsonDropColumnsStatemets);
+
+ jsonStatements.push(...jsonAlteredCompositePKs);
+
+ jsonStatements.push(...jsonAlteredUniqueConstraints);
+
+ const combinedJsonStatements = libSQLCombineStatements(jsonStatements, json2, action);
+
+ const sqlStatements = fromJson(
+ combinedJsonStatements,
+ 'turso',
+ action,
+ json2,
+ );
+
+ const uniqueSqlStatements: string[] = [];
+ sqlStatements.forEach((ss) => {
+ if (!uniqueSqlStatements.includes(ss)) {
+ uniqueSqlStatements.push(ss);
+ }
+ });
+
+ return {
+ statements: combinedJsonStatements,
sqlStatements: uniqueSqlStatements,
_meta,
};
diff --git a/drizzle-kit/src/sqlgenerator.ts b/drizzle-kit/src/sqlgenerator.ts
index 07b24b6c9..24457cb23 100644
--- a/drizzle-kit/src/sqlgenerator.ts
+++ b/drizzle-kit/src/sqlgenerator.ts
@@ -42,6 +42,7 @@ import {
JsonDropTableStatement,
JsonMoveSequenceStatement,
JsonPgCreateIndexStatement,
+ JsonRecreateTableStatement,
JsonRenameColumnStatement,
JsonRenameSchema,
JsonRenameSequenceStatement,
@@ -54,7 +55,7 @@ import { Dialect } from './schemaValidator';
import { MySqlSquasher } from './serializer/mysqlSchema';
import { PgSquasher } from './serializer/pgSchema';
import { SingleStoreSquasher } from './serializer/singlestoreSchema';
-import { SQLiteSquasher } from './serializer/sqliteSchema';
+import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema';
export const pgNativeTypes = new Set([
'uuid',
@@ -127,8 +128,15 @@ const isPgNativeType = (it: string) => {
};
abstract class Convertor {
- abstract can(statement: JsonStatement, dialect: Dialect): boolean;
- abstract convert(statement: JsonStatement): string | string[];
+ abstract can(
+ statement: JsonStatement,
+ dialect: Dialect,
+ ): boolean;
+ abstract convert(
+ statement: JsonStatement,
+ json2?: SQLiteSchemaSquashed,
+ action?: 'push',
+ ): string | string[];
}
class PgCreateTableConvertor extends Convertor {
@@ -382,7 +390,7 @@ class SingleStoreCreateTableConvertor extends Convertor {
export class SQLiteCreateTableConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'sqlite_create_table' && dialect === 'sqlite';
+ return statement.type === 'sqlite_create_table' && (dialect === 'sqlite' || dialect === 'turso');
}
convert(st: JsonSqliteCreateTableStatement) {
@@ -888,7 +896,7 @@ class SingleStoreDropTableConvertor extends Convertor {
export class SQLiteDropTableConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'drop_table' && dialect === 'sqlite';
+ return statement.type === 'drop_table' && (dialect === 'sqlite' || dialect === 'turso');
}
convert(statement: JsonDropTableStatement) {
@@ -914,7 +922,7 @@ class PgRenameTableConvertor extends Convertor {
export class SqliteRenameTableConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'rename_table' && dialect === 'sqlite';
+ return statement.type === 'rename_table' && (dialect === 'sqlite' || dialect === 'turso');
}
convert(statement: JsonRenameTableStatement) {
@@ -992,13 +1000,13 @@ class SingleStoreAlterTableRenameColumnConvertor extends Convertor {
class SQLiteAlterTableRenameColumnConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
- statement.type === 'alter_table_rename_column' && dialect === 'sqlite'
+ statement.type === 'alter_table_rename_column' && (dialect === 'sqlite' || dialect === 'turso')
);
}
convert(statement: JsonRenameColumnStatement) {
const { tableName, oldColumnName, newColumnName } = statement;
- return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`;
+ return `ALTER TABLE \`${tableName}\` RENAME COLUMN "${oldColumnName}" TO "${newColumnName}";`;
}
}
@@ -1044,7 +1052,7 @@ class SingleStoreAlterTableDropColumnConvertor extends Convertor {
class SQLiteAlterTableDropColumnConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'alter_table_drop_column' && dialect === 'sqlite';
+ return statement.type === 'alter_table_drop_column' && (dialect === 'sqlite' || dialect === 'turso');
}
convert(statement: JsonDropColumnStatement) {
@@ -1185,7 +1193,7 @@ class SingleStoreAlterTableAddColumnConvertor extends Convertor {
export class SQLiteAlterTableAddColumnConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
- statement.type === 'sqlite_alter_table_add_column' && dialect === 'sqlite'
+ statement.type === 'sqlite_alter_table_add_column' && (dialect === 'sqlite' || dialect === 'turso')
);
}
@@ -1232,26 +1240,6 @@ class PgAlterTableAlterColumnSetTypeConvertor extends Convertor {
}
}
-class SQLiteAlterTableAlterColumnSetTypeConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return (
- statement.type === 'alter_table_alter_column_set_type'
- && dialect === 'sqlite'
- );
- }
-
- convert(statement: JsonAlterColumnTypeStatement) {
- return (
- '/*\n SQLite does not support "Changing existing column type" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
- );
- }
-}
-
class PgAlterTableAlterColumnSetDefaultConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
@@ -1271,26 +1259,6 @@ class PgAlterTableAlterColumnSetDefaultConvertor extends Convertor {
}
}
-class SqliteAlterTableAlterColumnSetDefaultConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return (
- statement.type === 'alter_table_alter_column_set_default'
- && dialect === 'sqlite'
- );
- }
-
- convert(statement: JsonAlterColumnSetDefaultStatement) {
- return (
- '/*\n SQLite does not support "Set default to column" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
- );
- }
-}
-
class PgAlterTableAlterColumnDropDefaultConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
@@ -1430,7 +1398,7 @@ class SqliteAlterTableAlterColumnDropGeneratedConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
statement.type === 'alter_table_alter_column_drop_generated'
- && dialect === 'sqlite'
+ && (dialect === 'sqlite' || dialect === 'turso')
);
}
@@ -1479,7 +1447,7 @@ class SqliteAlterTableAlterColumnSetExpressionConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
statement.type === 'alter_table_alter_column_set_generated'
- && dialect === 'sqlite'
+ && (dialect === 'sqlite' || dialect === 'turso')
);
}
@@ -1528,7 +1496,7 @@ class SqliteAlterTableAlterColumnAlterGeneratedConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
statement.type === 'alter_table_alter_column_alter_generated'
- && dialect === 'sqlite'
+ && (dialect === 'sqlite' || dialect === 'turso')
);
}
@@ -1675,6 +1643,119 @@ class MySqlAlterTableDropPk extends Convertor {
}
}
+type LibSQLModifyColumnStatement =
+ | JsonAlterColumnTypeStatement
+ | JsonAlterColumnDropNotNullStatement
+ | JsonAlterColumnSetNotNullStatement
+ | JsonAlterColumnSetDefaultStatement
+ | JsonAlterColumnDropDefaultStatement;
+
+export class LibSQLModifyColumn extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return (
+ (statement.type === 'alter_table_alter_column_set_type'
+ || statement.type === 'alter_table_alter_column_drop_notnull'
+ || statement.type === 'alter_table_alter_column_set_notnull'
+ || statement.type === 'alter_table_alter_column_set_default'
+ || statement.type === 'alter_table_alter_column_drop_default')
+ && dialect === 'turso'
+ );
+ }
+
+ convert(statement: LibSQLModifyColumnStatement, json2: SQLiteSchemaSquashed) {
+ const { tableName, columnName } = statement;
+
+ let columnType = ``;
+ let columnDefault: any = '';
+ let columnNotNull = '';
+
+ const sqlStatements: string[] = [];
+
+ // collect index info
+ const indexes: {
+ name: string;
+ tableName: string;
+ columns: string[];
+ isUnique: boolean;
+ where?: string | undefined;
+ }[] = [];
+ for (const table of Object.values(json2.tables)) {
+ for (const index of Object.values(table.indexes)) {
+ const unsquashed = SQLiteSquasher.unsquashIdx(index);
+ sqlStatements.push(`DROP INDEX IF EXISTS "${unsquashed.name}";`);
+ indexes.push({ ...unsquashed, tableName: table.name });
+ }
+ }
+
+ switch (statement.type) {
+ case 'alter_table_alter_column_set_type':
+ columnType = ` ${statement.newDataType}`;
+
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+
+ break;
+ case 'alter_table_alter_column_drop_notnull':
+ columnType = ` ${statement.newDataType}`;
+
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+
+ columnNotNull = '';
+ break;
+ case 'alter_table_alter_column_set_notnull':
+ columnType = ` ${statement.newDataType}`;
+
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+
+ columnNotNull = ` NOT NULL`;
+ break;
+ case 'alter_table_alter_column_set_default':
+ columnType = ` ${statement.newDataType}`;
+
+ columnDefault = ` DEFAULT ${statement.newDefaultValue}`;
+
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ break;
+ case 'alter_table_alter_column_drop_default':
+ columnType = ` ${statement.newDataType}`;
+
+ columnDefault = '';
+
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ break;
+ }
+
+ // Seems like getting value from simple json2 shanpshot makes dates be dates
+ columnDefault = columnDefault instanceof Date
+ ? columnDefault.toISOString()
+ : columnDefault;
+
+ sqlStatements.push(
+ `ALTER TABLE \`${tableName}\` ALTER COLUMN "${columnName}" TO "${columnName}"${columnType}${columnNotNull}${columnDefault};`,
+ );
+
+ for (const index of indexes) {
+ const indexPart = index.isUnique ? 'UNIQUE INDEX' : 'INDEX';
+ const whereStatement = index.where ? ` WHERE ${index.where}` : '';
+ const uniqueString = index.columns.map((it) => `\`${it}\``).join(',');
+ const tableName = index.tableName;
+
+ sqlStatements.push(
+ `CREATE ${indexPart} \`${index.name}\` ON \`${tableName}\` (${uniqueString})${whereStatement};`,
+ );
+ }
+
+ return sqlStatements;
+ }
+}
+
type MySqlModifyColumnStatement =
| JsonAlterColumnDropNotNullStatement
| JsonAlterColumnSetNotNullStatement
@@ -2281,7 +2362,6 @@ class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor {
}");`;
}
}
-
class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return statement.type === 'delete_composite_pk' && dialect === 'postgresql';
@@ -2541,66 +2621,6 @@ class PgAlterTableAlterColumnSetNotNullConvertor extends Convertor {
}
}
-class SqliteAlterTableAlterColumnSetNotNullConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return (
- statement.type === 'alter_table_alter_column_set_notnull'
- && dialect === 'sqlite'
- );
- }
-
- convert(statement: JsonAlterColumnSetNotNullStatement) {
- return (
- '/*\n SQLite does not support "Set not null to column" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
- );
- }
-}
-
-class SqliteAlterTableAlterColumnSetAutoincrementConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return (
- statement.type === 'alter_table_alter_column_set_autoincrement'
- && dialect === 'sqlite'
- );
- }
-
- convert(statement: JsonAlterColumnSetAutoincrementStatement) {
- return (
- '/*\n SQLite does not support "Set autoincrement to a column" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
- );
- }
-}
-
-class SqliteAlterTableAlterColumnDropAutoincrementConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return (
- statement.type === 'alter_table_alter_column_drop_autoincrement'
- && dialect === 'sqlite'
- );
- }
-
- convert(statement: JsonAlterColumnDropAutoincrementStatement) {
- return (
- '/*\n SQLite does not support "Drop autoincrement from a column" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
- );
- }
-}
-
class PgAlterTableAlterColumnDropNotNullConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
@@ -2620,26 +2640,6 @@ class PgAlterTableAlterColumnDropNotNullConvertor extends Convertor {
}
}
-class SqliteAlterTableAlterColumnDropNotNullConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return (
- statement.type === 'alter_table_alter_column_drop_notnull'
- && dialect === 'sqlite'
- );
- }
-
- convert(statement: JsonAlterColumnDropNotNullStatement) {
- return (
- '/*\n SQLite does not support "Drop not null from column" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
- );
- }
-}
-
// FK
class PgCreateForeignKeyConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
@@ -2682,20 +2682,37 @@ class PgCreateForeignKeyConvertor extends Convertor {
}
}
-class SqliteCreateForeignKeyConvertor extends Convertor {
+class LibSQLCreateForeignKeyConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'create_reference' && dialect === 'sqlite';
- }
-
- convert(statement: JsonCreateReferenceStatement): string {
return (
- '/*\n SQLite does not support "Creating foreign key on existing column" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
+ statement.type === 'create_reference'
+ && dialect === 'turso'
);
}
+
+ convert(
+ statement: JsonCreateReferenceStatement,
+ json2?: SQLiteSchemaSquashed,
+ action?: 'push',
+ ): string {
+ const { columnsFrom, columnsTo, tableFrom, onDelete, onUpdate, tableTo } = action === 'push'
+ ? SQLiteSquasher.unsquashPushFK(statement.data)
+ : SQLiteSquasher.unsquashFK(statement.data);
+ const { columnDefault, columnNotNull, columnType } = statement;
+
+ const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : '';
+ const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : '';
+ const columnsDefaultValue = columnDefault
+ ? ` DEFAULT ${columnDefault}`
+ : '';
+ const columnNotNullValue = columnNotNull ? ` NOT NULL` : '';
+ const columnTypeValue = columnType ? ` ${columnType}` : '';
+
+ const columnFrom = columnsFrom[0];
+ const columnTo = columnsTo[0];
+
+ return `ALTER TABLE \`${tableFrom}\` ALTER COLUMN "${columnFrom}" TO "${columnFrom}"${columnTypeValue}${columnNotNullValue}${columnsDefaultValue} REFERENCES ${tableTo}(${columnTo})${onDeleteStatement}${onUpdateStatement};`;
+ }
}
class MySqlCreateForeignKeyConvertor extends Convertor {
@@ -2769,22 +2786,6 @@ class PgAlterForeignKeyConvertor extends Convertor {
}
}
-class SqliteAlterForeignKeyConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'alter_reference' && dialect === 'sqlite';
- }
-
- convert(statement: JsonAlterReferenceStatement): string {
- return (
- '/*\n SQLite does not support "Changing existing foreign key" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
- );
- }
-}
-
class PgDeleteForeignKeyConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return statement.type === 'delete_reference' && dialect === 'postgresql';
@@ -2802,22 +2803,6 @@ class PgDeleteForeignKeyConvertor extends Convertor {
}
}
-class SqliteDeleteForeignKeyConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'delete_reference' && dialect === 'sqlite';
- }
-
- convert(statement: JsonDeleteReferenceStatement): string {
- return (
- '/*\n SQLite does not support "Dropping foreign key" out of the box, we do not generate automatic migration for that, so it has to be done manually'
- + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
- + '\n https://www.sqlite.org/lang_altertable.html'
- + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
- + '\n*/'
- );
- }
-}
-
class MySqlDeleteForeignKeyConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return statement.type === 'delete_reference' && dialect === 'mysql';
@@ -2939,7 +2924,7 @@ class CreateSingleStoreIndexConvertor extends Convertor {
export class CreateSqliteIndexConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'create_index' && dialect === 'sqlite';
+ return statement.type === 'create_index' && (dialect === 'sqlite' || dialect === 'turso');
}
convert(statement: JsonCreateIndexStatement): string {
@@ -3061,7 +3046,7 @@ class PgAlterTableRemoveFromSchemaConvertor extends Convertor {
export class SqliteDropIndexConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'drop_index' && dialect === 'sqlite';
+ return statement.type === 'drop_index' && (dialect === 'sqlite' || dialect === 'turso');
}
convert(statement: JsonDropIndexStatement): string {
@@ -3091,12 +3076,132 @@ class SingleStoreDropIndexConvertor extends Convertor {
return `DROP INDEX \`${name}\` ON \`${statement.tableName}\`;`;
}
}
+class SQLiteRecreateTableConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return (
+ statement.type === 'recreate_table' && dialect === 'sqlite'
+ );
+ }
+
+ convert(statement: JsonRecreateTableStatement): string | string[] {
+ const { tableName, columns, compositePKs, referenceData } = statement;
+
+ const columnNames = columns.map((it) => `"${it.name}"`).join(', ');
+ const newTableName = `__new_${tableName}`;
+
+ const sqlStatements: string[] = [];
+
+ sqlStatements.push(`PRAGMA foreign_keys=OFF;`);
+
+ // create new table
+ sqlStatements.push(
+ new SQLiteCreateTableConvertor().convert({
+ type: 'sqlite_create_table',
+ tableName: newTableName,
+ columns,
+ referenceData,
+ compositePKs,
+ }),
+ );
+
+ // migrate data
+ sqlStatements.push(
+ `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`,
+ );
+
+ // drop table
+ sqlStatements.push(
+ new SQLiteDropTableConvertor().convert({
+ type: 'drop_table',
+ tableName: tableName,
+ schema: '',
+ }),
+ );
+
+ // rename table
+ sqlStatements.push(
+ new SqliteRenameTableConvertor().convert({
+ fromSchema: '',
+ tableNameFrom: newTableName,
+ tableNameTo: tableName,
+ toSchema: '',
+ type: 'rename_table',
+ }),
+ );
+
+ sqlStatements.push(`PRAGMA foreign_keys=ON;`);
+
+ return sqlStatements;
+ }
+}
+
+class LibSQLRecreateTableConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return (
+ statement.type === 'recreate_table'
+ && dialect === 'turso'
+ );
+ }
+
+ convert(statement: JsonRecreateTableStatement): string[] {
+ const { tableName, columns, compositePKs, referenceData } = statement;
+
+ const columnNames = columns.map((it) => `"${it.name}"`).join(', ');
+ const newTableName = `__new_${tableName}`;
+
+ const sqlStatements: string[] = [];
+
+ sqlStatements.push(`PRAGMA foreign_keys=OFF;`);
+
+ // create new table
+ sqlStatements.push(
+ new SQLiteCreateTableConvertor().convert({
+ type: 'sqlite_create_table',
+ tableName: newTableName,
+ columns,
+ referenceData,
+ compositePKs,
+ }),
+ );
+
+ // migrate data
+ sqlStatements.push(
+ `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`,
+ );
+
+ // drop table
+ sqlStatements.push(
+ new SQLiteDropTableConvertor().convert({
+ type: 'drop_table',
+ tableName: tableName,
+ schema: '',
+ }),
+ );
+
+ // rename table
+ sqlStatements.push(
+ new SqliteRenameTableConvertor().convert({
+ fromSchema: '',
+ tableNameFrom: newTableName,
+ tableNameTo: tableName,
+ toSchema: '',
+ type: 'rename_table',
+ }),
+ );
+
+ sqlStatements.push(`PRAGMA foreign_keys=ON;`);
+
+ return sqlStatements;
+ }
+}
const convertors: Convertor[] = [];
convertors.push(new PgCreateTableConvertor());
convertors.push(new MySqlCreateTableConvertor());
convertors.push(new SingleStoreCreateTableConvertor());
convertors.push(new SQLiteCreateTableConvertor());
+convertors.push(new SQLiteRecreateTableConvertor());
+convertors.push(new LibSQLRecreateTableConvertor());
convertors.push(new CreateTypeEnumConvertor());
@@ -3175,6 +3280,7 @@ convertors.push(new SqliteAlterTableAlterColumnAlterGeneratedConvertor());
convertors.push(new SqliteAlterTableAlterColumnSetExpressionConvertor());
convertors.push(new MySqlModifyColumn());
+convertors.push(new LibSQLModifyColumn());
// convertors.push(new MySqlAlterTableAlterColumnSetDefaultConvertor());
// convertors.push(new MySqlAlterTableAlterColumnDropDefaultConvertor());
@@ -3195,31 +3301,12 @@ convertors.push(new PgAlterTableSetSchemaConvertor());
convertors.push(new PgAlterTableSetNewSchemaConvertor());
convertors.push(new PgAlterTableRemoveFromSchemaConvertor());
-// Unhandled sqlite queries, so they will appear last
-convertors.push(new SQLiteAlterTableAlterColumnSetTypeConvertor());
-convertors.push(new SqliteAlterForeignKeyConvertor());
-convertors.push(new SqliteDeleteForeignKeyConvertor());
-convertors.push(new SqliteCreateForeignKeyConvertor());
-
-convertors.push(new SQLiteAlterTableAddUniqueConstraintConvertor());
-convertors.push(new SQLiteAlterTableDropUniqueConstraintConvertor());
+convertors.push(new LibSQLCreateForeignKeyConvertor());
convertors.push(new PgAlterTableAlterColumnDropGenerated());
convertors.push(new PgAlterTableAlterColumnSetGenerated());
convertors.push(new PgAlterTableAlterColumnAlterGenerated());
-convertors.push(new SqliteAlterTableAlterColumnSetNotNullConvertor());
-convertors.push(new SqliteAlterTableAlterColumnDropNotNullConvertor());
-convertors.push(new SqliteAlterTableAlterColumnSetDefaultConvertor());
-convertors.push(new SqliteAlterTableAlterColumnDropDefaultConvertor());
-
-convertors.push(new SqliteAlterTableAlterColumnSetAutoincrementConvertor());
-convertors.push(new SqliteAlterTableAlterColumnDropAutoincrementConvertor());
-
-convertors.push(new SqliteAlterTableCreateCompositePrimaryKeyConvertor());
-convertors.push(new SqliteAlterTableDeleteCompositePrimaryKeyConvertor());
-convertors.push(new SqliteAlterTableAlterCompositePrimaryKeyConvertor());
-
convertors.push(new PgAlterTableCreateCompositePrimaryKeyConvertor());
convertors.push(new PgAlterTableDeleteCompositePrimaryKeyConvertor());
convertors.push(new PgAlterTableAlterCompositePrimaryKeyConvertor());
@@ -3236,26 +3323,40 @@ convertors.push(new SingleStoreAlterTableCreateCompositePrimaryKeyConvertor());
convertors.push(new SingleStoreAlterTableAddPk());
convertors.push(new SingleStoreAlterTableAlterCompositePrimaryKeyConvertor());
-export const fromJson = (statements: JsonStatement[], dialect: Dialect) => {
+export function fromJson(
+ statements: JsonStatement[],
+ dialect: Exclude,
+): string[];
+export function fromJson(
+ statements: JsonStatement[],
+ dialect: 'sqlite' | 'turso',
+ action?: 'push',
+ json2?: SQLiteSchemaSquashed,
+): string[];
+
+export function fromJson(
+ statements: JsonStatement[],
+ dialect: Dialect,
+ action?: 'push',
+ json2?: SQLiteSchemaSquashed,
+) {
const result = statements
.flatMap((statement) => {
const filtered = convertors.filter((it) => {
- // console.log(statement, dialect)
return it.can(statement, dialect);
});
const convertor = filtered.length === 1 ? filtered[0] : undefined;
if (!convertor) {
- // console.log("no convertor:", statement.type, dialect);
return '';
}
- return convertor.convert(statement);
+ return convertor.convert(statement, json2, action);
})
.filter((it) => it !== '');
return result;
-};
+}
// blog.yo1.dog/updating-enum-values-in-postgresql-the-safe-and-easy-way/
// test case for enum altering
diff --git a/drizzle-kit/src/statementCombiner.ts b/drizzle-kit/src/statementCombiner.ts
new file mode 100644
index 000000000..2f7b6ddbe
--- /dev/null
+++ b/drizzle-kit/src/statementCombiner.ts
@@ -0,0 +1,450 @@
+import {
+ JsonCreateIndexStatement,
+ JsonRecreateTableStatement,
+ JsonStatement,
+ prepareCreateIndexesJson,
+} from './jsonStatements';
+import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema';
+
+export const prepareLibSQLRecreateTable = (
+ table: SQLiteSchemaSquashed['tables'][keyof SQLiteSchemaSquashed['tables']],
+ action?: 'push',
+): (JsonRecreateTableStatement | JsonCreateIndexStatement)[] => {
+ const { name, columns, uniqueConstraints, indexes } = table;
+
+ const composites: string[][] = Object.values(table.compositePrimaryKeys).map(
+ (it) => SQLiteSquasher.unsquashPK(it),
+ );
+
+ const references: string[] = Object.values(table.foreignKeys);
+ const fks = references.map((it) =>
+ action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it)
+ );
+
+ const statements: (JsonRecreateTableStatement | JsonCreateIndexStatement)[] = [
+ {
+ type: 'recreate_table',
+ tableName: name,
+ columns: Object.values(columns),
+ compositePKs: composites,
+ referenceData: fks,
+ uniqueConstraints: Object.values(uniqueConstraints),
+ },
+ ];
+
+ if (Object.keys(indexes).length) {
+ statements.push(...prepareCreateIndexesJson(name, '', indexes));
+ }
+ return statements;
+};
+
+export const prepareSQLiteRecreateTable = (
+ table: SQLiteSchemaSquashed['tables'][keyof SQLiteSchemaSquashed['tables']],
+ action?: 'push',
+): JsonStatement[] => {
+ const { name, columns, uniqueConstraints, indexes } = table;
+
+ const composites: string[][] = Object.values(table.compositePrimaryKeys).map(
+ (it) => SQLiteSquasher.unsquashPK(it),
+ );
+
+ const references: string[] = Object.values(table.foreignKeys);
+ const fks = references.map((it) =>
+ action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it)
+ );
+
+ const statements: JsonStatement[] = [
+ {
+ type: 'recreate_table',
+ tableName: name,
+ columns: Object.values(columns),
+ compositePKs: composites,
+ referenceData: fks,
+ uniqueConstraints: Object.values(uniqueConstraints),
+ },
+ ];
+
+ if (Object.keys(indexes).length) {
+ statements.push(...prepareCreateIndexesJson(name, '', indexes));
+ }
+ return statements;
+};
+
+export const libSQLCombineStatements = (
+ statements: JsonStatement[],
+ json2: SQLiteSchemaSquashed,
+ action?: 'push',
+) => {
+ // const tablesContext: Record = {};
+ const newStatements: Record = {};
+ for (const statement of statements) {
+ if (
+ statement.type === 'alter_table_alter_column_drop_autoincrement'
+ || statement.type === 'alter_table_alter_column_set_autoincrement'
+ || statement.type === 'alter_table_alter_column_drop_pk'
+ || statement.type === 'alter_table_alter_column_set_pk'
+ || statement.type === 'create_composite_pk'
+ || statement.type === 'alter_composite_pk'
+ || statement.type === 'delete_composite_pk'
+ ) {
+ const tableName = statement.tableName;
+
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ const wasRename = statementsForTable.some(({ type }) => type === 'rename_table');
+ const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ if (wasRename) {
+ newStatements[tableName].push(...preparedStatements);
+ } else {
+ newStatements[tableName] = preparedStatements;
+ }
+
+ continue;
+ }
+
+ continue;
+ }
+
+ if (
+ statement.type === 'alter_table_alter_column_set_type'
+ || statement.type === 'alter_table_alter_column_drop_notnull'
+ || statement.type === 'alter_table_alter_column_set_notnull'
+ || statement.type === 'alter_table_alter_column_set_default'
+ || statement.type === 'alter_table_alter_column_drop_default'
+ ) {
+ const { tableName, columnName, columnPk } = statement;
+
+ // const columnIsPartOfUniqueIndex = Object.values(
+ // json2.tables[tableName].indexes,
+ // ).some((it) => {
+ // const unsquashIndex = SQLiteSquasher.unsquashIdx(it);
+
+ // return (
+ // unsquashIndex.columns.includes(columnName) && unsquashIndex.isUnique
+ // );
+ // });
+
+ const columnIsPartOfForeignKey = Object.values(
+ json2.tables[tableName].foreignKeys,
+ ).some((it) => {
+ const unsquashFk = action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it);
+
+ return (
+ unsquashFk.columnsFrom.includes(columnName)
+ );
+ });
+
+ const statementsForTable = newStatements[tableName];
+
+ if (
+ !statementsForTable && (columnIsPartOfForeignKey || columnPk)
+ ) {
+ newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+ continue;
+ }
+
+ if (
+ statementsForTable && (columnIsPartOfForeignKey || columnPk)
+ ) {
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ const wasRename = statementsForTable.some(({ type }) => type === 'rename_table');
+ const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ if (wasRename) {
+ newStatements[tableName].push(...preparedStatements);
+ } else {
+ newStatements[tableName] = preparedStatements;
+ }
+ }
+ continue;
+ }
+ if (
+ statementsForTable && !(columnIsPartOfForeignKey || columnPk)
+ ) {
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ newStatements[tableName].push(statement);
+ }
+ continue;
+ }
+
+ newStatements[tableName] = [statement];
+
+ continue;
+ }
+
+ if (statement.type === 'create_reference') {
+ const tableName = statement.tableName;
+
+ const data = action === 'push'
+ ? SQLiteSquasher.unsquashPushFK(statement.data)
+ : SQLiteSquasher.unsquashFK(statement.data);
+
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = statement.isMulticolumn
+ ? prepareLibSQLRecreateTable(json2.tables[tableName], action)
+ : [statement];
+
+ continue;
+ }
+
+ // if add column with reference -> skip create_reference statement
+ if (
+ !statement.isMulticolumn
+ && statementsForTable.some((st) =>
+ st.type === 'sqlite_alter_table_add_column' && st.column.name === data.columnsFrom[0]
+ )
+ ) {
+ continue;
+ }
+
+ if (statement.isMulticolumn) {
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ const wasRename = statementsForTable.some(({ type }) => type === 'rename_table');
+ const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ if (wasRename) {
+ newStatements[tableName].push(...preparedStatements);
+ } else {
+ newStatements[tableName] = preparedStatements;
+ }
+
+ continue;
+ }
+
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ newStatements[tableName].push(statement);
+ }
+
+ continue;
+ }
+
+ if (statement.type === 'delete_reference') {
+ const tableName = statement.tableName;
+
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ const wasRename = statementsForTable.some(({ type }) => type === 'rename_table');
+ const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ if (wasRename) {
+ newStatements[tableName].push(...preparedStatements);
+ } else {
+ newStatements[tableName] = preparedStatements;
+ }
+
+ continue;
+ }
+
+ continue;
+ }
+
+ if (statement.type === 'sqlite_alter_table_add_column' && statement.column.primaryKey) {
+ const tableName = statement.tableName;
+
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ const wasRename = statementsForTable.some(({ type }) => type === 'rename_table');
+ const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ if (wasRename) {
+ newStatements[tableName].push(...preparedStatements);
+ } else {
+ newStatements[tableName] = preparedStatements;
+ }
+
+ continue;
+ }
+
+ continue;
+ }
+
+ const tableName = statement.type === 'rename_table'
+ ? statement.tableNameTo
+ : (statement as { tableName: string }).tableName;
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = [statement];
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ newStatements[tableName].push(statement);
+ }
+ }
+
+ const combinedStatements = Object.values(newStatements).flat();
+ const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table');
+ const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column');
+
+ const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column');
+
+ return [...renamedTables, ...renamedColumns, ...rest];
+};
+
+export const sqliteCombineStatements = (
+ statements: JsonStatement[],
+ json2: SQLiteSchemaSquashed,
+ action?: 'push',
+) => {
+ // const tablesContext: Record = {};
+ const newStatements: Record = {};
+ for (const statement of statements) {
+ if (
+ statement.type === 'alter_table_alter_column_set_type'
+ || statement.type === 'alter_table_alter_column_set_default'
+ || statement.type === 'alter_table_alter_column_drop_default'
+ || statement.type === 'alter_table_alter_column_set_notnull'
+ || statement.type === 'alter_table_alter_column_drop_notnull'
+ || statement.type === 'alter_table_alter_column_drop_autoincrement'
+ || statement.type === 'alter_table_alter_column_set_autoincrement'
+ || statement.type === 'alter_table_alter_column_drop_pk'
+ || statement.type === 'alter_table_alter_column_set_pk'
+ || statement.type === 'delete_reference'
+ || statement.type === 'alter_reference'
+ || statement.type === 'create_composite_pk'
+ || statement.type === 'alter_composite_pk'
+ || statement.type === 'delete_composite_pk'
+ || statement.type === 'create_unique_constraint'
+ || statement.type === 'delete_unique_constraint'
+ ) {
+ const tableName = statement.tableName;
+
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ const wasRename = statementsForTable.some(({ type }) => type === 'rename_table');
+ const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ if (wasRename) {
+ newStatements[tableName].push(...preparedStatements);
+ } else {
+ newStatements[tableName] = preparedStatements;
+ }
+
+ continue;
+ }
+
+ continue;
+ }
+
+ if (statement.type === 'sqlite_alter_table_add_column' && statement.column.primaryKey) {
+ const tableName = statement.tableName;
+
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ const wasRename = statementsForTable.some(({ type }) => type === 'rename_table');
+ const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ if (wasRename) {
+ newStatements[tableName].push(...preparedStatements);
+ } else {
+ newStatements[tableName] = preparedStatements;
+ }
+
+ continue;
+ }
+
+ continue;
+ }
+
+ if (statement.type === 'create_reference') {
+ const tableName = statement.tableName;
+
+ const data = action === 'push'
+ ? SQLiteSquasher.unsquashPushFK(statement.data)
+ : SQLiteSquasher.unsquashFK(statement.data);
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action);
+ continue;
+ }
+
+ // if add column with reference -> skip create_reference statement
+ if (
+ data.columnsFrom.length === 1
+ && statementsForTable.some((st) =>
+ st.type === 'sqlite_alter_table_add_column' && st.column.name === data.columnsFrom[0]
+ )
+ ) {
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ const wasRename = statementsForTable.some(({ type }) => type === 'rename_table');
+ const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action);
+
+ if (wasRename) {
+ newStatements[tableName].push(...preparedStatements);
+ } else {
+ newStatements[tableName] = preparedStatements;
+ }
+
+ continue;
+ }
+
+ continue;
+ }
+
+ const tableName = statement.type === 'rename_table'
+ ? statement.tableNameTo
+ : (statement as { tableName: string }).tableName;
+
+ const statementsForTable = newStatements[tableName];
+
+ if (!statementsForTable) {
+ newStatements[tableName] = [statement];
+ continue;
+ }
+
+ if (!statementsForTable.some(({ type }) => type === 'recreate_table')) {
+ newStatements[tableName].push(statement);
+ }
+ }
+
+ const combinedStatements = Object.values(newStatements).flat();
+
+ const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table');
+ const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column');
+
+ const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column');
+
+ return [...renamedTables, ...renamedColumns, ...rest];
+};
diff --git a/drizzle-kit/src/utils.ts b/drizzle-kit/src/utils.ts
index b14bad5b2..927e0ff51 100644
--- a/drizzle-kit/src/utils.ts
+++ b/drizzle-kit/src/utils.ts
@@ -1,9 +1,11 @@
import type { RunResult } from 'better-sqlite3';
import chalk from 'chalk';
+import { toCamelCase, toSnakeCase } from 'drizzle-orm/casing';
import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'fs';
import { join } from 'path';
import { parse } from 'url';
import type { NamedWithSchema } from './cli/commands/migrate';
+import { CasingType } from './cli/validations/common';
import { info } from './cli/views';
import { assertUnreachable, snapshotVersion } from './global';
import type { Dialect } from './schemaValidator';
@@ -26,9 +28,12 @@ export type DB = {
export type SQLiteDB = {
query: (sql: string, params?: any[]) => Promise;
run(query: string): Promise;
- batch?(
- queries: { query: string; values?: any[] | undefined }[],
- ): Promise;
+};
+
+export type LibSQLDB = {
+ query: (sql: string, params?: any[]) => Promise;
+ run(query: string): Promise;
+ batchWithPragma?(queries: string[]): Promise;
};
export const copy = (it: T): T => {
@@ -116,6 +121,8 @@ const validatorForDialect = (dialect: Dialect) => {
return { validator: backwardCompatiblePgSchema, version: 7 };
case 'sqlite':
return { validator: backwardCompatibleSqliteSchema, version: 6 };
+ case 'turso':
+ return { validator: backwardCompatibleSqliteSchema, version: 6 };
case 'mysql':
return { validator: backwardCompatibleMysqlSchema, version: 5 };
case 'singlestore':
@@ -344,3 +351,25 @@ export const normalisePGliteUrl = (
export function isPgArrayType(sqlType: string) {
return sqlType.match(/.*\[\d*\].*|.*\[\].*/g) !== null;
}
+
+export function findAddedAndRemoved(columnNames1: string[], columnNames2: string[]) {
+ const set1 = new Set(columnNames1);
+ const set2 = new Set(columnNames2);
+
+ const addedColumns = columnNames2.filter((it) => !set1.has(it));
+ const removedColumns = columnNames1.filter((it) => !set2.has(it));
+
+ return { addedColumns, removedColumns };
+}
+
+export function getColumnCasing(
+ column: { keyAsName: boolean; name: string | undefined },
+ casing: CasingType | undefined,
+) {
+ if (!column.name) return '';
+ return !column.keyAsName || casing === undefined
+ ? column.name
+ : casing === 'camelCase'
+ ? toCamelCase(column.name)
+ : toSnakeCase(column.name);
+}
diff --git a/drizzle-kit/tests/cli-generate.test.ts b/drizzle-kit/tests/cli-generate.test.ts
index 3e5c0fc22..6c8cae09e 100644
--- a/drizzle-kit/tests/cli-generate.test.ts
+++ b/drizzle-kit/tests/cli-generate.test.ts
@@ -38,6 +38,7 @@ test('generate #1', async (t) => {
schema: 'schema.ts',
out: 'drizzle',
bundle: false,
+ casing: undefined,
});
});
@@ -57,11 +58,13 @@ test('generate #2', async (t) => {
schema: 'schema.ts',
out: 'out',
bundle: false,
+ casing: undefined,
});
});
test('generate #3', async (t) => {
const res = await brotest(generate, '');
+
if (res.type !== 'handler') assert.fail(res.type, 'handler');
expect(res.options).toStrictEqual({
dialect: 'postgresql',
@@ -72,6 +75,7 @@ test('generate #3', async (t) => {
schema: './schema.ts',
out: 'drizzle',
bundle: false,
+ casing: undefined,
});
});
@@ -89,6 +93,7 @@ test('generate #4', async (t) => {
schema: './schema.ts',
out: 'drizzle',
bundle: false,
+ casing: undefined,
});
});
@@ -105,6 +110,7 @@ test('generate #5', async (t) => {
schema: './schema.ts',
out: 'drizzle',
bundle: false,
+ casing: undefined,
});
});
@@ -121,6 +127,7 @@ test('generate #6', async (t) => {
schema: './schema.ts',
out: 'drizzle',
bundle: false,
+ casing: undefined,
});
});
@@ -140,6 +147,7 @@ test('generate #7', async (t) => {
schema: './schema.ts',
out: 'drizzle',
bundle: false,
+ casing: undefined,
});
});
@@ -157,6 +165,7 @@ test('generate #8', async (t) => {
schema: './schema.ts',
out: 'drizzle',
bundle: true, // expo driver
+ casing: undefined,
});
});
@@ -177,6 +186,7 @@ test('generate #9', async (t) => {
schema: 'schema.ts',
out: 'out',
bundle: false,
+ casing: undefined,
});
});
diff --git a/drizzle-kit/tests/cli-migrate.test.ts b/drizzle-kit/tests/cli-migrate.test.ts
index a4ffec2f0..1425691f0 100644
--- a/drizzle-kit/tests/cli-migrate.test.ts
+++ b/drizzle-kit/tests/cli-migrate.test.ts
@@ -31,11 +31,10 @@ test('migrate #2', async (t) => {
const res = await brotest(migrate, '--config=turso.config.ts');
if (res.type !== 'handler') assert.fail(res.type, 'handler');
expect(res.options).toStrictEqual({
- dialect: 'sqlite',
+ dialect: 'turso',
out: 'drizzle',
credentials: {
authToken: 'token',
- driver: 'turso',
url: 'turso.dev',
},
schema: undefined, // drizzle migrations table schema
diff --git a/drizzle-kit/tests/cli-push.test.ts b/drizzle-kit/tests/cli-push.test.ts
index 1a4bde66d..e6f26eeb5 100644
--- a/drizzle-kit/tests/cli-push.test.ts
+++ b/drizzle-kit/tests/cli-push.test.ts
@@ -27,6 +27,7 @@ test('push #1', async (t) => {
tablesFilter: [],
strict: false,
verbose: false,
+ casing: undefined,
});
});
@@ -34,10 +35,9 @@ test('push #2', async (t) => {
const res = await brotest(push, '--config=turso.config.ts');
if (res.type !== 'handler') assert.fail(res.type, 'handler');
expect(res.options).toStrictEqual({
- dialect: 'sqlite',
+ dialect: 'turso',
credentials: {
authToken: 'token',
- driver: 'turso',
url: 'turso.dev',
},
force: false,
@@ -46,6 +46,7 @@ test('push #2', async (t) => {
tablesFilter: [],
strict: false,
verbose: false,
+ casing: undefined,
});
});
@@ -66,6 +67,7 @@ test('push #3', async (t) => {
tablesFilter: [],
strict: false,
verbose: false,
+ casing: undefined,
});
});
@@ -87,6 +89,7 @@ test('push #4', async (t) => {
tablesFilter: [],
strict: false,
verbose: false,
+ casing: undefined,
});
});
@@ -109,6 +112,7 @@ test('push #5', async (t) => {
strict: false,
force: false,
verbose: false,
+ casing: undefined,
});
});
diff --git a/drizzle-kit/tests/cli/turso.config.ts b/drizzle-kit/tests/cli/turso.config.ts
index 089e4d216..85efe5934 100644
--- a/drizzle-kit/tests/cli/turso.config.ts
+++ b/drizzle-kit/tests/cli/turso.config.ts
@@ -2,8 +2,7 @@ import { defineConfig } from '../../src';
export default defineConfig({
schema: './schema.ts',
- dialect: 'sqlite',
- driver: 'turso',
+ dialect: 'turso',
dbCredentials: {
url: 'turso.dev',
authToken: 'token',
diff --git a/drizzle-kit/tests/libsql-statements.test.ts b/drizzle-kit/tests/libsql-statements.test.ts
new file mode 100644
index 000000000..8221e52e0
--- /dev/null
+++ b/drizzle-kit/tests/libsql-statements.test.ts
@@ -0,0 +1,982 @@
+import { foreignKey, index, int, integer, sqliteTable, text, uniqueIndex } from 'drizzle-orm/sqlite-core';
+import { JsonRecreateTableStatement } from 'src/jsonStatements';
+import { expect, test } from 'vitest';
+import { diffTestSchemasLibSQL } from './schemaDiffer';
+
+test('drop autoincrement', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ }),
+ };
+
+ const { statements } = await diffTestSchemasLibSQL(schema1, schema2, []);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ }],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+});
+
+test('set autoincrement', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ }),
+ };
+
+ const { statements } = await diffTestSchemasLibSQL(schema1, schema2, []);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columns: [{
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ }],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+});
+
+test('set not null', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_set_notnull',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`,
+ );
+});
+
+test('drop not null', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`,
+ );
+});
+
+test('set default. set not null. add column', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull().default('name'),
+ age: int('age').notNull(),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(3);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_set_default',
+ tableName: 'users',
+ columnName: 'name',
+ newDefaultValue: "'name'",
+ schema: '',
+ newDataType: 'text',
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+ expect(statements[1]).toStrictEqual({
+ type: 'alter_table_alter_column_set_notnull',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: "'name'",
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+ expect(statements[2]).toStrictEqual({
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'users',
+ referenceData: undefined,
+ column: {
+ name: 'age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ });
+
+ expect(sqlStatements.length).toBe(2);
+ expect(sqlStatements[0]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL DEFAULT 'name';`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `ALTER TABLE \`users\` ADD \`age\` integer NOT NULL;`,
+ );
+});
+
+test('drop default. drop not null', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull().default('name'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(2);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_drop_default',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+ expect(statements[1]).toStrictEqual({
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`,
+ );
+});
+
+test('set data type. set default', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: int('name').default(123),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(2);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_set_type',
+ tableName: 'users',
+ columnName: 'name',
+ newDataType: 'integer',
+ oldDataType: 'text',
+ schema: '',
+ columnDefault: 123,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+ expect(statements[1]).toStrictEqual({
+ type: 'alter_table_alter_column_set_default',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'integer',
+ newDefaultValue: 123,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" integer DEFAULT 123;`,
+ );
+});
+
+test('add foriegn key', async (t) => {
+ const schema = {
+ table: sqliteTable('table', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ tableId: int('table_id'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ tableId: int('table_id').references(() => schema.table.id),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_reference',
+ tableName: 'users',
+ data: 'users_table_id_table_id_fk;users;table_id;table;id;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'integer',
+ });
+
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "table_id" TO "table_id" integer REFERENCES table(id) ON DELETE no action ON UPDATE no action;`,
+ );
+});
+
+test('drop foriegn key', async (t) => {
+ const schema = {
+ table: sqliteTable('table', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ tableId: int('table_id').references(() => schema.table.id, {
+ onDelete: 'cascade',
+ }),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ tableId: int('table_id'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'table_id',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
+\t\`table_id\` integer
+);\n`);
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_users\`("id", "table_id") SELECT "id", "table_id" FROM \`users\`;`,
+ );
+ expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements[4]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
+});
+
+test('alter foriegn key', async (t) => {
+ const tableRef = sqliteTable('table', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ });
+ const tableRef2 = sqliteTable('table2', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ });
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ tableId: int('table_id').references(() => tableRef.id, {
+ onDelete: 'cascade',
+ }),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ tableId: int('table_id').references(() => tableRef2.id),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'table_id',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [
+ {
+ columnsFrom: ['table_id'],
+ columnsTo: ['id'],
+ name: 'users_table_id_table2_id_fk',
+ onDelete: 'no action',
+ onUpdate: 'no action',
+ tableFrom: 'users',
+ tableTo: 'table2',
+ },
+ ],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
+\t\`table_id\` integer,
+\tFOREIGN KEY (\`table_id\`) REFERENCES \`table2\`(\`id\`) ON UPDATE no action ON DELETE no action
+);\n`);
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_users\`("id", "table_id") SELECT "id", "table_id" FROM \`users\`;`,
+ );
+ expect(sqlStatements[3]).toBe(
+ 'DROP TABLE `users`;',
+ );
+ expect(sqlStatements[4]).toBe(
+ 'ALTER TABLE `__new_users` RENAME TO `users`;',
+ );
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
+});
+
+test('add foriegn key for multiple columns', async (t) => {
+ const tableRef = sqliteTable('table', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ age: int('age'),
+ age1: int('age_1'),
+ });
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ column: int('column'),
+ column1: int('column_1'),
+ }),
+ tableRef,
+ };
+
+ const schema2 = {
+ tableRef,
+ users: sqliteTable(
+ 'users',
+ {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ column: int('column'),
+ column1: int('column_1'),
+ },
+ (table) => ({
+ foreignKey: foreignKey({
+ columns: [table.column, table.column1],
+ foreignColumns: [tableRef.age, tableRef.age1],
+ }),
+ }),
+ ),
+ };
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'column',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'column_1',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [
+ {
+ columnsFrom: ['column', 'column_1'],
+ columnsTo: ['age', 'age_1'],
+ name: 'users_column_column_1_table_age_age_1_fk',
+ onDelete: 'no action',
+ onUpdate: 'no action',
+ tableFrom: 'users',
+ tableTo: 'table',
+ },
+ ],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ } as JsonRecreateTableStatement);
+
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(
+ `CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
+\t\`column\` integer,
+\t\`column_1\` integer,
+\tFOREIGN KEY (\`column\`,\`column_1\`) REFERENCES \`table\`(\`age\`,\`age_1\`) ON UPDATE no action ON DELETE no action
+);\n`,
+ );
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_users\`("id", "column", "column_1") SELECT "id", "column", "column_1" FROM \`users\`;`,
+ );
+ expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements[4]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
+});
+
+test('drop foriegn key for multiple columns', async (t) => {
+ const tableRef = sqliteTable('table', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ age: int('age'),
+ age1: int('age_1'),
+ });
+
+ const schema1 = {
+ users: sqliteTable(
+ 'users',
+ {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ column: int('column'),
+ column1: int('column_1'),
+ },
+ (table) => ({
+ foreignKey: foreignKey({
+ columns: [table.column, table.column1],
+ foreignColumns: [tableRef.age, tableRef.age1],
+ }),
+ }),
+ ),
+ tableRef,
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ column: int('column'),
+ column1: int('column_1'),
+ }),
+ tableRef,
+ };
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'column',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'column_1',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(
+ `CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
+\t\`column\` integer,
+\t\`column_1\` integer
+);\n`,
+ );
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_users\`("id", "column", "column_1") SELECT "id", "column", "column_1" FROM \`users\`;`,
+ );
+ expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements[4]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
+});
+
+test('alter column drop generated', async (t) => {
+ const from = {
+ users: sqliteTable('table', {
+ id: int('id').primaryKey().notNull(),
+ name: text('name').generatedAlwaysAs('drizzle is the best').notNull(),
+ }),
+ };
+
+ const to = {
+ users: sqliteTable('table', {
+ id: int('id').primaryKey().notNull(),
+ name: text('name').notNull(),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: undefined,
+ columnName: 'name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'table',
+ type: 'alter_table_alter_column_drop_generated',
+ });
+
+ expect(sqlStatements.length).toBe(2);
+ expect(sqlStatements[0]).toBe(`ALTER TABLE \`table\` DROP COLUMN \`name\`;`);
+ expect(sqlStatements[1]).toBe(
+ `ALTER TABLE \`table\` ADD \`name\` text NOT NULL;`,
+ );
+});
+
+test('recreate table with nested references', async (t) => {
+ let users = sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: integer('age'),
+ });
+ let subscriptions = sqliteTable('subscriptions', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ userId: integer('user_id').references(() => users.id),
+ customerId: text('customer_id'),
+ });
+ const schema1 = {
+ users: users,
+ subscriptions: subscriptions,
+ subscriptionMetadata: sqliteTable('subscriptions_metadata', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ subscriptionId: text('subscription_id').references(
+ () => subscriptions.id,
+ ),
+ }),
+ };
+
+ users = sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ name: text('name'),
+ age: integer('age'),
+ });
+ const schema2 = {
+ users: users,
+ subscriptions: subscriptions,
+ subscriptionMetadata: sqliteTable('subscriptions_metadata', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ subscriptionId: text('subscription_id').references(
+ () => subscriptions.id,
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'age',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY NOT NULL,
+\t\`name\` text,
+\t\`age\` integer
+);\n`);
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`,
+ );
+ expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements[4]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
+});
+
+test('set not null with index', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }, (table) => ({
+ someIndex: index('users_name_index').on(table.name),
+ })),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ }, (table) => ({
+ someIndex: index('users_name_index').on(table.name),
+ })),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_set_notnull',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+
+ expect(sqlStatements.length).toBe(3);
+ expect(sqlStatements[0]).toBe(
+ `DROP INDEX IF EXISTS "users_name_index";`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`,
+ );
+ expect(sqlStatements[2]).toBe(
+ `CREATE INDEX \`users_name_index\` ON \`users\` (\`name\`);`,
+ );
+});
+
+test('drop not null with two indexes', async (t) => {
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ age: int('age').notNull(),
+ }, (table) => ({
+ someUniqeIndex: uniqueIndex('users_name_unique').on(table.name),
+ someIndex: index('users_age_index').on(table.age),
+ })),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: int('age').notNull(),
+ }, (table) => ({
+ someUniqeIndex: uniqueIndex('users_name_unique').on(table.name),
+ someIndex: index('users_age_index').on(table.age),
+ })),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasLibSQL(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+
+ expect(sqlStatements.length).toBe(5);
+ expect(sqlStatements[0]).toBe(
+ `DROP INDEX IF EXISTS "users_name_unique";`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `DROP INDEX IF EXISTS "users_age_index";`,
+ );
+ expect(sqlStatements[2]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`,
+ );
+ expect(sqlStatements[3]).toBe(
+ `CREATE UNIQUE INDEX \`users_name_unique\` ON \`users\` (\`name\`);`,
+ );
+ expect(sqlStatements[4]).toBe(
+ `CREATE INDEX \`users_age_index\` ON \`users\` (\`age\`);`,
+ );
+});
diff --git a/drizzle-kit/tests/migrate/libsq-schema.ts b/drizzle-kit/tests/migrate/libsq-schema.ts
new file mode 100644
index 000000000..5cb344d51
--- /dev/null
+++ b/drizzle-kit/tests/migrate/libsq-schema.ts
@@ -0,0 +1,6 @@
+import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core';
+
+export const users = sqliteTable('users', {
+ id: integer('id').primaryKey().notNull(),
+ name: text('name').notNull(),
+});
diff --git a/drizzle-kit/tests/migrate/libsql-migrate.test.ts b/drizzle-kit/tests/migrate/libsql-migrate.test.ts
new file mode 100644
index 000000000..b937b644f
--- /dev/null
+++ b/drizzle-kit/tests/migrate/libsql-migrate.test.ts
@@ -0,0 +1,58 @@
+import { createClient } from '@libsql/client';
+import { connectToLibSQL } from 'src/cli/connections';
+import { expect, test } from 'vitest';
+
+test('validate migrate function', async () => {
+ const credentials = {
+ url: ':memory:',
+ };
+ const { migrate, query } = await connectToLibSQL(credentials);
+
+ await migrate({ migrationsFolder: 'tests/migrate/migrations' });
+
+ const res = await query(`PRAGMA table_info("users");`);
+
+ expect(res).toStrictEqual([{
+ cid: 0,
+ name: 'id',
+ type: 'INTEGER',
+ notnull: 0,
+ dflt_value: null,
+ pk: 0,
+ }, {
+ cid: 1,
+ name: 'name',
+ type: 'INTEGER',
+ notnull: 1,
+ dflt_value: null,
+ pk: 0,
+ }]);
+});
+
+// test('validate migrate function', async () => {
+// const credentials = {
+// url: '',
+// authToken: '',
+// };
+// const { migrate, query } = await connectToLibSQL(credentials);
+
+// await migrate({ migrationsFolder: 'tests/migrate/migrations' });
+
+// const res = await query(`PRAGMA table_info("users");`);
+
+// expect(res).toStrictEqual([{
+// cid: 0,
+// name: 'id',
+// type: 'INTEGER',
+// notnull: 0,
+// dflt_value: null,
+// pk: 0,
+// }, {
+// cid: 1,
+// name: 'name',
+// type: 'INTEGER',
+// notnull: 1,
+// dflt_value: null,
+// pk: 0,
+// }]);
+// });
diff --git a/drizzle-kit/tests/migrate/migrations/0000_little_blizzard.sql b/drizzle-kit/tests/migrate/migrations/0000_little_blizzard.sql
new file mode 100644
index 000000000..9de0a139d
--- /dev/null
+++ b/drizzle-kit/tests/migrate/migrations/0000_little_blizzard.sql
@@ -0,0 +1,4 @@
+CREATE TABLE `users` (
+ `id` integer PRIMARY KEY NOT NULL,
+ `name` text NOT NULL
+);
diff --git a/drizzle-kit/tests/migrate/migrations/0001_nebulous_storm.sql b/drizzle-kit/tests/migrate/migrations/0001_nebulous_storm.sql
new file mode 100644
index 000000000..4309a05c2
--- /dev/null
+++ b/drizzle-kit/tests/migrate/migrations/0001_nebulous_storm.sql
@@ -0,0 +1,10 @@
+PRAGMA foreign_keys=OFF;--> statement-breakpoint
+CREATE TABLE `__new_users` (
+ `id` integer,
+ `name` integer NOT NULL
+);
+--> statement-breakpoint
+INSERT INTO `__new_users`("id", "name") SELECT "id", "name" FROM `users`;--> statement-breakpoint
+DROP TABLE `users`;--> statement-breakpoint
+ALTER TABLE `__new_users` RENAME TO `users`;--> statement-breakpoint
+PRAGMA foreign_keys=ON;
\ No newline at end of file
diff --git a/drizzle-kit/tests/migrate/migrations/meta/0000_snapshot.json b/drizzle-kit/tests/migrate/migrations/meta/0000_snapshot.json
new file mode 100644
index 000000000..599d02b91
--- /dev/null
+++ b/drizzle-kit/tests/migrate/migrations/meta/0000_snapshot.json
@@ -0,0 +1,40 @@
+{
+ "version": "6",
+ "dialect": "sqlite",
+ "id": "2bd46776-9e41-4a6c-b617-5c600bb176f2",
+ "prevId": "00000000-0000-0000-0000-000000000000",
+ "tables": {
+ "users": {
+ "name": "users",
+ "columns": {
+ "id": {
+ "name": "id",
+ "type": "integer",
+ "primaryKey": true,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "name": {
+ "name": "name",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ }
+ },
+ "indexes": {},
+ "foreignKeys": {},
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {}
+ }
+ },
+ "enums": {},
+ "_meta": {
+ "schemas": {},
+ "tables": {},
+ "columns": {}
+ },
+ "internal": {
+ "indexes": {}
+ }
+}
\ No newline at end of file
diff --git a/drizzle-kit/tests/migrate/migrations/meta/0001_snapshot.json b/drizzle-kit/tests/migrate/migrations/meta/0001_snapshot.json
new file mode 100644
index 000000000..e3b26ba14
--- /dev/null
+++ b/drizzle-kit/tests/migrate/migrations/meta/0001_snapshot.json
@@ -0,0 +1,40 @@
+{
+ "version": "6",
+ "dialect": "sqlite",
+ "id": "6c0ec455-42fd-47fd-a22c-4bb4551e1358",
+ "prevId": "2bd46776-9e41-4a6c-b617-5c600bb176f2",
+ "tables": {
+ "users": {
+ "name": "users",
+ "columns": {
+ "id": {
+ "name": "id",
+ "type": "integer",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "name": {
+ "name": "name",
+ "type": "integer",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ }
+ },
+ "indexes": {},
+ "foreignKeys": {},
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {}
+ }
+ },
+ "enums": {},
+ "_meta": {
+ "schemas": {},
+ "tables": {},
+ "columns": {}
+ },
+ "internal": {
+ "indexes": {}
+ }
+}
\ No newline at end of file
diff --git a/drizzle-kit/tests/migrate/migrations/meta/_journal.json b/drizzle-kit/tests/migrate/migrations/meta/_journal.json
new file mode 100644
index 000000000..c836eb194
--- /dev/null
+++ b/drizzle-kit/tests/migrate/migrations/meta/_journal.json
@@ -0,0 +1,20 @@
+{
+ "version": "7",
+ "dialect": "sqlite",
+ "entries": [
+ {
+ "idx": 0,
+ "version": "6",
+ "when": 1725358702427,
+ "tag": "0000_little_blizzard",
+ "breakpoints": true
+ },
+ {
+ "idx": 1,
+ "version": "6",
+ "when": 1725358713033,
+ "tag": "0001_nebulous_storm",
+ "breakpoints": true
+ }
+ ]
+}
\ No newline at end of file
diff --git a/drizzle-kit/tests/mysql.test.ts b/drizzle-kit/tests/mysql.test.ts
index e7b0b32a5..b7e8cc1cf 100644
--- a/drizzle-kit/tests/mysql.test.ts
+++ b/drizzle-kit/tests/mysql.test.ts
@@ -1,5 +1,17 @@
import { sql } from 'drizzle-orm';
-import { index, json, mysqlSchema, mysqlTable, primaryKey, serial, text, uniqueIndex } from 'drizzle-orm/mysql-core';
+import {
+ foreignKey,
+ index,
+ int,
+ json,
+ mysqlSchema,
+ mysqlTable,
+ primaryKey,
+ serial,
+ text,
+ unique,
+ uniqueIndex,
+} from 'drizzle-orm/mysql-core';
import { expect, test } from 'vitest';
import { diffTestSchemasMysql } from './schemaDiffer';
@@ -555,3 +567,185 @@ test('add table with indexes', async () => {
'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);',
]);
});
+
+test('optional db aliases (snake case)', async () => {
+ const from = {};
+
+ const t1 = mysqlTable(
+ 't1',
+ {
+ t1Id1: int().notNull().primaryKey(),
+ t1Col2: int().notNull(),
+ t1Col3: int().notNull(),
+ t2Ref: int().notNull().references(() => t2.t2Id),
+ t1Uni: int().notNull(),
+ t1UniIdx: int().notNull(),
+ t1Idx: int().notNull(),
+ },
+ (table) => ({
+ uni: unique('t1_uni').on(table.t1Uni),
+ uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx),
+ idx: index('t1_idx').on(table.t1Idx),
+ fk: foreignKey({
+ columns: [table.t1Col2, table.t1Col3],
+ foreignColumns: [t3.t3Id1, t3.t3Id2],
+ }),
+ }),
+ );
+
+ const t2 = mysqlTable(
+ 't2',
+ {
+ t2Id: serial().primaryKey(),
+ },
+ );
+
+ const t3 = mysqlTable(
+ 't3',
+ {
+ t3Id1: int(),
+ t3Id2: int(),
+ },
+ (table) => ({
+ pk: primaryKey({
+ columns: [table.t3Id1, table.t3Id2],
+ }),
+ }),
+ );
+
+ const to = {
+ t1,
+ t2,
+ t3,
+ };
+
+ const { sqlStatements } = await diffTestSchemasMysql(from, to, [], false, 'snake_case');
+
+ const st1 = `CREATE TABLE \`t1\` (
+ \`t1_id1\` int NOT NULL,
+ \`t1_col2\` int NOT NULL,
+ \`t1_col3\` int NOT NULL,
+ \`t2_ref\` int NOT NULL,
+ \`t1_uni\` int NOT NULL,
+ \`t1_uni_idx\` int NOT NULL,
+ \`t1_idx\` int NOT NULL,
+ CONSTRAINT \`t1_t1_id1\` PRIMARY KEY(\`t1_id1\`),
+ CONSTRAINT \`t1_uni\` UNIQUE(\`t1_uni\`),
+ CONSTRAINT \`t1_uni_idx\` UNIQUE(\`t1_uni_idx\`)
+);
+`;
+
+ const st2 = `CREATE TABLE \`t2\` (
+ \`t2_id\` serial AUTO_INCREMENT NOT NULL,
+ CONSTRAINT \`t2_t2_id\` PRIMARY KEY(\`t2_id\`)
+);
+`;
+
+ const st3 = `CREATE TABLE \`t3\` (
+ \`t3_id1\` int NOT NULL,
+ \`t3_id2\` int NOT NULL,
+ CONSTRAINT \`t3_t3_id1_t3_id2_pk\` PRIMARY KEY(\`t3_id1\`,\`t3_id2\`)
+);
+`;
+
+ const st4 =
+ `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t2_ref_t2_t2_id_fk\` FOREIGN KEY (\`t2_ref\`) REFERENCES \`t2\`(\`t2_id\`) ON DELETE no action ON UPDATE no action;`;
+
+ const st5 =
+ `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fk\` FOREIGN KEY (\`t1_col2\`,\`t1_col3\`) REFERENCES \`t3\`(\`t3_id1\`,\`t3_id2\`) ON DELETE no action ON UPDATE no action;`;
+
+ const st6 = `CREATE INDEX \`t1_idx\` ON \`t1\` (\`t1_idx\`);`;
+
+ expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]);
+});
+
+test('optional db aliases (camel case)', async () => {
+ const from = {};
+
+ const t1 = mysqlTable(
+ 't1',
+ {
+ t1_id1: int().notNull().primaryKey(),
+ t1_col2: int().notNull(),
+ t1_col3: int().notNull(),
+ t2_ref: int().notNull().references(() => t2.t2_id),
+ t1_uni: int().notNull(),
+ t1_uni_idx: int().notNull(),
+ t1_idx: int().notNull(),
+ },
+ (table) => ({
+ uni: unique('t1Uni').on(table.t1_uni),
+ uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx),
+ idx: index('t1Idx').on(table.t1_idx),
+ fk: foreignKey({
+ columns: [table.t1_col2, table.t1_col3],
+ foreignColumns: [t3.t3_id1, t3.t3_id2],
+ }),
+ }),
+ );
+
+ const t2 = mysqlTable(
+ 't2',
+ {
+ t2_id: serial().primaryKey(),
+ },
+ );
+
+ const t3 = mysqlTable(
+ 't3',
+ {
+ t3_id1: int(),
+ t3_id2: int(),
+ },
+ (table) => ({
+ pk: primaryKey({
+ columns: [table.t3_id1, table.t3_id2],
+ }),
+ }),
+ );
+
+ const to = {
+ t1,
+ t2,
+ t3,
+ };
+
+ const { sqlStatements } = await diffTestSchemasMysql(from, to, [], false, 'camelCase');
+
+ const st1 = `CREATE TABLE \`t1\` (
+ \`t1Id1\` int NOT NULL,
+ \`t1Col2\` int NOT NULL,
+ \`t1Col3\` int NOT NULL,
+ \`t2Ref\` int NOT NULL,
+ \`t1Uni\` int NOT NULL,
+ \`t1UniIdx\` int NOT NULL,
+ \`t1Idx\` int NOT NULL,
+ CONSTRAINT \`t1_t1Id1\` PRIMARY KEY(\`t1Id1\`),
+ CONSTRAINT \`t1Uni\` UNIQUE(\`t1Uni\`),
+ CONSTRAINT \`t1UniIdx\` UNIQUE(\`t1UniIdx\`)
+);
+`;
+
+ const st2 = `CREATE TABLE \`t2\` (
+ \`t2Id\` serial AUTO_INCREMENT NOT NULL,
+ CONSTRAINT \`t2_t2Id\` PRIMARY KEY(\`t2Id\`)
+);
+`;
+
+ const st3 = `CREATE TABLE \`t3\` (
+ \`t3Id1\` int NOT NULL,
+ \`t3Id2\` int NOT NULL,
+ CONSTRAINT \`t3_t3Id1_t3Id2_pk\` PRIMARY KEY(\`t3Id1\`,\`t3Id2\`)
+);
+`;
+
+ const st4 =
+ `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t2Ref_t2_t2Id_fk\` FOREIGN KEY (\`t2Ref\`) REFERENCES \`t2\`(\`t2Id\`) ON DELETE no action ON UPDATE no action;`;
+
+ const st5 =
+ `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fk\` FOREIGN KEY (\`t1Col2\`,\`t1Col3\`) REFERENCES \`t3\`(\`t3Id1\`,\`t3Id2\`) ON DELETE no action ON UPDATE no action;`;
+
+ const st6 = `CREATE INDEX \`t1Idx\` ON \`t1\` (\`t1Idx\`);`;
+
+ expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]);
+});
diff --git a/drizzle-kit/tests/pg-tables.test.ts b/drizzle-kit/tests/pg-tables.test.ts
index 4171af333..79a21a695 100644
--- a/drizzle-kit/tests/pg-tables.test.ts
+++ b/drizzle-kit/tests/pg-tables.test.ts
@@ -1,6 +1,7 @@
import { sql } from 'drizzle-orm';
import {
AnyPgColumn,
+ foreignKey,
geometry,
index,
integer,
@@ -12,6 +13,8 @@ import {
primaryKey,
serial,
text,
+ unique,
+ uniqueIndex,
vector,
} from 'drizzle-orm/pg-core';
import { expect, test } from 'vitest';
@@ -639,3 +642,199 @@ test('create table with tsvector', async () => {
`CREATE INDEX IF NOT EXISTS "title_search_index" ON "posts" USING gin (to_tsvector('english', "title"));`,
]);
});
+
+test('optional db aliases (snake case)', async () => {
+ const from = {};
+
+ const t1 = pgTable(
+ 't1',
+ {
+ t1Id1: integer().notNull().primaryKey(),
+ t1Col2: integer().notNull(),
+ t1Col3: integer().notNull(),
+ t2Ref: integer().notNull().references(() => t2.t2Id),
+ t1Uni: integer().notNull(),
+ t1UniIdx: integer().notNull(),
+ t1Idx: integer().notNull(),
+ },
+ (table) => ({
+ uni: unique('t1_uni').on(table.t1Uni),
+ uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx),
+ idx: index('t1_idx').on(table.t1Idx).where(sql`${table.t1Idx} > 0`),
+ fk: foreignKey({
+ columns: [table.t1Col2, table.t1Col3],
+ foreignColumns: [t3.t3Id1, t3.t3Id2],
+ }),
+ }),
+ );
+
+ const t2 = pgTable(
+ 't2',
+ {
+ t2Id: serial().primaryKey(),
+ },
+ );
+
+ const t3 = pgTable(
+ 't3',
+ {
+ t3Id1: integer(),
+ t3Id2: integer(),
+ },
+ (table) => ({
+ pk: primaryKey({
+ columns: [table.t3Id1, table.t3Id2],
+ }),
+ }),
+ );
+
+ const to = {
+ t1,
+ t2,
+ t3,
+ };
+
+ const { sqlStatements } = await diffTestSchemas(from, to, [], false, 'snake_case');
+
+ const st1 = `CREATE TABLE IF NOT EXISTS "t1" (
+ "t1_id1" integer PRIMARY KEY NOT NULL,
+ "t1_col2" integer NOT NULL,
+ "t1_col3" integer NOT NULL,
+ "t2_ref" integer NOT NULL,
+ "t1_uni" integer NOT NULL,
+ "t1_uni_idx" integer NOT NULL,
+ "t1_idx" integer NOT NULL,
+ CONSTRAINT "t1_uni" UNIQUE("t1_uni")
+);
+`;
+
+ const st2 = `CREATE TABLE IF NOT EXISTS "t2" (
+ "t2_id" serial PRIMARY KEY NOT NULL
+);
+`;
+
+ const st3 = `CREATE TABLE IF NOT EXISTS "t3" (
+ "t3_id1" integer,
+ "t3_id2" integer,
+ CONSTRAINT "t3_t3_id1_t3_id2_pk" PRIMARY KEY("t3_id1","t3_id2")
+);
+`;
+
+ const st4 = `DO $$ BEGIN
+ ALTER TABLE "t1" ADD CONSTRAINT "t1_t2_ref_t2_t2_id_fk" FOREIGN KEY ("t2_ref") REFERENCES "public"."t2"("t2_id") ON DELETE no action ON UPDATE no action;
+EXCEPTION
+ WHEN duplicate_object THEN null;
+END $$;
+`;
+
+ const st5 = `DO $$ BEGIN
+ ALTER TABLE "t1" ADD CONSTRAINT "t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fk" FOREIGN KEY ("t1_col2","t1_col3") REFERENCES "public"."t3"("t3_id1","t3_id2") ON DELETE no action ON UPDATE no action;
+EXCEPTION
+ WHEN duplicate_object THEN null;
+END $$;
+`;
+
+ const st6 = `CREATE UNIQUE INDEX IF NOT EXISTS "t1_uni_idx" ON "t1" USING btree ("t1_uni_idx");`;
+
+ const st7 = `CREATE INDEX IF NOT EXISTS "t1_idx" ON "t1" USING btree ("t1_idx") WHERE "t1"."t1_idx" > 0;`;
+
+ expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]);
+});
+
+test('optional db aliases (camel case)', async () => {
+ const from = {};
+
+ const t1 = pgTable(
+ 't1',
+ {
+ t1_id1: integer().notNull().primaryKey(),
+ t1_col2: integer().notNull(),
+ t1_col3: integer().notNull(),
+ t2_ref: integer().notNull().references(() => t2.t2_id),
+ t1_uni: integer().notNull(),
+ t1_uni_idx: integer().notNull(),
+ t1_idx: integer().notNull(),
+ },
+ (table) => ({
+ uni: unique('t1Uni').on(table.t1_uni),
+ uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx),
+ idx: index('t1Idx').on(table.t1_idx).where(sql`${table.t1_idx} > 0`),
+ fk: foreignKey({
+ columns: [table.t1_col2, table.t1_col3],
+ foreignColumns: [t3.t3_id1, t3.t3_id2],
+ }),
+ }),
+ );
+
+ const t2 = pgTable(
+ 't2',
+ {
+ t2_id: serial().primaryKey(),
+ },
+ );
+
+ const t3 = pgTable(
+ 't3',
+ {
+ t3_id1: integer(),
+ t3_id2: integer(),
+ },
+ (table) => ({
+ pk: primaryKey({
+ columns: [table.t3_id1, table.t3_id2],
+ }),
+ }),
+ );
+
+ const to = {
+ t1,
+ t2,
+ t3,
+ };
+
+ const { sqlStatements } = await diffTestSchemas(from, to, [], false, 'camelCase');
+
+ const st1 = `CREATE TABLE IF NOT EXISTS "t1" (
+ "t1Id1" integer PRIMARY KEY NOT NULL,
+ "t1Col2" integer NOT NULL,
+ "t1Col3" integer NOT NULL,
+ "t2Ref" integer NOT NULL,
+ "t1Uni" integer NOT NULL,
+ "t1UniIdx" integer NOT NULL,
+ "t1Idx" integer NOT NULL,
+ CONSTRAINT "t1Uni" UNIQUE("t1Uni")
+);
+`;
+
+ const st2 = `CREATE TABLE IF NOT EXISTS "t2" (
+ "t2Id" serial PRIMARY KEY NOT NULL
+);
+`;
+
+ const st3 = `CREATE TABLE IF NOT EXISTS "t3" (
+ "t3Id1" integer,
+ "t3Id2" integer,
+ CONSTRAINT "t3_t3Id1_t3Id2_pk" PRIMARY KEY("t3Id1","t3Id2")
+);
+`;
+
+ const st4 = `DO $$ BEGIN
+ ALTER TABLE "t1" ADD CONSTRAINT "t1_t2Ref_t2_t2Id_fk" FOREIGN KEY ("t2Ref") REFERENCES "public"."t2"("t2Id") ON DELETE no action ON UPDATE no action;
+EXCEPTION
+ WHEN duplicate_object THEN null;
+END $$;
+`;
+
+ const st5 = `DO $$ BEGIN
+ ALTER TABLE "t1" ADD CONSTRAINT "t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fk" FOREIGN KEY ("t1Col2","t1Col3") REFERENCES "public"."t3"("t3Id1","t3Id2") ON DELETE no action ON UPDATE no action;
+EXCEPTION
+ WHEN duplicate_object THEN null;
+END $$;
+`;
+
+ const st6 = `CREATE UNIQUE INDEX IF NOT EXISTS "t1UniIdx" ON "t1" USING btree ("t1UniIdx");`;
+
+ const st7 = `CREATE INDEX IF NOT EXISTS "t1Idx" ON "t1" USING btree ("t1Idx") WHERE "t1"."t1Idx" > 0;`;
+
+ expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]);
+});
diff --git a/drizzle-kit/tests/push/libsql.test.ts b/drizzle-kit/tests/push/libsql.test.ts
new file mode 100644
index 000000000..89ec008ca
--- /dev/null
+++ b/drizzle-kit/tests/push/libsql.test.ts
@@ -0,0 +1,1049 @@
+import { createClient } from '@libsql/client';
+import chalk from 'chalk';
+import { sql } from 'drizzle-orm';
+import {
+ blob,
+ foreignKey,
+ getTableConfig,
+ index,
+ int,
+ integer,
+ numeric,
+ real,
+ sqliteTable,
+ text,
+ uniqueIndex,
+} from 'drizzle-orm/sqlite-core';
+import { diffTestSchemasPushLibSQL } from 'tests/schemaDiffer';
+import { expect, test } from 'vitest';
+
+test('nothing changed in schema', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const users = sqliteTable('users', {
+ id: integer('id').primaryKey().notNull(),
+ name: text('name').notNull(),
+ email: text('email'),
+ textJson: text('text_json', { mode: 'json' }),
+ blobJon: blob('blob_json', { mode: 'json' }),
+ blobBigInt: blob('blob_bigint', { mode: 'bigint' }),
+ numeric: numeric('numeric'),
+ createdAt: integer('created_at', { mode: 'timestamp' }),
+ createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }),
+ real: real('real'),
+ text: text('text', { length: 255 }),
+ role: text('role', { enum: ['admin', 'user'] }).default('user'),
+ isConfirmed: integer('is_confirmed', {
+ mode: 'boolean',
+ }),
+ });
+
+ const schema1 = {
+ users,
+
+ customers: sqliteTable('customers', {
+ id: integer('id').primaryKey(),
+ address: text('address').notNull(),
+ isConfirmed: integer('is_confirmed', { mode: 'boolean' }),
+ registrationDate: integer('registration_date', { mode: 'timestamp_ms' })
+ .notNull()
+ .$defaultFn(() => new Date()),
+ userId: integer('user_id')
+ .references(() => users.id)
+ .notNull(),
+ }),
+
+ posts: sqliteTable('posts', {
+ id: integer('id').primaryKey(),
+ content: text('content'),
+ authorId: integer('author_id'),
+ }),
+ };
+
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(turso, schema1, schema1, [], false);
+ expect(sqlStatements.length).toBe(0);
+ expect(statements.length).toBe(0);
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+});
+
+test('added, dropped index', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const users = sqliteTable('users', {
+ id: integer('id').primaryKey().notNull(),
+ name: text('name').notNull(),
+ email: text('email'),
+ textJson: text('text_json', { mode: 'json' }),
+ blobJon: blob('blob_json', { mode: 'json' }),
+ blobBigInt: blob('blob_bigint', { mode: 'bigint' }),
+ numeric: numeric('numeric'),
+ createdAt: integer('created_at', { mode: 'timestamp' }),
+ createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }),
+ real: real('real'),
+ text: text('text', { length: 255 }),
+ role: text('role', { enum: ['admin', 'user'] }).default('user'),
+ isConfirmed: integer('is_confirmed', {
+ mode: 'boolean',
+ }),
+ });
+
+ const schema1 = {
+ users,
+ customers: sqliteTable(
+ 'customers',
+ {
+ id: integer('id').primaryKey(),
+ address: text('address').notNull(),
+ isConfirmed: integer('is_confirmed', { mode: 'boolean' }),
+ registrationDate: integer('registration_date', { mode: 'timestamp_ms' })
+ .notNull()
+ .$defaultFn(() => new Date()),
+ userId: integer('user_id').notNull(),
+ },
+ (table) => ({
+ uniqueIndex: uniqueIndex('customers_address_unique').on(table.address),
+ }),
+ ),
+
+ posts: sqliteTable('posts', {
+ id: integer('id').primaryKey(),
+ content: text('content'),
+ authorId: integer('author_id'),
+ }),
+ };
+
+ const schema2 = {
+ users,
+ customers: sqliteTable(
+ 'customers',
+ {
+ id: integer('id').primaryKey(),
+ address: text('address').notNull(),
+ isConfirmed: integer('is_confirmed', { mode: 'boolean' }),
+ registrationDate: integer('registration_date', { mode: 'timestamp_ms' })
+ .notNull()
+ .$defaultFn(() => new Date()),
+ userId: integer('user_id').notNull(),
+ },
+ (table) => ({
+ uniqueIndex: uniqueIndex('customers_is_confirmed_unique').on(
+ table.isConfirmed,
+ ),
+ }),
+ ),
+
+ posts: sqliteTable('posts', {
+ id: integer('id').primaryKey(),
+ content: text('content'),
+ authorId: integer('author_id'),
+ }),
+ };
+
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, [], false);
+
+ expect(statements.length).toBe(2);
+ expect(statements[0]).toStrictEqual({
+ type: 'drop_index',
+ tableName: 'customers',
+ data: 'customers_address_unique;address;true;',
+ schema: '',
+ });
+ expect(statements[1]).toStrictEqual({
+ type: 'create_index',
+ tableName: 'customers',
+ data: 'customers_is_confirmed_unique;is_confirmed;true;',
+ schema: '',
+ internal: { indexes: {} },
+ });
+
+ expect(sqlStatements.length).toBe(2);
+ expect(sqlStatements[0]).toBe(
+ `DROP INDEX IF EXISTS \`customers_address_unique\`;`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `CREATE UNIQUE INDEX \`customers_is_confirmed_unique\` ON \`customers\` (\`is_confirmed\`);`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('added column not null and without default to table with data', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const schema1 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey(),
+ name: text('name').notNull(),
+ }),
+ };
+
+ const schema2 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey(),
+ name: text('name').notNull(),
+ age: integer('age').notNull(),
+ }),
+ };
+
+ const table = getTableConfig(schema1.companies);
+
+ const seedStatements = [
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('drizzle');`,
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('turso');`,
+ ];
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(
+ turso,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'companies',
+ column: {
+ name: 'age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ });
+
+ expect(sqlStatements.length).toBe(2);
+ expect(sqlStatements[0]).toBe(`delete from companies;`);
+ expect(sqlStatements[1]).toBe(
+ `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(1);
+ expect(infoToPrint![0]).toBe(
+ `· You're about to add not-null ${
+ chalk.underline(
+ 'age',
+ )
+ } column without default value, which contains 2 items`,
+ );
+ expect(shouldAskForApprove).toBe(true);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(1);
+ expect(tablesToTruncate![0]).toBe('companies');
+});
+
+test('added column not null and without default to table without data', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const schema1 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey(),
+ name: text('name').notNull(),
+ }),
+ };
+
+ const schema2 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey(),
+ name: text('name').notNull(),
+ age: integer('age').notNull(),
+ }),
+ };
+
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, [], false);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'companies',
+ column: {
+ name: 'age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ });
+
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`,
+ );
+
+ expect(infoToPrint!.length).toBe(0);
+ expect(columnsToRemove!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('drop autoincrement. drop column with data', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const schema1 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const schema2 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: false }),
+ }),
+ };
+
+ const table = getTableConfig(schema1.companies);
+ const seedStatements = [
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`,
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`,
+ ];
+
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(
+ turso,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'recreate_table',
+ tableName: 'companies',
+ columns: [
+ {
+ name: 'id',
+ type: 'integer',
+ autoincrement: false,
+ notNull: true,
+ primaryKey: true,
+ generated: undefined,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(4);
+ expect(sqlStatements[0]).toBe(
+ `CREATE TABLE \`__new_companies\` (
+\t\`id\` integer PRIMARY KEY NOT NULL
+);\n`,
+ );
+ expect(sqlStatements[1]).toBe(`INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`);
+ expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`);
+ expect(sqlStatements[3]).toBe(
+ `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(1);
+ expect(infoToPrint!.length).toBe(1);
+ expect(infoToPrint![0]).toBe(
+ `· You're about to delete ${
+ chalk.underline(
+ 'name',
+ )
+ } column in companies table with 2 items`,
+ );
+ expect(shouldAskForApprove).toBe(true);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('change autoincrement. table is part of foreign key', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const companies1 = sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ });
+ const users1 = sqliteTable('users', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ name: text('name').unique(),
+ companyId: integer('company_id').references(() => companies1.id),
+ });
+ const schema1 = {
+ companies: companies1,
+ users: users1,
+ };
+
+ const companies2 = sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: false }),
+ });
+ const users2 = sqliteTable('users', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ name: text('name').unique(),
+ companyId: integer('company_id').references(() => companies2.id),
+ });
+ const schema2 = {
+ companies: companies2,
+ users: users2,
+ };
+
+ const { name: usersTableName } = getTableConfig(users1);
+ const { name: companiesTableName } = getTableConfig(companies1);
+ const seedStatements = [
+ `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('drizzle');`,
+ `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('turso');`,
+ `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES (1);`,
+ `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES (2);`,
+ ];
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(
+ turso,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'recreate_table',
+ tableName: 'companies',
+ columns: [
+ {
+ name: 'id',
+ type: 'integer',
+ autoincrement: false,
+ notNull: true,
+ primaryKey: true,
+ generated: undefined,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(4);
+ expect(sqlStatements[0]).toBe(
+ `CREATE TABLE \`__new_companies\` (
+\t\`id\` integer PRIMARY KEY NOT NULL
+);\n`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`,
+ );
+ expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`);
+ expect(sqlStatements[3]).toBe(
+ `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('drop not null, add not null', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ }),
+ posts: sqliteTable(
+ 'posts',
+ {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ userId: int('user_id'),
+ },
+ ),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ posts: sqliteTable(
+ 'posts',
+ {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ userId: int('user_id'),
+ },
+ ),
+ };
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(
+ turso,
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements!.length).toBe(2);
+ expect(statements![0]).toStrictEqual({
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnName: 'name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_drop_notnull',
+ });
+ expect(statements![1]).toStrictEqual({
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnName: 'name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'posts',
+ type: 'alter_table_alter_column_set_notnull',
+ });
+ expect(sqlStatements!.length).toBe(2);
+ expect(sqlStatements![0]).toBe(`ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`);
+ expect(sqlStatements![1]).toBe(`ALTER TABLE \`posts\` ALTER COLUMN "name" TO "name" text NOT NULL;`);
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('drop table with data', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ }),
+ posts: sqliteTable(
+ 'posts',
+ {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ userId: int('user_id'),
+ },
+ ),
+ };
+
+ const schema2 = {
+ posts: sqliteTable(
+ 'posts',
+ {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ userId: int('user_id'),
+ },
+ ),
+ };
+
+ const seedStatements = [
+ `INSERT INTO \`users\` ("name") VALUES ('drizzle')`,
+ ];
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(
+ turso,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements!.length).toBe(1);
+ expect(statements![0]).toStrictEqual({
+ schema: undefined,
+ tableName: 'users',
+ type: 'drop_table',
+ });
+
+ expect(sqlStatements!.length).toBe(1);
+ expect(sqlStatements![0]).toBe(`DROP TABLE \`users\`;`);
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(1);
+ expect(infoToPrint![0]).toBe(`· You're about to delete ${chalk.underline('users')} table with 1 items`);
+ expect(shouldAskForApprove).toBe(true);
+ expect(tablesToRemove!.length).toBe(1);
+ expect(tablesToRemove![0]).toBe('users');
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('recreate table with nested references', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ let users = sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: integer('age'),
+ });
+ let subscriptions = sqliteTable('subscriptions', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ userId: integer('user_id').references(() => users.id),
+ customerId: text('customer_id'),
+ });
+ const schema1 = {
+ users: users,
+ subscriptions: subscriptions,
+ subscriptionMetadata: sqliteTable('subscriptions_metadata', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ subscriptionId: text('subscription_id').references(
+ () => subscriptions.id,
+ ),
+ }),
+ };
+
+ users = sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ name: text('name'),
+ age: integer('age'),
+ });
+ const schema2 = {
+ users: users,
+ subscriptions: subscriptions,
+ subscriptionMetadata: sqliteTable('subscriptions_metadata', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ subscriptionId: text('subscription_id').references(
+ () => subscriptions.id,
+ ),
+ }),
+ };
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, []);
+
+ expect(statements!.length).toBe(1);
+ expect(statements![0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: false,
+ name: 'id',
+ notNull: true,
+ generated: undefined,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'name',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ name: 'age',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements!.length).toBe(4);
+ expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY NOT NULL,
+\t\`name\` text,
+\t\`age\` integer
+);\n`);
+ expect(sqlStatements![1]).toBe(
+ `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`,
+ );
+ expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements![3]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('recreate table with added column not null and without default', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: integer('age'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ name: text('name'),
+ age: integer('age'),
+ newColumn: text('new_column').notNull(),
+ }),
+ };
+
+ const seedStatements = [
+ `INSERT INTO \`users\` ("name", "age") VALUES ('drizzle', 12)`,
+ `INSERT INTO \`users\` ("name", "age") VALUES ('turso', 12)`,
+ ];
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(
+ turso,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements!.length).toBe(1);
+ expect(statements![0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: false,
+ name: 'id',
+ notNull: true,
+ generated: undefined,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'name',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ name: 'age',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'new_column',
+ notNull: true,
+ generated: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements!.length).toBe(4);
+ expect(sqlStatements[0]).toBe('DELETE FROM \`users\`;');
+ expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY NOT NULL,
+\t\`name\` text,
+\t\`age\` integer,
+\t\`new_column\` text NOT NULL
+);\n`);
+ expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements![3]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(1);
+ expect(infoToPrint![0]).toBe(
+ `· You're about to add not-null ${
+ chalk.underline('new_column')
+ } column without default value to table, which contains 2 items`,
+ );
+ expect(shouldAskForApprove).toBe(true);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(1);
+ expect(tablesToTruncate![0]).toBe('users');
+});
+
+test('set not null with index', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }, (table) => ({
+ someIndex: index('users_name_index').on(table.name),
+ })),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ }, (table) => ({
+ someIndex: index('users_name_index').on(table.name),
+ })),
+ };
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(
+ turso,
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements!.length).toBe(1);
+ expect(statements![0]).toStrictEqual({
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnName: 'name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_set_notnull',
+ });
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_set_notnull',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+
+ expect(sqlStatements.length).toBe(3);
+ expect(sqlStatements[0]).toBe(
+ `DROP INDEX IF EXISTS "users_name_index";`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`,
+ );
+ expect(sqlStatements[2]).toBe(
+ `CREATE INDEX \`users_name_index\` ON \`users\` (\`name\`);`,
+ );
+ expect(columnsToRemove!.length).toBe(0), expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('drop not null with two indexes', async (t) => {
+ const turso = createClient({
+ url: ':memory:',
+ });
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ age: int('age').notNull(),
+ }, (table) => ({
+ someUniqeIndex: uniqueIndex('users_name_unique').on(table.name),
+ someIndex: index('users_age_index').on(table.age),
+ })),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: int('age').notNull(),
+ }, (table) => ({
+ someUniqeIndex: uniqueIndex('users_name_unique').on(table.name),
+ someIndex: index('users_age_index').on(table.age),
+ })),
+ };
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushLibSQL(
+ turso,
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName: 'users',
+ columnName: 'name',
+ schema: '',
+ newDataType: 'text',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ });
+
+ expect(sqlStatements.length).toBe(5);
+ expect(sqlStatements[0]).toBe(
+ `DROP INDEX IF EXISTS "users_name_unique";`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `DROP INDEX IF EXISTS "users_age_index";`,
+ );
+ expect(sqlStatements[2]).toBe(
+ `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`,
+ );
+ expect(sqlStatements[3]).toBe(
+ `CREATE UNIQUE INDEX \`users_name_unique\` ON \`users\` (\`name\`);`,
+ );
+ expect(sqlStatements[4]).toBe(
+ `CREATE INDEX \`users_age_index\` ON \`users\` (\`age\`);`,
+ );
+ expect(columnsToRemove!.length).toBe(0), expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
diff --git a/drizzle-kit/tests/push/sqlite.test.ts b/drizzle-kit/tests/push/sqlite.test.ts
index cf468d3ec..aea5cd379 100644
--- a/drizzle-kit/tests/push/sqlite.test.ts
+++ b/drizzle-kit/tests/push/sqlite.test.ts
@@ -1,384 +1,630 @@
import Database from 'better-sqlite3';
-import { SQL, sql } from 'drizzle-orm';
-import { blob, foreignKey, int, integer, numeric, real, sqliteTable, text } from 'drizzle-orm/sqlite-core';
+import chalk from 'chalk';
+import {
+ blob,
+ foreignKey,
+ getTableConfig,
+ int,
+ integer,
+ numeric,
+ real,
+ sqliteTable,
+ text,
+ uniqueIndex,
+} from 'drizzle-orm/sqlite-core';
import { diffTestSchemasPushSqlite } from 'tests/schemaDiffer';
import { expect, test } from 'vitest';
-import { DialectSuite, run } from './common';
-
-const sqliteSuite: DialectSuite = {
- addBasicIndexes: function(context?: any): Promise {
- return {} as any;
- },
- changeIndexFields: function(context?: any): Promise {
- return {} as any;
- },
- dropIndex: function(context?: any): Promise {
- return {} as any;
- },
-
- async allTypes() {
- const sqlite = new Database(':memory:');
-
- const Users = sqliteTable('users', {
- id: integer('id').primaryKey().notNull(),
- name: text('name').notNull(),
- email: text('email'),
- textJson: text('text_json', { mode: 'json' }),
- blobJon: blob('blob_json', { mode: 'json' }),
- blobBigInt: blob('blob_bigint', { mode: 'bigint' }),
- numeric: numeric('numeric'),
- createdAt: integer('created_at', { mode: 'timestamp' }),
- createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }),
- real: real('real'),
- text: text('text', { length: 255 }),
- role: text('role', { enum: ['admin', 'user'] }).default('user'),
- isConfirmed: integer('is_confirmed', {
- mode: 'boolean',
- }),
- });
- const schema1 = {
- Users,
+test('nothing changed in schema', async (t) => {
+ const client = new Database(':memory:');
+
+ const users = sqliteTable('users', {
+ id: integer('id').primaryKey().notNull(),
+ name: text('name').notNull(),
+ email: text('email'),
+ textJson: text('text_json', { mode: 'json' }),
+ blobJon: blob('blob_json', { mode: 'json' }),
+ blobBigInt: blob('blob_bigint', { mode: 'bigint' }),
+ numeric: numeric('numeric'),
+ createdAt: integer('created_at', { mode: 'timestamp' }),
+ createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }),
+ real: real('real'),
+ text: text('text', { length: 255 }),
+ role: text('role', { enum: ['admin', 'user'] }).default('user'),
+ isConfirmed: integer('is_confirmed', {
+ mode: 'boolean',
+ }),
+ });
- Customers: sqliteTable('customers', {
+ const schema1 = {
+ users,
+
+ customers: sqliteTable('customers', {
+ id: integer('id').primaryKey(),
+ address: text('address').notNull(),
+ isConfirmed: integer('is_confirmed', { mode: 'boolean' }),
+ registrationDate: integer('registration_date', { mode: 'timestamp_ms' })
+ .notNull()
+ .$defaultFn(() => new Date()),
+ userId: integer('user_id')
+ .references(() => users.id)
+ .notNull(),
+ }),
+
+ posts: sqliteTable('posts', {
+ id: integer('id').primaryKey(),
+ content: text('content'),
+ authorId: integer('author_id'),
+ }),
+ };
+
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(client, schema1, schema1, [], false);
+ expect(sqlStatements.length).toBe(0);
+ expect(statements.length).toBe(0);
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+});
+
+test('dropped, added unique index', async (t) => {
+ const client = new Database(':memory:');
+
+ const users = sqliteTable('users', {
+ id: integer('id').primaryKey().notNull(),
+ name: text('name').notNull(),
+ email: text('email'),
+ textJson: text('text_json', { mode: 'json' }),
+ blobJon: blob('blob_json', { mode: 'json' }),
+ blobBigInt: blob('blob_bigint', { mode: 'bigint' }),
+ numeric: numeric('numeric'),
+ createdAt: integer('created_at', { mode: 'timestamp' }),
+ createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }),
+ real: real('real'),
+ text: text('text', { length: 255 }),
+ role: text('role', { enum: ['admin', 'user'] }).default('user'),
+ isConfirmed: integer('is_confirmed', {
+ mode: 'boolean',
+ }),
+ });
+
+ const schema1 = {
+ users,
+
+ customers: sqliteTable(
+ 'customers',
+ {
id: integer('id').primaryKey(),
- address: text('address').notNull(),
+ address: text('address').notNull().unique(),
isConfirmed: integer('is_confirmed', { mode: 'boolean' }),
registrationDate: integer('registration_date', { mode: 'timestamp_ms' })
.notNull()
.$defaultFn(() => new Date()),
- userId: integer('user_id')
- .references(() => Users.id)
- .notNull(),
+ userId: integer('user_id').notNull(),
+ },
+ (table) => ({
+ uniqueIndex: uniqueIndex('customers_address_unique').on(table.address),
}),
+ ),
+
+ posts: sqliteTable('posts', {
+ id: integer('id').primaryKey(),
+ content: text('content'),
+ authorId: integer('author_id'),
+ }),
+ };
+
+ const schema2 = {
+ users,
- Posts: sqliteTable('posts', {
+ customers: sqliteTable(
+ 'customers',
+ {
id: integer('id').primaryKey(),
- content: text('content'),
- authorId: integer('author_id'),
- }),
- };
-
- const { statements } = await diffTestSchemasPushSqlite(
- sqlite,
- schema1,
- schema1,
- [],
- false,
- );
- expect(statements.length).toBe(0);
- },
- indexesToBeNotTriggered: function(context?: any): Promise {
- return {} as any;
- },
- indexesTestCase1: function(context?: any): Promise {
- return {} as any;
- },
- async case1(): Promise {
- const sqlite = new Database(':memory:');
-
- const schema1 = {
- users: sqliteTable('users', {
- id: text('id').notNull().primaryKey(),
- firstName: text('first_name').notNull(),
- lastName: text('last_name').notNull(),
- username: text('username').notNull().unique(),
- email: text('email').notNull().unique(),
- password: text('password').notNull(),
- avatarUrl: text('avatar_url').notNull(),
- postsCount: integer('posts_count').notNull().default(0),
- followersCount: integer('followers_count').notNull().default(0),
- followingsCount: integer('followings_count').notNull().default(0),
- createdAt: integer('created_at').notNull(),
- }),
- };
-
- const schema2 = {
- users: sqliteTable('users', {
- id: text('id').notNull().primaryKey(),
- firstName: text('first_name').notNull(),
- lastName: text('last_name').notNull(),
- username: text('username').notNull().unique(),
- email: text('email').notNull().unique(),
- password: text('password').notNull(),
- avatarUrl: text('avatar_url').notNull(),
- followersCount: integer('followers_count').notNull().default(0),
- followingsCount: integer('followings_count').notNull().default(0),
- createdAt: integer('created_at').notNull(),
- }),
- };
-
- const { statements } = await diffTestSchemasPushSqlite(
- sqlite,
- schema1,
- schema2,
- [],
- false,
- );
- expect(statements.length).toBe(1);
- expect(statements[0]).toStrictEqual({
- type: 'alter_table_drop_column',
- tableName: 'users',
- columnName: 'posts_count',
- schema: '',
- });
- },
- addNotNull: function(context?: any): Promise {
- return {} as any;
- },
- addNotNullWithDataNoRollback: function(context?: any): Promise {
- return {} as any;
- },
- addBasicSequences: function(context?: any): Promise {
- return {} as any;
- },
- // ---
- addGeneratedColumn: async function(context?: any): Promise {
- const sqlite = new Database(':memory:');
-
- const from = {
- users: sqliteTable('users', {
- id: int('id'),
- id2: int('id2'),
- name: text('name'),
- }),
- };
- const to = {
- users: sqliteTable('users', {
- id: int('id'),
- id2: int('id2'),
- name: text('name'),
- generatedName: text('gen_name').generatedAlwaysAs(
- (): SQL => sql`${to.users.name} || 'hello'`,
- { mode: 'stored' },
- ),
- }),
- };
-
- const { statements, sqlStatements } = await diffTestSchemasPushSqlite(
- sqlite,
- from,
- to,
- [],
- );
-
- expect(statements).toStrictEqual([]);
- expect(sqlStatements).toStrictEqual([]);
- },
- addGeneratedToColumn: async function(context?: any): Promise {
- const sqlite = new Database(':memory:');
-
- const from = {
- users: sqliteTable('users', {
- id: int('id'),
- id2: int('id2'),
- name: text('name'),
- generatedName: text('gen_name').notNull(),
- generatedName1: text('gen_name1'),
- }),
- };
- const to = {
- users: sqliteTable('users', {
- id: int('id'),
- id2: int('id2'),
- name: text('name'),
- generatedName: text('gen_name')
+ address: text('address').notNull(),
+ isConfirmed: integer('is_confirmed', { mode: 'boolean' }),
+ registrationDate: integer('registration_date', { mode: 'timestamp_ms' })
.notNull()
- .generatedAlwaysAs((): SQL => sql`${to.users.name} || 'hello'`, {
- mode: 'stored',
- }),
- generatedName1: text('gen_name1').generatedAlwaysAs(
- (): SQL => sql`${to.users.name} || 'hello'`,
- { mode: 'virtual' },
+ .$defaultFn(() => new Date()),
+ userId: integer('user_id').notNull(),
+ },
+ (table) => ({
+ uniqueIndex: uniqueIndex('customers_is_confirmed_unique').on(
+ table.isConfirmed,
),
}),
- };
+ ),
+
+ posts: sqliteTable('posts', {
+ id: integer('id').primaryKey(),
+ content: text('content'),
+ authorId: integer('author_id'),
+ }),
+ };
+
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(client, schema1, schema2, [], false);
+ expect(statements.length).toBe(2);
+ expect(statements[0]).toStrictEqual({
+ type: 'drop_index',
+ tableName: 'customers',
+ data: 'customers_address_unique;address;true;',
+ schema: '',
+ });
+ expect(statements[1]).toStrictEqual({
+ type: 'create_index',
+ tableName: 'customers',
+ data: 'customers_is_confirmed_unique;is_confirmed;true;',
+ schema: '',
+ internal: {
+ indexes: {},
+ },
+ });
+
+ expect(sqlStatements.length).toBe(2);
+ expect(sqlStatements[0]).toBe(
+ `DROP INDEX IF EXISTS \`customers_address_unique\`;`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `CREATE UNIQUE INDEX \`customers_is_confirmed_unique\` ON \`customers\` (\`is_confirmed\`);`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('added column not null and without default to table with data', async (t) => {
+ const client = new Database(':memory:');
+
+ const schema1 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey(),
+ name: text('name').notNull(),
+ }),
+ };
+
+ const schema2 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey(),
+ name: text('name').notNull(),
+ age: integer('age').notNull(),
+ }),
+ };
+
+ const table = getTableConfig(schema1.companies);
+ const seedStatements = [
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('drizzle');`,
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('turso');`,
+ ];
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(
+ client,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'companies',
+ column: {
+ name: 'age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ });
+ expect(sqlStatements.length).toBe(2);
+ expect(sqlStatements[0]).toBe(`delete from companies;`);
+ expect(sqlStatements[1]).toBe(
+ `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(1);
+ expect(infoToPrint![0]).toBe(
+ `· You're about to add not-null ${
+ chalk.underline(
+ 'age',
+ )
+ } column without default value, which contains 2 items`,
+ );
+ expect(shouldAskForApprove).toBe(true);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(1);
+ expect(tablesToTruncate![0]).toBe('companies');
+});
+
+test('added column not null and without default to table without data', async (t) => {
+ const turso = new Database(':memory:');
+
+ const schema1 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey(),
+ name: text('name').notNull(),
+ }),
+ };
+
+ const schema2 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey(),
+ name: text('name').notNull(),
+ age: integer('age').notNull(),
+ }),
+ };
+
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(turso, schema1, schema2, [], false);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'companies',
+ column: {
+ name: 'age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ });
+
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`,
+ );
+
+ expect(infoToPrint!.length).toBe(0);
+ expect(columnsToRemove!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('drop autoincrement. drop column with data', async (t) => {
+ const turso = new Database(':memory:');
+
+ const schema1 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const schema2 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: false }),
+ }),
+ };
- const { statements, sqlStatements } = await diffTestSchemasPushSqlite(
- sqlite,
- from,
- to,
- [],
- );
+ const table = getTableConfig(schema1.companies);
+ const seedStatements = [
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`,
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`,
+ ];
- expect(statements).toStrictEqual([
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(
+ turso,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'recreate_table',
+ tableName: 'companies',
+ columns: [
{
- columnAutoIncrement: false,
- columnDefault: undefined,
- columnGenerated: {
- as: '("name" || \'hello\')',
- type: 'virtual',
- },
- columnName: 'gen_name1',
- columnNotNull: false,
- columnOnUpdate: undefined,
- columnPk: false,
- newDataType: 'text',
- schema: '',
- tableName: 'users',
- type: 'alter_table_alter_column_set_generated',
+ name: 'id',
+ type: 'integer',
+ autoincrement: false,
+ notNull: true,
+ primaryKey: true,
+ generated: undefined,
},
- ]);
- expect(sqlStatements).toStrictEqual([
- 'ALTER TABLE `users` DROP COLUMN `gen_name1`;',
- 'ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;',
- ]);
-
- for (const st of sqlStatements) {
- sqlite.exec(st);
- }
- },
- dropGeneratedConstraint: async function(context?: any): Promise {
- const sqlite = new Database(':memory:');
-
- const from = {
- users: sqliteTable('users', {
- id: int('id'),
- id2: int('id2'),
- name: text('name'),
- generatedName: text('gen_name').generatedAlwaysAs(
- (): SQL => sql`${to.users.name} || 'hello'`,
- { mode: 'stored' },
- ),
- generatedName1: text('gen_name1').generatedAlwaysAs(
- (): SQL => sql`${to.users.name} || 'hello'`,
- { mode: 'virtual' },
- ),
- }),
- };
- const to = {
- users: sqliteTable('users', {
- id: int('id'),
- id2: int('id2'),
- name: text('name'),
- generatedName: text('gen_name'),
- generatedName1: text('gen_name1'),
- }),
- };
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(4);
+ expect(sqlStatements[0]).toBe(
+ `CREATE TABLE \`__new_companies\` (
+\t\`id\` integer PRIMARY KEY NOT NULL
+);\n`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`,
+ );
+ expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`);
+ expect(sqlStatements[3]).toBe(
+ `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(1);
+ expect(columnsToRemove![0]).toBe('name');
+ expect(infoToPrint!.length).toBe(1);
+ expect(infoToPrint![0]).toBe(
+ `· You're about to delete ${
+ chalk.underline(
+ 'name',
+ )
+ } column in companies table with 2 items`,
+ );
+ expect(shouldAskForApprove).toBe(true);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('drop autoincrement. drop column with data with pragma off', async (t) => {
+ const client = new Database(':memory:');
- const { statements, sqlStatements } = await diffTestSchemasPushSqlite(
- sqlite,
- from,
- to,
- [],
- );
+ client.exec('PRAGMA foreign_keys=OFF;');
- expect(statements).toStrictEqual([
+ const users = sqliteTable('users', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ });
+ const schema1 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ user_id: integer('user_id').references(() => users.id),
+ }),
+ };
+
+ const schema2 = {
+ companies: sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: false }),
+ user_id: integer('user_id').references(() => users.id),
+ }),
+ };
+
+ const table = getTableConfig(schema1.companies);
+ const seedStatements = [
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`,
+ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`,
+ ];
+
+ const {
+ sqlStatements,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(
+ client,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'recreate_table',
+ tableName: 'companies',
+ columns: [
{
- columnAutoIncrement: false,
- columnDefault: undefined,
- columnGenerated: undefined,
- columnName: 'gen_name',
- columnNotNull: false,
- columnOnUpdate: undefined,
- columnPk: false,
- newDataType: 'text',
- schema: '',
- tableName: 'users',
- type: 'alter_table_alter_column_drop_generated',
+ name: 'id',
+ type: 'integer',
+ autoincrement: false,
+ notNull: true,
+ primaryKey: true,
+ generated: undefined,
},
{
- columnAutoIncrement: false,
- columnDefault: undefined,
- columnGenerated: undefined,
- columnName: 'gen_name1',
- columnNotNull: false,
- columnOnUpdate: undefined,
- columnPk: false,
- newDataType: 'text',
- schema: '',
- tableName: 'users',
- type: 'alter_table_alter_column_drop_generated',
+ name: 'user_id',
+ type: 'integer',
+ autoincrement: false,
+ notNull: false,
+ primaryKey: false,
+ generated: undefined,
},
- ]);
- expect(sqlStatements).toStrictEqual([
- 'ALTER TABLE `users` DROP COLUMN `gen_name`;',
- 'ALTER TABLE `users` ADD `gen_name` text;',
- 'ALTER TABLE `users` DROP COLUMN `gen_name1`;',
- 'ALTER TABLE `users` ADD `gen_name1` text;',
- ]);
-
- for (const st of sqlStatements) {
- sqlite.exec(st);
- }
- },
- alterGeneratedConstraint: async function(context?: any): Promise {
- const sqlite = new Database(':memory:');
-
- const from = {
- users: sqliteTable('users', {
- id: int('id'),
- id2: int('id2'),
- name: text('name'),
- generatedName: text('gen_name').generatedAlwaysAs(
- (): SQL => sql`${to.users.name} || 'hello'`,
- { mode: 'stored' },
- ),
- generatedName1: text('gen_name1').generatedAlwaysAs(
- (): SQL => sql`${to.users.name} || 'hello'`,
- { mode: 'virtual' },
- ),
- }),
- };
- const to = {
- users: sqliteTable('users', {
- id: int('id'),
- id2: int('id2'),
- name: text('name'),
- generatedName: text('gen_name').generatedAlwaysAs(
- (): SQL => sql`${to.users.name}`,
- { mode: 'stored' },
- ),
- generatedName1: text('gen_name1').generatedAlwaysAs(
- (): SQL => sql`${to.users.name}`,
- { mode: 'virtual' },
- ),
- }),
- };
+ ],
+ compositePKs: [],
+ referenceData: [
+ {
+ columnsFrom: [
+ 'user_id',
+ ],
+ columnsTo: [
+ 'id',
+ ],
+ name: '',
+ onDelete: 'no action',
+ onUpdate: 'no action',
+ tableFrom: 'companies',
+ tableTo: 'users',
+ },
+ ],
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(4);
+ expect(sqlStatements[0]).toBe(
+ `CREATE TABLE \`__new_companies\` (
+\t\`id\` integer PRIMARY KEY NOT NULL,
+\t\`user_id\` integer,
+\tFOREIGN KEY (\`user_id\`) REFERENCES \`users\`(\`id\`) ON UPDATE no action ON DELETE no action
+);\n`,
+ );
+ expect(sqlStatements[1]).toBe(
+ `INSERT INTO \`__new_companies\`("id", "user_id") SELECT "id", "user_id" FROM \`companies\`;`,
+ );
+ expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`);
+ expect(sqlStatements[3]).toBe(
+ `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(1);
+ expect(infoToPrint!.length).toBe(1);
+ expect(infoToPrint![0]).toBe(
+ `· You're about to delete ${
+ chalk.underline(
+ 'name',
+ )
+ } column in companies table with 2 items`,
+ );
+ expect(shouldAskForApprove).toBe(true);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('change autoincrement. other table references current', async (t) => {
+ const client = new Database(':memory:');
- const { statements, sqlStatements } = await diffTestSchemasPushSqlite(
- sqlite,
- from,
- to,
- [],
- );
+ const companies1 = sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ });
+ const users1 = sqliteTable('users', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ name: text('name').unique(),
+ companyId: text('company_id').references(() => companies1.id),
+ });
+ const schema1 = {
+ companies: companies1,
+ users: users1,
+ };
- expect(statements).toStrictEqual([
+ const companies2 = sqliteTable('companies', {
+ id: integer('id').primaryKey({ autoIncrement: false }),
+ });
+ const users2 = sqliteTable('users', {
+ id: integer('id').primaryKey({ autoIncrement: true }),
+ name: text('name').unique(),
+ companyId: text('company_id').references(() => companies1.id),
+ });
+ const schema2 = {
+ companies: companies2,
+ users: users2,
+ };
+
+ const { name: usersTableName } = getTableConfig(users1);
+ const { name: companiesTableName } = getTableConfig(companies1);
+ const seedStatements = [
+ `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('drizzle');`,
+ `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('turso');`,
+ `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES ('1');`,
+ `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES ('2');`,
+ ];
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(
+ client,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'recreate_table',
+ tableName: 'companies',
+ columns: [
{
- columnAutoIncrement: false,
- columnDefault: undefined,
- columnGenerated: {
- as: '("name")',
- type: 'virtual',
- },
- columnName: 'gen_name1',
- columnNotNull: false,
- columnOnUpdate: undefined,
- columnPk: false,
- newDataType: 'text',
- schema: '',
- tableName: 'users',
- type: 'alter_table_alter_column_alter_generated',
+ name: 'id',
+ type: 'integer',
+ autoincrement: false,
+ notNull: true,
+ primaryKey: true,
+ generated: undefined,
},
- ]);
- expect(sqlStatements).toStrictEqual([
- 'ALTER TABLE `users` DROP COLUMN `gen_name1`;',
- 'ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS ("name") VIRTUAL;',
- ]);
-
- for (const st of sqlStatements) {
- sqlite.exec(st);
- }
- },
- createTableWithGeneratedConstraint: function(context?: any): Promise {
- return {} as any;
- },
-};
-
-run(sqliteSuite);
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(
+ `CREATE TABLE \`__new_companies\` (
+\t\`id\` integer PRIMARY KEY NOT NULL
+);\n`,
+ );
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`,
+ );
+ expect(sqlStatements[3]).toBe(`DROP TABLE \`companies\`;`);
+ expect(sqlStatements[4]).toBe(
+ `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`,
+ );
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
test('create table with custom name references', async (t) => {
- const sqlite = new Database(':memory:');
+ const client = new Database(':memory:');
const users = sqliteTable('users', {
id: int('id').primaryKey({ autoIncrement: true }),
@@ -424,7 +670,7 @@ test('create table with custom name references', async (t) => {
};
const { sqlStatements } = await diffTestSchemasPushSqlite(
- sqlite,
+ client,
schema1,
schema2,
[],
@@ -432,3 +678,613 @@ test('create table with custom name references', async (t) => {
expect(sqlStatements!.length).toBe(0);
});
+
+test('drop not null, add not null', async (t) => {
+ const client = new Database(':memory:');
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ }),
+ posts: sqliteTable('posts', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ userId: int('user_id'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ posts: sqliteTable('posts', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name').notNull(),
+ userId: int('user_id'),
+ }),
+ };
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(client, schema1, schema2, []);
+
+ expect(statements!.length).toBe(2);
+ expect(statements![0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+ expect(statements![1]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: true,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'user_id',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'posts',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(8);
+ expect(sqlStatements[0]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
+\t\`name\` text
+);\n`);
+ expect(sqlStatements[1]).toBe(
+ `INSERT INTO \`__new_users\`("id", "name") SELECT "id", "name" FROM \`users\`;`,
+ );
+ expect(sqlStatements[2]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements[3]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+
+ expect(sqlStatements![4]).toBe(`CREATE TABLE \`__new_posts\` (
+\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
+\t\`name\` text NOT NULL,
+\t\`user_id\` integer
+);\n`);
+ expect(sqlStatements![5]).toBe(
+ `INSERT INTO \`__new_posts\`("id", "name", "user_id") SELECT "id", "name", "user_id" FROM \`posts\`;`,
+ );
+ expect(sqlStatements![6]).toBe(`DROP TABLE \`posts\`;`);
+ expect(sqlStatements![7]).toBe(
+ `ALTER TABLE \`__new_posts\` RENAME TO \`posts\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('rename table and change data type', async (t) => {
+ const client = new Database(':memory:');
+
+ const schema1 = {
+ users: sqliteTable('old_users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ age: text('age'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('new_users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ age: integer('age'),
+ }),
+ };
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(client, schema1, schema2, [
+ 'public.old_users->public.new_users',
+ ]);
+
+ expect(statements!.length).toBe(2);
+ expect(statements![0]).toStrictEqual({
+ fromSchema: undefined,
+ tableNameFrom: 'old_users',
+ tableNameTo: 'new_users',
+ toSchema: undefined,
+ type: 'rename_table',
+ });
+ expect(statements![1]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: true,
+ name: 'id',
+ notNull: true,
+ generated: undefined,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'age',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'new_users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements!.length).toBe(5);
+ expect(sqlStatements![0]).toBe(
+ `ALTER TABLE \`old_users\` RENAME TO \`new_users\`;`,
+ );
+ expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_new_users\` (
+\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
+\t\`age\` integer
+);\n`);
+ expect(sqlStatements![2]).toBe(
+ `INSERT INTO \`__new_new_users\`("id", "age") SELECT "id", "age" FROM \`new_users\`;`,
+ );
+ expect(sqlStatements![3]).toBe(`DROP TABLE \`new_users\`;`);
+ expect(sqlStatements![4]).toBe(
+ `ALTER TABLE \`__new_new_users\` RENAME TO \`new_users\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('rename column and change data type', async (t) => {
+ const client = new Database(':memory:');
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ age: integer('age'),
+ }),
+ };
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(client, schema1, schema2, [
+ 'public.users.name->public.users.age',
+ ]);
+
+ expect(statements!.length).toBe(1);
+ expect(statements![0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: true,
+ name: 'id',
+ notNull: true,
+ generated: undefined,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'age',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements!.length).toBe(4);
+ expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
+\t\`age\` integer
+);\n`);
+ expect(sqlStatements![1]).toBe(
+ `INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`,
+ );
+ expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements![3]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('recreate table with nested references', async (t) => {
+ const client = new Database(':memory:');
+
+ let users = sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: integer('age'),
+ });
+ let subscriptions = sqliteTable('subscriptions', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ userId: integer('user_id').references(() => users.id),
+ customerId: text('customer_id'),
+ });
+ const schema1 = {
+ users: users,
+ subscriptions: subscriptions,
+ subscriptionMetadata: sqliteTable('subscriptions_metadata', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ subscriptionId: text('subscription_id').references(
+ () => subscriptions.id,
+ ),
+ }),
+ };
+
+ users = sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ name: text('name'),
+ age: integer('age'),
+ });
+ const schema2 = {
+ users: users,
+ subscriptions: subscriptions,
+ subscriptionMetadata: sqliteTable('subscriptions_metadata', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ subscriptionId: text('subscription_id').references(
+ () => subscriptions.id,
+ ),
+ }),
+ };
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(client, schema1, schema2, [
+ 'public.users.name->public.users.age',
+ ]);
+
+ expect(statements!.length).toBe(1);
+ expect(statements![0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: false,
+ name: 'id',
+ notNull: true,
+ generated: undefined,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'name',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ name: 'age',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements!.length).toBe(6);
+ expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;');
+ expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY NOT NULL,
+\t\`name\` text,
+\t\`age\` integer
+);\n`);
+ expect(sqlStatements![2]).toBe(
+ `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`,
+ );
+ expect(sqlStatements![3]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements![4]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+ expect(sqlStatements[5]).toBe('PRAGMA foreign_keys=ON;');
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
+
+test('recreate table with added column not null and without default with data', async (t) => {
+ const client = new Database(':memory:');
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: integer('age'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ name: text('name'),
+ age: integer('age'),
+ newColumn: text('new_column').notNull(),
+ }),
+ };
+
+ const seedStatements = [
+ `INSERT INTO \`users\` ("name", "age") VALUES ('drizzle', 12)`,
+ `INSERT INTO \`users\` ("name", "age") VALUES ('turso', 12)`,
+ ];
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(
+ client,
+ schema1,
+ schema2,
+ [],
+ false,
+ seedStatements,
+ );
+
+ expect(statements!.length).toBe(1);
+ expect(statements![0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: false,
+ name: 'id',
+ notNull: true,
+ generated: undefined,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'name',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ name: 'age',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'new_column',
+ notNull: true,
+ generated: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements!.length).toBe(4);
+ expect(sqlStatements[0]).toBe('DELETE FROM \`users\`;');
+ expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY NOT NULL,
+\t\`name\` text,
+\t\`age\` integer,
+\t\`new_column\` text NOT NULL
+);\n`);
+ expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements![3]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(1);
+ expect(infoToPrint![0]).toBe(
+ `· You're about to add not-null ${
+ chalk.underline('new_column')
+ } column without default value to table, which contains 2 items`,
+ );
+ expect(shouldAskForApprove).toBe(true);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(1);
+ expect(tablesToTruncate![0]).toBe('users');
+});
+
+test('recreate table with added column not null and without default with data', async (t) => {
+ const client = new Database(':memory:');
+
+ const schema1 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: integer('age'),
+ }),
+ };
+
+ const schema2 = {
+ users: sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ name: text('name'),
+ age: integer('age'),
+ newColumn: text('new_column').notNull(),
+ }),
+ };
+
+ const {
+ statements,
+ sqlStatements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await diffTestSchemasPushSqlite(
+ client,
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements!.length).toBe(1);
+ expect(statements![0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: false,
+ name: 'id',
+ notNull: true,
+ generated: undefined,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'name',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ name: 'age',
+ notNull: false,
+ generated: undefined,
+ primaryKey: false,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'new_column',
+ notNull: true,
+ generated: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements!.length).toBe(4);
+ expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY NOT NULL,
+\t\`name\` text,
+\t\`age\` integer,
+\t\`new_column\` text NOT NULL
+);\n`);
+ expect(sqlStatements[1]).toBe(
+ 'INSERT INTO `__new_users`("id", "name", "age", "new_column") SELECT "id", "name", "age", "new_column" FROM `users`;',
+ );
+ expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements![3]).toBe(
+ `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`,
+ );
+
+ expect(columnsToRemove!.length).toBe(0);
+ expect(infoToPrint!.length).toBe(0);
+ expect(shouldAskForApprove).toBe(false);
+ expect(tablesToRemove!.length).toBe(0);
+ expect(tablesToTruncate!.length).toBe(0);
+});
diff --git a/drizzle-kit/tests/schemaDiffer.ts b/drizzle-kit/tests/schemaDiffer.ts
index f300fc68c..3567c4f8c 100644
--- a/drizzle-kit/tests/schemaDiffer.ts
+++ b/drizzle-kit/tests/schemaDiffer.ts
@@ -1,4 +1,5 @@
import { PGlite } from '@electric-sql/pglite';
+import { Client } from '@libsql/client/.';
import { Database } from 'better-sqlite3';
import { is } from 'drizzle-orm';
import { MySqlSchema, MySqlTable } from 'drizzle-orm/mysql-core';
@@ -7,6 +8,7 @@ import { SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-cor
import { SQLiteTable } from 'drizzle-orm/sqlite-core';
import * as fs from 'fs';
import { Connection } from 'mysql2/promise';
+import { libSqlLogSuggestionsAndReturn } from 'src/cli/commands/libSqlPushUtils';
import {
columnsResolver,
enumsResolver,
@@ -16,6 +18,7 @@ import {
tablesResolver,
} from 'src/cli/commands/migrate';
import { logSuggestionsAndReturn } from 'src/cli/commands/sqlitePushUtils';
+import { CasingType } from 'src/cli/validations/common';
import { schemaToTypeScript as schemaToTypeScriptMySQL } from 'src/introspect-mysql';
import { schemaToTypeScript } from 'src/introspect-pg';
import { schemaToTypeScript as schemaToTypeScriptSingleStore } from 'src/introspect-singlestore';
@@ -36,6 +39,7 @@ import { prepareFromSqliteImports } from 'src/serializer/sqliteImports';
import { sqliteSchema, squashSqliteScheme } from 'src/serializer/sqliteSchema';
import { fromDatabase as fromSqliteDatabase, generateSqliteSnapshot } from 'src/serializer/sqliteSerializer';
import {
+ applyLibSQLSnapshotsDiff,
applyMysqlSnapshotsDiff,
applyPgSnapshotsDiff,
applySingleStoreSnapshotsDiff,
@@ -417,8 +421,9 @@ export const diffTestSchemasPush = async (
renamesArr: string[],
cli: boolean = false,
schemas: string[] = ['public'],
+ casing?: CasingType | undefined,
) => {
- const { sqlStatements } = await applyPgDiffs(left);
+ const { sqlStatements } = await applyPgDiffs(left, casing);
for (const st of sqlStatements) {
await client.query(st);
}
@@ -448,6 +453,7 @@ export const diffTestSchemasPush = async (
leftEnums,
leftSchemas,
leftSequences,
+ casing,
);
const { version: v1, dialect: d1, ...rest1 } = introspectedSchema;
@@ -508,7 +514,7 @@ export const diffTestSchemasPush = async (
}
};
-export const applyPgDiffs = async (sn: PostgresSchema) => {
+export const applyPgDiffs = async (sn: PostgresSchema, casing: CasingType | undefined) => {
const dryRun = {
version: '7',
dialect: 'postgresql',
@@ -533,7 +539,7 @@ export const applyPgDiffs = async (sn: PostgresSchema) => {
const sequences = Object.values(sn).filter((it) => isPgSequence(it)) as PgSequence[];
- const serialized1 = generatePgSnapshot(tables, enums, schemas, sequences);
+ const serialized1 = generatePgSnapshot(tables, enums, schemas, sequences, casing);
const { version: v1, dialect: d1, ...rest1 } = serialized1;
@@ -569,6 +575,7 @@ export const diffTestSchemas = async (
right: PostgresSchema,
renamesArr: string[],
cli: boolean = false,
+ casing?: CasingType | undefined,
) => {
const leftTables = Object.values(left).filter((it) => is(it, PgTable)) as PgTable[];
@@ -591,12 +598,14 @@ export const diffTestSchemas = async (
leftEnums,
leftSchemas,
leftSequences,
+ casing,
);
const serialized2 = generatePgSnapshot(
rightTables,
rightEnums,
rightSchemas,
rightSequences,
+ casing,
);
const { version: v1, dialect: d1, ...rest1 } = serialized1;
@@ -662,8 +671,9 @@ export const diffTestSchemasPushMysql = async (
renamesArr: string[],
schema: string,
cli: boolean = false,
+ casing?: CasingType | undefined,
) => {
- const { sqlStatements } = await applyMySqlDiffs(left);
+ const { sqlStatements } = await applyMySqlDiffs(left, casing);
for (const st of sqlStatements) {
await client.query(st);
}
@@ -680,7 +690,7 @@ export const diffTestSchemasPushMysql = async (
const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[];
- const serialized2 = generateMySqlSnapshot(leftTables);
+ const serialized2 = generateMySqlSnapshot(leftTables, casing);
const { version: v1, dialect: d1, ...rest1 } = introspectedSchema;
const { version: v2, dialect: d2, ...rest2 } = serialized2;
@@ -734,7 +744,7 @@ export const diffTestSchemasPushMysql = async (
}
};
-export const applyMySqlDiffs = async (sn: MysqlSchema) => {
+export const applyMySqlDiffs = async (sn: MysqlSchema, casing: CasingType | undefined) => {
const dryRun = {
version: '5',
dialect: 'mysql',
@@ -752,7 +762,7 @@ export const applyMySqlDiffs = async (sn: MysqlSchema) => {
const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[];
- const serialized1 = generateMySqlSnapshot(tables);
+ const serialized1 = generateMySqlSnapshot(tables, casing);
const { version: v1, dialect: d1, ...rest1 } = serialized1;
@@ -785,13 +795,14 @@ export const diffTestSchemasMysql = async (
right: MysqlSchema,
renamesArr: string[],
cli: boolean = false,
+ casing?: CasingType | undefined,
) => {
const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[];
const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[];
- const serialized1 = generateMySqlSnapshot(leftTables);
- const serialized2 = generateMySqlSnapshot(rightTables);
+ const serialized1 = generateMySqlSnapshot(leftTables, casing);
+ const serialized2 = generateMySqlSnapshot(rightTables, casing);
const { version: v1, dialect: d1, ...rest1 } = serialized1;
const { version: v2, dialect: d2, ...rest2 } = serialized2;
@@ -958,11 +969,19 @@ export const diffTestSchemasPushSqlite = async (
right: SqliteSchema,
renamesArr: string[],
cli: boolean = false,
+ seedStatements: string[] = [],
+ casing?: CasingType | undefined,
) => {
const { sqlStatements } = await applySqliteDiffs(left, 'push');
+
for (const st of sqlStatements) {
client.exec(st);
}
+
+ for (const st of seedStatements) {
+ client.exec(st);
+ }
+
// do introspect into PgSchemaInternal
const introspectedSchema = await fromSqliteDatabase(
{
@@ -976,9 +995,9 @@ export const diffTestSchemasPushSqlite = async (
undefined,
);
- const leftTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
+ const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
- const serialized2 = generateSqliteSnapshot(leftTables);
+ const serialized2 = generateSqliteSnapshot(rightTables, casing);
const { version: v1, dialect: d1, ...rest1 } = introspectedSchema;
const { version: v2, dialect: d2, ...rest2 } = serialized2;
@@ -1015,7 +1034,15 @@ export const diffTestSchemasPushSqlite = async (
'push',
);
- const { statementsToExecute } = await logSuggestionsAndReturn(
+ const {
+ statementsToExecute,
+ columnsToRemove,
+ infoToPrint,
+ schemasToRemove,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await logSuggestionsAndReturn(
{
query: async (sql: string, params: any[] = []) => {
return client.prepare(sql).bind(params).all() as T[];
@@ -1030,7 +1057,16 @@ export const diffTestSchemasPushSqlite = async (
_meta!,
);
- return { sqlStatements: statementsToExecute, statements };
+ return {
+ sqlStatements: statementsToExecute,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ schemasToRemove,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ };
} else {
const { sqlStatements, statements } = await applySqliteSnapshotsDiff(
sn1,
@@ -1045,9 +1081,141 @@ export const diffTestSchemasPushSqlite = async (
}
};
+export async function diffTestSchemasPushLibSQL(
+ client: Client,
+ left: SqliteSchema,
+ right: SqliteSchema,
+ renamesArr: string[],
+ cli: boolean = false,
+ seedStatements: string[] = [],
+ casing?: CasingType | undefined,
+) {
+ const { sqlStatements } = await applyLibSQLDiffs(left, 'push');
+
+ for (const st of sqlStatements) {
+ await client.execute(st);
+ }
+
+ for (const st of seedStatements) {
+ await client.execute(st);
+ }
+
+ const introspectedSchema = await fromSqliteDatabase(
+ {
+ query: async (sql: string, params?: any[]) => {
+ const res = await client.execute({ sql, args: params || [] });
+ return res.rows as T[];
+ },
+ run: async (query: string) => {
+ await client.execute(query);
+ },
+ batch: async (
+ queries: { query: string; values?: any[] | undefined }[],
+ ) => {
+ await client.batch(
+ queries.map((it) => ({ sql: it.query, args: it.values ?? [] })),
+ );
+ },
+ },
+ undefined,
+ );
+
+ const leftTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
+
+ const serialized2 = generateSqliteSnapshot(leftTables, casing);
+
+ const { version: v1, dialect: d1, ...rest1 } = introspectedSchema;
+ const { version: v2, dialect: d2, ...rest2 } = serialized2;
+
+ const sch1 = {
+ version: '6',
+ dialect: 'sqlite',
+ id: '0',
+ prevId: '0',
+ ...rest1,
+ } as const;
+
+ const sch2 = {
+ version: '6',
+ dialect: 'sqlite',
+ id: '0',
+ prevId: '0',
+ ...rest2,
+ } as const;
+
+ const sn1 = squashSqliteScheme(sch1, 'push');
+ const sn2 = squashSqliteScheme(sch2, 'push');
+
+ const renames = new Set(renamesArr);
+
+ if (!cli) {
+ const { sqlStatements, statements, _meta } = await applyLibSQLSnapshotsDiff(
+ sn1,
+ sn2,
+ testTablesResolver(renames),
+ testColumnsResolver(renames),
+ sch1,
+ sch2,
+ 'push',
+ );
+
+ const {
+ statementsToExecute,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await libSqlLogSuggestionsAndReturn(
+ {
+ query: async (sql: string, params?: any[]) => {
+ const res = await client.execute({ sql, args: params || [] });
+ return res.rows as T[];
+ },
+ run: async (query: string) => {
+ await client.execute(query);
+ },
+ batch: async (
+ queries: { query: string; values?: any[] | undefined }[],
+ ) => {
+ await client.batch(
+ queries.map((it) => ({ sql: it.query, args: it.values ?? [] })),
+ );
+ },
+ },
+ statements,
+ sn1,
+ sn2,
+ _meta!,
+ );
+
+ return {
+ sqlStatements: statementsToExecute,
+ statements,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ };
+ } else {
+ const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff(
+ sn1,
+ sn2,
+ tablesResolver,
+ columnsResolver,
+ sch1,
+ sch2,
+ 'push',
+ );
+ return { sqlStatements, statements };
+ }
+}
+
export const applySqliteDiffs = async (
sn: SqliteSchema,
action?: 'push' | undefined,
+ casing?: CasingType | undefined,
) => {
const dryRun = {
version: '6',
@@ -1066,7 +1234,7 @@ export const applySqliteDiffs = async (
const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
- const serialized1 = generateSqliteSnapshot(tables);
+ const serialized1 = generateSqliteSnapshot(tables, casing);
const { version: v1, dialect: d1, ...rest1 } = serialized1;
@@ -1093,18 +1261,68 @@ export const applySqliteDiffs = async (
return { sqlStatements, statements };
};
+export const applyLibSQLDiffs = async (
+ sn: SqliteSchema,
+ action?: 'push' | undefined,
+ casing?: CasingType | undefined,
+) => {
+ const dryRun = {
+ version: '6',
+ dialect: 'sqlite',
+ id: '0',
+ prevId: '0',
+ tables: {},
+ enums: {},
+ schemas: {},
+ _meta: {
+ schemas: {},
+ tables: {},
+ columns: {},
+ },
+ } as const;
+
+ const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
+
+ const serialized1 = generateSqliteSnapshot(tables, casing);
+
+ const { version: v1, dialect: d1, ...rest1 } = serialized1;
+
+ const sch1 = {
+ version: '6',
+ dialect: 'sqlite',
+ id: '0',
+ prevId: '0',
+ ...rest1,
+ } as const;
+
+ const sn1 = squashSqliteScheme(sch1, action);
+
+ const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff(
+ dryRun,
+ sn1,
+ testTablesResolver(new Set()),
+ testColumnsResolver(new Set()),
+ dryRun,
+ sch1,
+ action,
+ );
+
+ return { sqlStatements, statements };
+};
+
export const diffTestSchemasSqlite = async (
left: SqliteSchema,
right: SqliteSchema,
renamesArr: string[],
cli: boolean = false,
+ casing?: CasingType | undefined,
) => {
const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
- const serialized1 = generateSqliteSnapshot(leftTables);
- const serialized2 = generateSqliteSnapshot(rightTables);
+ const serialized1 = generateSqliteSnapshot(leftTables, casing);
+ const serialized2 = generateSqliteSnapshot(rightTables, casing);
const { version: v1, dialect: d1, ...rest1 } = serialized1;
const { version: v2, dialect: d2, ...rest2 } = serialized2;
@@ -1153,6 +1371,67 @@ export const diffTestSchemasSqlite = async (
return { sqlStatements, statements };
};
+export const diffTestSchemasLibSQL = async (
+ left: SqliteSchema,
+ right: SqliteSchema,
+ renamesArr: string[],
+ cli: boolean = false,
+ casing?: CasingType | undefined,
+) => {
+ const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
+
+ const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
+
+ const serialized1 = generateSqliteSnapshot(leftTables, casing);
+ const serialized2 = generateSqliteSnapshot(rightTables, casing);
+
+ const { version: v1, dialect: d1, ...rest1 } = serialized1;
+ const { version: v2, dialect: d2, ...rest2 } = serialized2;
+
+ const sch1 = {
+ version: '6',
+ dialect: 'sqlite',
+ id: '0',
+ prevId: '0',
+ ...rest1,
+ } as const;
+
+ const sch2 = {
+ version: '6',
+ dialect: 'sqlite',
+ id: '0',
+ prevId: '0',
+ ...rest2,
+ } as const;
+
+ const sn1 = squashSqliteScheme(sch1);
+ const sn2 = squashSqliteScheme(sch2);
+
+ const renames = new Set(renamesArr);
+
+ if (!cli) {
+ const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff(
+ sn1,
+ sn2,
+ testTablesResolver(renames),
+ testColumnsResolver(renames),
+ sch1,
+ sch2,
+ );
+ return { sqlStatements, statements };
+ }
+
+ const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff(
+ sn1,
+ sn2,
+ tablesResolver,
+ columnsResolver,
+ sch1,
+ sch2,
+ );
+ return { sqlStatements, statements };
+};
+
// --- Introspect to file helpers ---
export const introspectPgToFile = async (
@@ -1160,9 +1439,10 @@ export const introspectPgToFile = async (
initSchema: PostgresSchema,
testName: string,
schemas: string[] = ['public'],
+ casing?: CasingType | undefined,
) => {
// put in db
- const { sqlStatements } = await applyPgDiffs(initSchema);
+ const { sqlStatements } = await applyPgDiffs(initSchema, casing);
for (const st of sqlStatements) {
await client.query(st);
}
@@ -1192,6 +1472,7 @@ export const introspectPgToFile = async (
response.enums,
response.schemas,
response.sequences,
+ casing,
);
const { version: v2, dialect: d2, ...rest2 } = afterFileImports;
@@ -1220,6 +1501,7 @@ export const introspectPgToFile = async (
leftEnums,
leftSchemas,
leftSequences,
+ casing,
);
const { version: initV, dialect: initD, ...initRest } = initSnapshot;
@@ -1263,9 +1545,10 @@ export const introspectMySQLToFile = async (
initSchema: MysqlSchema,
testName: string,
schema: string,
+ casing?: CasingType | undefined,
) => {
// put in db
- const { sqlStatements } = await applyMySqlDiffs(initSchema);
+ const { sqlStatements } = await applyMySqlDiffs(initSchema, casing);
for (const st of sqlStatements) {
await client.query(st);
}
@@ -1289,7 +1572,7 @@ export const introspectMySQLToFile = async (
`tests/introspect/mysql/${testName}.ts`,
]);
- const afterFileImports = generateMySqlSnapshot(response.tables);
+ const afterFileImports = generateMySqlSnapshot(response.tables, casing);
const { version: v2, dialect: d2, ...rest2 } = afterFileImports;
@@ -1306,7 +1589,7 @@ export const introspectMySQLToFile = async (
const leftTables = Object.values(initSchema).filter((it) => is(it, MySqlTable)) as MySqlTable[];
- const initSnapshot = generateMySqlSnapshot(leftTables);
+ const initSnapshot = generateMySqlSnapshot(leftTables, casing);
const { version: initV, dialect: initD, ...initRest } = initSnapshot;
@@ -1428,6 +1711,7 @@ export const introspectSQLiteToFile = async (
client: Database,
initSchema: SqliteSchema,
testName: string,
+ casing?: CasingType | undefined,
) => {
// put in db
const { sqlStatements } = await applySqliteDiffs(initSchema);
@@ -1456,7 +1740,7 @@ export const introspectSQLiteToFile = async (
`tests/introspect/sqlite/${testName}.ts`,
]);
- const afterFileImports = generateSqliteSnapshot(response.tables);
+ const afterFileImports = generateSqliteSnapshot(response.tables, casing);
const { version: v2, dialect: d2, ...rest2 } = afterFileImports;
@@ -1473,7 +1757,7 @@ export const introspectSQLiteToFile = async (
const leftTables = Object.values(initSchema).filter((it) => is(it, SQLiteTable)) as SQLiteTable[];
- const initSnapshot = generateSqliteSnapshot(leftTables);
+ const initSnapshot = generateSqliteSnapshot(leftTables, casing);
const { version: initV, dialect: initD, ...initRest } = initSnapshot;
diff --git a/drizzle-kit/tests/sqlite-columns.test.ts b/drizzle-kit/tests/sqlite-columns.test.ts
index 8a258072a..04dbb940c 100644
--- a/drizzle-kit/tests/sqlite-columns.test.ts
+++ b/drizzle-kit/tests/sqlite-columns.test.ts
@@ -8,6 +8,7 @@ import {
sqliteTable,
text,
} from 'drizzle-orm/sqlite-core';
+import { JsonCreateIndexStatement, JsonRecreateTableStatement } from 'src/jsonStatements';
import { expect, test } from 'vitest';
import { diffTestSchemasSqlite } from './schemaDiffer';
@@ -223,7 +224,7 @@ test('add columns #5', async (t) => {
const { statements } = await diffTestSchemasSqlite(schema1, schema2, []);
// TODO: Fix here
- expect(statements.length).toBe(2);
+ expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
type: 'sqlite_alter_table_add_column',
tableName: 'users',
@@ -332,12 +333,38 @@ test('add foreign key #1', async (t) => {
const { statements } = await diffTestSchemasSqlite(schema1, schema2, []);
expect(statements.length).toBe(1);
- expect(statements[0]).toStrictEqual({
- type: 'create_reference',
- tableName: 'users',
- schema: '',
- data: 'users_report_to_users_id_fk;users;report_to;users;id;no action;no action',
- });
+ expect(statements[0]).toStrictEqual(
+ {
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ }, {
+ autoincrement: false,
+ generated: undefined,
+ name: 'report_to',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ }],
+ compositePKs: [],
+ referenceData: [{
+ columnsFrom: ['report_to'],
+ columnsTo: ['id'],
+ name: 'users_report_to_users_id_fk',
+ tableFrom: 'users',
+ tableTo: 'users',
+ onDelete: 'no action',
+ onUpdate: 'no action',
+ }],
+ tableName: 'users',
+ uniqueConstraints: [],
+ } as JsonRecreateTableStatement,
+ );
});
test('add foreign key #2', async (t) => {
@@ -371,11 +398,35 @@ test('add foreign key #2', async (t) => {
expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
- type: 'create_reference',
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: true,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ }, {
+ autoincrement: false,
+ generated: undefined,
+ name: 'report_to',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ }],
+ compositePKs: [],
+ referenceData: [{
+ columnsFrom: ['report_to'],
+ columnsTo: ['id'],
+ name: 'reportee_fk',
+ tableFrom: 'users',
+ tableTo: 'users',
+ onDelete: 'no action',
+ onUpdate: 'no action',
+ }],
tableName: 'users',
- schema: '',
- data: 'reportee_fk;users;report_to;users;id;no action;no action',
- });
+ uniqueConstraints: [],
+ } as JsonRecreateTableStatement);
});
test('alter column change name #1', async (t) => {
@@ -513,9 +564,26 @@ test('alter table add composite pk', async (t) => {
expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
- type: 'create_composite_pk',
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'id1',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ }, {
+ autoincrement: false,
+ generated: undefined,
+ name: 'id2',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ }],
+ compositePKs: [['id1', 'id2']],
+ referenceData: [],
tableName: 'table',
- data: 'id1,id2',
+ uniqueConstraints: [],
});
});
@@ -540,16 +608,19 @@ test('alter column drop not null', async (t) => {
expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
- type: 'alter_table_alter_column_drop_notnull',
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ }],
+ compositePKs: [],
+ referenceData: [],
tableName: 'table',
- columnName: 'name',
- schema: '',
- newDataType: 'text',
- columnDefault: undefined,
- columnOnUpdate: undefined,
- columnNotNull: false,
- columnAutoIncrement: false,
- columnPk: false,
+ uniqueConstraints: [],
});
});
@@ -574,16 +645,19 @@ test('alter column add not null', async (t) => {
expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
- type: 'alter_table_alter_column_set_notnull',
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: true,
+ primaryKey: false,
+ type: 'text',
+ }],
+ compositePKs: [],
+ referenceData: [],
tableName: 'table',
- columnName: 'name',
- schema: '',
- newDataType: 'text',
- columnDefault: undefined,
- columnOnUpdate: undefined,
- columnNotNull: true,
- columnAutoIncrement: false,
- columnPk: false,
+ uniqueConstraints: [],
});
});
@@ -608,16 +682,20 @@ test('alter column add default', async (t) => {
expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
- type: 'alter_table_alter_column_set_default',
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ default: "'dan'",
+ }],
+ compositePKs: [],
+ referenceData: [],
tableName: 'table',
- columnName: 'name',
- schema: '',
- newDataType: 'text',
- columnNotNull: false,
- columnOnUpdate: undefined,
- columnAutoIncrement: false,
- newDefaultValue: "'dan'",
- columnPk: false,
+ uniqueConstraints: [],
});
});
@@ -642,16 +720,19 @@ test('alter column drop default', async (t) => {
expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
- type: 'alter_table_alter_column_drop_default',
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ }],
+ compositePKs: [],
+ referenceData: [],
tableName: 'table',
- columnName: 'name',
- schema: '',
- newDataType: 'text',
- columnNotNull: false,
- columnOnUpdate: undefined,
- columnDefault: undefined,
- columnAutoIncrement: false,
- columnPk: false,
+ uniqueConstraints: [],
});
});
@@ -674,32 +755,84 @@ test('alter column add default not null', async (t) => {
[],
);
- expect(statements.length).toBe(2);
+ expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
- columnAutoIncrement: false,
- columnName: 'name',
- columnNotNull: true,
- columnOnUpdate: undefined,
- columnPk: false,
- newDataType: 'text',
- newDefaultValue: "'dan'",
- schema: '',
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: true,
+ primaryKey: false,
+ type: 'text',
+ default: "'dan'",
+ }],
+ compositePKs: [],
+ referenceData: [],
tableName: 'table',
- type: 'alter_table_alter_column_set_default',
+ uniqueConstraints: [],
});
+});
+test('alter column add default not null with indexes', async (t) => {
+ const from = {
+ users: sqliteTable('table', {
+ name: text('name'),
+ }, (table) => ({
+ someIndex: index('index_name').on(table.name),
+ })),
+ };
+
+ const to = {
+ users: sqliteTable('table', {
+ name: text('name').notNull().default('dan'),
+ }, (table) => ({
+ someIndex: index('index_name').on(table.name),
+ })),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSqlite(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements.length).toBe(2);
expect(statements[0]).toStrictEqual({
- columnAutoIncrement: false,
- columnName: 'name',
- columnNotNull: true,
- columnOnUpdate: undefined,
- columnPk: false,
- newDataType: 'text',
- newDefaultValue: "'dan'",
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: true,
+ primaryKey: false,
+ type: 'text',
+ default: "'dan'",
+ }],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'table',
+ uniqueConstraints: [],
+ });
+ expect(statements[1]).toStrictEqual({
+ data: 'index_name;name;false;',
schema: '',
tableName: 'table',
- type: 'alter_table_alter_column_set_default',
+ type: 'create_index',
+ internal: undefined,
});
+ expect(sqlStatements.length).toBe(7);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_table\` (
+\t\`name\` text DEFAULT 'dan' NOT NULL
+);\n`);
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_table\`("name") SELECT "name" FROM \`table\`;`,
+ );
+ expect(sqlStatements[3]).toBe(`DROP TABLE \`table\`;`);
+ expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_table\` RENAME TO \`table\`;`);
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
+ expect(sqlStatements[6]).toBe(`CREATE INDEX \`index_name\` ON \`table\` (\`name\`);`);
});
test('alter column drop default not null', async (t) => {
@@ -721,30 +854,162 @@ test('alter column drop default not null', async (t) => {
[],
);
- expect(statements.length).toBe(2);
+ expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
- columnAutoIncrement: false,
- columnDefault: undefined,
- columnName: 'name',
- columnNotNull: false,
- columnOnUpdate: undefined,
- columnPk: false,
- newDataType: 'text',
- schema: '',
+ type: 'recreate_table',
+ columns: [{
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ }],
+ compositePKs: [],
+ referenceData: [],
tableName: 'table',
- type: 'alter_table_alter_column_drop_default',
+ uniqueConstraints: [],
});
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_table\` (
+\t\`name\` text
+);\n`);
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_table\`("name") SELECT "name" FROM \`table\`;`,
+ );
+ expect(sqlStatements[3]).toBe(`DROP TABLE \`table\`;`);
+ expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_table\` RENAME TO \`table\`;`);
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
+});
+test('alter column drop generated', async (t) => {
+ const from = {
+ users: sqliteTable('table', {
+ id: int('id').primaryKey().notNull(),
+ name: text('name').generatedAlwaysAs('drizzle is the best').notNull(),
+ }),
+ };
+
+ const to = {
+ users: sqliteTable('table', {
+ id: int('id').primaryKey().notNull(),
+ name: text('name').notNull(),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSqlite(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
expect(statements[0]).toStrictEqual({
columnAutoIncrement: false,
columnDefault: undefined,
+ columnGenerated: undefined,
columnName: 'name',
- columnNotNull: false,
+ columnNotNull: true,
columnOnUpdate: undefined,
columnPk: false,
newDataType: 'text',
schema: '',
tableName: 'table',
- type: 'alter_table_alter_column_drop_default',
+ type: 'alter_table_alter_column_drop_generated',
});
+
+ expect(sqlStatements.length).toBe(2);
+ expect(sqlStatements[0]).toBe(`ALTER TABLE \`table\` DROP COLUMN \`name\`;`);
+ expect(sqlStatements[1]).toBe(`ALTER TABLE \`table\` ADD \`name\` text NOT NULL;`);
+});
+
+test('recreate table with nested references', async (t) => {
+ let users = sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ name: text('name'),
+ age: integer('age'),
+ });
+ let subscriptions = sqliteTable('subscriptions', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ userId: integer('user_id').references(() => users.id),
+ customerId: text('customer_id'),
+ });
+ const schema1 = {
+ users: users,
+ subscriptions: subscriptions,
+ subscriptionMetadata: sqliteTable('subscriptions_metadata', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ subscriptionId: text('subscription_id').references(() => subscriptions.id),
+ }),
+ };
+
+ users = sqliteTable('users', {
+ id: int('id').primaryKey({ autoIncrement: false }),
+ name: text('name'),
+ age: integer('age'),
+ });
+ const schema2 = {
+ users: users,
+ subscriptions: subscriptions,
+ subscriptionMetadata: sqliteTable('subscriptions_metadata', {
+ id: int('id').primaryKey({ autoIncrement: true }),
+ subscriptionId: text('subscription_id').references(() => subscriptions.id),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSqlite(
+ schema1,
+ schema2,
+ [],
+ );
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ columns: [
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'id',
+ notNull: true,
+ primaryKey: true,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ generated: undefined,
+ name: 'age',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ tableName: 'users',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ });
+
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`);
+ expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` (
+\t\`id\` integer PRIMARY KEY NOT NULL,
+\t\`name\` text,
+\t\`age\` integer
+);\n`);
+ expect(sqlStatements[2]).toBe(
+ `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`,
+ );
+ expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`);
+ expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`);
+ expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`);
});
diff --git a/drizzle-kit/tests/sqlite-tables.test.ts b/drizzle-kit/tests/sqlite-tables.test.ts
index d7781f150..81ac7f100 100644
--- a/drizzle-kit/tests/sqlite-tables.test.ts
+++ b/drizzle-kit/tests/sqlite-tables.test.ts
@@ -1,5 +1,15 @@
import { sql } from 'drizzle-orm';
-import { AnySQLiteColumn, index, int, primaryKey, sqliteTable, text, uniqueIndex } from 'drizzle-orm/sqlite-core';
+import {
+ AnySQLiteColumn,
+ foreignKey,
+ index,
+ int,
+ primaryKey,
+ sqliteTable,
+ text,
+ unique,
+ uniqueIndex,
+} from 'drizzle-orm/sqlite-core';
import { expect, test } from 'vitest';
import { diffTestSchemasSqlite } from './schemaDiffer';
@@ -162,6 +172,13 @@ test('add table #7', async () => {
expect(statements.length).toBe(2);
expect(statements[0]).toStrictEqual({
+ type: 'rename_table',
+ tableNameFrom: 'users1',
+ tableNameTo: 'users2',
+ fromSchema: undefined,
+ toSchema: undefined,
+ });
+ expect(statements[1]).toStrictEqual({
type: 'sqlite_create_table',
tableName: 'users',
columns: [],
@@ -169,13 +186,6 @@ test('add table #7', async () => {
uniqueConstraints: [],
referenceData: [],
});
- expect(statements[1]).toStrictEqual({
- type: 'rename_table',
- tableNameFrom: 'users1',
- tableNameTo: 'users2',
- fromSchema: undefined,
- toSchema: undefined,
- });
});
test('add table #8', async () => {
@@ -397,3 +407,177 @@ test('add table with indexes', async () => {
'CREATE INDEX `indexColExpr` ON `users` ((lower("email")),`email`);',
]);
});
+
+test('optional db aliases (snake case)', async () => {
+ const from = {};
+
+ const t1 = sqliteTable(
+ 't1',
+ {
+ t1Id1: int().notNull().primaryKey(),
+ t1Col2: int().notNull(),
+ t1Col3: int().notNull(),
+ t2Ref: int().notNull().references(() => t2.t2Id),
+ t1Uni: int().notNull(),
+ t1UniIdx: int().notNull(),
+ t1Idx: int().notNull(),
+ },
+ (table) => ({
+ uni: unique('t1_uni').on(table.t1Uni),
+ uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx),
+ idx: index('t1_idx').on(table.t1Idx),
+ fk: foreignKey({
+ columns: [table.t1Col2, table.t1Col3],
+ foreignColumns: [t3.t3Id1, t3.t3Id2],
+ }),
+ }),
+ );
+
+ const t2 = sqliteTable(
+ 't2',
+ {
+ t2Id: int().primaryKey({ autoIncrement: true }),
+ },
+ );
+
+ const t3 = sqliteTable(
+ 't3',
+ {
+ t3Id1: int(),
+ t3Id2: int(),
+ },
+ (table) => ({
+ pk: primaryKey({
+ columns: [table.t3Id1, table.t3Id2],
+ }),
+ }),
+ );
+
+ const to = {
+ t1,
+ t2,
+ t3,
+ };
+
+ const { sqlStatements } = await diffTestSchemasSqlite(from, to, [], false, 'snake_case');
+
+ const st1 = `CREATE TABLE \`t1\` (
+ \`t1_id1\` integer PRIMARY KEY NOT NULL,
+ \`t1_col2\` integer NOT NULL,
+ \`t1_col3\` integer NOT NULL,
+ \`t2_ref\` integer NOT NULL,
+ \`t1_uni\` integer NOT NULL,
+ \`t1_uni_idx\` integer NOT NULL,
+ \`t1_idx\` integer NOT NULL,
+ FOREIGN KEY (\`t2_ref\`) REFERENCES \`t2\`(\`t2_id\`) ON UPDATE no action ON DELETE no action,
+ FOREIGN KEY (\`t1_col2\`,\`t1_col3\`) REFERENCES \`t3\`(\`t3_id1\`,\`t3_id2\`) ON UPDATE no action ON DELETE no action
+);
+`;
+
+ const st2 = `CREATE UNIQUE INDEX \`t1_uni_idx\` ON \`t1\` (\`t1_uni_idx\`);`;
+
+ const st3 = `CREATE INDEX \`t1_idx\` ON \`t1\` (\`t1_idx\`);`;
+
+ const st4 = `CREATE UNIQUE INDEX \`t1_uni\` ON \`t1\` (\`t1_uni\`);`;
+
+ const st5 = `CREATE TABLE \`t2\` (
+ \`t2_id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL
+);
+`;
+
+ const st6 = `CREATE TABLE \`t3\` (
+ \`t3_id1\` integer,
+ \`t3_id2\` integer,
+ PRIMARY KEY(\`t3_id1\`, \`t3_id2\`)
+);
+`;
+
+ expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]);
+});
+
+test('optional db aliases (camel case)', async () => {
+ const from = {};
+
+ const t1 = sqliteTable(
+ 't1',
+ {
+ t1_id1: int().notNull().primaryKey(),
+ t1_col2: int().notNull(),
+ t1_col3: int().notNull(),
+ t2_ref: int().notNull().references(() => t2.t2_id),
+ t1_uni: int().notNull(),
+ t1_uni_idx: int().notNull(),
+ t1_idx: int().notNull(),
+ },
+ (table) => ({
+ uni: unique('t1Uni').on(table.t1_uni),
+ uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx),
+ idx: index('t1Idx').on(table.t1_idx),
+ fk: foreignKey({
+ columns: [table.t1_col2, table.t1_col3],
+ foreignColumns: [t3.t3_id1, t3.t3_id2],
+ }),
+ }),
+ );
+
+ const t2 = sqliteTable(
+ 't2',
+ {
+ t2_id: int().primaryKey({ autoIncrement: true }),
+ },
+ );
+
+ const t3 = sqliteTable(
+ 't3',
+ {
+ t3_id1: int(),
+ t3_id2: int(),
+ },
+ (table) => ({
+ pk: primaryKey({
+ columns: [table.t3_id1, table.t3_id2],
+ }),
+ }),
+ );
+
+ const to = {
+ t1,
+ t2,
+ t3,
+ };
+
+ const { sqlStatements } = await diffTestSchemasSqlite(from, to, [], false, 'camelCase');
+
+ const st1 = `CREATE TABLE \`t1\` (
+ \`t1Id1\` integer PRIMARY KEY NOT NULL,
+ \`t1Col2\` integer NOT NULL,
+ \`t1Col3\` integer NOT NULL,
+ \`t2Ref\` integer NOT NULL,
+ \`t1Uni\` integer NOT NULL,
+ \`t1UniIdx\` integer NOT NULL,
+ \`t1Idx\` integer NOT NULL,
+ FOREIGN KEY (\`t2Ref\`) REFERENCES \`t2\`(\`t2Id\`) ON UPDATE no action ON DELETE no action,
+ FOREIGN KEY (\`t1Col2\`,\`t1Col3\`) REFERENCES \`t3\`(\`t3Id1\`,\`t3Id2\`) ON UPDATE no action ON DELETE no action
+);
+`;
+
+ const st2 = `CREATE UNIQUE INDEX \`t1UniIdx\` ON \`t1\` (\`t1UniIdx\`);`;
+
+ const st3 = `CREATE INDEX \`t1Idx\` ON \`t1\` (\`t1Idx\`);`;
+
+ const st4 = `CREATE UNIQUE INDEX \`t1Uni\` ON \`t1\` (\`t1Uni\`);`;
+
+ const st5 = `CREATE TABLE \`t2\` (
+ \`t2Id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL
+);
+`;
+
+ const st6 = `CREATE TABLE \`t3\` (
+ \`t3Id1\` integer,
+ \`t3Id2\` integer,
+ PRIMARY KEY(\`t3Id1\`, \`t3Id2\`)
+);
+`;
+
+ expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]);
+});
diff --git a/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts b/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts
new file mode 100644
index 000000000..47447decd
--- /dev/null
+++ b/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts
@@ -0,0 +1,1749 @@
+import { JsonAddColumnStatement, JsonSqliteAddColumnStatement, JsonStatement } from 'src/jsonStatements';
+import { SQLiteSchemaSquashed } from 'src/serializer/sqliteSchema';
+import { SQLiteAlterTableAddColumnConvertor } from 'src/sqlgenerator';
+import { libSQLCombineStatements } from 'src/statementCombiner';
+import { expect, test } from 'vitest';
+
+/**
+ * ! before:
+ *
+ * user: {
+ * id INT;
+ * first_name INT;
+ * iq INT;
+ * PRIMARY KEY (id, iq)
+ * INDEXES: {
+ * UNIQUE id;
+ * }
+ * }
+ *
+ * ! after:
+ *
+ * new_user: {
+ * id INT;
+ * first_name INT;
+ * iq INT;
+ * PRIMARY KEY (id, iq)
+ * INDEXES: {}
+ * }
+ *
+ * rename table and drop unique index
+ * expect to get "rename_table" statement and then "recreate_table"
+ */
+test(`rename table and drop index`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'rename_table',
+ fromSchema: '',
+ toSchema: '',
+ tableNameFrom: 'user',
+ tableNameTo: 'new_user',
+ },
+ {
+ type: 'drop_index',
+ tableName: 'new_user',
+ data: 'user_first_name_unique;first_name;true;',
+ schema: '',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ user: {
+ name: 'user',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ first_name: {
+ name: 'first_name',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ iq: {
+ name: 'iq',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {
+ user_first_name_unique: 'user_first_name_unique;first_name;true;',
+ },
+ foreignKeys: {},
+ compositePrimaryKeys: {
+ user_id_iq_pk: 'id,iq',
+ },
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ new_user: {
+ name: 'new_user',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ first_name: {
+ name: 'first_name',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ iq: {
+ name: 'iq',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {
+ new_user_id_iq_pk: 'id,iq',
+ },
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'rename_table',
+ fromSchema: '',
+ toSchema: '',
+ tableNameFrom: 'user',
+ tableNameTo: 'new_user',
+ },
+ {
+ type: 'drop_index',
+ tableName: 'new_user',
+ data: 'user_first_name_unique;first_name;true;',
+ schema: '',
+ },
+ ];
+ expect(libSQLCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+/**
+ * ! before:
+ *
+ * autoincrement1: {
+ * id INT PRIMARY KEY;
+ * }
+ *
+ * autoincrement2: {
+ * id INT PRIMARY KEY AUTOINCREMENT;
+ * }
+ *
+ * dropNotNull: {
+ * id INT NOT NULL;
+ * }
+ *
+ * ! after:
+ *
+ * autoincrement1: {
+ * id INT PRIMARY KEY AUTOINCREMENT;
+ * }
+ *
+ * autoincrement2: {
+ * id INT PRI {
+ const statements: JsonStatement[] = [
+ {
+ type: 'alter_table_alter_column_set_autoincrement',
+ tableName: 'autoincrement1',
+ columnName: 'id',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: true,
+ columnPk: true,
+ } as unknown as JsonStatement,
+ {
+ type: 'alter_table_alter_column_drop_autoincrement',
+ tableName: 'autoincrement2',
+ columnName: 'id',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: true,
+ } as unknown as JsonStatement,
+ {
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName: 'dropNotNull',
+ columnName: 'id',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ } as unknown as JsonStatement,
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ autoincrement1: {
+ name: 'autoincrement1',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ autoincrement2: {
+ name: 'autoincrement2',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: false,
+ autoincrement: true,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ dropNotNull: {
+ name: 'dropNotNull',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ autoincrement1: {
+ name: 'autoincrement1',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: true,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ autoincrement2: {
+ name: 'autoincrement2',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ dropNotNull: {
+ name: 'dropNotNull',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'recreate_table',
+ tableName: 'autoincrement1',
+ columns: [
+ {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: true,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ {
+ type: 'recreate_table',
+ tableName: 'autoincrement2',
+ columns: [
+ {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ {
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName: 'dropNotNull',
+ columnName: 'id',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ },
+ ];
+ expect(libSQLCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+/**
+ * ! before:
+ *
+ * pk1: {
+ * id INT;
+ * }
+ *
+ * pk2: {
+ * id INT PRIMARY KEY;
+ * }
+ *
+ * ref_table: {
+ * id INT;
+ * }
+ *
+ * create_reference: {
+ * id INT;
+ * }
+ *
+ * ! after:
+ *
+ * pk1: {
+ * id INT PRIMARY KEY;
+ * }
+ *
+ * pk2: {
+ * id INT;
+ * }
+ *
+ * ref_table: {
+ * id INT;
+ * }
+ *
+ * create_reference: {
+ * id INT -> ref_table INT;
+ * }
+ *
+ * drop primary key for pk2
+ * set primary key for pk1
+ * "create_reference" reference on "ref_table"
+ *
+ * expect to:
+ * - "recreate_table" statement for pk1
+ * - "recreate_table" statement for pk2
+ * - "create_reference" statement for create_reference
+ */
+test(`drop and set primary key. create reference`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'alter_table_alter_column_set_pk',
+ tableName: 'pk1',
+ schema: '',
+ columnName: 'id',
+ },
+ {
+ type: 'alter_table_alter_column_set_notnull',
+ tableName: 'pk1',
+ columnName: 'id',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: true,
+ } as unknown as JsonStatement,
+ {
+ type: 'alter_table_alter_column_drop_pk',
+ tableName: 'pk2',
+ columnName: 'id',
+ schema: '',
+ },
+ {
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName: 'pk2',
+ columnName: 'id',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ } as unknown as JsonStatement,
+ {
+ type: 'create_reference',
+ tableName: 'create_reference',
+ data: 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'int',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ create_reference: {
+ name: 'create_reference',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ pk1: {
+ name: 'pk1',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ pk2: {
+ name: 'pk2',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ ref_table: {
+ name: 'ref_table',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ create_reference: {
+ name: 'create_reference',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ create_reference_id_ref_table_id_fk:
+ 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ pk1: {
+ name: 'pk1',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ pk2: {
+ name: 'pk2',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ ref_table: {
+ name: 'ref_table',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'recreate_table',
+ tableName: 'pk1',
+ columns: [
+ {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ {
+ type: 'recreate_table',
+ tableName: 'pk2',
+ columns: [
+ {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ {
+ type: 'create_reference',
+ tableName: 'create_reference',
+ data: 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'int',
+ },
+ ];
+ expect(libSQLCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+/**
+ * ! before:
+ *
+ * fk1: {
+ * fk_id INT;
+ * fk_id1 INT;
+ * }
+ *
+ * fk2: {
+ * fk2_id INT; -> composite reference on ref_table id INT
+ * fk2_id1 INT; -> composite reference on ref_table id1 INT
+ * }
+ *
+ * ref_table: {
+ * id INT;
+ * id1 INT;
+ * }
+ *
+ * ! after:
+ *
+ * fk1: {
+ * fk_id INT; -> composite reference on ref_table id INT
+ * fk_id1 INT; -> composite reference on ref_table id1 INT
+ * }
+ *
+ * fk2: {
+ * fk2_id INT;
+ * fk2_id1 INT;
+ * }
+ *
+ * ref_table: {
+ * id INT;
+ * id1 INT;
+ * }
+ *
+ * set multi column reference for fk1
+ * drop multi column reference for fk2
+ *
+ * expect to:
+ * - "recreate_table" statement for fk1
+ * - "recreate_table" statement for fk2
+ */
+test(`set and drop multiple columns reference`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'delete_reference',
+ tableName: 'fk1',
+ data: 'fk1_fk_id_fk_id1_ref_table_id_id1_fk;fk1;fk_id,fk_id1;ref_table;id,id1;no action;no action',
+ schema: '',
+ isMulticolumn: true,
+ },
+ {
+ type: 'create_reference',
+ tableName: 'fk2',
+ data: 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk;fk2;fk2_id,fk2_id1;ref_table;id,id1;no action;no action',
+ schema: '',
+ isMulticolumn: true,
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ fk1: {
+ name: 'fk1',
+ columns: {
+ fk_id: {
+ name: 'fk_id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ fk_id1: {
+ name: 'fk_id1',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ fk1_fk_id_fk_id1_ref_table_id_id1_fk:
+ 'fk1_fk_id_fk_id1_ref_table_id_id1_fk;fk1;fk_id,fk_id1;ref_table;id,id1;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ fk2: {
+ name: 'fk2',
+ columns: {
+ fk2_id: {
+ name: 'fk2_id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ fk2_id1: {
+ name: 'fk2_id1',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ ref_table: {
+ name: 'ref_table',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ id1: {
+ name: 'id1',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ fk1: {
+ name: 'fk1',
+ columns: {
+ fk_id: {
+ name: 'fk_id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ fk_id1: {
+ name: 'fk_id1',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ fk2: {
+ name: 'fk2',
+ columns: {
+ fk2_id: {
+ name: 'fk2_id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ fk2_id1: {
+ name: 'fk2_id1',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ fk2_fk2_id_fk2_id1_ref_table_id_id1_fk:
+ 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk;fk2;fk2_id,fk2_id1;ref_table;id,id1;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ ref_table: {
+ name: 'ref_table',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ id1: {
+ name: 'id1',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'recreate_table',
+ tableName: 'fk1',
+ columns: [
+ {
+ name: 'fk_id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ {
+ name: 'fk_id1',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ {
+ type: 'recreate_table',
+ tableName: 'fk2',
+ columns: [
+ {
+ name: 'fk2_id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ {
+ name: 'fk2_id1',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [
+ {
+ name: 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk',
+ tableFrom: 'fk2',
+ tableTo: 'ref_table',
+ columnsFrom: ['fk2_id', 'fk2_id1'],
+ columnsTo: ['id', 'id1'],
+ onDelete: 'no action',
+ onUpdate: 'no action',
+ },
+ ],
+ uniqueConstraints: [],
+ },
+ ];
+ expect(libSQLCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+/**
+ * ! before:
+ *
+ * pk: {
+ * pk TEXT PRIMARY KEY;
+ * }
+ *
+ * simple: {
+ * simple TEXT;
+ * }
+ *
+ * unique: {
+ * unique INT UNIQUE;
+ * }
+ *
+ * ! after:
+ *
+ * pk: {
+ * pk INT PRIMARY KEY;
+ * }
+ *
+ * simple: {
+ * simple INT;
+ * }
+ *
+ * unique: {
+ * unique TEXT UNIQUE;
+ * }
+ *
+ * set new type for primary key column
+ * set new type for unique column
+ * set new type for column without pk or unique
+ *
+ * expect to:
+ * - "recreate_table" statement for pk
+ * - "recreate_table" statement for unique
+ * - "alter_table_alter_column_set_type" statement for simple
+ * - "create_index" statement for unique
+ */
+test(`set new type for primary key, unique and normal column`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'alter_table_alter_column_set_type',
+ tableName: 'pk',
+ columnName: 'pk',
+ newDataType: 'int',
+ oldDataType: 'text',
+ schema: '',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: true,
+ } as unknown as JsonStatement,
+ {
+ type: 'alter_table_alter_column_set_type',
+ tableName: 'simple',
+ columnName: 'simple',
+ newDataType: 'int',
+ oldDataType: 'text',
+ schema: '',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ } as unknown as JsonStatement,
+ {
+ type: 'alter_table_alter_column_set_type',
+ tableName: 'unique',
+ columnName: 'unique',
+ newDataType: 'text',
+ oldDataType: 'int',
+ schema: '',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ } as unknown as JsonStatement,
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ pk: {
+ name: 'pk',
+ columns: {
+ pk: {
+ name: 'pk',
+ type: 'text',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ simple: {
+ name: 'simple',
+ columns: {
+ simple: {
+ name: 'simple',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ unique: {
+ name: 'unique',
+ columns: {
+ unique: {
+ name: 'unique',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {
+ unique_unique_unique: 'unique_unique_unique;unique;true;',
+ },
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ pk: {
+ name: 'pk',
+ columns: {
+ pk: {
+ name: 'pk',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ simple: {
+ name: 'simple',
+ columns: {
+ simple: {
+ name: 'simple',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ unique: {
+ name: 'unique',
+ columns: {
+ unique: {
+ name: 'unique',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {
+ unique_unique_unique: 'unique_unique_unique;unique;true;',
+ },
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'recreate_table',
+ tableName: 'pk',
+ columns: [
+ {
+ name: 'pk',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ {
+ type: 'alter_table_alter_column_set_type',
+ tableName: 'simple',
+ columnName: 'simple',
+ newDataType: 'int',
+ oldDataType: 'text',
+ schema: '',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ },
+ {
+ type: 'alter_table_alter_column_set_type',
+ tableName: 'unique',
+ columnName: 'unique',
+ newDataType: 'text',
+ oldDataType: 'int',
+ schema: '',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ },
+ ];
+ expect(libSQLCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`add columns. set fk`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ },
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ },
+ {
+ type: 'create_reference',
+ tableName: 'ref',
+ data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'integer',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test1: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ },
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ },
+ {
+ type: 'create_reference',
+ tableName: 'ref',
+ data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'integer',
+ },
+ ];
+ expect(libSQLCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`add column and fk`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ {
+ type: 'create_reference',
+ tableName: 'ref',
+ data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'integer',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test1: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test1: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ ];
+ expect(libSQLCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`add column and fk`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ {
+ type: 'create_reference',
+ tableName: 'ref',
+ data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'integer',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test1: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test1: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ ];
+ expect(libSQLCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
diff --git a/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts b/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts
new file mode 100644
index 000000000..2fcaf6436
--- /dev/null
+++ b/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts
@@ -0,0 +1,1170 @@
+import { JsonStatement } from 'src/jsonStatements';
+import { SQLiteSchemaSquashed } from 'src/serializer/sqliteSchema';
+import { sqliteCombineStatements } from 'src/statementCombiner';
+import { expect, test } from 'vitest';
+
+test(`renamed column and altered this column type`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'alter_table_rename_column',
+ tableName: 'user',
+ oldColumnName: 'lastName',
+ newColumnName: 'lastName123',
+ schema: '',
+ },
+ {
+ type: 'alter_table_alter_column_set_type',
+ tableName: 'user',
+ columnName: 'lastName123',
+ newDataType: 'int',
+ oldDataType: 'text',
+ schema: '',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ columnIsUnique: false,
+ } as unknown as JsonStatement,
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ user: {
+ name: 'user',
+ columns: {
+ firstName: {
+ name: 'firstName',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ lastName: {
+ name: 'lastName',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ user: {
+ name: 'user',
+ columns: {
+ firstName: {
+ name: 'firstName',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ lastName: {
+ name: 'lastName123',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'recreate_table',
+ tableName: 'user',
+ columns: [
+ {
+ name: 'firstName',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ {
+ name: 'lastName123',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ {
+ name: 'test',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ ];
+ expect(sqliteCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`renamed column and droped column "test"`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'alter_table_rename_column',
+ tableName: 'user',
+ oldColumnName: 'lastName',
+ newColumnName: 'lastName123',
+ schema: '',
+ },
+ {
+ type: 'alter_table_drop_column',
+ tableName: 'user',
+ columnName: 'test',
+ schema: '',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ user: {
+ name: 'user',
+ columns: {
+ firstName: {
+ name: 'firstName',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ lastName: {
+ name: 'lastName',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ user: {
+ name: 'user',
+ columns: {
+ firstName: {
+ name: 'firstName',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ lastName: {
+ name: 'lastName123',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements: JsonStatement[] = [
+ {
+ type: 'alter_table_rename_column',
+ tableName: 'user',
+ oldColumnName: 'lastName',
+ newColumnName: 'lastName123',
+ schema: '',
+ },
+ {
+ type: 'alter_table_drop_column',
+ tableName: 'user',
+ columnName: 'test',
+ schema: '',
+ },
+ ];
+ expect(sqliteCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`droped column that is part of composite pk`, async (t) => {
+ const statements: JsonStatement[] = [
+ { type: 'delete_composite_pk', tableName: 'user', data: 'id,iq' },
+ {
+ type: 'alter_table_alter_column_set_pk',
+ tableName: 'user',
+ schema: '',
+ columnName: 'id',
+ },
+ {
+ type: 'alter_table_drop_column',
+ tableName: 'user',
+ columnName: 'iq',
+ schema: '',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ user: {
+ name: 'user',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ first_nam: {
+ name: 'first_nam',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ iq: {
+ name: 'iq',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {
+ user_id_iq_pk: 'id,iq',
+ },
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ user: {
+ name: 'user',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: false,
+ autoincrement: false,
+ },
+ first_nam: {
+ name: 'first_nam',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements: JsonStatement[] = [
+ {
+ type: 'recreate_table',
+ tableName: 'user',
+ columns: [
+ {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: false,
+ autoincrement: false,
+ },
+ {
+ name: 'first_nam',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ ];
+ expect(sqliteCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`drop column "ref"."name", rename column "ref"."age". dropped primary key "user"."id". Set not null to "user"."iq"`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'alter_table_rename_column',
+ tableName: 'ref',
+ oldColumnName: 'age',
+ newColumnName: 'age1',
+ schema: '',
+ },
+ {
+ type: 'alter_table_alter_column_drop_pk',
+ tableName: 'user',
+ columnName: 'id',
+ schema: '',
+ },
+ {
+ type: 'alter_table_alter_column_drop_autoincrement',
+ tableName: 'user',
+ columnName: 'id',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ } as unknown as JsonStatement,
+ {
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName: 'user',
+ columnName: 'id',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: false,
+ columnAutoIncrement: false,
+ columnPk: false,
+ } as unknown as JsonStatement,
+ {
+ type: 'alter_table_alter_column_set_notnull',
+ tableName: 'user',
+ columnName: 'iq',
+ schema: '',
+ newDataType: 'int',
+ columnDefault: undefined,
+ columnOnUpdate: undefined,
+ columnNotNull: true,
+ columnAutoIncrement: false,
+ columnPk: false,
+ } as unknown as JsonStatement,
+ {
+ type: 'alter_table_drop_column',
+ tableName: 'ref',
+ columnName: 'text',
+ schema: '',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: true,
+ },
+ user_iq: {
+ name: 'user_iq',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ name: {
+ name: 'name',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ age: {
+ name: 'age',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_user_iq_user_iq_fk: 'ref_user_iq_user_iq_fk;ref;user_iq;user;iq;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: true,
+ },
+ first_name: {
+ name: 'first_name',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ iq: {
+ name: 'iq',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ user_iq: {
+ name: 'user_iq',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ age1: {
+ name: 'age1',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_user_iq_user_iq_fk: 'ref_user_iq_user_iq_fk;ref;user_iq;user;iq;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id: {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ first_name: {
+ name: 'first_name',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ iq: {
+ name: 'iq',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements: JsonStatement[] = [
+ {
+ type: 'alter_table_rename_column',
+ tableName: 'ref',
+ oldColumnName: 'age',
+ newColumnName: 'age1',
+ schema: '',
+ },
+ {
+ type: 'alter_table_drop_column',
+ tableName: 'ref',
+ columnName: 'text',
+ schema: '',
+ },
+ {
+ type: 'recreate_table',
+ tableName: 'user',
+ columns: [
+ {
+ name: 'id',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ {
+ name: 'first_name',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ {
+ name: 'iq',
+ type: 'int',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [],
+ uniqueConstraints: [],
+ },
+ ];
+
+ expect(sqliteCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`create reference on exising column (table includes unique index). expect to recreate column and recreate index`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'create_reference',
+ tableName: 'unique',
+ data: 'unique_ref_pk_pk_pk_fk;unique;ref_pk;pk;pk;no action;no action',
+ schema: '',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ pk: {
+ name: 'pk',
+ columns: {
+ pk: {
+ name: 'pk',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ unique: {
+ name: 'unique',
+ columns: {
+ unique: {
+ name: 'unique',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ ref_pk: {
+ name: 'ref_pk',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {
+ unique_unique_unique: 'unique_unique_unique;unique;true;',
+ },
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ pk: {
+ name: 'pk',
+ columns: {
+ pk: {
+ name: 'pk',
+ type: 'int',
+ primaryKey: true,
+ notNull: true,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ unique: {
+ name: 'unique',
+ columns: {
+ unique: {
+ name: 'unique',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ ref_pk: {
+ name: 'ref_pk',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {
+ unique_unique_unique: 'unique_unique_unique;unique;true;',
+ },
+ foreignKeys: {
+ unique_ref_pk_pk_pk_fk: 'unique_ref_pk_pk_pk_fk;unique;ref_pk;pk;pk;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements: JsonStatement[] = [
+ {
+ type: 'recreate_table',
+ tableName: 'unique',
+ columns: [
+ {
+ name: 'unique',
+ type: 'text',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ {
+ name: 'ref_pk',
+ type: 'int',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ ],
+ compositePKs: [],
+ referenceData: [
+ {
+ name: 'unique_ref_pk_pk_pk_fk',
+ tableFrom: 'unique',
+ tableTo: 'pk',
+ columnsFrom: ['ref_pk'],
+ columnsTo: ['pk'],
+ onDelete: 'no action',
+ onUpdate: 'no action',
+ },
+ ],
+ uniqueConstraints: [],
+ },
+ {
+ data: 'unique_unique_unique;unique;true;',
+ internal: undefined,
+ schema: '',
+ tableName: 'unique',
+ type: 'create_index',
+ },
+ ];
+
+ expect(sqliteCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`add columns. set fk`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ },
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: undefined,
+ },
+ {
+ type: 'create_reference',
+ tableName: 'ref',
+ data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'integer',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test1: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ columns: [
+ {
+ autoincrement: false,
+ name: 'id1',
+ notNull: true,
+ primaryKey: false,
+ type: 'text',
+ },
+ {
+ autoincrement: false,
+ name: 'new_age',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'test',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ {
+ autoincrement: false,
+ name: 'test1',
+ notNull: false,
+ primaryKey: false,
+ type: 'integer',
+ },
+ ],
+ compositePKs: [],
+ referenceData: [
+ {
+ columnsFrom: [
+ 'new_age',
+ ],
+ columnsTo: [
+ 'new_age',
+ ],
+ name: 'ref_new_age_user_new_age_fk',
+ onDelete: 'no action',
+ onUpdate: 'no action',
+ tableFrom: 'ref',
+ tableTo: 'user',
+ },
+ ],
+ tableName: 'ref',
+ type: 'recreate_table',
+ uniqueConstraints: [],
+ },
+ ];
+ expect(sqliteCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
+
+test(`add column and fk`, async (t) => {
+ const statements: JsonStatement[] = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ {
+ type: 'create_reference',
+ tableName: 'ref',
+ data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ schema: '',
+ columnNotNull: false,
+ columnDefault: undefined,
+ columnType: 'integer',
+ },
+ ];
+ const json1: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test1: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+ const json2: SQLiteSchemaSquashed = {
+ version: '6',
+ dialect: 'sqlite',
+ tables: {
+ ref: {
+ name: 'ref',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test: {
+ name: 'test',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ test1: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {
+ ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action',
+ },
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ user: {
+ name: 'user',
+ columns: {
+ id1: {
+ name: 'id1',
+ type: 'text',
+ primaryKey: false,
+ notNull: true,
+ autoincrement: false,
+ },
+ new_age: {
+ name: 'new_age',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ },
+ indexes: {},
+ foreignKeys: {},
+ compositePrimaryKeys: {},
+ uniqueConstraints: {},
+ },
+ },
+ enums: {},
+ };
+
+ const newJsonStatements = [
+ {
+ type: 'sqlite_alter_table_add_column',
+ tableName: 'ref',
+ column: {
+ name: 'test1',
+ type: 'integer',
+ primaryKey: false,
+ notNull: false,
+ autoincrement: false,
+ },
+ referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action',
+ },
+ ];
+ expect(sqliteCombineStatements(statements, json2)).toStrictEqual(
+ newJsonStatements,
+ );
+});
diff --git a/drizzle-orm/package.json b/drizzle-orm/package.json
index 888f7efcb..829441886 100644
--- a/drizzle-orm/package.json
+++ b/drizzle-orm/package.json
@@ -1,6 +1,6 @@
{
"name": "drizzle-orm",
- "version": "0.33.0",
+ "version": "0.34.1",
"description": "Drizzle ORM package for SQL databases",
"type": "module",
"scripts": {
@@ -46,7 +46,7 @@
"@aws-sdk/client-rds-data": ">=3",
"@cloudflare/workers-types": ">=3",
"@electric-sql/pglite": ">=0.1.1",
- "@libsql/client": "*",
+ "@libsql/client": ">=0.10.0",
"@neondatabase/serverless": ">=0.1",
"@op-engineering/op-sqlite": ">=2",
"@opentelemetry/api": "^1.4.1",
@@ -161,7 +161,8 @@
"@aws-sdk/client-rds-data": "^3.549.0",
"@cloudflare/workers-types": "^4.20230904.0",
"@electric-sql/pglite": "^0.1.1",
- "@libsql/client": "^0.5.6",
+ "@libsql/client": "^0.10.0",
+ "@miniflare/d1": "^2.14.2",
"@neondatabase/serverless": "^0.9.0",
"@op-engineering/op-sqlite": "^2.0.16",
"@opentelemetry/api": "^1.4.1",
diff --git a/drizzle-orm/scripts/fix-imports.ts b/drizzle-orm/scripts/fix-imports.ts
index f2035eeda..6fc63a48f 100755
--- a/drizzle-orm/scripts/fix-imports.ts
+++ b/drizzle-orm/scripts/fix-imports.ts
@@ -54,6 +54,12 @@ await Promise.all(cjsFiles.map(async (file) => {
path.value.argument.value = resolvePathAlias(path.value.argument.value, file);
this.traverse(path);
},
+ visitAwaitExpression(path) {
+ if (print(path.value).code.startsWith(`await import("./`)) {
+ path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.cjs');
+ }
+ this.traverse(path);
+ },
});
await fs.writeFile(file, print(code).code);
@@ -83,6 +89,12 @@ await Promise.all(esmFiles.map(async (file) => {
path.value.argument.value = fixImportPath(path.value.argument.value, file, '.js');
this.traverse(path);
},
+ visitAwaitExpression(path) {
+ if (print(path.value).code.startsWith(`await import("./`)) {
+ path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.js');
+ }
+ this.traverse(path);
+ },
});
await fs.writeFile(file, print(code).code);
diff --git a/drizzle-orm/src/aws-data-api/pg/driver.ts b/drizzle-orm/src/aws-data-api/pg/driver.ts
index 5174c24d0..479cc32fe 100644
--- a/drizzle-orm/src/aws-data-api/pg/driver.ts
+++ b/drizzle-orm/src/aws-data-api/pg/driver.ts
@@ -1,6 +1,4 @@
import { entityKind, is } from '~/entity.ts';
-import type { SQL, SQLWrapper } from '~/index.ts';
-import { Param, sql, Table } from '~/index.ts';
import type { Logger } from '~/logger.ts';
import { DefaultLogger } from '~/logger.ts';
import { PgDatabase } from '~/pg-core/db.ts';
@@ -14,6 +12,8 @@ import {
type RelationalSchemaConfig,
type TablesRelationalConfig,
} from '~/relations.ts';
+import { Param, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts';
+import { Table } from '~/table.ts';
import type { DrizzleConfig, UpdateSet } from '~/utils.ts';
import type { AwsDataApiClient, AwsDataApiPgQueryResult, AwsDataApiPgQueryResultHKT } from './session.ts';
import { AwsDataApiSession } from './session.ts';
@@ -40,7 +40,7 @@ export class AwsDataApiPgDatabase<
override execute<
TRow extends Record = Record,
- >(query: SQLWrapper): PgRaw> {
+ >(query: SQLWrapper | string): PgRaw> {
return super.execute(query);
}
}
@@ -90,8 +90,10 @@ export class AwsPgDialect extends PgDialect {
export function drizzle = Record>(
client: AwsDataApiClient,
config: DrizzleAwsDataApiPgConfig,
-): AwsDataApiPgDatabase {
- const dialect = new AwsPgDialect();
+): AwsDataApiPgDatabase & {
+ $client: AwsDataApiClient;
+} {
+ const dialect = new AwsPgDialect({ casing: config.casing });
let logger;
if (config.logger === true) {
logger = new DefaultLogger();
@@ -113,5 +115,8 @@ export function drizzle = Record;
+ const db = new AwsDataApiPgDatabase(dialect, session, schema as any);
+ ( db).$client = client;
+
+ return db as any;
}
diff --git a/drizzle-orm/src/aws-data-api/pg/migrator.ts b/drizzle-orm/src/aws-data-api/pg/migrator.ts
index 2afa79412..c58ba7ab1 100644
--- a/drizzle-orm/src/aws-data-api/pg/migrator.ts
+++ b/drizzle-orm/src/aws-data-api/pg/migrator.ts
@@ -4,7 +4,7 @@ import type { AwsDataApiPgDatabase } from './driver.ts';
export async function migrate>(
db: AwsDataApiPgDatabase,
- config: string | MigrationConfig,
+ config: MigrationConfig,
) {
const migrations = readMigrationFiles(config);
await db.dialect.migrate(migrations, db.session, config);
diff --git a/drizzle-orm/src/better-sqlite3/driver.ts b/drizzle-orm/src/better-sqlite3/driver.ts
index 728586e57..50660e4d6 100644
--- a/drizzle-orm/src/better-sqlite3/driver.ts
+++ b/drizzle-orm/src/better-sqlite3/driver.ts
@@ -1,4 +1,5 @@
import type { Database, RunResult } from 'better-sqlite3';
+import { entityKind } from '~/entity.ts';
import { DefaultLogger } from '~/logger.ts';
import {
createTableRelationsHelpers,
@@ -11,15 +12,19 @@ import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts';
import type { DrizzleConfig } from '~/utils.ts';
import { BetterSQLiteSession } from './session.ts';
-export type BetterSQLite3Database<
- TSchema extends Record = Record,
-> = BaseSQLiteDatabase<'sync', RunResult, TSchema>;
+export class BetterSQLite3Database = Record>
+ extends BaseSQLiteDatabase<'sync', RunResult, TSchema>
+{
+ static readonly [entityKind]: string = 'BetterSQLite3Database';
+}
export function drizzle = Record>(
client: Database,
config: DrizzleConfig = {},
-): BetterSQLite3Database {
- const dialect = new SQLiteSyncDialect();
+): BetterSQLite3Database & {
+ $client: Database;
+} {
+ const dialect = new SQLiteSyncDialect({ casing: config.casing });
let logger;
if (config.logger === true) {
logger = new DefaultLogger();
@@ -41,5 +46,8 @@ export function drizzle = Record;
+ const db = new BetterSQLite3Database('sync', dialect, session, schema);
+ ( db).$client = client;
+
+ return db as any;
}
diff --git a/drizzle-orm/src/better-sqlite3/migrator.ts b/drizzle-orm/src/better-sqlite3/migrator.ts
index 1cbd2fe56..cea198257 100644
--- a/drizzle-orm/src/better-sqlite3/migrator.ts
+++ b/drizzle-orm/src/better-sqlite3/migrator.ts
@@ -4,7 +4,7 @@ import type { BetterSQLite3Database } from './driver.ts';
export function migrate>(
db: BetterSQLite3Database,
- config: string | MigrationConfig,
+ config: MigrationConfig,
) {
const migrations = readMigrationFiles(config);
db.dialect.migrate(migrations, db.session, config);
diff --git a/drizzle-orm/src/bun-sqlite/driver.ts b/drizzle-orm/src/bun-sqlite/driver.ts
index 0d196ff03..abcc09224 100644
--- a/drizzle-orm/src/bun-sqlite/driver.ts
+++ b/drizzle-orm/src/bun-sqlite/driver.ts
@@ -1,6 +1,7 @@
///
import type { Database } from 'bun:sqlite';
+import { entityKind } from '~/entity.ts';
import { DefaultLogger } from '~/logger.ts';
import {
createTableRelationsHelpers,
@@ -13,15 +14,19 @@ import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts';
import type { DrizzleConfig } from '~/utils.ts';
import { SQLiteBunSession } from './session.ts';
-export type BunSQLiteDatabase<
+export class BunSQLiteDatabase<
TSchema extends Record = Record,
-> = BaseSQLiteDatabase<'sync', void, TSchema>;
+> extends BaseSQLiteDatabase<'sync', void, TSchema> {
+ static readonly [entityKind]: string = 'BunSQLiteDatabase';
+}
export function drizzle = Record>(
client: Database,
config: DrizzleConfig = {},
-): BunSQLiteDatabase {
- const dialect = new SQLiteSyncDialect();
+): BunSQLiteDatabase & {
+ $client: Database;
+} {
+ const dialect = new SQLiteSyncDialect({ casing: config.casing });
let logger;
if (config.logger === true) {
logger = new DefaultLogger();
@@ -43,5 +48,8 @@ export function drizzle = Record;
+ const db = new BunSQLiteDatabase('sync', dialect, session, schema) as BunSQLiteDatabase;
+ ( db).$client = client;
+
+ return db as any;
}
diff --git a/drizzle-orm/src/bun-sqlite/migrator.ts b/drizzle-orm/src/bun-sqlite/migrator.ts
index ee248fd3e..785dabae9 100644
--- a/drizzle-orm/src/bun-sqlite/migrator.ts
+++ b/drizzle-orm/src/bun-sqlite/migrator.ts
@@ -4,7 +4,7 @@ import type { BunSQLiteDatabase } from './driver.ts';
export function migrate>(
db: BunSQLiteDatabase,
- config: string | MigrationConfig,
+ config: MigrationConfig,
) {
const migrations = readMigrationFiles(config);
db.dialect.migrate(migrations, db.session, config);
diff --git a/drizzle-orm/src/casing.ts b/drizzle-orm/src/casing.ts
new file mode 100644
index 000000000..8372227b3
--- /dev/null
+++ b/drizzle-orm/src/casing.ts
@@ -0,0 +1,76 @@
+import type { Column } from '~/column.ts';
+import { entityKind } from './entity.ts';
+import { Table } from './table.ts';
+import type { Casing } from './utils.ts';
+
+export function toSnakeCase(input: string) {
+ const words = input
+ .replace(/['\u2019]/g, '')
+ .match(/[\da-z]+|[A-Z]+(?![a-z])|[A-Z][\da-z]+/g) ?? [];
+
+ return words.map((word) => word.toLowerCase()).join('_');
+}
+
+export function toCamelCase(input: string) {
+ const words = input
+ .replace(/['\u2019]/g, '')
+ .match(/[\da-z]+|[A-Z]+(?![a-z])|[A-Z][\da-z]+/g) ?? [];
+
+ return words.reduce((acc, word, i) => {
+ const formattedWord = i === 0 ? word.toLowerCase() : `${word[0]!.toUpperCase()}${word.slice(1)}`;
+ return acc + formattedWord;
+ }, '');
+}
+
+function noopCase(input: string) {
+ return input;
+}
+
+export class CasingCache {
+ static readonly [entityKind]: string = 'CasingCache';
+
+ /** @internal */
+ cache: Record = {};
+ private cachedTables: Record = {};
+ private convert: (input: string) => string;
+
+ constructor(casing?: Casing) {
+ this.convert = casing === 'snake_case'
+ ? toSnakeCase
+ : casing === 'camelCase'
+ ? toCamelCase
+ : noopCase;
+ }
+
+ getColumnCasing(column: Column): string {
+ if (!column.keyAsName) return column.name;
+
+ const schema = column.table[Table.Symbol.Schema] ?? 'public';
+ const tableName = column.table[Table.Symbol.OriginalName];
+ const key = `${schema}.${tableName}.${column.name}`;
+
+ if (!this.cache[key]) {
+ this.cacheTable(column.table);
+ }
+ return this.cache[key]!;
+ }
+
+ private cacheTable(table: Table) {
+ const schema = table[Table.Symbol.Schema] ?? 'public';
+ const tableName = table[Table.Symbol.OriginalName];
+ const tableKey = `${schema}.${tableName}`;
+
+ if (!this.cachedTables[tableKey]) {
+ for (const column of Object.values(table[Table.Symbol.Columns])) {
+ const columnKey = `${tableKey}.${column.name}`;
+ this.cache[columnKey] = this.convert(column.name);
+ }
+ this.cachedTables[tableKey] = true;
+ }
+ }
+
+ clearCache() {
+ this.cache = {};
+ this.cachedTables = {};
+ }
+}
diff --git a/drizzle-orm/src/column-builder.ts b/drizzle-orm/src/column-builder.ts
index ad278e29d..13d9d363f 100644
--- a/drizzle-orm/src/column-builder.ts
+++ b/drizzle-orm/src/column-builder.ts
@@ -5,7 +5,7 @@ import type { ExtraConfigColumn, PgColumn, PgSequenceOptions } from './pg-core/i
import type { SingleStoreColumn } from './singlestore-core/index.ts';
import type { SQL } from './sql/sql.ts';
import type { SQLiteColumn } from './sqlite-core/index.ts';
-import type { Simplify } from './utils.ts';
+import type { Assume, Simplify } from './utils.ts';
export type ColumnDataType =
| 'string'
@@ -90,6 +90,7 @@ export type ColumnBuilderTypeConfig<
export type ColumnBuilderRuntimeConfig = {
name: string;
+ keyAsName: boolean;
notNull: boolean;
default: TData | SQL | undefined;
defaultFn: (() => TData | SQL) | undefined;
@@ -186,6 +187,7 @@ export abstract class ColumnBuilder<
constructor(name: T['name'], dataType: T['dataType'], columnType: T['columnType']) {
this.config = {
name,
+ keyAsName: name === '',
notNull: false,
default: undefined,
hasDefault: false,
@@ -294,6 +296,12 @@ export abstract class ColumnBuilder<
as: SQL | T['data'] | (() => SQL),
config?: Partial>,
): HasGenerated;
+
+ /** @internal Sets the name of the column to the key within the table definition if a name was not given. */
+ setName(name: string) {
+ if (this.config.name !== '') return;
+ this.config.name = name;
+ }
}
export type BuildColumn<
@@ -324,7 +332,11 @@ export type BuildColumns<
TDialect extends Dialect,
> =
& {
- [Key in keyof TConfigMap]: BuildColumn;
+ [Key in keyof TConfigMap]: BuildColumn
+ & { name: TConfigMap[Key]['_']['name'] extends '' ? Assume : TConfigMap[Key]['_']['name'] };
+ }, TDialect>;
}
& {};
diff --git a/drizzle-orm/src/column.ts b/drizzle-orm/src/column.ts
index e740acaa0..79ba17f12 100644
--- a/drizzle-orm/src/column.ts
+++ b/drizzle-orm/src/column.ts
@@ -69,6 +69,7 @@ export abstract class Column<
declare readonly _: ColumnTypeConfig;
readonly name: string;
+ readonly keyAsName: boolean;
readonly primary: boolean;
readonly notNull: boolean;
readonly default: T['data'] | SQL | undefined;
@@ -92,6 +93,7 @@ export abstract class Column<
) {
this.config = config;
this.name = config.name;
+ this.keyAsName = config.keyAsName;
this.notNull = config.notNull;
this.default = config.default;
this.defaultFn = config.defaultFn;
diff --git a/drizzle-orm/src/connect.ts b/drizzle-orm/src/connect.ts
new file mode 100644
index 000000000..6e26b2922
--- /dev/null
+++ b/drizzle-orm/src/connect.ts
@@ -0,0 +1,2 @@
+export * from './monodriver.ts';
+export * from './monomigrator.ts';
diff --git a/drizzle-orm/src/d1/driver.ts b/drizzle-orm/src/d1/driver.ts
index 46fc8ec8e..6ec8a5294 100644
--- a/drizzle-orm/src/d1/driver.ts
+++ b/drizzle-orm/src/d1/driver.ts
@@ -1,4 +1,5 @@
///
+import type { D1Database as MiniflareD1Database } from '@miniflare/d1';
import type { BatchItem, BatchResponse } from '~/batch.ts';
import { entityKind } from '~/entity.ts';
import { DefaultLogger } from '~/logger.ts';
@@ -11,9 +12,15 @@ import {
} from '~/relations.ts';
import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts';
import { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts';
-import type { DrizzleConfig } from '~/utils.ts';
+import type { DrizzleConfig, IfNotImported } from '~/utils.ts';
import { SQLiteD1Session } from './session.ts';
+export type AnyD1Database = IfNotImported<
+ D1Database,
+ MiniflareD1Database,
+ D1Database | IfNotImported
+>;
+
export class DrizzleD1Database<
TSchema extends Record = Record,
> extends BaseSQLiteDatabase<'async', D1Result, TSchema> {
@@ -29,11 +36,16 @@ export class DrizzleD1Database<
}
}
-export function drizzle = Record>(
- client: D1Database,
+export function drizzle<
+ TSchema extends Record = Record,
+ TClient extends AnyD1Database = AnyD1Database,
+>(
+ client: TClient,
config: DrizzleConfig = {},
-): DrizzleD1Database {
- const dialect = new SQLiteAsyncDialect();
+): DrizzleD1Database & {
+ $client: TClient;
+} {
+ const dialect = new SQLiteAsyncDialect({ casing: config.casing });
let logger;
if (config.logger === true) {
logger = new DefaultLogger();
@@ -54,6 +66,9 @@ export function drizzle = Record;
+ const session = new SQLiteD1Session(client as D1Database, dialect, schema, { logger });
+ const db = new DrizzleD1Database('async', dialect, session, schema) as DrizzleD1Database;
+ ( db).$client = client;
+
+ return db as any;
}
diff --git a/drizzle-orm/src/d1/migrator.ts b/drizzle-orm/src/d1/migrator.ts
index 9a137136d..2259516bf 100644
--- a/drizzle-orm/src/d1/migrator.ts
+++ b/drizzle-orm/src/d1/migrator.ts
@@ -5,14 +5,10 @@ import type { DrizzleD1Database } from './driver.ts';
export async function migrate>(
db: DrizzleD1Database,
- config: string | MigrationConfig,
+ config: MigrationConfig,
) {
const migrations = readMigrationFiles(config);
- const migrationsTable = config === undefined
- ? '__drizzle_migrations'
- : typeof config === 'string'
- ? '__drizzle_migrations'
- : config.migrationsTable ?? '__drizzle_migrations';
+ const migrationsTable = config.migrationsTable ?? '__drizzle_migrations';
const migrationTableCreate = sql`
CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} (
diff --git a/drizzle-orm/src/expo-sqlite/driver.ts b/drizzle-orm/src/expo-sqlite/driver.ts
index ae8ce6577..d9cf47b01 100644
--- a/drizzle-orm/src/expo-sqlite/driver.ts
+++ b/drizzle-orm/src/expo-sqlite/driver.ts
@@ -1,4 +1,5 @@
import type { SQLiteDatabase, SQLiteRunResult } from 'expo-sqlite/next';
+import { entityKind } from '~/entity.ts';
import { DefaultLogger } from '~/logger.ts';
import {
createTableRelationsHelpers,
@@ -11,15 +12,19 @@ import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts';
import type { DrizzleConfig } from '~/utils.ts';
import { ExpoSQLiteSession } from './session.ts';
-export type ExpoSQLiteDatabase<
- TSchema extends Record = Record,
-> = BaseSQLiteDatabase<'sync', SQLiteRunResult, TSchema>;
+export class ExpoSQLiteDatabase = Record>
+ extends BaseSQLiteDatabase<'sync', SQLiteRunResult, TSchema>
+{
+ static readonly [entityKind]: string = 'ExpoSQLiteDatabase';
+}
export function drizzle = Record>(
client: SQLiteDatabase,
config: DrizzleConfig = {},
-): ExpoSQLiteDatabase {
- const dialect = new SQLiteSyncDialect();
+): ExpoSQLiteDatabase & {
+ $client: SQLiteDatabase;
+} {
+ const dialect = new SQLiteSyncDialect({ casing: config.casing });
let logger;
if (config.logger === true) {
logger = new DefaultLogger();
@@ -41,5 +46,8 @@ export function drizzle = Record;
+ const db = new ExpoSQLiteDatabase('sync', dialect, session, schema) as ExpoSQLiteDatabase;
+ ( db).$client = client;
+
+ return db as any;
}
diff --git a/drizzle-orm/src/expo-sqlite/query.ts b/drizzle-orm/src/expo-sqlite/query.ts
index db467ce2c..8d9c5e4d9 100644
--- a/drizzle-orm/src/expo-sqlite/query.ts
+++ b/drizzle-orm/src/expo-sqlite/query.ts
@@ -1,9 +1,11 @@
import { addDatabaseChangeListener } from 'expo-sqlite/next';
import { useEffect, useState } from 'react';
-import { is, SQL, Subquery } from '~/index.ts';
+import { is } from '~/entity.ts';
+import { SQL } from '~/sql/sql.ts';
import type { AnySQLiteSelect } from '~/sqlite-core/index.ts';
import { getTableConfig, getViewConfig, SQLiteTable, SQLiteView } from '~/sqlite-core/index.ts';
import { SQLiteRelationalQuery } from '~/sqlite-core/query-builders/query.ts';
+import { Subquery } from '~/subquery.ts';
export const useLiveQuery = | SQLiteRelationalQuery<'sync', unknown>>(
query: T,
diff --git a/drizzle-orm/src/libsql/driver.ts b/drizzle-orm/src/libsql/driver.ts
index 3acff2893..1e87e7555 100644
--- a/drizzle-orm/src/libsql/driver.ts
+++ b/drizzle-orm/src/libsql/driver.ts
@@ -31,8 +31,10 @@ export class LibSQLDatabase<
export function drizzle<
TSchema extends Record = Record,
->(client: Client, config: DrizzleConfig = {}): LibSQLDatabase {
- const dialect = new SQLiteAsyncDialect();
+>(client: Client, config: DrizzleConfig = {}): LibSQLDatabase & {
+ $client: Client;
+} {
+ const dialect = new SQLiteAsyncDialect({ casing: config.casing });
let logger;
if (config.logger === true) {
logger = new DefaultLogger();
@@ -54,5 +56,8 @@ export function drizzle<
}
const session = new LibSQLSession(client, dialect, schema, { logger }, undefined);
- return new LibSQLDatabase('async', dialect, session, schema) as LibSQLDatabase;
+ const db = new LibSQLDatabase('async', dialect, session, schema) as LibSQLDatabase;
+ ( db).$client = client;
+
+ return db as any;
}
diff --git a/drizzle-orm/src/libsql/migrator.ts b/drizzle-orm/src/libsql/migrator.ts
index 58bcc9e05..373a8aab4 100644
--- a/drizzle-orm/src/libsql/migrator.ts
+++ b/drizzle-orm/src/libsql/migrator.ts
@@ -8,11 +8,7 @@ export async function migrate>(
config: MigrationConfig,
) {
const migrations = readMigrationFiles(config);
- const migrationsTable = config === undefined
- ? '__drizzle_migrations'
- : typeof config === 'string'
- ? '__drizzle_migrations'
- : config.migrationsTable ?? '__drizzle_migrations';
+ const migrationsTable = config.migrationsTable ?? '__drizzle_migrations';
const migrationTableCreate = sql`
CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} (
@@ -47,5 +43,5 @@ export async function migrate>(
}
}
- await db.session.batch(statementToBatch);
+ await db.session.migrate(statementToBatch);
}
diff --git a/drizzle-orm/src/libsql/session.ts b/drizzle-orm/src/libsql/session.ts
index 29e4e268f..640977734 100644
--- a/drizzle-orm/src/libsql/session.ts
+++ b/drizzle-orm/src/libsql/session.ts
@@ -76,6 +76,21 @@ export class LibSQLSession<
return batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true));
}
+ async migrate[] | readonly BatchItem<'sqlite'>[]>(queries: T) {
+ const preparedQueries: PreparedQuery[] = [];
+ const builtQueries: InStatement[] = [];
+
+ for (const query of queries) {
+ const preparedQuery = query._prepare();
+ const builtQuery = preparedQuery.getQuery();
+ preparedQueries.push(preparedQuery);
+ builtQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs });
+ }
+
+ const batchResults = await this.client.migrate(builtQueries);
+ return batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true));
+ }
+
override async transaction(
transaction: (db: LibSQLTransaction) => T | Promise,
_config?: SQLiteTransactionConfig,
diff --git a/drizzle-orm/src/migrator.ts b/drizzle-orm/src/migrator.ts
index 946f3269d..8b7636a44 100644
--- a/drizzle-orm/src/migrator.ts
+++ b/drizzle-orm/src/migrator.ts
@@ -1,6 +1,5 @@
import crypto from 'node:crypto';
import fs from 'node:fs';
-import path from 'node:path';
export interface KitConfig {
out: string;
@@ -20,19 +19,8 @@ export interface MigrationMeta {
bps: boolean;
}
-export function readMigrationFiles(config: string | MigrationConfig): MigrationMeta[] {
- let migrationFolderTo: string | undefined;
- if (typeof config === 'string') {
- const configAsString = fs.readFileSync(path.resolve('.', config), 'utf8');
- const jsonConfig = JSON.parse(configAsString) as KitConfig;
- migrationFolderTo = jsonConfig.out;
- } else {
- migrationFolderTo = config.migrationsFolder;
- }
-
- if (!migrationFolderTo) {
- throw new Error('no migration folder defined');
- }
+export function readMigrationFiles(config: MigrationConfig): MigrationMeta[] {
+ const migrationFolderTo = config.migrationsFolder;
const migrationQueries: MigrationMeta[] = [];
diff --git a/drizzle-orm/src/monodriver.ts b/drizzle-orm/src/monodriver.ts
new file mode 100644
index 000000000..9af80db06
--- /dev/null
+++ b/drizzle-orm/src/monodriver.ts
@@ -0,0 +1,659 @@
+/* eslint-disable import/extensions */
+import type { RDSDataClient, RDSDataClientConfig as RDSConfig } from '@aws-sdk/client-rds-data';
+import type { PGlite, PGliteOptions } from '@electric-sql/pglite';
+import type { Client as LibsqlClient, Config as LibsqlConfig } from '@libsql/client';
+import type {
+ HTTPTransactionOptions as NeonHttpConfig,
+ NeonQueryFunction,
+ Pool as NeonServerlessPool,
+ PoolConfig as NeonServerlessConfig,
+ QueryResult,
+ QueryResultRow,
+} from '@neondatabase/serverless';
+import type { Client as PlanetscaleClient, Config as PlanetscaleConfig } from '@planetscale/database';
+import type { Config as TiDBServerlessConfig, Connection as TiDBConnection } from '@tidbcloud/serverless';
+import type { VercelPool } from '@vercel/postgres';
+import type { Database as BetterSQLite3Database, Options as BetterSQLite3Options } from 'better-sqlite3';
+import type { Database as BunDatabase } from 'bun:sqlite';
+import type { Pool as Mysql2Pool, PoolOptions as Mysql2Config } from 'mysql2';
+import type { Pool as NodePgPool, PoolConfig as NodePgPoolConfig } from 'pg';
+import type {
+ Options as PostgresJSOptions,
+ PostgresType as PostgresJSPostgresType,
+ Sql as PostgresJsClient,
+} from 'postgres';
+import type { AwsDataApiPgDatabase, DrizzleAwsDataApiPgConfig } from './aws-data-api/pg/index.ts';
+import type { BetterSQLite3Database as DrizzleBetterSQLite3Database } from './better-sqlite3/index.ts';
+import type { BunSQLiteDatabase } from './bun-sqlite/index.ts';
+import type { AnyD1Database, DrizzleD1Database } from './d1/index.ts';
+import type { LibSQLDatabase } from './libsql/index.ts';
+import type { MySql2Database, MySql2DrizzleConfig } from './mysql2/index.ts';
+import type { NeonHttpDatabase } from './neon-http/index.ts';
+import type { NeonDatabase } from './neon-serverless/index.ts';
+import type { NodePgDatabase } from './node-postgres/driver.ts';
+import type { PgliteDatabase } from './pglite/driver.ts';
+import type { PlanetScaleDatabase } from './planetscale-serverless/index.ts';
+import type { PostgresJsDatabase } from './postgres-js/index.ts';
+import type { TiDBServerlessDatabase } from './tidb-serverless/index.ts';
+import type { DrizzleConfig, IfNotImported } from './utils.ts';
+import type { VercelPgDatabase } from './vercel-postgres/index.ts';
+
+type BunSqliteDatabaseOptions = {
+ /**
+ * Open the database as read-only (no write operations, no create).
+ *
+ * Equivalent to {@link constants.SQLITE_OPEN_READONLY}
+ */
+ readonly?: boolean;
+ /**
+ * Allow creating a new database
+ *
+ * Equivalent to {@link constants.SQLITE_OPEN_CREATE}
+ */
+ create?: boolean;
+ /**
+ * Open the database as read-write
+ *
+ * Equivalent to {@link constants.SQLITE_OPEN_READWRITE}
+ */
+ readwrite?: boolean;
+};
+
+type BunSqliteDatabaseConfig =
+ | ({
+ source?: string;
+ } & BunSqliteDatabaseOptions)
+ | string
+ | undefined;
+
+type BetterSQLite3DatabaseConfig =
+ | ({
+ source?:
+ | string
+ | Buffer;
+ } & BetterSQLite3Options)
+ | string
+ | undefined;
+
+type MonodriverNeonHttpConfig =
+ | ({
+ connectionString: string;
+ } & NeonHttpConfig)
+ | string;
+
+type AwsDataApiConnectionConfig = RDSConfig & Omit;
+
+type DatabaseClient =
+ | 'node-postgres'
+ | 'postgres-js'
+ | 'neon-websocket'
+ | 'neon-http'
+ | 'vercel-postgres'
+ | 'aws-data-api-pg'
+ | 'planetscale'
+ | 'mysql2'
+ | 'tidb-serverless'
+ | 'libsql'
+ | 'turso'
+ | 'd1'
+ | 'bun:sqlite'
+ | 'better-sqlite3'
+ | 'pglite';
+
+type ClientDrizzleInstanceMap> = {
+ 'node-postgres': NodePgDatabase;
+ 'postgres-js': PostgresJsDatabase;
+ 'neon-websocket': NeonDatabase;
+ 'neon-http': NeonHttpDatabase;
+ 'vercel-postgres': VercelPgDatabase;
+ 'aws-data-api-pg': AwsDataApiPgDatabase;
+ planetscale: PlanetScaleDatabase;
+ mysql2: MySql2Database;
+ 'tidb-serverless': TiDBServerlessDatabase;
+ libsql: LibSQLDatabase;
+ turso: LibSQLDatabase;
+ d1: DrizzleD1Database;
+ 'bun:sqlite': BunSQLiteDatabase;
+ 'better-sqlite3': DrizzleBetterSQLite3Database;
+ pglite: PgliteDatabase;
+};
+
+type Primitive = string | number | boolean | undefined | null;
+
+type ClientInstanceMap = {
+ 'node-postgres': NodePgPool;
+ 'postgres-js': PostgresJsClient;
+ 'neon-websocket': NeonServerlessPool;
+ 'neon-http': NeonQueryFunction;
+ 'vercel-postgres':
+ & VercelPool
+ & ((strings: TemplateStringsArray, ...values: Primitive[]) => Promise>);
+ 'aws-data-api-pg': RDSDataClient;
+ planetscale: PlanetscaleClient;
+ mysql2: Mysql2Pool;
+ 'tidb-serverless': TiDBConnection;
+ libsql: LibsqlClient;
+ turso: LibsqlClient;
+ d1: AnyD1Database;
+ 'bun:sqlite': BunDatabase;
+ 'better-sqlite3': BetterSQLite3Database;
+ pglite: PGlite;
+};
+
+type ClientTypeImportErrorMap = {
+ 'node-postgres': 'pg`, `@types/pg';
+ 'postgres-js': 'postgres';
+ 'neon-websocket': '@neondatabase/serverless';
+ 'neon-http': '@neondatabase/serverless';
+ 'vercel-postgres': '@vercel/postgres';
+ 'aws-data-api-pg': '@aws-sdk/client-rds-data';
+ planetscale: '@planetscale/database';
+ mysql2: 'mysql2';
+ 'tidb-serverless': '@tidbcloud/serverless';
+ libsql: '@libsql/client';
+ turso: '@libsql/client';
+ d1: '@cloudflare/workers-types` or `@miniflare/d1';
+ 'bun:sqlite': 'bun-types';
+ 'better-sqlite3': 'better-sqlite3';
+ pglite: '@electric-sql/pglite';
+};
+
+type ImportTypeError =
+ `Please install \`${ClientTypeImportErrorMap[TClient]}\`to allow Drizzle ORM to connect to the database`;
+
+type InitializerParams = {
+ 'node-postgres': {
+ connection: string | NodePgPoolConfig;
+ };
+ 'postgres-js': {
+ connection: string | ({ url?: string } & PostgresJSOptions>);
+ };
+ 'neon-websocket': {
+ connection: string | NeonServerlessConfig;
+ };
+ 'neon-http': {
+ connection: MonodriverNeonHttpConfig;
+ };
+ 'vercel-postgres': {};
+ 'aws-data-api-pg': {
+ connection: AwsDataApiConnectionConfig;
+ };
+ planetscale: {
+ connection: PlanetscaleConfig | string;
+ };
+ mysql2: {
+ connection: Mysql2Config | string;
+ };
+ 'tidb-serverless': {
+ connection: TiDBServerlessConfig | string;
+ };
+ libsql: {
+ connection: LibsqlConfig | string;
+ };
+ turso: {
+ connection: LibsqlConfig | string;
+ };
+ d1: {
+ connection: AnyD1Database;
+ };
+ 'bun:sqlite': {
+ connection?: BunSqliteDatabaseConfig;
+ };
+ 'better-sqlite3': {
+ connection?: BetterSQLite3DatabaseConfig;
+ };
+ pglite: {
+ connection?: (PGliteOptions & { dataDir?: string }) | string;
+ };
+};
+
+type DetermineClient<
+ TClient extends DatabaseClient,
+ TSchema extends Record,
+> =
+ & ClientDrizzleInstanceMap<
+ TSchema
+ >[TClient]
+ & {
+ $client: ClientInstanceMap[TClient];
+ };
+
+const importError = (libName: string) => {
+ throw new Error(
+ `Please install '${libName}' to allow Drizzle ORM to connect to the database`,
+ );
+};
+
+function assertUnreachable(_: never | undefined): never {
+ throw new Error("Didn't expect to get here");
+}
+
+export async function drizzle<
+ TClient extends DatabaseClient,
+ TSchema extends Record = Record,
+>(
+ client: TClient,
+ ...params: TClient extends 'bun:sqlite' | 'better-sqlite3' | 'pglite' ? (
+ [] | [
+ (
+ & IfNotImported<
+ ClientInstanceMap[TClient],
+ { connection: ImportTypeError },
+ InitializerParams[TClient]
+ >
+ & DrizzleConfig
+ ),
+ ] | [string]
+ )
+ : TClient extends 'vercel-postgres' ? ([] | [
+ (
+ & IfNotImported<
+ ClientInstanceMap[TClient],
+ { connection: ImportTypeError },
+ InitializerParams[TClient]
+ >
+ & DrizzleConfig
+ ),
+ ])
+ : TClient extends
+ 'postgres-js' | 'tidb-serverless' | 'libsql' | 'turso' | 'planetscale' | 'neon-http' | 'node-postgres' ? (
+ [
+ (
+ & IfNotImported<
+ ClientInstanceMap[TClient],
+ { connection: ImportTypeError },
+ InitializerParams[TClient]
+ >
+ & DrizzleConfig
+ ),
+ ] | [string]
+ )
+ : TClient extends 'mysql2' ? (
+ [
+ (
+ & IfNotImported<
+ ClientInstanceMap[TClient],
+ { connection: ImportTypeError },
+ InitializerParams[TClient]
+ >
+ & MySql2DrizzleConfig
+ ),
+ ] | [string]
+ )
+ : TClient extends 'neon-websocket' ? (
+ | [
+ & IfNotImported<
+ ClientInstanceMap[TClient],
+ { connection: ImportTypeError },
+ InitializerParams[TClient]
+ >
+ & DrizzleConfig
+ & {
+ ws?: any;
+ },
+ ]
+ | [string]
+ )
+ : [
+ (
+ & IfNotImported<
+ ClientInstanceMap[TClient],
+ { connection: ImportTypeError },
+ InitializerParams[TClient]
+ >
+ & DrizzleConfig
+ ),
+ ]
+): Promise> {
+ switch (client) {
+ case 'node-postgres': {
+ const defpg = await import('pg').catch(() => importError('pg'));
+ const { drizzle } = await import('./node-postgres/index.ts');
+
+ if (typeof params[0] === 'object') {
+ const { connection, ...drizzleConfig } = params[0] as
+ & { connection: NodePgPoolConfig | string }
+ & DrizzleConfig;
+
+ const instance = typeof connection === 'string'
+ ? new defpg.default.Pool({
+ connectionString: connection,
+ })
+ : new defpg.default.Pool(connection);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = typeof params[0] === 'string'
+ ? new defpg.default.Pool({
+ connectionString: params[0],
+ })
+ : new defpg.default.Pool(params[0]);
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ case 'aws-data-api-pg': {
+ const { connection, ...drizzleConfig } = params[0] as {
+ connection: AwsDataApiConnectionConfig;
+ } & DrizzleConfig;
+ const { resourceArn, database, secretArn, ...rdsConfig } = connection;
+
+ const { RDSDataClient } = await import('@aws-sdk/client-rds-data').catch(() =>
+ importError('@aws-sdk/client-rds-data')
+ );
+ const { drizzle } = await import('./aws-data-api/pg/index.ts');
+
+ const instance = new RDSDataClient(rdsConfig);
+ const db = drizzle(instance, { resourceArn, database, secretArn, ...drizzleConfig });
+
+ return db as any;
+ }
+ case 'better-sqlite3': {
+ const { default: Client } = await import('better-sqlite3').catch(() => importError('better-sqlite3'));
+ const { drizzle } = await import('./better-sqlite3/index.ts');
+
+ if (typeof params[0] === 'object') {
+ const { connection, ...drizzleConfig } = params[0] as {
+ connection: BetterSQLite3DatabaseConfig;
+ } & DrizzleConfig;
+
+ if (typeof connection === 'object') {
+ const { source, ...options } = connection;
+
+ const instance = new Client(source, options);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = new Client(connection);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = new Client(params[0]);
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ case 'bun:sqlite': {
+ const { Database: Client } = await import('bun:sqlite').catch(() => {
+ throw new Error(`Please use bun to use 'bun:sqlite' for Drizzle ORM to connect to database`);
+ });
+ const { drizzle } = await import('./bun-sqlite/index.ts');
+
+ if (typeof params[0] === 'object') {
+ const { connection, ...drizzleConfig } = params[0] as {
+ connection: BunSqliteDatabaseConfig | string | undefined;
+ } & DrizzleConfig;
+
+ if (typeof connection === 'object') {
+ const { source, ...opts } = connection;
+
+ const options = Object.values(opts).filter((v) => v !== undefined).length ? opts : undefined;
+
+ const instance = new Client(source, options);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = new Client(connection);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = new Client(params[0]);
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ case 'd1': {
+ const { connection, ...drizzleConfig } = params[0] as { connection: AnyD1Database } & DrizzleConfig;
+
+ const { drizzle } = await import('./d1/index.ts');
+
+ const db = drizzle(connection, drizzleConfig);
+
+ return db as any;
+ }
+ case 'libsql':
+ case 'turso': {
+ const { createClient } = await import('@libsql/client').catch(() => importError('@libsql/client'));
+ const { drizzle } = await import('./libsql/index.ts');
+
+ if (typeof params[0] === 'string') {
+ const instance = createClient({
+ url: params[0],
+ });
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+
+ const { connection, ...drizzleConfig } = params[0] as any as { connection: LibsqlConfig } & DrizzleConfig;
+
+ const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+ case 'mysql2': {
+ const { createPool } = await import('mysql2/promise').catch(() => importError('mysql2'));
+ const { drizzle } = await import('./mysql2/index.ts');
+
+ if (typeof params[0] === 'object') {
+ const { connection, ...drizzleConfig } = params[0] as
+ & { connection: Mysql2Config | string }
+ & MySql2DrizzleConfig;
+
+ const instance = createPool(connection as Mysql2Config);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const connectionString = params[0]!;
+ const instance = createPool(connectionString);
+
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ case 'neon-http': {
+ const { neon } = await import('@neondatabase/serverless').catch(() => importError('@neondatabase/serverless'));
+ const { drizzle } = await import('./neon-http/index.ts');
+
+ if (typeof params[0] === 'object') {
+ const { connection, ...drizzleConfig } = params[0] as { connection: MonodriverNeonHttpConfig } & DrizzleConfig;
+
+ if (typeof connection === 'object') {
+ const { connectionString, ...options } = connection;
+
+ const instance = neon(connectionString, options);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = neon(connection);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = neon(params[0]!);
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ case 'neon-websocket': {
+ const { Pool, neonConfig } = await import('@neondatabase/serverless').catch(() =>
+ importError('@neondatabase/serverless')
+ );
+ const { drizzle } = await import('./neon-serverless/index.ts');
+ if (typeof params[0] === 'string') {
+ const instance = new Pool({
+ connectionString: params[0],
+ });
+
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+
+ if (typeof params[0] === 'object') {
+ const { connection, ws, ...drizzleConfig } = params[0] as {
+ connection?: NeonServerlessConfig | string;
+ ws?: any;
+ } & DrizzleConfig;
+
+ if (ws) {
+ neonConfig.webSocketConstructor = ws;
+ }
+
+ const instance = typeof connection === 'string'
+ ? new Pool({
+ connectionString: connection,
+ })
+ : new Pool(connection);
+
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = new Pool();
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ case 'planetscale': {
+ const { Client } = await import('@planetscale/database').catch(() => importError('@planetscale/database'));
+ const { drizzle } = await import('./planetscale-serverless/index.ts');
+
+ if (typeof params[0] === 'object') {
+ const { connection, ...drizzleConfig } = params[0] as
+ & { connection: PlanetscaleConfig | string }
+ & DrizzleConfig;
+
+ const instance = typeof connection === 'string'
+ ? new Client({
+ url: connection,
+ })
+ : new Client(
+ connection,
+ );
+ const db = drizzle(instance, drizzleConfig);
+ return db as any;
+ }
+
+ const instance = new Client({
+ url: params[0],
+ });
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ case 'postgres-js': {
+ const { default: client } = await import('postgres').catch(() => importError('postgres'));
+ const { drizzle } = await import('./postgres-js/index.ts');
+
+ if (typeof params[0] === 'object') {
+ const { connection, ...drizzleConfig } = params[0] as {
+ connection: { url?: string } & PostgresJSOptions>;
+ } & DrizzleConfig;
+
+ if (typeof connection === 'object' && connection.url !== undefined) {
+ const { url, ...config } = connection;
+
+ const instance = client(url, config);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = client(connection);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = client(params[0]!);
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ case 'tidb-serverless': {
+ const { connect } = await import('@tidbcloud/serverless').catch(() => importError('@tidbcloud/serverless'));
+ const { drizzle } = await import('./tidb-serverless/index.ts');
+
+ if (typeof params[0] === 'string') {
+ const instance = connect({
+ url: params[0],
+ });
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+
+ const { connection, ...drizzleConfig } = params[0] as
+ & { connection: TiDBServerlessConfig | string }
+ & DrizzleConfig;
+
+ const instance = typeof connection === 'string'
+ ? connect({
+ url: connection,
+ })
+ : connect(connection);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+ case 'vercel-postgres': {
+ const drizzleConfig = params[0] as DrizzleConfig | undefined;
+ const { sql } = await import('@vercel/postgres').catch(() => importError('@vercel/postgres'));
+ const { drizzle } = await import('./vercel-postgres/index.ts');
+
+ const db = drizzle(sql, drizzleConfig);
+
+ return db as any;
+ }
+
+ case 'pglite': {
+ const { PGlite } = await import('@electric-sql/pglite').catch(() => importError('@electric-sql/pglite'));
+ const { drizzle } = await import('./pglite/index.ts');
+
+ if (typeof params[0] === 'object') {
+ const { connection, ...drizzleConfig } = params[0] as {
+ connection: PGliteOptions & { dataDir: string };
+ } & DrizzleConfig;
+
+ if (typeof connection === 'object') {
+ const { dataDir, ...options } = connection;
+
+ const instance = new PGlite(dataDir, options);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = new PGlite(connection);
+ const db = drizzle(instance, drizzleConfig);
+
+ return db as any;
+ }
+
+ const instance = new PGlite(params[0]);
+ const db = drizzle(instance);
+
+ return db as any;
+ }
+ }
+
+ assertUnreachable(client);
+}
diff --git a/drizzle-orm/src/monomigrator.ts b/drizzle-orm/src/monomigrator.ts
new file mode 100644
index 000000000..9f4a748e0
--- /dev/null
+++ b/drizzle-orm/src/monomigrator.ts
@@ -0,0 +1,109 @@
+/* eslint-disable import/extensions */
+import type { AwsDataApiPgDatabase } from './aws-data-api/pg/index.ts';
+import type { BetterSQLite3Database } from './better-sqlite3/index.ts';
+import type { BunSQLiteDatabase } from './bun-sqlite/index.ts';
+import type { DrizzleD1Database } from './d1/index.ts';
+import { entityKind } from './entity.ts';
+import type { LibSQLDatabase } from './libsql/index.ts';
+import type { MigrationConfig } from './migrator.ts';
+import type { MySql2Database } from './mysql2/index.ts';
+import type { NeonHttpDatabase } from './neon-http/index.ts';
+import type { NeonDatabase } from './neon-serverless/index.ts';
+import type { NodePgDatabase } from './node-postgres/index.ts';
+import type { PgliteDatabase } from './pglite/driver.ts';
+import type { PlanetScaleDatabase } from './planetscale-serverless/index.ts';
+import type { PostgresJsDatabase } from './postgres-js/index.ts';
+import type { TiDBServerlessDatabase } from './tidb-serverless/index.ts';
+import type { VercelPgDatabase } from './vercel-postgres/index.ts';
+
+export async function migrate(
+ db:
+ | AwsDataApiPgDatabase
+ | BetterSQLite3Database
+ | BunSQLiteDatabase
+ | DrizzleD1Database
+ | LibSQLDatabase
+ | MySql2Database
+ | NeonHttpDatabase
+ | NeonDatabase
+ | NodePgDatabase
+ | PlanetScaleDatabase
+ | PostgresJsDatabase
+ | VercelPgDatabase
+ | TiDBServerlessDatabase
+ | PgliteDatabase,
+ config: MigrationConfig,
+) {
+ switch (( db).constructor[entityKind]) {
+ case 'AwsDataApiPgDatabase': {
+ const { migrate } = await import('./aws-data-api/pg/migrator.ts');
+
+ return migrate(db as AwsDataApiPgDatabase, config as MigrationConfig);
+ }
+ case 'BetterSQLite3Database': {
+ const { migrate } = await import('./better-sqlite3/migrator.ts');
+
+ return migrate(db as BetterSQLite3Database, config as MigrationConfig);
+ }
+ case 'BunSQLiteDatabase': {
+ const { migrate } = await import('./bun-sqlite/migrator.ts');
+
+ return migrate(db as BunSQLiteDatabase, config as MigrationConfig);
+ }
+ case 'D1Database': {
+ const { migrate } = await import('./d1/migrator.ts');
+
+ return migrate(db as DrizzleD1Database, config as MigrationConfig);
+ }
+ case 'LibSQLDatabase': {
+ const { migrate } = await import('./libsql/migrator.ts');
+
+ return migrate(db as LibSQLDatabase, config as MigrationConfig);
+ }
+ case 'MySql2Database': {
+ const { migrate } = await import('./mysql2/migrator.ts');
+
+ return migrate(db as MySql2Database, config as MigrationConfig);
+ }
+ case 'NeonHttpDatabase': {
+ const { migrate } = await import('./neon-http/migrator.ts');
+
+ return migrate(db as NeonHttpDatabase, config as MigrationConfig);
+ }
+ case 'NeonServerlessDatabase': {
+ const { migrate } = await import('./neon-serverless/migrator.ts');
+
+ return migrate(db as NeonDatabase, config as MigrationConfig);
+ }
+ case 'NodePgDatabase': {
+ const { migrate } = await import('./node-postgres/migrator.ts');
+
+ return migrate(db as NodePgDatabase, config as MigrationConfig);
+ }
+ case 'PlanetScaleDatabase': {
+ const { migrate } = await import('./planetscale-serverless/migrator.ts');
+
+ return migrate(db as PlanetScaleDatabase, config as MigrationConfig);
+ }
+ case 'PostgresJsDatabase': {
+ const { migrate } = await import('./postgres-js/migrator.ts');
+
+ return migrate(db as PostgresJsDatabase, config as MigrationConfig);
+ }
+ case 'TiDBServerlessDatabase': {
+ const { migrate } = await import('./tidb-serverless/migrator.ts');
+
+ return migrate(db as TiDBServerlessDatabase, config as MigrationConfig);
+ }
+ case 'VercelPgDatabase': {
+ const { migrate } = await import('./vercel-postgres/migrator.ts');
+
+ return migrate(db as VercelPgDatabase, config as MigrationConfig);
+ }
+ case 'PgliteDatabase': {
+ const { migrate } = await import('./pglite/migrator.ts');
+
+ return migrate(db as PgliteDatabase, config as MigrationConfig);
+ }
+ }
+}
diff --git a/drizzle-orm/src/mysql-core/columns/all.ts b/drizzle-orm/src/mysql-core/columns/all.ts
new file mode 100644
index 000000000..428b3c330
--- /dev/null
+++ b/drizzle-orm/src/mysql-core/columns/all.ts
@@ -0,0 +1,55 @@
+import { bigint } from './bigint.ts';
+import { binary } from './binary.ts';
+import { boolean } from './boolean.ts';
+import { char } from './char.ts';
+import { customType } from './custom.ts';
+import { date } from './date.ts';
+import { datetime } from './datetime.ts';
+import { decimal } from './decimal.ts';
+import { double } from './double.ts';
+import { mysqlEnum } from './enum.ts';
+import { float } from './float.ts';
+import { int } from './int.ts';
+import { json } from './json.ts';
+import { mediumint } from './mediumint.ts';
+import { real } from './real.ts';
+import { serial } from './serial.ts';
+import { smallint } from './smallint.ts';
+import { text } from './text.ts';
+import { time } from './time.ts';
+import { timestamp } from './timestamp.ts';
+import { tinyint } from './tinyint.ts';
+import { varbinary } from './varbinary.ts';
+import { varchar } from './varchar.ts';
+import { year } from './year.ts';
+
+export function getMySqlColumnBuilders() {
+ return {
+ bigint,
+ binary,
+ boolean,
+ char,
+ customType,
+ date,
+ datetime,
+ decimal,
+ double,
+ mysqlEnum,
+ float,
+ int,
+ json,
+ mediumint,
+ real,
+ serial,
+ smallint,
+ text,
+ time,
+ timestamp,
+ tinyint,
+ varbinary,
+ varchar,
+ year,
+ };
+}
+
+export type MySqlColumnBuilders = ReturnType;
diff --git a/drizzle-orm/src/mysql-core/columns/bigint.ts b/drizzle-orm/src/mysql-core/columns/bigint.ts
index ca1eedb3f..5882b1025 100644
--- a/drizzle-orm/src/mysql-core/columns/bigint.ts
+++ b/drizzle-orm/src/mysql-core/columns/bigint.ts
@@ -2,6 +2,7 @@ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnCon
import type { ColumnBaseConfig } from '~/column.ts';
import { entityKind } from '~/entity.ts';
import type { AnyMySqlTable } from '~/mysql-core/table.ts';
+import { getColumnNameAndConfig } from '~/utils.ts';
import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts';
export type MySqlBigInt53BuilderInitial = MySqlBigInt53Builder<{
@@ -98,16 +99,20 @@ export class MySqlBigInt64
}
}
-interface MySqlBigIntConfig {
+export interface MySqlBigIntConfig {
mode: T;
unsigned?: boolean;
}
+export function bigint(
+ config: MySqlBigIntConfig,
+): TMode extends 'number' ? MySqlBigInt53BuilderInitial<''> : MySqlBigInt64BuilderInitial<''>;
export function bigint(
name: TName,
config: MySqlBigIntConfig,
): TMode extends 'number' ? MySqlBigInt53BuilderInitial : MySqlBigInt64BuilderInitial;
-export function bigint(name: string, config: MySqlBigIntConfig) {
+export function bigint(a?: string | MySqlBigIntConfig, b?: MySqlBigIntConfig) {
+ const { name, config } = getColumnNameAndConfig(a, b);
if (config.mode === 'number') {
return new MySqlBigInt53Builder(name, config.unsigned);
}
diff --git a/drizzle-orm/src/mysql-core/columns/binary.ts b/drizzle-orm/src/mysql-core/columns/binary.ts
index 87a8e0f8c..7297d7b0a 100644
--- a/drizzle-orm/src/mysql-core/columns/binary.ts
+++ b/drizzle-orm/src/mysql-core/columns/binary.ts
@@ -2,6 +2,7 @@ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnCon
import type { ColumnBaseConfig } from '~/column.ts';
import { entityKind } from '~/entity.ts';
import type { AnyMySqlTable } from '~/mysql-core/table.ts';
+import { getColumnNameAndConfig } from '~/utils.ts';
import { MySqlColumn, MySqlColumnBuilder } from './common.ts';
export type MySqlBinaryBuilderInitial = MySqlBinaryBuilder<{
@@ -50,9 +51,15 @@ export interface MySqlBinaryConfig {
length?: number;
}
+export function binary(): MySqlBinaryBuilderInitial<''>;
+export function binary(
+ config?: MySqlBinaryConfig,
+): MySqlBinaryBuilderInitial<''>;
export function binary(
name: TName,
- config: MySqlBinaryConfig = {},
-): MySqlBinaryBuilderInitial {
+ config?: MySqlBinaryConfig,
+): MySqlBinaryBuilderInitial;
+export function binary(a?: string | MySqlBinaryConfig, b: MySqlBinaryConfig = {}) {
+ const { name, config } = getColumnNameAndConfig(a, b);
return new MySqlBinaryBuilder(name, config.length);
}
diff --git a/drizzle-orm/src/mysql-core/columns/boolean.ts b/drizzle-orm/src/mysql-core/columns/boolean.ts
index 3a915e673..d1df78570 100644
--- a/drizzle-orm/src/mysql-core/columns/boolean.ts
+++ b/drizzle-orm/src/mysql-core/columns/boolean.ts
@@ -49,6 +49,8 @@ export class MySqlBoolean>
}
}
-export function boolean