diff --git a/changelogs/drizzle-kit/0.29.0.md b/changelogs/drizzle-kit/0.29.0.md
new file mode 100644
index 000000000..b68b33085
--- /dev/null
+++ b/changelogs/drizzle-kit/0.29.0.md
@@ -0,0 +1,40 @@
+# New Dialects
+
+### 🎉 `SingleStore` dialect is now available in Drizzle
+
+Thanks to the SingleStore team for creating a PR with all the necessary changes to support the MySQL-compatible part of SingleStore. You can already start using it with Drizzle. The SingleStore team will also help us iterate through updates and make more SingleStore-specific features available in Drizzle
+
+```ts
+import 'dotenv/config';
+import { defineConfig } from 'drizzle-kit';
+
+export default defineConfig({
+ dialect: 'singlestore',
+ out: './drizzle',
+ schema: './src/db/schema.ts',
+ dbCredentials: {
+ url: process.env.DATABASE_URL!,
+ },
+});
+```
+
+You can check out our [Getting started guides](https://orm.drizzle.team/docs/get-started/singlestore-new) to try SingleStore!
+
+# New Drivers
+
+### 🎉 `SQLite Durable Objects` driver is now available in Drizzle
+
+You can now query SQLite Durable Objects in Drizzle!
+
+For the full example, please check our [Get Started](https://orm.drizzle.team/docs/get-started/do-new) Section
+
+```ts
+import 'dotenv/config';
+import { defineConfig } from 'drizzle-kit';
+export default defineConfig({
+ out: './drizzle',
+ schema: './src/db/schema.ts',
+ dialect: 'sqlite',
+ driver: 'durable-sqlite',
+});
+```
\ No newline at end of file
diff --git a/changelogs/drizzle-orm/0.36.1.md b/changelogs/drizzle-orm/0.36.1.md
index 1c0e96756..74d256a13 100644
--- a/changelogs/drizzle-orm/0.36.1.md
+++ b/changelogs/drizzle-orm/0.36.1.md
@@ -3,4 +3,4 @@
- [[BUG]: Using sql.placeholder with limit and/or offset for a prepared statement produces TS error](https://github.com/drizzle-team/drizzle-orm/issues/2146) - thanks @L-Mario564
- [[BUG] If a query I am trying to modify with a dynamic query (....$dynamic()) contains any placeholders, I'm getting an error that says No value for placeholder.... provided](https://github.com/drizzle-team/drizzle-orm/issues/2272) - thanks @L-Mario564
- [[BUG]: Error thrown when trying to insert an array of new rows using generatedAlwaysAsIdentity() for the id column](https://github.com/drizzle-team/drizzle-orm/issues/2849) - thanks @L-Mario564
-- [[BUG]: Unable to Use BigInt Types with Bun and Drizzle](https://github.com/drizzle-team/drizzle-orm/issues/2603) - thanks @L-Mario564
\ No newline at end of file
+- [[BUG]: Unable to Use BigInt Types with Bun and Drizzle](https://github.com/drizzle-team/drizzle-orm/issues/2603) - thanks @L-Mario564
diff --git a/changelogs/drizzle-orm/0.37.0.md b/changelogs/drizzle-orm/0.37.0.md
new file mode 100644
index 000000000..f54fa4a27
--- /dev/null
+++ b/changelogs/drizzle-orm/0.37.0.md
@@ -0,0 +1,98 @@
+# New Dialects
+
+### 🎉 `SingleStore` dialect is now available in Drizzle
+
+Thanks to the SingleStore team for creating a PR with all the necessary changes to support the MySQL-compatible part of SingleStore. You can already start using it with Drizzle. The SingleStore team will also help us iterate through updates and make more SingleStore-specific features available in Drizzle
+
+```ts
+import { int, singlestoreTable, varchar } from 'drizzle-orm/singlestore-core';
+import { drizzle } from 'drizzle-orm/singlestore';
+
+export const usersTable = singlestoreTable('users_table', {
+ id: int().primaryKey(),
+ name: varchar({ length: 255 }).notNull(),
+ age: int().notNull(),
+ email: varchar({ length: 255 }).notNull().unique(),
+});
+
+...
+
+const db = drizzle(process.env.DATABASE_URL!);
+
+db.select()...
+```
+
+You can check out our [Getting started guides](https://orm.drizzle.team/docs/get-started/singlestore-new) to try SingleStore!
+
+# New Drivers
+
+### 🎉 `SQLite Durable Objects` driver is now available in Drizzle
+
+You can now query SQLite Durable Objects in Drizzle!
+
+For the full example, please check our [Get Started](https://orm.drizzle.team/docs/get-started/do-new) Section
+
+```ts
+///
+import { drizzle, DrizzleSqliteDODatabase } from 'drizzle-orm/durable-sqlite';
+import { DurableObject } from 'cloudflare:workers'
+import { migrate } from 'drizzle-orm/durable-sqlite/migrator';
+import migrations from '../drizzle/migrations';
+import { usersTable } from './db/schema';
+
+export class MyDurableObject1 extends DurableObject {
+ storage: DurableObjectStorage;
+ db: DrizzleSqliteDODatabase;
+
+ constructor(ctx: DurableObjectState, env: Env) {
+ super(ctx, env);
+ this.storage = ctx.storage;
+ this.db = drizzle(this.storage, { logger: false });
+ }
+
+ async migrate() {
+ migrate(this.db, migrations);
+ }
+
+ async insert(user: typeof usersTable.$inferInsert) {
+ await this.db.insert(usersTable).values(user);
+ }
+
+ async select() {
+ return this.db.select().from(usersTable);
+ }
+}
+
+export default {
+ /**
+ * This is the standard fetch handler for a Cloudflare Worker
+ *
+ * @param request - The request submitted to the Worker from the client
+ * @param env - The interface to reference bindings declared in wrangler.toml
+ * @param ctx - The execution context of the Worker
+ * @returns The response to be sent back to the client
+ */
+ async fetch(request: Request, env: Env): Promise {
+ const id: DurableObjectId = env.MY_DURABLE_OBJECT1.idFromName('durable-object');
+ const stub = env.MY_DURABLE_OBJECT1.get(id);
+ await stub.migrate();
+
+ await stub.insert({
+ name: 'John',
+ age: 30,
+ email: 'john@example.com',
+ })
+ console.log('New user created!')
+
+ const users = await stub.select();
+ console.log('Getting all users from the database: ', users)
+
+ return new Response();
+ }
+}
+```
+
+# Bug fixes
+
+- [[BUG]: $with is undefined on withReplicas](https://github.com/drizzle-team/drizzle-orm/issues/1834)
+- [[BUG]: Neon serverless driver accepts authToken as a promise, but the $withAuth does not](https://github.com/drizzle-team/drizzle-orm/issues/3597)
\ No newline at end of file
diff --git a/drizzle-kit/package.json b/drizzle-kit/package.json
index 0b3b2562b..eca469d9b 100644
--- a/drizzle-kit/package.json
+++ b/drizzle-kit/package.json
@@ -1,12 +1,13 @@
{
"name": "drizzle-kit",
- "version": "0.28.1",
+ "version": "0.29.0",
"homepage": "https://orm.drizzle.team",
"keywords": [
"drizzle",
"orm",
"pg",
"mysql",
+ "singlestore",
"postgresql",
"postgres",
"sqlite",
diff --git a/drizzle-kit/src/api.ts b/drizzle-kit/src/api.ts
index b18ed95f4..18107bd34 100644
--- a/drizzle-kit/src/api.ts
+++ b/drizzle-kit/src/api.ts
@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto';
import { LibSQLDatabase } from 'drizzle-orm/libsql';
import type { MySql2Database } from 'drizzle-orm/mysql2';
import { PgDatabase } from 'drizzle-orm/pg-core';
+import { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore';
import {
columnsResolver,
enumsResolver,
@@ -30,12 +31,19 @@ import { generateMySqlSnapshot } from './serializer/mysqlSerializer';
import { prepareFromExports } from './serializer/pgImports';
import { PgSchema as PgSchemaKit, pgSchema, squashPgScheme } from './serializer/pgSchema';
import { generatePgSnapshot } from './serializer/pgSerializer';
+import {
+ SingleStoreSchema as SingleStoreSchemaKit,
+ singlestoreSchema,
+ squashSingleStoreScheme,
+} from './serializer/singlestoreSchema';
+import { generateSingleStoreSnapshot } from './serializer/singlestoreSerializer';
import { SQLiteSchema as SQLiteSchemaKit, sqliteSchema, squashSqliteScheme } from './serializer/sqliteSchema';
import { generateSqliteSnapshot } from './serializer/sqliteSerializer';
import type { DB, SQLiteDB } from './utils';
export type DrizzleSnapshotJSON = PgSchemaKit;
export type DrizzleSQLiteSnapshotJSON = SQLiteSchemaKit;
export type DrizzleMySQLSnapshotJSON = MySQLSchemaKit;
+export type DrizzleSingleStoreSnapshotJSON = SingleStoreSchemaKit;
export const generateDrizzleJson = (
imports: Record,
@@ -374,6 +382,112 @@ export const pushMySQLSchema = async (
};
};
+// SingleStore
+
+export const generateSingleStoreDrizzleJson = async (
+ imports: Record,
+ prevId?: string,
+ casing?: CasingType,
+): Promise => {
+ const { prepareFromExports } = await import('./serializer/singlestoreImports');
+
+ const prepared = prepareFromExports(imports);
+
+ const id = randomUUID();
+
+ const snapshot = generateSingleStoreSnapshot(prepared.tables, /* prepared.views, */ casing);
+
+ return {
+ ...snapshot,
+ id,
+ prevId: prevId ?? originUUID,
+ };
+};
+
+export const generateSingleStoreMigration = async (
+ prev: DrizzleSingleStoreSnapshotJSON,
+ cur: DrizzleSingleStoreSnapshotJSON,
+) => {
+ const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer');
+
+ const validatedPrev = singlestoreSchema.parse(prev);
+ const validatedCur = singlestoreSchema.parse(cur);
+
+ const squashedPrev = squashSingleStoreScheme(validatedPrev);
+ const squashedCur = squashSingleStoreScheme(validatedCur);
+
+ const { sqlStatements } = await applySingleStoreSnapshotsDiff(
+ squashedPrev,
+ squashedCur,
+ tablesResolver,
+ columnsResolver,
+ /* singleStoreViewsResolver, */
+ validatedPrev,
+ validatedCur,
+ 'push',
+ );
+
+ return sqlStatements;
+};
+
+export const pushSingleStoreSchema = async (
+ imports: Record,
+ drizzleInstance: SingleStoreDriverDatabase,
+ databaseName: string,
+) => {
+ const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer');
+ const { logSuggestionsAndReturn } = await import(
+ './cli/commands/singlestorePushUtils'
+ );
+ const { singlestorePushIntrospect } = await import(
+ './cli/commands/singlestoreIntrospect'
+ );
+ const { sql } = await import('drizzle-orm');
+
+ const db: DB = {
+ query: async (query: string) => {
+ const res = await drizzleInstance.execute(sql.raw(query));
+ return res[0] as unknown as any[];
+ },
+ };
+ const cur = await generateSingleStoreDrizzleJson(imports);
+ const { schema: prev } = await singlestorePushIntrospect(db, databaseName, []);
+
+ const validatedPrev = singlestoreSchema.parse(prev);
+ const validatedCur = singlestoreSchema.parse(cur);
+
+ const squashedPrev = squashSingleStoreScheme(validatedPrev);
+ const squashedCur = squashSingleStoreScheme(validatedCur);
+
+ const { statements } = await applySingleStoreSnapshotsDiff(
+ squashedPrev,
+ squashedCur,
+ tablesResolver,
+ columnsResolver,
+ /* singleStoreViewsResolver, */
+ validatedPrev,
+ validatedCur,
+ 'push',
+ );
+
+ const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn(
+ db,
+ statements,
+ validatedCur,
+ );
+
+ return {
+ hasDataLoss: shouldAskForApprove,
+ warnings: infoToPrint,
+ statementsToExecute,
+ apply: async () => {
+ for (const dStmnt of statementsToExecute) {
+ await db.query(dStmnt);
+ }
+ },
+ };
+};
+
export const upPgSnapshot = (snapshot: Record) => {
if (snapshot.version === '5') {
return upPgV7(upPgV6(snapshot));
diff --git a/drizzle-kit/src/cli/commands/introspect.ts b/drizzle-kit/src/cli/commands/introspect.ts
index 2629d000e..101eb617a 100644
--- a/drizzle-kit/src/cli/commands/introspect.ts
+++ b/drizzle-kit/src/cli/commands/introspect.ts
@@ -4,20 +4,24 @@ import { render, renderWithTask } from 'hanji';
import { Minimatch } from 'minimatch';
import { join } from 'path';
import { plural, singular } from 'pluralize';
+import { drySingleStore, SingleStoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema';
import { assertUnreachable, originUUID } from '../../global';
import { schemaToTypeScript as mysqlSchemaToTypeScript } from '../../introspect-mysql';
import { paramNameFor, schemaToTypeScript as postgresSchemaToTypeScript } from '../../introspect-pg';
+import { schemaToTypeScript as singlestoreSchemaToTypeScript } from '../../introspect-singlestore';
import { schemaToTypeScript as sqliteSchemaToTypeScript } from '../../introspect-sqlite';
import { dryMySql, MySqlSchema, squashMysqlScheme } from '../../serializer/mysqlSchema';
import { fromDatabase as fromMysqlDatabase } from '../../serializer/mysqlSerializer';
import { dryPg, type PgSchema, squashPgScheme } from '../../serializer/pgSchema';
import { fromDatabase as fromPostgresDatabase } from '../../serializer/pgSerializer';
+import { fromDatabase as fromSingleStoreDatabase } from '../../serializer/singlestoreSerializer';
import { drySQLite, type SQLiteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema';
import { fromDatabase as fromSqliteDatabase } from '../../serializer/sqliteSerializer';
import {
applyLibSQLSnapshotsDiff,
applyMysqlSnapshotsDiff,
applyPgSnapshotsDiff,
+ applySingleStoreSnapshotsDiff,
applySqliteSnapshotsDiff,
} from '../../snapshotsDiffer';
import { prepareOutFolder } from '../../utils';
@@ -26,6 +30,7 @@ import type { Casing, Prefix } from '../validations/common';
import { LibSQLCredentials } from '../validations/libsql';
import type { MysqlCredentials } from '../validations/mysql';
import type { PostgresCredentials } from '../validations/postgres';
+import { SingleStoreCredentials } from '../validations/singlestore';
import type { SqliteCredentials } from '../validations/sqlite';
import { IntrospectProgress } from '../views';
import {
@@ -280,6 +285,103 @@ export const introspectMysql = async (
process.exit(0);
};
+export const introspectSingleStore = async (
+ casing: Casing,
+ out: string,
+ breakpoints: boolean,
+ credentials: SingleStoreCredentials,
+ tablesFilter: string[],
+ prefix: Prefix,
+) => {
+ const { connectToSingleStore } = await import('../connections');
+ const { db, database } = await connectToSingleStore(credentials);
+
+ const matchers = tablesFilter.map((it) => {
+ return new Minimatch(it);
+ });
+
+ const filter = (tableName: string) => {
+ if (matchers.length === 0) return true;
+
+ let flags: boolean[] = [];
+
+ for (let matcher of matchers) {
+ if (matcher.negate) {
+ if (!matcher.match(tableName)) {
+ flags.push(false);
+ }
+ }
+
+ if (matcher.match(tableName)) {
+ flags.push(true);
+ }
+ }
+
+ if (flags.length > 0) {
+ return flags.every(Boolean);
+ }
+ return false;
+ };
+
+ const progress = new IntrospectProgress();
+ const res = await renderWithTask(
+ progress,
+ fromSingleStoreDatabase(db, database, filter, (stage, count, status) => {
+ progress.update(stage, count, status);
+ }),
+ );
+
+ const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema;
+ const ts = singlestoreSchemaToTypeScript(schema, casing);
+ const { internal, ...schemaWithoutInternals } = schema;
+
+ const schemaFile = join(out, 'schema.ts');
+ writeFileSync(schemaFile, ts.file);
+ console.log();
+
+ const { snapshots, journal } = prepareOutFolder(out, 'postgresql');
+
+ if (snapshots.length === 0) {
+ const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff(
+ squashSingleStoreScheme(drySingleStore),
+ squashSingleStoreScheme(schema),
+ tablesResolver,
+ columnsResolver,
+ /* singleStoreViewsResolver, */
+ drySingleStore,
+ schema,
+ );
+
+ writeResult({
+ cur: schema,
+ sqlStatements,
+ journal,
+ _meta,
+ outFolder: out,
+ breakpoints,
+ type: 'introspect',
+ prefixMode: prefix,
+ });
+ } else {
+ render(
+ `[${
+ chalk.blue(
+ 'i',
+ )
+ }] No SQL generated, you already have migrations in project`,
+ );
+ }
+
+ render(
+ `[${
+ chalk.green(
+ '✓',
+ )
+ }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`,
+ );
+ process.exit(0);
+};
+
export const introspectSqlite = async (
casing: Casing,
out: string,
diff --git a/drizzle-kit/src/cli/commands/migrate.ts b/drizzle-kit/src/cli/commands/migrate.ts
index 92770e99d..96067c165 100644
--- a/drizzle-kit/src/cli/commands/migrate.ts
+++ b/drizzle-kit/src/cli/commands/migrate.ts
@@ -4,6 +4,8 @@ import {
prepareMySqlMigrationSnapshot,
preparePgDbPushSnapshot,
preparePgMigrationSnapshot,
+ prepareSingleStoreDbPushSnapshot,
+ prepareSingleStoreMigrationSnapshot,
prepareSQLiteDbPushSnapshot,
prepareSqliteMigrationSnapshot,
} from '../../migrationPreparator';
@@ -11,6 +13,7 @@ import {
import chalk from 'chalk';
import { render } from 'hanji';
import path, { join } from 'path';
+import { SingleStoreSchema, singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema';
import { TypeOf } from 'zod';
import type { CommonSchema } from '../../schemaValidator';
import { MySqlSchema, mysqlSchema, squashMysqlScheme, ViewSquashed } from '../../serializer/mysqlSchema';
@@ -20,6 +23,7 @@ import {
applyLibSQLSnapshotsDiff,
applyMysqlSnapshotsDiff,
applyPgSnapshotsDiff,
+ applySingleStoreSnapshotsDiff,
applySqliteSnapshotsDiff,
Column,
ColumnsResolverInput,
@@ -39,7 +43,7 @@ import {
} from '../../snapshotsDiffer';
import { assertV1OutFolder, Journal, prepareMigrationFolder } from '../../utils';
import { prepareMigrationMetadata } from '../../utils/words';
-import { CasingType, Prefix } from '../validations/common';
+import { CasingType, Driver, Prefix } from '../validations/common';
import { withStyle } from '../validations/outputs';
import {
isRenamePromptItem,
@@ -143,6 +147,28 @@ export const mySqlViewsResolver = async (
}
};
+/* export const singleStoreViewsResolver = async (
+ input: ResolverInput,
+): Promise> => {
+ try {
+ const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict(
+ input.created,
+ input.deleted,
+ 'view',
+ );
+
+ return {
+ created: created,
+ deleted: deleted,
+ moved: moved,
+ renamed: renamed,
+ };
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+}; */
+
export const sqliteViewsResolver = async (
input: ResolverInput,
): Promise> => {
@@ -521,6 +547,156 @@ export const prepareAndMigrateMysql = async (config: GenerateConfig) => {
}
};
+// Not needed for now
+function singleStoreSchemaSuggestions(
+ curSchema: TypeOf,
+ prevSchema: TypeOf,
+) {
+ const suggestions: string[] = [];
+ const usedSuggestions: string[] = [];
+ const suggestionTypes = {
+ // TODO: Check if SingleStore has serial type
+ serial: withStyle.errorWarning(
+ `We deprecated the use of 'serial' for SingleStore starting from version 0.20.0. In SingleStore, 'serial' is simply an alias for 'bigint unsigned not null auto_increment unique,' which creates all constraints and indexes for you. This may make the process less explicit for both users and drizzle-kit push commands`,
+ ),
+ };
+
+ for (const table of Object.values(curSchema.tables)) {
+ for (const column of Object.values(table.columns)) {
+ if (column.type === 'serial') {
+ if (!usedSuggestions.includes('serial')) {
+ suggestions.push(suggestionTypes['serial']);
+ }
+
+ const uniqueForSerial = Object.values(
+ prevSchema.tables[table.name].uniqueConstraints,
+ ).find((it) => it.columns[0] === column.name);
+
+ suggestions.push(
+ `\n`
+ + withStyle.suggestion(
+ `We are suggesting to change ${
+ chalk.blue(
+ column.name,
+ )
+ } column in ${
+ chalk.blueBright(
+ table.name,
+ )
+ } table from serial to bigint unsigned\n\n${
+ chalk.blueBright(
+ `bigint("${column.name}", { mode: "number", unsigned: true }).notNull().autoincrement().unique(${
+ uniqueForSerial?.name ? `"${uniqueForSerial?.name}"` : ''
+ })`,
+ )
+ }`,
+ ),
+ );
+ }
+ }
+ }
+
+ return suggestions;
+}
+
+// Intersect with prepareAnMigrate
+export const prepareSingleStorePush = async (
+ schemaPath: string | string[],
+ snapshot: SingleStoreSchema,
+ casing: CasingType | undefined,
+) => {
+ try {
+ const { prev, cur } = await prepareSingleStoreDbPushSnapshot(
+ snapshot,
+ schemaPath,
+ casing,
+ );
+
+ const validatedPrev = singlestoreSchema.parse(prev);
+ const validatedCur = singlestoreSchema.parse(cur);
+
+ const squashedPrev = squashSingleStoreScheme(validatedPrev);
+ const squashedCur = squashSingleStoreScheme(validatedCur);
+
+ const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff(
+ squashedPrev,
+ squashedCur,
+ tablesResolver,
+ columnsResolver,
+ /* singleStoreViewsResolver, */
+ validatedPrev,
+ validatedCur,
+ 'push',
+ );
+
+ return { sqlStatements, statements, validatedCur, validatedPrev };
+ } catch (e) {
+ console.error(e);
+ process.exit(1);
+ }
+};
+
+export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => {
+ const outFolder = config.out;
+ const schemaPath = config.schema;
+ const casing = config.casing;
+
+ try {
+ // TODO: remove
+ assertV1OutFolder(outFolder);
+
+ const { snapshots, journal } = prepareMigrationFolder(outFolder, 'singlestore');
+ const { prev, cur, custom } = await prepareSingleStoreMigrationSnapshot(
+ snapshots,
+ schemaPath,
+ casing,
+ );
+
+ const validatedPrev = singlestoreSchema.parse(prev);
+ const validatedCur = singlestoreSchema.parse(cur);
+
+ if (config.custom) {
+ writeResult({
+ cur: custom,
+ sqlStatements: [],
+ journal,
+ outFolder,
+ name: config.name,
+ breakpoints: config.breakpoints,
+ type: 'custom',
+ prefixMode: config.prefix,
+ });
+ return;
+ }
+
+ const squashedPrev = squashSingleStoreScheme(validatedPrev);
+ const squashedCur = squashSingleStoreScheme(validatedCur);
+
+ const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff(
+ squashedPrev,
+ squashedCur,
+ tablesResolver,
+ columnsResolver,
+ /* singleStoreViewsResolver, */
+ validatedPrev,
+ validatedCur,
+ );
+
+ writeResult({
+ cur,
+ sqlStatements,
+ journal,
+ _meta,
+ outFolder,
+ name: config.name,
+ breakpoints: config.breakpoints,
+ prefixMode: config.prefix,
+ });
+ } catch (e) {
+ console.error(e);
+ }
+};
+
export const prepareAndMigrateSqlite = async (config: GenerateConfig) => {
const outFolder = config.out;
const schemaPath = config.schema;
@@ -577,6 +753,7 @@ export const prepareAndMigrateSqlite = async (config: GenerateConfig) => {
breakpoints: config.breakpoints,
bundle: config.bundle,
prefixMode: config.prefix,
+ driver: config.driver,
});
} catch (e) {
console.error(e);
@@ -1025,6 +1202,7 @@ export const writeResult = ({
bundle = false,
type = 'none',
prefixMode,
+ driver,
}: {
cur: CommonSchema;
sqlStatements: string[];
@@ -1036,6 +1214,7 @@ export const writeResult = ({
name?: string;
bundle?: boolean;
type?: 'introspect' | 'custom' | 'none';
+ driver?: Driver;
}) => {
if (type === 'none') {
console.log(schema(cur));
@@ -1093,9 +1272,9 @@ export const writeResult = ({
fs.writeFileSync(`${outFolder}/${tag}.sql`, sql);
- // js file with .sql imports for React Native / Expo
+ // js file with .sql imports for React Native / Expo and Durable Sqlite Objects
if (bundle) {
- const js = embeddedMigrations(journal);
+ const js = embeddedMigrations(journal, driver);
fs.writeFileSync(`${outFolder}/migrations.js`, js);
}
@@ -1112,9 +1291,11 @@ export const writeResult = ({
);
};
-export const embeddedMigrations = (journal: Journal) => {
- let content =
- '// This file is required for Expo/React Native SQLite migrations - https://orm.drizzle.team/quick-sqlite/expo\n\n';
+export const embeddedMigrations = (journal: Journal, driver?: Driver) => {
+ let content = driver === 'expo'
+ ? '// This file is required for Expo/React Native SQLite migrations - https://orm.drizzle.team/quick-sqlite/expo\n\n'
+ : '';
+
content += "import journal from './meta/_journal.json';\n";
journal.entries.forEach((entry) => {
content += `import m${entry.idx.toString().padStart(4, '0')} from './${entry.tag}.sql';\n`;
diff --git a/drizzle-kit/src/cli/commands/push.ts b/drizzle-kit/src/cli/commands/push.ts
index 4a41a46d4..0c82fe026 100644
--- a/drizzle-kit/src/cli/commands/push.ts
+++ b/drizzle-kit/src/cli/commands/push.ts
@@ -10,10 +10,18 @@ import { LibSQLCredentials } from '../validations/libsql';
import type { MysqlCredentials } from '../validations/mysql';
import { withStyle } from '../validations/outputs';
import type { PostgresCredentials } from '../validations/postgres';
+import { SingleStoreCredentials } from '../validations/singlestore';
import type { SqliteCredentials } from '../validations/sqlite';
import { libSqlLogSuggestionsAndReturn } from './libSqlPushUtils';
-import { filterStatements, logSuggestionsAndReturn } from './mysqlPushUtils';
+import {
+ filterStatements as mySqlFilterStatements,
+ logSuggestionsAndReturn as mySqlLogSuggestionsAndReturn,
+} from './mysqlPushUtils';
import { pgSuggestions } from './pgPushUtils';
+import {
+ filterStatements as singleStoreFilterStatements,
+ logSuggestionsAndReturn as singleStoreLogSuggestionsAndReturn,
+} from './singlestorePushUtils';
import { logSuggestionsAndReturn as sqliteSuggestions } from './sqlitePushUtils';
export const mysqlPush = async (
@@ -35,7 +43,7 @@ export const mysqlPush = async (
const statements = await prepareMySQLPush(schemaPath, schema, casing);
- const filteredStatements = filterStatements(
+ const filteredStatements = mySqlFilterStatements(
statements.statements ?? [],
statements.validatedCur,
statements.validatedPrev,
@@ -52,8 +60,7 @@ export const mysqlPush = async (
tablesToRemove,
tablesToTruncate,
infoToPrint,
- schemasToRemove,
- } = await logSuggestionsAndReturn(
+ } = await mySqlLogSuggestionsAndReturn(
db,
filteredStatements,
statements.validatedCur,
@@ -156,6 +163,150 @@ export const mysqlPush = async (
}
};
+export const singlestorePush = async (
+ schemaPath: string | string[],
+ credentials: SingleStoreCredentials,
+ tablesFilter: string[],
+ strict: boolean,
+ verbose: boolean,
+ force: boolean,
+ casing: CasingType | undefined,
+) => {
+ const { connectToSingleStore } = await import('../connections');
+ const { singlestorePushIntrospect } = await import('./singlestoreIntrospect');
+
+ const { db, database } = await connectToSingleStore(credentials);
+
+ const { schema } = await singlestorePushIntrospect(
+ db,
+ database,
+ tablesFilter,
+ );
+ const { prepareSingleStorePush } = await import('./migrate');
+
+ const statements = await prepareSingleStorePush(schemaPath, schema, casing);
+
+ const filteredStatements = singleStoreFilterStatements(
+ statements.statements ?? [],
+ statements.validatedCur,
+ statements.validatedPrev,
+ );
+
+ try {
+ if (filteredStatements.length === 0) {
+ render(`[${chalk.blue('i')}] No changes detected`);
+ } else {
+ const {
+ shouldAskForApprove,
+ statementsToExecute,
+ columnsToRemove,
+ tablesToRemove,
+ tablesToTruncate,
+ infoToPrint,
+ schemasToRemove,
+ } = await singleStoreLogSuggestionsAndReturn(
+ db,
+ filteredStatements,
+ statements.validatedCur,
+ );
+
+ const filteredSqlStatements = fromJson(filteredStatements, 'singlestore');
+
+ const uniqueSqlStatementsToExecute: string[] = [];
+ statementsToExecute.forEach((ss) => {
+ if (!uniqueSqlStatementsToExecute.includes(ss)) {
+ uniqueSqlStatementsToExecute.push(ss);
+ }
+ });
+ const uniqueFilteredSqlStatements: string[] = [];
+ filteredSqlStatements.forEach((ss) => {
+ if (!uniqueFilteredSqlStatements.includes(ss)) {
+ uniqueFilteredSqlStatements.push(ss);
+ }
+ });
+
+ if (verbose) {
+ console.log();
+ console.log(
+ withStyle.warning('You are about to execute current statements:'),
+ );
+ console.log();
+ console.log(
+ [...uniqueSqlStatementsToExecute, ...uniqueFilteredSqlStatements]
+ .map((s) => chalk.blue(s))
+ .join('\n'),
+ );
+ console.log();
+ }
+
+ if (!force && strict) {
+ if (!shouldAskForApprove) {
+ const { status, data } = await render(
+ new Select(['No, abort', `Yes, I want to execute all statements`]),
+ );
+ if (data?.index === 0) {
+ render(`[${chalk.red('x')}] All changes were aborted`);
+ process.exit(0);
+ }
+ }
+ }
+
+ if (!force && shouldAskForApprove) {
+ console.log(withStyle.warning('Found data-loss statements:'));
+ console.log(infoToPrint.join('\n'));
+ console.log();
+ console.log(
+ chalk.red.bold(
+ 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n',
+ ),
+ );
+
+ console.log(chalk.white('Do you still want to push changes?'));
+
+ const { status, data } = await render(
+ new Select([
+ 'No, abort',
+ `Yes, I want to${
+ tablesToRemove.length > 0
+ ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},`
+ : ' '
+ }${
+ columnsToRemove.length > 0
+ ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},`
+ : ' '
+ }${
+ tablesToTruncate.length > 0
+ ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}`
+ : ''
+ }`
+ .replace(/(^,)|(,$)/g, '')
+ .replace(/ +(?= )/g, ''),
+ ]),
+ );
+ if (data?.index === 0) {
+ render(`[${chalk.red('x')}] All changes were aborted`);
+ process.exit(0);
+ }
+ }
+
+ for (const dStmnt of uniqueSqlStatementsToExecute) {
+ await db.query(dStmnt);
+ }
+
+ for (const statement of uniqueFilteredSqlStatements) {
+ await db.query(statement);
+ }
+ if (filteredStatements.length > 0) {
+ render(`[${chalk.green('✓')}] Changes applied`);
+ } else {
+ render(`[${chalk.blue('i')}] No changes detected`);
+ }
+ }
+ } catch (e) {
+ console.log(e);
+ }
+};
+
export const pgPush = async (
schemaPath: string | string[],
verbose: boolean,
@@ -171,7 +322,6 @@ export const pgPush = async (
const { pgPushIntrospect } = await import('./pgIntrospect');
const db = await preparePostgresDB(credentials);
-
const serialized = await serializePg(schemaPath, casing, schemasFilter);
const { schema } = await pgPushIntrospect(db, tablesFilter, schemasFilter, entities, serialized);
diff --git a/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts b/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts
new file mode 100644
index 000000000..27d8c59c5
--- /dev/null
+++ b/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts
@@ -0,0 +1,53 @@
+import { renderWithTask } from 'hanji';
+import { Minimatch } from 'minimatch';
+import { originUUID } from '../../global';
+import type { SingleStoreSchema } from '../../serializer/singlestoreSchema';
+import { fromDatabase } from '../../serializer/singlestoreSerializer';
+import type { DB } from '../../utils';
+import { ProgressView } from '../views';
+
+export const singlestorePushIntrospect = async (
+ db: DB,
+ databaseName: string,
+ filters: string[],
+) => {
+ const matchers = filters.map((it) => {
+ return new Minimatch(it);
+ });
+
+ const filter = (tableName: string) => {
+ if (matchers.length === 0) return true;
+
+ let flags: boolean[] = [];
+
+ for (let matcher of matchers) {
+ if (matcher.negate) {
+ if (!matcher.match(tableName)) {
+ flags.push(false);
+ }
+ }
+
+ if (matcher.match(tableName)) {
+ flags.push(true);
+ }
+ }
+
+ if (flags.length > 0) {
+ return flags.every(Boolean);
+ }
+ return false;
+ };
+
+ const progress = new ProgressView(
+ 'Pulling schema from database...',
+ 'Pulling schema from database...',
+ );
+ const res = await renderWithTask(
+ progress,
+ fromDatabase(db, databaseName, filter),
+ );
+
+ const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema;
+ const { internal, ...schemaWithoutInternals } = schema;
+ return { schema: schemaWithoutInternals };
+};
diff --git a/drizzle-kit/src/cli/commands/singlestorePushUtils.ts b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts
new file mode 100644
index 000000000..80fad9b2d
--- /dev/null
+++ b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts
@@ -0,0 +1,352 @@
+import chalk from 'chalk';
+import { render } from 'hanji';
+import { TypeOf } from 'zod';
+import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements';
+import { singlestoreSchema, SingleStoreSquasher } from '../../serializer/singlestoreSchema';
+import type { DB } from '../../utils';
+import { Select } from '../selector-ui';
+import { withStyle } from '../validations/outputs';
+
+export const filterStatements = (
+ statements: JsonStatement[],
+ currentSchema: TypeOf,
+ prevSchema: TypeOf,
+) => {
+ return statements.filter((statement) => {
+ if (statement.type === 'alter_table_alter_column_set_type') {
+ // Don't need to handle it on migrations step and introspection
+ // but for both it should be skipped
+ if (
+ statement.oldDataType.startsWith('tinyint')
+ && statement.newDataType.startsWith('boolean')
+ ) {
+ return false;
+ }
+
+ if (
+ statement.oldDataType.startsWith('bigint unsigned')
+ && statement.newDataType.startsWith('serial')
+ ) {
+ return false;
+ }
+
+ if (
+ statement.oldDataType.startsWith('serial')
+ && statement.newDataType.startsWith('bigint unsigned')
+ ) {
+ return false;
+ }
+ } else if (statement.type === 'alter_table_alter_column_set_default') {
+ if (
+ statement.newDefaultValue === false
+ && statement.oldDefaultValue === 0
+ && statement.newDataType === 'boolean'
+ ) {
+ return false;
+ }
+ if (
+ statement.newDefaultValue === true
+ && statement.oldDefaultValue === 1
+ && statement.newDataType === 'boolean'
+ ) {
+ return false;
+ }
+ } else if (statement.type === 'delete_unique_constraint') {
+ const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data);
+ // only if constraint was removed from a serial column, than treat it as removed
+ // const serialStatement = statements.find(
+ // (it) => it.type === "alter_table_alter_column_set_type"
+ // ) as JsonAlterColumnTypeStatement;
+ // if (
+ // serialStatement?.oldDataType.startsWith("bigint unsigned") &&
+ // serialStatement?.newDataType.startsWith("serial") &&
+ // serialStatement.columnName ===
+ // SingleStoreSquasher.unsquashUnique(statement.data).columns[0]
+ // ) {
+ // return false;
+ // }
+ // Check if uniqueindex was only on this column, that is serial
+
+ // if now serial and was not serial and was unique index
+ if (
+ unsquashed.columns.length === 1
+ && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]]
+ .type === 'serial'
+ && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]]
+ .type === 'serial'
+ && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]]
+ .name === unsquashed.columns[0]
+ ) {
+ return false;
+ }
+ } else if (statement.type === 'alter_table_alter_column_drop_notnull') {
+ // only if constraint was removed from a serial column, than treat it as removed
+ const serialStatement = statements.find(
+ (it) => it.type === 'alter_table_alter_column_set_type',
+ ) as JsonAlterColumnTypeStatement;
+ if (
+ serialStatement?.oldDataType.startsWith('bigint unsigned')
+ && serialStatement?.newDataType.startsWith('serial')
+ && serialStatement.columnName === statement.columnName
+ && serialStatement.tableName === statement.tableName
+ ) {
+ return false;
+ }
+ if (statement.newDataType === 'serial' && !statement.columnNotNull) {
+ return false;
+ }
+ if (statement.columnAutoIncrement) {
+ return false;
+ }
+ }
+
+ return true;
+ });
+};
+
+export const logSuggestionsAndReturn = async (
+ db: DB,
+ statements: JsonStatement[],
+ json2: TypeOf,
+) => {
+ let shouldAskForApprove = false;
+ const statementsToExecute: string[] = [];
+ const infoToPrint: string[] = [];
+
+ const tablesToRemove: string[] = [];
+ const columnsToRemove: string[] = [];
+ const schemasToRemove: string[] = [];
+ const tablesToTruncate: string[] = [];
+
+ for (const statement of statements) {
+ if (statement.type === 'drop_table') {
+ const res = await db.query(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to delete ${
+ chalk.underline(
+ statement.tableName,
+ )
+ } table with ${count} items`,
+ );
+ tablesToRemove.push(statement.tableName);
+ shouldAskForApprove = true;
+ }
+ } else if (statement.type === 'alter_table_drop_column') {
+ const res = await db.query(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to delete ${
+ chalk.underline(
+ statement.columnName,
+ )
+ } column in ${statement.tableName} table with ${count} items`,
+ );
+ columnsToRemove.push(`${statement.tableName}_${statement.columnName}`);
+ shouldAskForApprove = true;
+ }
+ } else if (statement.type === 'drop_schema') {
+ const res = await db.query(
+ `select count(*) as count from information_schema.tables where table_schema = \`${statement.name}\`;`,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to delete ${
+ chalk.underline(
+ statement.name,
+ )
+ } schema with ${count} tables`,
+ );
+ schemasToRemove.push(statement.name);
+ shouldAskForApprove = true;
+ }
+ } else if (statement.type === 'alter_table_alter_column_set_type') {
+ const res = await db.query(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to change ${
+ chalk.underline(
+ statement.columnName,
+ )
+ } column type from ${
+ chalk.underline(
+ statement.oldDataType,
+ )
+ } to ${chalk.underline(statement.newDataType)} with ${count} items`,
+ );
+ statementsToExecute.push(`truncate table ${statement.tableName};`);
+ tablesToTruncate.push(statement.tableName);
+ shouldAskForApprove = true;
+ }
+ } else if (statement.type === 'alter_table_alter_column_drop_default') {
+ if (statement.columnNotNull) {
+ const res = await db.query(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to remove default value from ${
+ chalk.underline(
+ statement.columnName,
+ )
+ } not-null column with ${count} items`,
+ );
+
+ tablesToTruncate.push(statement.tableName);
+ statementsToExecute.push(`truncate table ${statement.tableName};`);
+
+ shouldAskForApprove = true;
+ }
+ }
+ // shouldAskForApprove = true;
+ } else if (statement.type === 'alter_table_alter_column_set_notnull') {
+ if (typeof statement.columnDefault === 'undefined') {
+ const res = await db.query(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to set not-null constraint to ${
+ chalk.underline(
+ statement.columnName,
+ )
+ } column without default, which contains ${count} items`,
+ );
+
+ tablesToTruncate.push(statement.tableName);
+ statementsToExecute.push(`truncate table ${statement.tableName};`);
+
+ shouldAskForApprove = true;
+ }
+ }
+ } else if (statement.type === 'alter_table_alter_column_drop_pk') {
+ const res = await db.query(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+
+ // if drop pk and json2 has autoincrement in table -> exit process with error
+ if (
+ Object.values(json2.tables[statement.tableName].columns).filter(
+ (column) => column.autoincrement,
+ ).length > 0
+ ) {
+ console.log(
+ `${
+ withStyle.errorWarning(
+ `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`,
+ )
+ }`,
+ );
+ process.exit(1);
+ }
+
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to change ${
+ chalk.underline(
+ statement.tableName,
+ )
+ } primary key. This statements may fail and you table may left without primary key`,
+ );
+
+ tablesToTruncate.push(statement.tableName);
+ shouldAskForApprove = true;
+ }
+ } else if (statement.type === 'delete_composite_pk') {
+ // if drop pk and json2 has autoincrement in table -> exit process with error
+ if (
+ Object.values(json2.tables[statement.tableName].columns).filter(
+ (column) => column.autoincrement,
+ ).length > 0
+ ) {
+ console.log(
+ `${
+ withStyle.errorWarning(
+ `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`,
+ )
+ }`,
+ );
+ process.exit(1);
+ }
+ } else if (statement.type === 'alter_table_add_column') {
+ if (
+ statement.column.notNull
+ && typeof statement.column.default === 'undefined'
+ ) {
+ const res = await db.query(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ infoToPrint.push(
+ `· You're about to add not-null ${
+ chalk.underline(
+ statement.column.name,
+ )
+ } column without default value, which contains ${count} items`,
+ );
+
+ tablesToTruncate.push(statement.tableName);
+ statementsToExecute.push(`truncate table ${statement.tableName};`);
+
+ shouldAskForApprove = true;
+ }
+ }
+ } else if (statement.type === 'create_unique_constraint') {
+ const res = await db.query(
+ `select count(*) as count from \`${statement.tableName}\``,
+ );
+ const count = Number(res[0].count);
+ if (count > 0) {
+ const unsquashedUnique = SingleStoreSquasher.unsquashUnique(statement.data);
+ console.log(
+ `· You're about to add ${
+ chalk.underline(
+ unsquashedUnique.name,
+ )
+ } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${
+ chalk.underline(
+ statement.tableName,
+ )
+ } table?\n`,
+ );
+ const { status, data } = await render(
+ new Select([
+ 'No, add the constraint without truncating the table',
+ `Yes, truncate the table`,
+ ]),
+ );
+ if (data?.index === 1) {
+ tablesToTruncate.push(statement.tableName);
+ statementsToExecute.push(`truncate table ${statement.tableName};`);
+ shouldAskForApprove = true;
+ }
+ }
+ }
+ }
+
+ return {
+ statementsToExecute,
+ shouldAskForApprove,
+ infoToPrint,
+ columnsToRemove: [...new Set(columnsToRemove)],
+ schemasToRemove: [...new Set(schemasToRemove)],
+ tablesToTruncate: [...new Set(tablesToTruncate)],
+ tablesToRemove: [...new Set(tablesToRemove)],
+ };
+};
diff --git a/drizzle-kit/src/cli/commands/singlestoreUp.ts b/drizzle-kit/src/cli/commands/singlestoreUp.ts
new file mode 100644
index 000000000..dc5004ed0
--- /dev/null
+++ b/drizzle-kit/src/cli/commands/singlestoreUp.ts
@@ -0,0 +1 @@
+export const upSinglestoreHandler = (out: string) => {};
diff --git a/drizzle-kit/src/cli/commands/utils.ts b/drizzle-kit/src/cli/commands/utils.ts
index 7386b74d5..60571ad73 100644
--- a/drizzle-kit/src/cli/commands/utils.ts
+++ b/drizzle-kit/src/cli/commands/utils.ts
@@ -31,6 +31,11 @@ import {
postgresCredentials,
printConfigConnectionIssues as printIssuesPg,
} from '../validations/postgres';
+import {
+ printConfigConnectionIssues as printIssuesSingleStore,
+ SingleStoreCredentials,
+ singlestoreCredentials,
+} from '../validations/singlestore';
import {
printConfigConnectionIssues as printIssuesSqlite,
SqliteCredentials,
@@ -127,6 +132,7 @@ export type GenerateConfig = {
custom: boolean;
bundle: boolean;
casing?: CasingType;
+ driver?: Driver;
};
export const prepareGenerateConfig = async (
@@ -173,8 +179,9 @@ export const prepareGenerateConfig = async (
breakpoints: breakpoints ?? true,
schema: schema,
out: out || 'drizzle',
- bundle: driver === 'expo',
+ bundle: driver === 'expo' || driver === 'durable-sqlite',
casing,
+ driver,
};
};
@@ -222,6 +229,10 @@ export const preparePushConfig = async (
dialect: 'turso';
credentials: LibSQLCredentials;
}
+ | {
+ dialect: 'singlestore';
+ credentials: SingleStoreCredentials;
+ }
) & {
schemaPath: string | string[];
verbose: boolean;
@@ -316,6 +327,25 @@ export const preparePushConfig = async (
};
}
+ if (config.dialect === 'singlestore') {
+ const parsed = singlestoreCredentials.safeParse(config);
+ if (!parsed.success) {
+ printIssuesSingleStore(config);
+ process.exit(1);
+ }
+
+ return {
+ dialect: 'singlestore',
+ schemaPath: config.schema,
+ strict: config.strict ?? false,
+ verbose: config.verbose ?? false,
+ force: (options.force as boolean) ?? false,
+ credentials: parsed.data,
+ tablesFilter,
+ schemasFilter,
+ };
+ }
+
if (config.dialect === 'sqlite') {
const parsed = sqliteCredentials.safeParse(config);
if (!parsed.success) {
@@ -378,6 +408,10 @@ export const preparePullConfig = async (
dialect: 'turso';
credentials: LibSQLCredentials;
}
+ | {
+ dialect: 'singlestore';
+ credentials: SingleStoreCredentials;
+ }
) & {
out: string;
breakpoints: boolean;
@@ -468,6 +502,26 @@ export const preparePullConfig = async (
};
}
+ if (dialect === 'singlestore') {
+ const parsed = singlestoreCredentials.safeParse(config);
+ if (!parsed.success) {
+ printIssuesSingleStore(config);
+ process.exit(1);
+ }
+
+ return {
+ dialect: 'singlestore',
+ out: config.out,
+ breakpoints: config.breakpoints,
+ casing: config.casing,
+ credentials: parsed.data,
+ tablesFilter,
+ schemasFilter,
+ prefix: config.migrations?.prefix || 'index',
+ entities: config.entities,
+ };
+ }
+
if (dialect === 'sqlite') {
const parsed = sqliteCredentials.safeParse(config);
if (!parsed.success) {
@@ -559,6 +613,23 @@ export const prepareStudioConfig = async (options: Record) => {
credentials,
};
}
+
+ if (dialect === 'singlestore') {
+ const parsed = singlestoreCredentials.safeParse(flattened);
+ if (!parsed.success) {
+ printIssuesSingleStore(flattened as Record);
+ process.exit(1);
+ }
+ const credentials = parsed.data;
+ return {
+ dialect,
+ schema,
+ host,
+ port,
+ credentials,
+ };
+ }
+
if (dialect === 'sqlite') {
const parsed = sqliteCredentials.safeParse(flattened);
if (!parsed.success) {
@@ -644,6 +715,23 @@ export const prepareMigrateConfig = async (configPath: string | undefined) => {
table,
};
}
+
+ if (dialect === 'singlestore') {
+ const parsed = singlestoreCredentials.safeParse(flattened);
+ if (!parsed.success) {
+ printIssuesSingleStore(flattened as Record);
+ process.exit(1);
+ }
+ const credentials = parsed.data;
+ return {
+ dialect,
+ out,
+ credentials,
+ schema,
+ table,
+ };
+ }
+
if (dialect === 'sqlite') {
const parsed = sqliteCredentials.safeParse(flattened);
if (!parsed.success) {
diff --git a/drizzle-kit/src/cli/connections.ts b/drizzle-kit/src/cli/connections.ts
index aab1d0ef7..f2cf4817c 100644
--- a/drizzle-kit/src/cli/connections.ts
+++ b/drizzle-kit/src/cli/connections.ts
@@ -19,6 +19,7 @@ import { LibSQLCredentials } from './validations/libsql';
import type { MysqlCredentials } from './validations/mysql';
import { withStyle } from './validations/outputs';
import type { PostgresCredentials } from './validations/postgres';
+import { SingleStoreCredentials } from './validations/singlestore';
import type { SqliteCredentials } from './validations/sqlite';
export const preparePostgresDB = async (
@@ -415,6 +416,85 @@ export const preparePostgresDB = async (
process.exit(1);
};
+const parseSingleStoreCredentials = (credentials: SingleStoreCredentials) => {
+ if ('url' in credentials) {
+ const url = credentials.url;
+
+ const connectionUrl = new URL(url);
+ const pathname = connectionUrl.pathname;
+
+ const database = pathname.split('/')[pathname.split('/').length - 1];
+ if (!database) {
+ console.error(
+ 'You should specify a database name in connection string (singlestore://USER:PASSWORD@HOST:PORT/DATABASE)',
+ );
+ process.exit(1);
+ }
+ return { database, url };
+ } else {
+ return {
+ database: credentials.database,
+ credentials,
+ };
+ }
+};
+
+export const connectToSingleStore = async (
+ it: SingleStoreCredentials,
+): Promise<{
+ db: DB;
+ proxy: Proxy;
+ database: string;
+ migrate: (config: MigrationConfig) => Promise;
+}> => {
+ const result = parseSingleStoreCredentials(it);
+
+ if (await checkPackage('mysql2')) {
+ const { createConnection } = await import('mysql2/promise');
+ const { drizzle } = await import('drizzle-orm/singlestore');
+ const { migrate } = await import('drizzle-orm/singlestore/migrator');
+
+ const connection = result.url
+ ? await createConnection(result.url)
+ : await createConnection(result.credentials!); // needed for some reason!
+
+ const db = drizzle(connection);
+ const migrateFn = async (config: MigrationConfig) => {
+ return migrate(db, config);
+ };
+
+ await connection.connect();
+ const query: DB['query'] = async (
+ sql: string,
+ params?: any[],
+ ): Promise => {
+ const res = await connection.execute(sql, params);
+ return res[0] as any;
+ };
+
+ const proxy: Proxy = async (params: ProxyParams) => {
+ const result = await connection.query({
+ sql: params.sql,
+ values: params.params,
+ rowsAsArray: params.mode === 'array',
+ });
+ return result[0] as any[];
+ };
+
+ return {
+ db: { query },
+ proxy,
+ database: result.database,
+ migrate: migrateFn,
+ };
+ }
+
+ console.error(
+ "To connect to SingleStore database - please install 'singlestore' driver",
+ );
+ process.exit(1);
+};
+
const parseMysqlCredentials = (credentials: MysqlCredentials) => {
if ('url' in credentials) {
const url = credentials.url;
diff --git a/drizzle-kit/src/cli/schema.ts b/drizzle-kit/src/cli/schema.ts
index b03acde95..12153ee74 100644
--- a/drizzle-kit/src/cli/schema.ts
+++ b/drizzle-kit/src/cli/schema.ts
@@ -1,11 +1,19 @@
+import { boolean, command, number, string } from '@drizzle-team/brocli';
import chalk from 'chalk';
-import { checkHandler } from './commands/check';
-import { assertOrmCoreVersion, assertPackages, assertStudioNodeVersion, ormVersionGt } from './utils';
+import 'dotenv/config';
+import { mkdirSync } from 'fs';
+import { renderWithTask } from 'hanji';
+import { dialects } from 'src/schemaValidator';
import '../@types/utils';
+import { assertUnreachable } from '../global';
+import { type Setup } from '../serializer/studio';
import { assertV1OutFolder } from '../utils';
+import { certs } from '../utils/certs';
+import { checkHandler } from './commands/check';
import { dropMigration } from './commands/drop';
import { upMysqlHandler } from './commands/mysqlUp';
import { upPgHandler } from './commands/pgUp';
+import { upSinglestoreHandler } from './commands/singlestoreUp';
import { upSqliteHandler } from './commands/sqliteUp';
import {
prepareCheckParams,
@@ -16,21 +24,16 @@ import {
preparePushConfig,
prepareStudioConfig,
} from './commands/utils';
+import { assertOrmCoreVersion, assertPackages, assertStudioNodeVersion, ormVersionGt } from './utils';
import { assertCollisions, drivers, prefixes } from './validations/common';
import { withStyle } from './validations/outputs';
-import 'dotenv/config';
-import { boolean, command, number, string } from '@drizzle-team/brocli';
-import { mkdirSync } from 'fs';
-import { renderWithTask } from 'hanji';
-import { dialects } from 'src/schemaValidator';
-import { assertUnreachable } from '../global';
-import type { Setup } from '../serializer/studio';
-import { certs } from '../utils/certs';
import { grey, MigrateProgress } from './views';
const optionDialect = string('dialect')
.enum(...dialects)
- .desc(`Database dialect: 'postgresql', 'mysql', 'sqlite' or 'turso'`);
+ .desc(
+ `Database dialect: 'postgresql', 'mysql', 'sqlite', 'turso' or 'singlestore'`,
+ );
const optionOut = string().desc("Output folder, 'drizzle' by default");
const optionConfig = string().desc('Path to drizzle config file');
const optionBreakpoints = boolean().desc(
@@ -81,6 +84,7 @@ export const generate = command({
prepareAndMigrateMysql,
prepareAndMigrateSqlite,
prepareAndMigrateLibSQL,
+ prepareAndMigrateSingleStore,
} = await import('./commands/migrate');
const dialect = opts.dialect;
@@ -92,6 +96,8 @@ export const generate = command({
await prepareAndMigrateSqlite(opts);
} else if (dialect === 'turso') {
await prepareAndMigrateLibSQL(opts);
+ } else if (dialect === 'singlestore') {
+ await prepareAndMigrateSqlite(opts);
} else {
assertUnreachable(dialect);
}
@@ -154,6 +160,17 @@ export const migrate = command({
migrationsSchema: schema,
}),
);
+ } else if (dialect === 'singlestore') {
+ const { connectToSingleStore } = await import('./connections');
+ const { migrate } = await connectToSingleStore(credentials);
+ await renderWithTask(
+ new MigrateProgress(),
+ migrate({
+ migrationsFolder: out,
+ migrationsTable: table,
+ migrationsSchema: schema,
+ }),
+ );
} else if (dialect === 'sqlite') {
const { connectToSQLite } = await import('./connections');
const { migrate } = await connectToSQLite(credentials);
@@ -340,6 +357,17 @@ export const push = command({
force,
casing,
);
+ } else if (dialect === 'singlestore') {
+ const { singlestorePush } = await import('./commands/push');
+ await singlestorePush(
+ schemaPath,
+ credentials,
+ tablesFilter,
+ strict,
+ verbose,
+ force,
+ casing,
+ );
} else {
assertUnreachable(dialect);
}
@@ -398,6 +426,10 @@ export const up = command({
if (dialect === 'sqlite' || dialect === 'turso') {
upSqliteHandler(out);
}
+
+ if (dialect === 'singlestore') {
+ upSinglestoreHandler(out);
+ }
},
});
@@ -531,6 +563,16 @@ export const pull = command({
tablesFilter,
prefix,
);
+ } else if (dialect === 'singlestore') {
+ const { introspectSingleStore } = await import('./commands/introspect');
+ await introspectSingleStore(
+ casing,
+ out,
+ breakpoints,
+ credentials,
+ tablesFilter,
+ prefix,
+ );
} else {
assertUnreachable(dialect);
}
@@ -591,6 +633,8 @@ export const studio = command({
drizzleForMySQL,
prepareSQLiteSchema,
drizzleForSQLite,
+ prepareSingleStoreSchema,
+ drizzleForSingleStore,
drizzleForLibSQL,
} = await import('../serializer/studio');
@@ -637,6 +681,16 @@ export const studio = command({
? await prepareSQLiteSchema(schemaPath)
: { schema: {}, relations: {}, files: [] };
setup = await drizzleForLibSQL(credentials, schema, relations, files);
+ } else if (dialect === 'singlestore') {
+ const { schema, relations, files } = schemaPath
+ ? await prepareSingleStoreSchema(schemaPath)
+ : { schema: {}, relations: {}, files: [] };
+ setup = await drizzleForSingleStore(
+ credentials,
+ schema,
+ relations,
+ files,
+ );
} else {
assertUnreachable(dialect);
}
diff --git a/drizzle-kit/src/cli/validations/common.ts b/drizzle-kit/src/cli/validations/common.ts
index 1662e87bb..7fc6046a7 100644
--- a/drizzle-kit/src/cli/validations/common.ts
+++ b/drizzle-kit/src/cli/validations/common.ts
@@ -63,6 +63,7 @@ export const assertCollisions = <
export const sqliteDriversLiterals = [
literal('d1-http'),
literal('expo'),
+ literal('durable-sqlite'),
] as const;
export const postgresqlDriversLiterals = [
@@ -160,7 +161,7 @@ export const configPushSchema = object({
});
export type CliConfig = TypeOf;
-export const drivers = ['d1-http', 'expo', 'aws-data-api', 'pglite'] as const;
+export const drivers = ['d1-http', 'expo', 'aws-data-api', 'pglite', 'durable-sqlite'] as const;
export type Driver = (typeof drivers)[number];
const _: Driver = '' as TypeOf;
diff --git a/drizzle-kit/src/cli/validations/outputs.ts b/drizzle-kit/src/cli/validations/outputs.ts
index 3ef499651..6e9d520dd 100644
--- a/drizzle-kit/src/cli/validations/outputs.ts
+++ b/drizzle-kit/src/cli/validations/outputs.ts
@@ -26,7 +26,7 @@ export const outputs = {
),
noDialect: () =>
withStyle.error(
- `Please specify 'dialect' param in config, either of 'postgresql', 'mysql', 'sqlite' or 'turso'`,
+ `Please specify 'dialect' param in config, either of 'postgresql', 'mysql', 'sqlite', turso or singlestore`,
),
},
common: {
@@ -79,4 +79,13 @@ export const outputs = {
introspect: {},
push: {},
},
+ singlestore: {
+ connection: {
+ driver: () => withStyle.error(`Only "mysql2" is available options for "--driver"`),
+ required: () =>
+ withStyle.error(
+ `Either "url" or "host", "database" are required for database connection`,
+ ),
+ },
+ },
};
diff --git a/drizzle-kit/src/cli/validations/singlestore.ts b/drizzle-kit/src/cli/validations/singlestore.ts
new file mode 100644
index 000000000..ebe0cc5f0
--- /dev/null
+++ b/drizzle-kit/src/cli/validations/singlestore.ts
@@ -0,0 +1,61 @@
+import { boolean, coerce, object, string, TypeOf, union } from 'zod';
+import { error } from '../views';
+import { wrapParam } from './common';
+import { outputs } from './outputs';
+
+export const singlestoreCredentials = union([
+ object({
+ host: string().min(1),
+ port: coerce.number().min(1).optional(),
+ user: string().min(1).optional(),
+ password: string().min(1).optional(),
+ database: string().min(1),
+ ssl: union([
+ string(),
+ object({
+ pfx: string().optional(),
+ key: string().optional(),
+ passphrase: string().optional(),
+ cert: string().optional(),
+ ca: union([string(), string().array()]).optional(),
+ crl: union([string(), string().array()]).optional(),
+ ciphers: string().optional(),
+ rejectUnauthorized: boolean().optional(),
+ }),
+ ]).optional(),
+ }),
+ object({
+ url: string().min(1),
+ }),
+]);
+
+export type SingleStoreCredentials = TypeOf;
+
+export const printCliConnectionIssues = (options: any) => {
+ const { uri, host, database } = options || {};
+
+ if (!uri && (!host || !database)) {
+ console.log(outputs.singlestore.connection.required());
+ }
+};
+
+export const printConfigConnectionIssues = (
+ options: Record,
+) => {
+ if ('url' in options) {
+ let text = `Please provide required params for SingleStore driver:\n`;
+ console.log(error(text));
+ console.log(wrapParam('url', options.url, false, 'url'));
+ process.exit(1);
+ }
+
+ let text = `Please provide required params for SingleStore driver:\n`;
+ console.log(error(text));
+ console.log(wrapParam('host', options.host));
+ console.log(wrapParam('port', options.port, true));
+ console.log(wrapParam('user', options.user, true));
+ console.log(wrapParam('password', options.password, true, 'secret'));
+ console.log(wrapParam('database', options.database));
+ console.log(wrapParam('ssl', options.ssl, true));
+ process.exit(1);
+};
diff --git a/drizzle-kit/src/cli/validations/sqlite.ts b/drizzle-kit/src/cli/validations/sqlite.ts
index 54178fd4a..bb16492c3 100644
--- a/drizzle-kit/src/cli/validations/sqlite.ts
+++ b/drizzle-kit/src/cli/validations/sqlite.ts
@@ -72,12 +72,27 @@ export const printConfigConnectionIssues = (
console.log(wrapParam('databaseId', options.databaseId));
console.log(wrapParam('token', options.token, false, 'secret'));
process.exit(1);
- } else if (driver === 'turso') {
- let text = `Please provide required params for Turso driver:\n`;
- console.log(error(text));
- console.log(wrapParam('url', options.url));
- console.log(wrapParam('authToken', options.authToken, false, 'secret'));
- return;
+ } else if (driver === 'durable-sqlite') {
+ if (command === 'migrate') {
+ console.log(
+ error(
+ `You can't use 'migrate' command with SQLite Durable Objects`,
+ ),
+ );
+ } else if (command === 'studio') {
+ console.log(
+ error(
+ `You can't use 'migrate' command with SQLite Durable Objects`,
+ ),
+ );
+ } else if (command === 'pull') {
+ console.log(error("You can't use 'pull' command with SQLite Durable Objects"));
+ } else if (command === 'push') {
+ console.log(error("You can't use 'push' command with SQLite Durable Objects"));
+ } else {
+ console.log(error('Unexpected error with SQLite Durable Object driver 🤔'));
+ }
+ process.exit(1);
} else {
softAssertUnreachable(driver);
}
diff --git a/drizzle-kit/src/cli/views.ts b/drizzle-kit/src/cli/views.ts
index 3ec04a588..9106d31cd 100644
--- a/drizzle-kit/src/cli/views.ts
+++ b/drizzle-kit/src/cli/views.ts
@@ -32,7 +32,13 @@ export const schema = (schema: CommonSchema): string => {
.map((t) => {
const columnsCount = Object.values(t.columns).length;
const indexesCount = Object.values(t.indexes).length;
- const foreignKeys = Object.values(t.foreignKeys).length;
+ let foreignKeys: number = 0;
+ // Singlestore doesn't have foreign keys
+ if (schema.dialect !== 'singlestore') {
+ // @ts-expect-error
+ foreignKeys = Object.values(t.foreignKeys).length;
+ }
+
return `${chalk.bold.blue(t.name)} ${
chalk.gray(
`${columnsCount} columns ${indexesCount} indexes ${foreignKeys} fks`,
diff --git a/drizzle-kit/src/index.ts b/drizzle-kit/src/index.ts
index 4a57e59e3..4c55f3eb6 100644
--- a/drizzle-kit/src/index.ts
+++ b/drizzle-kit/src/index.ts
@@ -23,7 +23,7 @@ type Verify = U;
* **Config** usage:
*
* `dialect` - mandatory and is responsible for explicitly providing a databse dialect you are using for all the commands
- * *Possible values*: `postgresql`, `mysql`, `sqlite`
+ * *Possible values*: `postgresql`, `mysql`, `sqlite`, `singlestore
*
* See https://orm.drizzle.team/kit-docs/config-reference#dialect
*
@@ -64,7 +64,7 @@ type Verify = U;
*
* `breakpoints` - param lets you enable/disable SQL statement breakpoints in generated migrations.
* It’s optional and true by default, it’s necessary to properly apply migrations on databases,
- * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite) and
+ * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite, SingleStore) and
* Drizzle ORM has to apply them sequentially one by one.
*
* See https://orm.drizzle.team/kit-docs/config-reference#breakpoints
@@ -209,7 +209,26 @@ export type Config =
dialect: Verify;
driver: Verify;
}
+ | {
+ dialect: Verify;
+ driver: Verify;
+ }
| {}
+ | {
+ dialect: Verify;
+ dbCredentials:
+ | {
+ host: string;
+ port?: number;
+ user?: string;
+ password?: string;
+ database: string;
+ ssl?: string | SslOptions;
+ }
+ | {
+ url: string;
+ };
+ }
);
/**
@@ -219,7 +238,7 @@ export type Config =
* **Config** usage:
*
* `dialect` - mandatory and is responsible for explicitly providing a databse dialect you are using for all the commands
- * *Possible values*: `postgresql`, `mysql`, `sqlite`
+ * *Possible values*: `postgresql`, `mysql`, `sqlite`, `singlestore`
*
* See https://orm.drizzle.team/kit-docs/config-reference#dialect
*
@@ -260,7 +279,7 @@ export type Config =
*
* `breakpoints` - param lets you enable/disable SQL statement breakpoints in generated migrations.
* It’s optional and true by default, it’s necessary to properly apply migrations on databases,
- * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite) and
+ * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite, SingleStore) and
* Drizzle ORM has to apply them sequentially one by one.
*
* See https://orm.drizzle.team/kit-docs/config-reference#breakpoints
diff --git a/drizzle-kit/src/introspect-singlestore.ts b/drizzle-kit/src/introspect-singlestore.ts
new file mode 100644
index 000000000..8f93cdfda
--- /dev/null
+++ b/drizzle-kit/src/introspect-singlestore.ts
@@ -0,0 +1,918 @@
+/* eslint-disable @typescript-eslint/no-unsafe-argument */
+import { toCamelCase } from 'drizzle-orm/casing';
+import './@types/utils';
+import type { Casing } from './cli/validations/common';
+import { assertUnreachable } from './global';
+import {
+ Column,
+ Index,
+ PrimaryKey,
+ SingleStoreSchema,
+ SingleStoreSchemaInternal,
+ UniqueConstraint,
+} from './serializer/singlestoreSchema';
+import { indexName } from './serializer/singlestoreSerializer';
+
+// time precision to fsp
+// {mode: "string"} for timestamp by default
+
+const singlestoreImportsList = new Set([
+ 'singlestoreTable',
+ 'singlestoreEnum',
+ 'bigint',
+ 'binary',
+ 'boolean',
+ 'char',
+ 'date',
+ 'datetime',
+ 'decimal',
+ 'double',
+ 'float',
+ 'int',
+ 'json',
+ // TODO: add new type BSON
+ // TODO: add new type Blob
+ // TODO: add new type UUID
+ // TODO: add new type GUID
+ // TODO: add new type Vector
+ // TODO: add new type GeoPoint
+ 'mediumint',
+ 'real',
+ 'serial',
+ 'smallint',
+ 'text',
+ 'tinytext',
+ 'mediumtext',
+ 'longtext',
+ 'time',
+ 'timestamp',
+ 'tinyint',
+ 'varbinary',
+ 'varchar',
+ 'year',
+ 'enum',
+]);
+
+const objToStatement = (json: any) => {
+ json = Object.fromEntries(Object.entries(json).filter((it) => it[1]));
+
+ const keys = Object.keys(json);
+ if (keys.length === 0) return;
+
+ let statement = '{ ';
+ statement += keys.map((it) => `"${it}": "${json[it]}"`).join(', ');
+ statement += ' }';
+ return statement;
+};
+
+const objToStatement2 = (json: any) => {
+ json = Object.fromEntries(Object.entries(json).filter((it) => it[1]));
+
+ const keys = Object.keys(json);
+ if (keys.length === 0) return;
+
+ let statement = '{ ';
+ statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys
+ statement += ' }';
+ return statement;
+};
+
+const timeConfig = (json: any) => {
+ json = Object.fromEntries(Object.entries(json).filter((it) => it[1]));
+
+ const keys = Object.keys(json);
+ if (keys.length === 0) return;
+
+ let statement = '{ ';
+ statement += keys.map((it) => `${it}: ${json[it]}`).join(', ');
+ statement += ' }';
+ return statement;
+};
+
+const binaryConfig = (json: any) => {
+ json = Object.fromEntries(Object.entries(json).filter((it) => it[1]));
+
+ const keys = Object.keys(json);
+ if (keys.length === 0) return;
+
+ let statement = '{ ';
+ statement += keys.map((it) => `${it}: ${json[it]}`).join(', ');
+ statement += ' }';
+ return statement;
+};
+
+const importsPatch = {
+ 'double precision': 'doublePrecision',
+ 'timestamp without time zone': 'timestamp',
+} as Record;
+
+const escapeColumnKey = (value: string) => {
+ if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) {
+ return `"${value}"`;
+ }
+ return value;
+};
+
+const prepareCasing = (casing?: Casing) => (value: string) => {
+ if (casing === 'preserve') {
+ return escapeColumnKey(value);
+ }
+ if (casing === 'camel') {
+ return escapeColumnKey(value.camelCase());
+ }
+
+ assertUnreachable(casing);
+};
+
+const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => {
+ if (casing === 'preserve') {
+ return '';
+ }
+ if (casing === 'camel') {
+ return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`;
+ }
+
+ assertUnreachable(casing);
+};
+
+export const schemaToTypeScript = (
+ schema: SingleStoreSchemaInternal,
+ casing: Casing,
+) => {
+ const withCasing = prepareCasing(casing);
+
+ const imports = Object.values(schema.tables).reduce(
+ (res, it) => {
+ const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index');
+ const pkImports = Object.values(it.compositePrimaryKeys).map(
+ (it) => 'primaryKey',
+ );
+ const uniqueImports = Object.values(it.uniqueConstraints).map(
+ (it) => 'unique',
+ );
+
+ res.singlestore.push(...idxImports);
+ res.singlestore.push(...pkImports);
+ res.singlestore.push(...uniqueImports);
+
+ const columnImports = Object.values(it.columns)
+ .map((col) => {
+ let patched = importsPatch[col.type] ?? col.type;
+ patched = patched.startsWith('varchar(') ? 'varchar' : patched;
+ patched = patched.startsWith('char(') ? 'char' : patched;
+ patched = patched.startsWith('binary(') ? 'binary' : patched;
+ patched = patched.startsWith('decimal(') ? 'decimal' : patched;
+ patched = patched.startsWith('smallint(') ? 'smallint' : patched;
+ patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched;
+ patched = patched.startsWith('datetime(') ? 'datetime' : patched;
+ patched = patched.startsWith('varbinary(') ? 'varbinary' : patched;
+ patched = patched.startsWith('int(') ? 'int' : patched;
+ patched = patched.startsWith('double(') ? 'double' : patched;
+ patched = patched.startsWith('float(') ? 'float' : patched;
+ patched = patched.startsWith('int unsigned') ? 'int' : patched;
+ patched = patched.startsWith('tinyint(') ? 'tinyint' : patched;
+ patched = patched.startsWith('mediumint(') ? 'mediumint' : patched;
+ patched = patched.startsWith('bigint(') ? 'bigint' : patched;
+ patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched;
+ patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched;
+ patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched;
+ patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched;
+ return patched;
+ })
+ .filter((type) => {
+ return singlestoreImportsList.has(type);
+ });
+
+ res.singlestore.push(...columnImports);
+ return res;
+ },
+ { singlestore: [] as string[] },
+ );
+
+ /* Object.values(schema.views).forEach((it) => {
+ imports.singlestore.push('singlestoreView');
+
+ const columnImports = Object.values(it.columns)
+ .map((col) => {
+ let patched = importsPatch[col.type] ?? col.type;
+ patched = patched.startsWith('varchar(') ? 'varchar' : patched;
+ patched = patched.startsWith('char(') ? 'char' : patched;
+ patched = patched.startsWith('binary(') ? 'binary' : patched;
+ patched = patched.startsWith('decimal(') ? 'decimal' : patched;
+ patched = patched.startsWith('smallint(') ? 'smallint' : patched;
+ patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched;
+ patched = patched.startsWith('datetime(') ? 'datetime' : patched;
+ patched = patched.startsWith('varbinary(') ? 'varbinary' : patched;
+ patched = patched.startsWith('int(') ? 'int' : patched;
+ patched = patched.startsWith('double(') ? 'double' : patched;
+ patched = patched.startsWith('float(') ? 'float' : patched;
+ patched = patched.startsWith('int unsigned') ? 'int' : patched;
+ patched = patched.startsWith('tinyint(') ? 'tinyint' : patched;
+ patched = patched.startsWith('mediumint(') ? 'mediumint' : patched;
+ patched = patched.startsWith('bigint(') ? 'bigint' : patched;
+ patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched;
+ patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched;
+ patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched;
+ patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched;
+ return patched;
+ })
+ .filter((type) => {
+ return singlestoreImportsList.has(type);
+ });
+
+ imports.singlestore.push(...columnImports);
+ }); */
+
+ const tableStatements = Object.values(schema.tables).map((table) => {
+ const func = 'singlestoreTable';
+ let statement = '';
+ if (imports.singlestore.includes(withCasing(table.name))) {
+ statement = `// Table name is in conflict with ${
+ withCasing(
+ table.name,
+ )
+ } import.\n// Please change to any other name, that is not in imports list\n`;
+ }
+ statement += `export const ${withCasing(table.name)} = ${func}("${table.name}", {\n`;
+ statement += createTableColumns(
+ Object.values(table.columns),
+ withCasing,
+ casing,
+ table.name,
+ schema,
+ );
+ statement += '}';
+
+ if (
+ Object.keys(table.indexes).length > 0
+ || Object.keys(table.compositePrimaryKeys).length > 0
+ || Object.keys(table.uniqueConstraints).length > 0
+ ) {
+ statement += ',\n';
+ statement += '(table) => {\n';
+ statement += '\treturn {\n';
+ statement += createTableIndexes(
+ table.name,
+ Object.values(table.indexes),
+ withCasing,
+ );
+ statement += createTablePKs(
+ Object.values(table.compositePrimaryKeys),
+ withCasing,
+ );
+ statement += createTableUniques(
+ Object.values(table.uniqueConstraints),
+ withCasing,
+ );
+ statement += '\t}\n';
+ statement += '}';
+ }
+
+ statement += ');';
+ return statement;
+ });
+
+ /* const viewsStatements = Object.values(schema.views).map((view) => {
+ const { columns, name, algorithm, definition, sqlSecurity, withCheckOption } = view;
+ const func = 'singlestoreView';
+ let statement = '';
+
+ if (imports.singlestore.includes(withCasing(name))) {
+ statement = `// Table name is in conflict with ${
+ withCasing(
+ view.name,
+ )
+ } import.\n// Please change to any other name, that is not in imports list\n`;
+ }
+ statement += `export const ${withCasing(name)} = ${func}("${name}", {\n`;
+ statement += createTableColumns(
+ Object.values(columns),
+ withCasing,
+ casing,
+ name,
+ schema,
+ );
+ statement += '})';
+
+ statement += algorithm ? `.algorithm("${algorithm}")` : '';
+ statement += sqlSecurity ? `.sqlSecurity("${sqlSecurity}")` : '';
+ statement += withCheckOption ? `.withCheckOption("${withCheckOption}")` : '';
+ statement += `.as(sql\`${definition?.replaceAll('`', '\\`')}\`);`;
+
+ return statement;
+ }); */
+
+ const uniqueSingleStoreImports = [
+ 'singlestoreTable',
+ 'singlestoreSchema',
+ 'AnySingleStoreColumn',
+ ...new Set(imports.singlestore),
+ ];
+ const importsTs = `import { ${
+ uniqueSingleStoreImports.join(
+ ', ',
+ )
+ } } from "drizzle-orm/singlestore-core"\nimport { sql } from "drizzle-orm"\n\n`;
+
+ let decalrations = '';
+ decalrations += tableStatements.join('\n\n');
+ decalrations += '\n';
+ /* decalrations += viewsStatements.join('\n\n'); */
+
+ const file = importsTs + decalrations;
+
+ const schemaEntry = `
+ {
+ ${
+ Object.values(schema.tables)
+ .map((it) => withCasing(it.name))
+ .join(',')
+ }
+ }
+ `;
+
+ return {
+ file, // backward compatible, print to file
+ imports: importsTs,
+ decalrations,
+ schemaEntry,
+ };
+};
+
+const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => {
+ if (isExpression) {
+ return `sql\`${defaultValue}\``;
+ }
+
+ return defaultValue;
+};
+
+const mapColumnDefaultForJson = (defaultValue: any) => {
+ if (
+ typeof defaultValue === 'string'
+ && defaultValue.startsWith("('")
+ && defaultValue.endsWith("')")
+ ) {
+ return defaultValue.substring(2, defaultValue.length - 2);
+ }
+
+ return defaultValue;
+};
+
+const column = (
+ type: string,
+ name: string,
+ casing: (value: string) => string,
+ rawCasing: Casing,
+ defaultValue?: any,
+ autoincrement?: boolean,
+ onUpdate?: boolean,
+ isExpression?: boolean,
+) => {
+ let lowered = type;
+ if (!type.startsWith('enum(')) {
+ lowered = type.toLowerCase();
+ }
+
+ if (lowered === 'serial') {
+ return `${casing(name)}: serial(${dbColumnName({ name, casing: rawCasing })})`;
+ }
+
+ if (lowered.startsWith('int')) {
+ const isUnsigned = lowered.includes('unsigned');
+ const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned });
+ let out = `${casing(name)}: int(${columnName}${
+ isUnsigned ? `${columnName.length > 0 ? ', ' : ''}{ unsigned: true }` : ''
+ })`;
+ out += autoincrement ? `.autoincrement()` : '';
+ out += typeof defaultValue !== 'undefined'
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('tinyint')) {
+ const isUnsigned = lowered.includes('unsigned');
+ const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned });
+ // let out = `${name.camelCase()}: tinyint("${name}")`;
+ let out: string = `${casing(name)}: tinyint(${columnName}${
+ isUnsigned ? `${columnName.length > 0 ? ', ' : ''}{ unsigned: true }` : ''
+ })`;
+ out += autoincrement ? `.autoincrement()` : '';
+ out += typeof defaultValue !== 'undefined'
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('smallint')) {
+ const isUnsigned = lowered.includes('unsigned');
+ const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned });
+ let out = `${casing(name)}: smallint(${columnName}${
+ isUnsigned ? `${columnName.length > 0 ? ', ' : ''}{ unsigned: true }` : ''
+ })`;
+ out += autoincrement ? `.autoincrement()` : '';
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('mediumint')) {
+ const isUnsigned = lowered.includes('unsigned');
+ const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned });
+ let out = `${casing(name)}: mediumint(${columnName}${
+ isUnsigned ? `${columnName.length > 0 ? ', ' : ''}{ unsigned: true }` : ''
+ })`;
+ out += autoincrement ? `.autoincrement()` : '';
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('bigint')) {
+ const isUnsigned = lowered.includes('unsigned');
+ let out = `${casing(name)}: bigint(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: "number"${
+ isUnsigned ? ', unsigned: true' : ''
+ } })`;
+ out += autoincrement ? `.autoincrement()` : '';
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered === 'boolean') {
+ let out = `${casing(name)}: boolean(${dbColumnName({ name, casing: rawCasing })})`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('double')) {
+ let params:
+ | { precision?: string; scale?: string; unsigned?: boolean }
+ | undefined;
+
+ if (lowered.length > (lowered.includes('unsigned') ? 15 : 6)) {
+ const [precision, scale] = lowered
+ .slice(7, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0)))
+ .split(',');
+ params = { precision, scale };
+ }
+
+ if (lowered.includes('unsigned')) {
+ params = { ...(params ?? {}), unsigned: true };
+ }
+
+ const timeConfigParams = params ? timeConfig(params) : undefined;
+
+ let out = params
+ ? `${casing(name)}: double(${
+ dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined })
+ }${timeConfig(params)})`
+ : `${casing(name)}: double(${dbColumnName({ name, casing: rawCasing })})`;
+
+ // let out = `${name.camelCase()}: double("${name}")`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('float')) {
+ let params:
+ | { precision?: string; scale?: string; unsigned?: boolean }
+ | undefined;
+
+ if (lowered.length > (lowered.includes('unsigned') ? 14 : 5)) {
+ const [precision, scale] = lowered
+ .slice(6, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0)))
+ .split(',');
+ params = { precision, scale };
+ }
+
+ if (lowered.includes('unsigned')) {
+ params = { ...(params ?? {}), unsigned: true };
+ }
+
+ let out = `${casing(name)}: float(${dbColumnName({ name, casing: rawCasing })}${params ? timeConfig(params) : ''})`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered === 'real') {
+ let out = `${casing(name)}: real(${dbColumnName({ name, casing: rawCasing })})`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('timestamp')) {
+ const keyLength = 'timestamp'.length + 1;
+ let fsp = lowered.length > keyLength
+ ? Number(lowered.substring(keyLength, lowered.length - 1))
+ : null;
+ fsp = fsp ? fsp : null;
+
+ const params = timeConfig({ fsp, mode: "'string'" });
+
+ let out = params
+ ? `${casing(name)}: timestamp(${
+ dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })
+ }${params})`
+ : `${casing(name)}: timestamp(${dbColumnName({ name, casing: rawCasing })})`;
+
+ // singlestore has only CURRENT_TIMESTAMP, as I found from docs. But will leave now() for just a case
+ defaultValue = defaultValue === 'now()' || defaultValue === 'CURRENT_TIMESTAMP'
+ ? '.defaultNow()'
+ : defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+
+ out += defaultValue;
+
+ let onUpdateNow = onUpdate ? '.onUpdateNow()' : '';
+ out += onUpdateNow;
+
+ return out;
+ }
+
+ if (lowered.startsWith('time')) {
+ const keyLength = 'time'.length + 1;
+ let fsp = lowered.length > keyLength
+ ? Number(lowered.substring(keyLength, lowered.length - 1))
+ : null;
+ fsp = fsp ? fsp : null;
+
+ const params = timeConfig({ fsp });
+
+ let out = params
+ ? `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})`
+ : `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing })})`;
+
+ defaultValue = defaultValue === 'now()'
+ ? '.defaultNow()'
+ : defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+
+ out += defaultValue;
+ return out;
+ }
+
+ if (lowered === 'date') {
+ let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t${
+ casing(
+ name,
+ )
+ }: date(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string' })`;
+
+ defaultValue = defaultValue === 'now()'
+ ? '.defaultNow()'
+ : defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+
+ out += defaultValue;
+ return out;
+ }
+
+ // in singlestore text can't have default value. Will leave it in case smth ;)
+ if (lowered === 'text') {
+ let out = `${casing(name)}: text(${dbColumnName({ name, casing: rawCasing })})`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ // in singlestore text can't have default value. Will leave it in case smth ;)
+ if (lowered === 'tinytext') {
+ let out = `${casing(name)}: tinytext(${dbColumnName({ name, casing: rawCasing })})`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ // in singlestore text can't have default value. Will leave it in case smth ;)
+ if (lowered === 'mediumtext') {
+ let out = `${casing(name)}: mediumtext(${dbColumnName({ name, casing: rawCasing })})`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ // in singlestore text can't have default value. Will leave it in case smth ;)
+ if (lowered === 'longtext') {
+ let out = `${casing(name)}: longtext(${dbColumnName({ name, casing: rawCasing })})`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered === 'year') {
+ let out = `${casing(name)}: year(${dbColumnName({ name, casing: rawCasing })})`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ // in singlestore json can't have default value. Will leave it in case smth ;)
+ if (lowered === 'json') {
+ let out = `${casing(name)}: json(${dbColumnName({ name, casing: rawCasing })})`;
+
+ out += defaultValue
+ ? `.default(${mapColumnDefaultForJson(defaultValue)})`
+ : '';
+
+ return out;
+ }
+
+ if (lowered.startsWith('varchar')) {
+ let out: string = `${
+ casing(
+ name,
+ )
+ }: varchar(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${
+ lowered.substring(
+ 'varchar'.length + 1,
+ lowered.length - 1,
+ )
+ } })`;
+
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('char')) {
+ let out: string = `${
+ casing(
+ name,
+ )
+ }: char(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${
+ lowered.substring(
+ 'char'.length + 1,
+ lowered.length - 1,
+ )
+ } })`;
+
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('datetime')) {
+ let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t`;
+
+ const fsp = lowered.startsWith('datetime(')
+ ? lowered.substring('datetime'.length + 1, lowered.length - 1)
+ : undefined;
+
+ out = fsp
+ ? `${
+ casing(
+ name,
+ )
+ }: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string', fsp: ${
+ lowered.substring(
+ 'datetime'.length + 1,
+ lowered.length - 1,
+ )
+ } })`
+ : `${casing(name)}: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string'})`;
+
+ defaultValue = defaultValue === 'now()'
+ ? '.defaultNow()'
+ : defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+
+ out += defaultValue;
+ return out;
+ }
+
+ if (lowered.startsWith('decimal')) {
+ let params:
+ | { precision?: string; scale?: string; unsigned?: boolean }
+ | undefined;
+
+ if (lowered.length > (lowered.includes('unsigned') ? 16 : 7)) {
+ const [precision, scale] = lowered
+ .slice(8, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0)))
+ .split(',');
+ params = { precision, scale };
+ }
+
+ if (lowered.includes('unsigned')) {
+ params = { ...(params ?? {}), unsigned: true };
+ }
+
+ const timeConfigParams = params ? timeConfig(params) : undefined;
+
+ let out = params
+ ? `${casing(name)}: decimal(${
+ dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined })
+ }${timeConfigParams})`
+ : `${casing(name)}: decimal(${dbColumnName({ name, casing: rawCasing })})`;
+
+ defaultValue = typeof defaultValue !== 'undefined'
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+
+ out += defaultValue;
+ return out;
+ }
+
+ if (lowered.startsWith('binary')) {
+ const keyLength = 'binary'.length + 1;
+ let length = lowered.length > keyLength
+ ? Number(lowered.substring(keyLength, lowered.length - 1))
+ : null;
+ length = length ? length : null;
+
+ const params = binaryConfig({ length });
+
+ let out = params
+ ? `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})`
+ : `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing })})`;
+
+ defaultValue = defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+
+ out += defaultValue;
+ return out;
+ }
+
+ if (lowered.startsWith('enum')) {
+ const values = lowered.substring('enum'.length + 1, lowered.length - 1);
+ let out = `${casing(name)}: singlestoreEnum(${
+ dbColumnName({ name, casing: rawCasing, withMode: true })
+ }[${values}])`;
+ out += defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+ return out;
+ }
+
+ if (lowered.startsWith('varbinary')) {
+ const keyLength = 'varbinary'.length + 1;
+ let length = lowered.length > keyLength
+ ? Number(lowered.substring(keyLength, lowered.length - 1))
+ : null;
+ length = length ? length : null;
+
+ const params = binaryConfig({ length });
+
+ let out = params
+ ? `${casing(name)}: varbinary(${
+ dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })
+ }${params})`
+ : `${casing(name)}: varbinary(${dbColumnName({ name, casing: rawCasing })})`;
+
+ defaultValue = defaultValue
+ ? `.default(${mapColumnDefault(defaultValue, isExpression)})`
+ : '';
+
+ out += defaultValue;
+ return out;
+ }
+
+ console.log('uknown', type);
+ return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`;
+};
+
+const createTableColumns = (
+ columns: Column[],
+ casing: (val: string) => string,
+ rawCasing: Casing,
+ tableName: string,
+ schema: SingleStoreSchemaInternal,
+): string => {
+ let statement = '';
+
+ columns.forEach((it) => {
+ statement += '\t';
+ statement += column(
+ it.type,
+ it.name,
+ casing,
+ rawCasing,
+ it.default,
+ it.autoincrement,
+ it.onUpdate,
+ schema.internal?.tables![tableName]?.columns[it.name]
+ ?.isDefaultAnExpression ?? false,
+ );
+ statement += it.primaryKey ? '.primaryKey()' : '';
+ statement += it.notNull ? '.notNull()' : '';
+
+ statement += it.generated
+ ? `.generatedAlwaysAs(sql\`${
+ it.generated.as.replace(
+ /`/g,
+ '\\`',
+ )
+ }\`, { mode: "${it.generated.type}" })`
+ : '';
+
+ statement += ',\n';
+ });
+
+ return statement;
+};
+
+const createTableIndexes = (
+ tableName: string,
+ idxs: Index[],
+ casing: (value: string) => string,
+): string => {
+ let statement = '';
+
+ idxs.forEach((it) => {
+ let idxKey = it.name.startsWith(tableName) && it.name !== tableName
+ ? it.name.slice(tableName.length + 1)
+ : it.name;
+ idxKey = idxKey.endsWith('_index')
+ ? idxKey.slice(0, -'_index'.length) + '_idx'
+ : idxKey;
+
+ idxKey = casing(idxKey);
+
+ const indexGeneratedName = indexName(tableName, it.columns);
+ const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`;
+
+ statement += `\t\t${idxKey}: `;
+ statement += it.isUnique ? 'uniqueIndex(' : 'index(';
+ statement += `${escapedIndexName})`;
+ statement += `.on(${
+ it.columns
+ .map((it) => `table.${casing(it)}`)
+ .join(', ')
+ }),`;
+ statement += `\n`;
+ });
+
+ return statement;
+};
+
+const createTableUniques = (
+ unqs: UniqueConstraint[],
+ casing: (value: string) => string,
+): string => {
+ let statement = '';
+
+ unqs.forEach((it) => {
+ const idxKey = casing(it.name);
+
+ statement += `\t\t${idxKey}: `;
+ statement += 'unique(';
+ statement += `"${it.name}")`;
+ statement += `.on(${
+ it.columns
+ .map((it) => `table.${casing(it)}`)
+ .join(', ')
+ }),`;
+ statement += `\n`;
+ });
+
+ return statement;
+};
+
+const createTablePKs = (
+ pks: PrimaryKey[],
+ casing: (value: string) => string,
+): string => {
+ let statement = '';
+
+ pks.forEach((it) => {
+ let idxKey = casing(it.name);
+
+ statement += `\t\t${idxKey}: `;
+ statement += 'primaryKey({ columns: [';
+ statement += `${
+ it.columns
+ .map((c) => {
+ return `table.${casing(c)}`;
+ })
+ .join(', ')
+ }]${it.name ? `, name: "${it.name}"` : ''}}`;
+ statement += '),';
+ statement += `\n`;
+ });
+
+ return statement;
+};
diff --git a/drizzle-kit/src/jsonStatements.ts b/drizzle-kit/src/jsonStatements.ts
index 18b28fac4..f64020f5a 100644
--- a/drizzle-kit/src/jsonStatements.ts
+++ b/drizzle-kit/src/jsonStatements.ts
@@ -13,6 +13,7 @@ import {
View as PgView,
ViewWithOption,
} from './serializer/pgSchema';
+import { SingleStoreKitInternals, SingleStoreSchema, SingleStoreSquasher } from './serializer/singlestoreSchema';
import {
SQLiteKitInternals,
SQLiteSchemaInternal,
@@ -50,7 +51,7 @@ export interface JsonCreateTableStatement {
uniqueConstraints?: string[];
policies?: string[];
checkConstraints?: string[];
- internals?: MySqlKitInternals;
+ internals?: MySqlKitInternals | SingleStoreKitInternals;
isRLSEnabled?: boolean;
}
@@ -306,7 +307,7 @@ export interface JsonCreateIndexStatement {
tableName: string;
data: string;
schema: string;
- internal?: MySqlKitInternals | SQLiteKitInternals;
+ internal?: MySqlKitInternals | SQLiteKitInternals | SingleStoreKitInternals;
}
export interface JsonPgCreateIndexStatement {
@@ -673,6 +674,11 @@ export type JsonCreateMySqlViewStatement = {
replace: boolean;
} & Omit;
+/* export type JsonCreateSingleStoreViewStatement = {
+ type: 'singlestore_create_view';
+ replace: boolean;
+} & Omit; */
+
export type JsonCreateSqliteViewStatement = {
type: 'sqlite_create_view';
} & Omit;
@@ -756,6 +762,10 @@ export type JsonAlterMySqlViewStatement = {
type: 'alter_mysql_view';
} & Omit;
+/* export type JsonAlterSingleStoreViewStatement = {
+ type: 'alter_singlestore_view';
+} & Omit; */
+
export type JsonAlterViewStatement =
| JsonAlterViewAlterSchemaStatement
| JsonAlterViewAddWithOptionStatement
@@ -838,6 +848,8 @@ export type JsonStatement =
| JsonAlterViewStatement
| JsonCreateMySqlViewStatement
| JsonAlterMySqlViewStatement
+ /* | JsonCreateSingleStoreViewStatement
+ | JsonAlterSingleStoreViewStatement */
| JsonCreateSqliteViewStatement
| JsonCreateCheckConstraint
| JsonDeleteCheckConstraint
@@ -906,6 +918,34 @@ export const prepareMySqlCreateTableJson = (
};
};
+export const prepareSingleStoreCreateTableJson = (
+ table: Table,
+ // TODO: remove?
+ json2: SingleStoreSchema,
+ // we need it to know if some of the indexes(and in future other parts) are expressions or columns
+ // didn't change singlestoreserialaizer, because it will break snapshots and diffs and it's hard to detect
+ // if previously it was an expression or column
+ internals: SingleStoreKitInternals,
+): JsonCreateTableStatement => {
+ const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table;
+
+ return {
+ type: 'create_table',
+ tableName: name,
+ schema,
+ columns: Object.values(columns),
+ compositePKs: Object.values(compositePrimaryKeys),
+ compositePkName: Object.values(compositePrimaryKeys).length > 0
+ ? json2.tables[name].compositePrimaryKeys[
+ SingleStoreSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0])
+ .name
+ ].name
+ : '',
+ uniqueConstraints: Object.values(uniqueConstraints),
+ internals,
+ };
+};
+
export const prepareSQLiteCreateTable = (
table: Table,
action?: 'push' | undefined,
@@ -1207,7 +1247,7 @@ export const prepareDeleteSchemasJson = (
export const prepareRenameColumns = (
tableName: string,
- // TODO: split for pg and mysql+sqlite without schema
+ // TODO: split for pg and mysql+sqlite and singlestore without schema
schema: string,
pairs: { from: Column; to: Column }[],
): JsonRenameColumnStatement[] => {
@@ -1637,6 +1677,363 @@ export const prepareAlterColumnsMysql = (
return [...dropPkStatements, ...setPkStatements, ...statements];
};
+export const prepareAlterColumnsSingleStore = (
+ tableName: string,
+ schema: string,
+ columns: AlteredColumn[],
+ // TODO: remove?
+ json1: CommonSquashedSchema,
+ json2: CommonSquashedSchema,
+ action?: 'push' | undefined,
+): JsonAlterColumnStatement[] => {
+ let statements: JsonAlterColumnStatement[] = [];
+ let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = [];
+ let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = [];
+
+ for (const column of columns) {
+ const columnName = typeof column.name !== 'string' ? column.name.new : column.name;
+
+ const table = json2.tables[tableName];
+ const snapshotColumn = table.columns[columnName];
+
+ const columnType = snapshotColumn.type;
+ const columnDefault = snapshotColumn.default;
+ const columnOnUpdate = 'onUpdate' in snapshotColumn ? snapshotColumn.onUpdate : undefined;
+ const columnNotNull = table.columns[columnName].notNull;
+
+ const columnAutoIncrement = 'autoincrement' in snapshotColumn
+ ? snapshotColumn.autoincrement ?? false
+ : false;
+
+ const columnPk = table.columns[columnName].primaryKey;
+
+ if (column.autoincrement?.type === 'added') {
+ statements.push({
+ type: 'alter_table_alter_column_set_autoincrement',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
+ if (column.autoincrement?.type === 'changed') {
+ const type = column.autoincrement.new
+ ? 'alter_table_alter_column_set_autoincrement'
+ : 'alter_table_alter_column_drop_autoincrement';
+
+ statements.push({
+ type,
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
+ if (column.autoincrement?.type === 'deleted') {
+ statements.push({
+ type: 'alter_table_alter_column_drop_autoincrement',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+ }
+
+ for (const column of columns) {
+ const columnName = typeof column.name !== 'string' ? column.name.new : column.name;
+
+ // I used any, because those fields are available only for mysql and singlestore dialect
+ // For other dialects it will become undefined, that is fine for json statements
+ const columnType = json2.tables[tableName].columns[columnName].type;
+ const columnDefault = json2.tables[tableName].columns[columnName].default;
+ const columnGenerated = json2.tables[tableName].columns[columnName].generated;
+ const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any)
+ .onUpdate;
+ const columnNotNull = json2.tables[tableName].columns[columnName].notNull;
+ const columnAutoIncrement = (
+ json2.tables[tableName].columns[columnName] as any
+ ).autoincrement;
+ const columnPk = (json2.tables[tableName].columns[columnName] as any)
+ .primaryKey;
+
+ const compositePk = json2.tables[tableName].compositePrimaryKeys[
+ `${tableName}_${columnName}`
+ ];
+
+ if (typeof column.name !== 'string') {
+ statements.push({
+ type: 'alter_table_rename_column',
+ tableName,
+ oldColumnName: column.name.old,
+ newColumnName: column.name.new,
+ schema,
+ });
+ }
+
+ if (column.type?.type === 'changed') {
+ statements.push({
+ type: 'alter_table_alter_column_set_type',
+ tableName,
+ columnName,
+ newDataType: column.type.new,
+ oldDataType: column.type.old,
+ schema,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ columnGenerated,
+ });
+ }
+
+ if (
+ column.primaryKey?.type === 'deleted'
+ || (column.primaryKey?.type === 'changed'
+ && !column.primaryKey.new
+ && typeof compositePk === 'undefined')
+ ) {
+ dropPkStatements.push({
+ ////
+ type: 'alter_table_alter_column_drop_pk',
+ tableName,
+ columnName,
+ schema,
+ });
+ }
+
+ if (column.default?.type === 'added') {
+ statements.push({
+ type: 'alter_table_alter_column_set_default',
+ tableName,
+ columnName,
+ newDefaultValue: column.default.value,
+ schema,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ newDataType: columnType,
+ columnPk,
+ });
+ }
+
+ if (column.default?.type === 'changed') {
+ statements.push({
+ type: 'alter_table_alter_column_set_default',
+ tableName,
+ columnName,
+ newDefaultValue: column.default.new,
+ oldDefaultValue: column.default.old,
+ schema,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ newDataType: columnType,
+ columnPk,
+ });
+ }
+
+ if (column.default?.type === 'deleted') {
+ statements.push({
+ type: 'alter_table_alter_column_drop_default',
+ tableName,
+ columnName,
+ schema,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ newDataType: columnType,
+ columnPk,
+ });
+ }
+
+ if (column.notNull?.type === 'added') {
+ statements.push({
+ type: 'alter_table_alter_column_set_notnull',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
+ if (column.notNull?.type === 'changed') {
+ const type = column.notNull.new
+ ? 'alter_table_alter_column_set_notnull'
+ : 'alter_table_alter_column_drop_notnull';
+ statements.push({
+ type: type,
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
+ if (column.notNull?.type === 'deleted') {
+ statements.push({
+ type: 'alter_table_alter_column_drop_notnull',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
+ if (column.generated?.type === 'added') {
+ if (columnGenerated?.type === 'virtual') {
+ // TODO: Change warning message according to SingleStore docs
+ warning(
+ `You are trying to add virtual generated constraint to ${
+ chalk.blue(
+ columnName,
+ )
+ } column. As MySQL docs mention: "Nongenerated columns can be altered to stored but not virtual generated columns". We will drop an existing column and add it with a virtual generated statement. This means that the data previously stored in this column will be wiped, and new data will be generated on each read for this column\n`,
+ );
+ }
+ statements.push({
+ type: 'alter_table_alter_column_set_generated',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ columnGenerated,
+ });
+ }
+
+ if (column.generated?.type === 'changed' && action !== 'push') {
+ statements.push({
+ type: 'alter_table_alter_column_alter_generated',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ columnGenerated,
+ });
+ }
+
+ if (column.generated?.type === 'deleted') {
+ if (columnGenerated?.type === 'virtual') {
+ // TODO: Change warning message according to SingleStore docs
+ warning(
+ `You are trying to remove virtual generated constraint from ${
+ chalk.blue(
+ columnName,
+ )
+ } column. As MySQL docs mention: "Stored but not virtual generated columns can be altered to nongenerated columns. The stored generated values become the values of the nongenerated column". We will drop an existing column and add it without a virtual generated statement. This means that this column will have no data after migration\n`,
+ );
+ }
+ statements.push({
+ type: 'alter_table_alter_column_drop_generated',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ columnGenerated,
+ oldColumn: json1.tables[tableName].columns[columnName],
+ });
+ }
+
+ if (
+ column.primaryKey?.type === 'added'
+ || (column.primaryKey?.type === 'changed' && column.primaryKey.new)
+ ) {
+ const wasAutoincrement = statements.filter(
+ (it) => it.type === 'alter_table_alter_column_set_autoincrement',
+ );
+ if (wasAutoincrement.length === 0) {
+ setPkStatements.push({
+ type: 'alter_table_alter_column_set_pk',
+ tableName,
+ schema,
+ columnName,
+ });
+ }
+ }
+
+ if (column.onUpdate?.type === 'added') {
+ statements.push({
+ type: 'alter_table_alter_column_set_on_update',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+
+ if (column.onUpdate?.type === 'deleted') {
+ statements.push({
+ type: 'alter_table_alter_column_drop_on_update',
+ tableName,
+ columnName,
+ schema,
+ newDataType: columnType,
+ columnDefault,
+ columnOnUpdate,
+ columnNotNull,
+ columnAutoIncrement,
+ columnPk,
+ });
+ }
+ }
+
+ return [...dropPkStatements, ...setPkStatements, ...statements];
+};
+
export const preparePgAlterColumns = (
_tableName: string,
schema: string,
@@ -2704,8 +3101,11 @@ export const prepareAddCompositePrimaryKeyPg = (
tableName: string,
schema: string,
pks: Record,
+ // TODO: remove?
+ json2: PgSchema,
): JsonCreateCompositePK[] => {
return Object.values(pks).map((it) => {
+ const unsquashed = PgSquasher.unsquashPK(it);
return {
type: 'create_composite_pk',
tableName,
@@ -2720,6 +3120,8 @@ export const prepareDeleteCompositePrimaryKeyPg = (
tableName: string,
schema: string,
pks: Record,
+ // TODO: remove?
+ json1: PgSchema,
): JsonDeleteCompositePK[] => {
return Object.values(pks).map((it) => {
return {
@@ -2736,6 +3138,9 @@ export const prepareAlterCompositePrimaryKeyPg = (
tableName: string,
schema: string,
pks: Record,
+ // TODO: remove?
+ json1: PgSchema,
+ json2: PgSchema,
): JsonAlterCompositePK[] => {
return Object.values(pks).map((it) => {
return {
@@ -2876,7 +3281,6 @@ export const prepareDeleteCompositePrimaryKeyMySql = (
type: 'delete_composite_pk',
tableName,
data: it,
- constraintName: unsquashed.name,
} as JsonDeleteCompositePK;
});
};
@@ -2945,6 +3349,24 @@ export const prepareMySqlCreateViewJson = (
};
};
+/* export const prepareSingleStoreCreateViewJson = (
+ name: string,
+ definition: string,
+ meta: string,
+ replace: boolean = false,
+): JsonCreateSingleStoreViewStatement => {
+ const { algorithm, sqlSecurity, withCheckOption } = SingleStoreSquasher.unsquashView(meta);
+ return {
+ type: 'singlestore_create_view',
+ name: name,
+ definition: definition,
+ algorithm,
+ sqlSecurity,
+ withCheckOption,
+ replace,
+ };
+}; */
+
export const prepareSqliteCreateViewJson = (
name: string,
definition: string,
@@ -3070,3 +3492,9 @@ export const prepareMySqlAlterView = (
): JsonAlterMySqlViewStatement => {
return { type: 'alter_mysql_view', ...view };
};
+
+/* export const prepareSingleStoreAlterView = (
+ view: Omit,
+): JsonAlterSingleStoreViewStatement => {
+ return { type: 'alter_singlestore_view', ...view };
+}; */
diff --git a/drizzle-kit/src/migrationPreparator.ts b/drizzle-kit/src/migrationPreparator.ts
index d61f804ca..4e67e8174 100644
--- a/drizzle-kit/src/migrationPreparator.ts
+++ b/drizzle-kit/src/migrationPreparator.ts
@@ -1,9 +1,10 @@
import { randomUUID } from 'crypto';
import fs from 'fs';
import { CasingType } from './cli/validations/common';
-import { serializeMySql, serializePg, serializeSQLite } from './serializer';
+import { serializeMySql, serializePg, serializeSingleStore, serializeSQLite } from './serializer';
import { dryMySql, MySqlSchema, mysqlSchema } from './serializer/mysqlSchema';
import { dryPg, PgSchema, pgSchema, PgSchemaInternal } from './serializer/pgSchema';
+import { drySingleStore, SingleStoreSchema, singlestoreSchema } from './serializer/singlestoreSchema';
import { drySQLite, SQLiteSchema, sqliteSchema } from './serializer/sqliteSchema';
export const prepareMySqlDbPushSnapshot = async (
@@ -22,6 +23,22 @@ export const prepareMySqlDbPushSnapshot = async (
return { prev, cur: result };
};
+export const prepareSingleStoreDbPushSnapshot = async (
+ prev: SingleStoreSchema,
+ schemaPath: string | string[],
+ casing: CasingType | undefined,
+): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema }> => {
+ const serialized = await serializeSingleStore(schemaPath, casing);
+
+ const id = randomUUID();
+ const idPrev = prev.id;
+
+ const { version, dialect, ...rest } = serialized;
+ const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest };
+
+ return { prev, cur: result };
+};
+
export const prepareSQLiteDbPushSnapshot = async (
prev: SQLiteSchema,
schemaPath: string | string[],
@@ -89,6 +106,34 @@ export const prepareMySqlMigrationSnapshot = async (
return { prev: prevSnapshot, cur: result, custom };
};
+export const prepareSingleStoreMigrationSnapshot = async (
+ migrationFolders: string[],
+ schemaPath: string | string[],
+ casing: CasingType | undefined,
+): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema; custom: SingleStoreSchema }> => {
+ const prevSnapshot = singlestoreSchema.parse(
+ preparePrevSnapshot(migrationFolders, drySingleStore),
+ );
+ const serialized = await serializeSingleStore(schemaPath, casing);
+
+ const id = randomUUID();
+ const idPrev = prevSnapshot.id;
+
+ const { version, dialect, ...rest } = serialized;
+ const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest };
+
+ const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot;
+
+ // that's for custom migrations, when we need new IDs, but old snapshot
+ const custom: SingleStoreSchema = {
+ id,
+ prevId: idPrev,
+ ...prevRest,
+ };
+
+ return { prev: prevSnapshot, cur: result, custom };
+};
+
export const prepareSqliteMigrationSnapshot = async (
snapshots: string[],
schemaPath: string | string[],
diff --git a/drizzle-kit/src/schemaValidator.ts b/drizzle-kit/src/schemaValidator.ts
index 6ad29a544..e91b5ab11 100644
--- a/drizzle-kit/src/schemaValidator.ts
+++ b/drizzle-kit/src/schemaValidator.ts
@@ -1,9 +1,10 @@
import { enum as enumType, TypeOf, union } from 'zod';
import { mysqlSchema, mysqlSchemaSquashed } from './serializer/mysqlSchema';
import { pgSchema, pgSchemaSquashed } from './serializer/pgSchema';
+import { singlestoreSchema, singlestoreSchemaSquashed } from './serializer/singlestoreSchema';
import { sqliteSchema, SQLiteSchemaSquashed } from './serializer/sqliteSchema';
-export const dialects = ['postgresql', 'mysql', 'sqlite', 'turso'] as const;
+export const dialects = ['postgresql', 'mysql', 'sqlite', 'turso', 'singlestore'] as const;
export const dialect = enumType(dialects);
export type Dialect = (typeof dialects)[number];
@@ -13,9 +14,10 @@ const commonSquashedSchema = union([
pgSchemaSquashed,
mysqlSchemaSquashed,
SQLiteSchemaSquashed,
+ singlestoreSchemaSquashed,
]);
-const commonSchema = union([pgSchema, mysqlSchema, sqliteSchema]);
+const commonSchema = union([pgSchema, mysqlSchema, sqliteSchema, singlestoreSchema]);
export type CommonSquashedSchema = TypeOf;
export type CommonSchema = TypeOf;
diff --git a/drizzle-kit/src/serializer/index.ts b/drizzle-kit/src/serializer/index.ts
index cf2ee625a..d24afbab0 100644
--- a/drizzle-kit/src/serializer/index.ts
+++ b/drizzle-kit/src/serializer/index.ts
@@ -6,6 +6,7 @@ import { CasingType } from 'src/cli/validations/common';
import { error } from '../cli/views';
import type { MySqlSchemaInternal } from './mysqlSchema';
import type { PgSchemaInternal } from './pgSchema';
+import { SingleStoreSchemaInternal } from './singlestoreSchema';
import type { SQLiteSchemaInternal } from './sqliteSchema';
export const serializeMySql = async (
@@ -53,6 +54,22 @@ export const serializeSQLite = async (
return generateSqliteSnapshot(tables, views, casing);
};
+export const serializeSingleStore = async (
+ path: string | string[],
+ casing: CasingType | undefined,
+): Promise => {
+ const filenames = prepareFilenames(path);
+
+ console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`));
+
+ const { prepareFromSingleStoreImports } = await import('./singlestoreImports');
+ const { generateSingleStoreSnapshot } = await import('./singlestoreSerializer');
+
+ const { tables /* views */ } = await prepareFromSingleStoreImports(filenames);
+
+ return generateSingleStoreSnapshot(tables, /* views, */ casing);
+};
+
export const prepareFilenames = (path: string | string[]) => {
if (typeof path === 'string') {
path = [path];
diff --git a/drizzle-kit/src/serializer/singlestoreImports.ts b/drizzle-kit/src/serializer/singlestoreImports.ts
new file mode 100644
index 000000000..23c2d66a9
--- /dev/null
+++ b/drizzle-kit/src/serializer/singlestoreImports.ts
@@ -0,0 +1,38 @@
+import { is } from 'drizzle-orm';
+import { AnySingleStoreTable, SingleStoreTable } from 'drizzle-orm/singlestore-core';
+import { safeRegister } from '../cli/commands/utils';
+
+export const prepareFromExports = (exports: Record) => {
+ const tables: AnySingleStoreTable[] = [];
+ /* const views: SingleStoreView[] = []; */
+
+ const i0values = Object.values(exports);
+ i0values.forEach((t) => {
+ if (is(t, SingleStoreTable)) {
+ tables.push(t);
+ }
+
+ /* if (is(t, SingleStoreView)) {
+ views.push(t);
+ } */
+ });
+
+ return { tables /* views */ };
+};
+
+export const prepareFromSingleStoreImports = async (imports: string[]) => {
+ const tables: AnySingleStoreTable[] = [];
+ /* const views: SingleStoreView[] = []; */
+
+ const { unregister } = await safeRegister();
+ for (let i = 0; i < imports.length; i++) {
+ const it = imports[i];
+ const i0: Record = require(`${it}`);
+ const prepared = prepareFromExports(i0);
+
+ tables.push(...prepared.tables);
+ /* views.push(...prepared.views); */
+ }
+ unregister();
+ return { tables: Array.from(new Set(tables)) /* , views */ };
+};
diff --git a/drizzle-kit/src/serializer/singlestoreSchema.ts b/drizzle-kit/src/serializer/singlestoreSchema.ts
new file mode 100644
index 000000000..9ff45ef5a
--- /dev/null
+++ b/drizzle-kit/src/serializer/singlestoreSchema.ts
@@ -0,0 +1,257 @@
+import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod';
+import { mapValues, originUUID, snapshotVersion } from '../global';
+
+// ------- V3 --------
+const index = object({
+ name: string(),
+ columns: string().array(),
+ isUnique: boolean(),
+ using: enumType(['btree', 'hash']).optional(),
+ algorithm: enumType(['default', 'inplace', 'copy']).optional(),
+ lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(),
+}).strict();
+
+const column = object({
+ name: string(),
+ type: string(),
+ primaryKey: boolean(),
+ notNull: boolean(),
+ autoincrement: boolean().optional(),
+ default: any().optional(),
+ onUpdate: any().optional(),
+ generated: object({
+ type: enumType(['stored', 'virtual']),
+ as: string(),
+ }).optional(),
+}).strict();
+
+const compositePK = object({
+ name: string(),
+ columns: string().array(),
+}).strict();
+
+const uniqueConstraint = object({
+ name: string(),
+ columns: string().array(),
+}).strict();
+
+const table = object({
+ name: string(),
+ columns: record(string(), column),
+ indexes: record(string(), index),
+ compositePrimaryKeys: record(string(), compositePK),
+ uniqueConstraints: record(string(), uniqueConstraint).default({}),
+}).strict();
+
+const viewMeta = object({
+ algorithm: enumType(['undefined', 'merge', 'temptable']),
+ sqlSecurity: enumType(['definer', 'invoker']),
+ withCheckOption: enumType(['local', 'cascaded']).optional(),
+}).strict();
+
+/* export const view = object({
+ name: string(),
+ columns: record(string(), column),
+ definition: string().optional(),
+ isExisting: boolean(),
+}).strict().merge(viewMeta);
+type SquasherViewMeta = Omit, 'definer'>; */
+
+export const kitInternals = object({
+ tables: record(
+ string(),
+ object({
+ columns: record(
+ string(),
+ object({ isDefaultAnExpression: boolean().optional() }).optional(),
+ ),
+ }).optional(),
+ ).optional(),
+ indexes: record(
+ string(),
+ object({
+ columns: record(
+ string(),
+ object({ isExpression: boolean().optional() }).optional(),
+ ),
+ }).optional(),
+ ).optional(),
+}).optional();
+
+// use main dialect
+const dialect = literal('singlestore');
+
+const schemaHash = object({
+ id: string(),
+ prevId: string(),
+});
+
+export const schemaInternal = object({
+ version: literal('1'),
+ dialect: dialect,
+ tables: record(string(), table),
+ /* views: record(string(), view).default({}), */
+ _meta: object({
+ tables: record(string(), string()),
+ columns: record(string(), string()),
+ }),
+ internal: kitInternals,
+}).strict();
+
+export const schema = schemaInternal.merge(schemaHash);
+
+const tableSquashed = object({
+ name: string(),
+ columns: record(string(), column),
+ indexes: record(string(), string()),
+ compositePrimaryKeys: record(string(), string()),
+ uniqueConstraints: record(string(), string()).default({}),
+}).strict();
+
+/* const viewSquashed = view.omit({
+ algorithm: true,
+ sqlSecurity: true,
+ withCheckOption: true,
+}).extend({ meta: string() }); */
+
+export const schemaSquashed = object({
+ version: literal('1'),
+ dialect: dialect,
+ tables: record(string(), tableSquashed),
+ /* views: record(string(), viewSquashed), */
+}).strict();
+
+export type Dialect = TypeOf;
+export type Column = TypeOf;
+export type Table = TypeOf;
+export type SingleStoreSchema = TypeOf;
+export type SingleStoreSchemaInternal = TypeOf;
+export type SingleStoreKitInternals = TypeOf;
+export type SingleStoreSchemaSquashed = TypeOf;
+export type Index = TypeOf;
+export type PrimaryKey = TypeOf;
+export type UniqueConstraint = TypeOf;
+/* export type View = TypeOf; */
+/* export type ViewSquashed = TypeOf; */
+
+export const SingleStoreSquasher = {
+ squashIdx: (idx: Index) => {
+ index.parse(idx);
+ return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.using ?? ''};${idx.algorithm ?? ''};${
+ idx.lock ?? ''
+ }`;
+ },
+ unsquashIdx: (input: string): Index => {
+ const [name, columnsString, isUnique, using, algorithm, lock] = input.split(';');
+ const destructed = {
+ name,
+ columns: columnsString.split(','),
+ isUnique: isUnique === 'true',
+ using: using ? using : undefined,
+ algorithm: algorithm ? algorithm : undefined,
+ lock: lock ? lock : undefined,
+ };
+ return index.parse(destructed);
+ },
+ squashPK: (pk: PrimaryKey) => {
+ return `${pk.name};${pk.columns.join(',')}`;
+ },
+ unsquashPK: (pk: string): PrimaryKey => {
+ const splitted = pk.split(';');
+ return { name: splitted[0], columns: splitted[1].split(',') };
+ },
+ squashUnique: (unq: UniqueConstraint) => {
+ return `${unq.name};${unq.columns.join(',')}`;
+ },
+ unsquashUnique: (unq: string): UniqueConstraint => {
+ const [name, columns] = unq.split(';');
+ return { name, columns: columns.split(',') };
+ },
+ /* squashView: (view: View): string => {
+ return `${view.algorithm};${view.sqlSecurity};${view.withCheckOption}`;
+ },
+ unsquashView: (meta: string): SquasherViewMeta => {
+ const [algorithm, sqlSecurity, withCheckOption] = meta.split(';');
+ const toReturn = {
+ algorithm: algorithm,
+ sqlSecurity: sqlSecurity,
+ withCheckOption: withCheckOption !== 'undefined' ? withCheckOption : undefined,
+ };
+
+ return viewMeta.parse(toReturn);
+ }, */
+};
+
+export const squashSingleStoreScheme = (json: SingleStoreSchema): SingleStoreSchemaSquashed => {
+ const mappedTables = Object.fromEntries(
+ Object.entries(json.tables).map((it) => {
+ const squashedIndexes = mapValues(it[1].indexes, (index) => {
+ return SingleStoreSquasher.squashIdx(index);
+ });
+
+ const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => {
+ return SingleStoreSquasher.squashPK(pk);
+ });
+
+ const squashedUniqueConstraints = mapValues(
+ it[1].uniqueConstraints,
+ (unq) => {
+ return SingleStoreSquasher.squashUnique(unq);
+ },
+ );
+
+ return [
+ it[0],
+ {
+ name: it[1].name,
+ columns: it[1].columns,
+ indexes: squashedIndexes,
+ compositePrimaryKeys: squashedPKs,
+ uniqueConstraints: squashedUniqueConstraints,
+ },
+ ];
+ }),
+ );
+
+ /* const mappedViews = Object.fromEntries(
+ Object.entries(json.views).map(([key, value]) => {
+ const meta = SingleStoreSquasher.squashView(value);
+
+ return [key, {
+ name: value.name,
+ isExisting: value.isExisting,
+ columns: value.columns,
+ definition: value.definition,
+ meta,
+ }];
+ }),
+ ); */
+
+ return {
+ version: '1',
+ dialect: json.dialect,
+ tables: mappedTables,
+ /* views: mappedViews, */
+ };
+};
+
+export const singlestoreSchema = schema;
+export const singlestoreSchemaSquashed = schemaSquashed;
+
+// no prev version
+export const backwardCompatibleSingleStoreSchema = union([singlestoreSchema, schema]);
+
+export const drySingleStore = singlestoreSchema.parse({
+ version: '1',
+ dialect: 'singlestore',
+ id: originUUID,
+ prevId: '',
+ tables: {},
+ schemas: {},
+ /* views: {}, */
+ _meta: {
+ schemas: {},
+ tables: {},
+ columns: {},
+ },
+});
diff --git a/drizzle-kit/src/serializer/singlestoreSerializer.ts b/drizzle-kit/src/serializer/singlestoreSerializer.ts
new file mode 100644
index 000000000..e8c89f1d1
--- /dev/null
+++ b/drizzle-kit/src/serializer/singlestoreSerializer.ts
@@ -0,0 +1,767 @@
+import chalk from 'chalk';
+import { is, SQL } from 'drizzle-orm';
+import {
+ AnySingleStoreTable,
+ getTableConfig,
+ type PrimaryKey as PrimaryKeyORM,
+ SingleStoreDialect,
+ uniqueKeyName,
+} from 'drizzle-orm/singlestore-core';
+import { RowDataPacket } from 'mysql2/promise';
+import { withStyle } from '../cli/validations/outputs';
+import { IntrospectStage, IntrospectStatus } from '../cli/views';
+
+import { CasingType } from 'src/cli/validations/common';
+import type { DB } from '../utils';
+import {
+ Column,
+ Index,
+ PrimaryKey,
+ SingleStoreKitInternals,
+ SingleStoreSchemaInternal,
+ Table,
+ UniqueConstraint,
+} from './singlestoreSchema';
+import { sqlToStr } from './utils';
+
+const dialect = new SingleStoreDialect();
+
+export const indexName = (tableName: string, columns: string[]) => {
+ return `${tableName}_${columns.join('_')}_index`;
+};
+
+export const generateSingleStoreSnapshot = (
+ tables: AnySingleStoreTable[],
+ /* views: SingleStoreView[], */
+ casing: CasingType | undefined,
+): SingleStoreSchemaInternal => {
+ const dialect = new SingleStoreDialect({ casing });
+ const result: Record = {};
+ /* const resultViews: Record = {}; */
+ const internal: SingleStoreKitInternals = { tables: {}, indexes: {} };
+ for (const table of tables) {
+ const {
+ name: tableName,
+ columns,
+ indexes,
+ schema,
+ primaryKeys,
+ uniqueConstraints,
+ } = getTableConfig(table);
+ const columnsObject: Record = {};
+ const indexesObject: Record = {};
+ const primaryKeysObject: Record = {};
+ const uniqueConstraintObject: Record = {};
+
+ columns.forEach((column) => {
+ const notNull: boolean = column.notNull;
+ const sqlTypeLowered = column.getSQLType().toLowerCase();
+ const autoIncrement = typeof (column as any).autoIncrement === 'undefined'
+ ? false
+ : (column as any).autoIncrement;
+
+ const generated = column.generated;
+
+ const columnToSet: Column = {
+ name: column.name,
+ type: column.getSQLType(),
+ primaryKey: false,
+ // If field is autoincrement it's notNull by default
+ // notNull: autoIncrement ? true : notNull,
+ notNull,
+ autoincrement: autoIncrement,
+ onUpdate: (column as any).hasOnUpdateNow,
+ generated: generated
+ ? {
+ as: is(generated.as, SQL)
+ ? dialect.sqlToQuery(generated.as as SQL).sql
+ : typeof generated.as === 'function'
+ ? dialect.sqlToQuery(generated.as() as SQL).sql
+ : (generated.as as any),
+ type: generated.mode ?? 'stored',
+ }
+ : undefined,
+ };
+
+ if (column.primary) {
+ primaryKeysObject[`${tableName}_${column.name}`] = {
+ name: `${tableName}_${column.name}`,
+ columns: [column.name],
+ };
+ }
+
+ if (column.isUnique) {
+ const existingUnique = uniqueConstraintObject[column.uniqueName!];
+ if (typeof existingUnique !== 'undefined') {
+ console.log(
+ `\n${
+ withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${
+ chalk.underline.blue(
+ tableName,
+ )
+ } table.
+ The unique constraint ${
+ chalk.underline.blue(
+ column.uniqueName,
+ )
+ } on the ${
+ chalk.underline.blue(
+ column.name,
+ )
+ } column is confilcting with a unique constraint name already defined for ${
+ chalk.underline.blue(
+ existingUnique.columns.join(','),
+ )
+ } columns\n`)
+ }`,
+ );
+ process.exit(1);
+ }
+ uniqueConstraintObject[column.uniqueName!] = {
+ name: column.uniqueName!,
+ columns: [columnToSet.name],
+ };
+ }
+
+ if (column.default !== undefined) {
+ if (is(column.default, SQL)) {
+ columnToSet.default = sqlToStr(column.default, casing);
+ } else {
+ if (typeof column.default === 'string') {
+ columnToSet.default = `'${column.default}'`;
+ } else {
+ if (sqlTypeLowered === 'json') {
+ columnToSet.default = `'${JSON.stringify(column.default)}'`;
+ } else if (column.default instanceof Date) {
+ if (sqlTypeLowered === 'date') {
+ columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`;
+ } else if (
+ sqlTypeLowered.startsWith('datetime')
+ || sqlTypeLowered.startsWith('timestamp')
+ ) {
+ columnToSet.default = `'${
+ column.default
+ .toISOString()
+ .replace('T', ' ')
+ .slice(0, 23)
+ }'`;
+ }
+ } else {
+ columnToSet.default = column.default;
+ }
+ }
+ // if (['blob', 'text', 'json'].includes(column.getSQLType())) {
+ // columnToSet.default = `(${columnToSet.default})`;
+ // }
+ }
+ }
+ columnsObject[column.name] = columnToSet;
+ });
+
+ primaryKeys.map((pk: PrimaryKeyORM) => {
+ const columnNames = pk.columns.map((c: any) => c.name);
+ primaryKeysObject[pk.getName()] = {
+ name: pk.getName(),
+ columns: columnNames,
+ };
+
+ // all composite pk's should be treated as notNull
+ for (const column of pk.columns) {
+ columnsObject[column.name].notNull = true;
+ }
+ });
+
+ uniqueConstraints?.map((unq) => {
+ const columnNames = unq.columns.map((c) => c.name);
+
+ const name = unq.name ?? uniqueKeyName(table, columnNames);
+
+ const existingUnique = uniqueConstraintObject[name];
+ if (typeof existingUnique !== 'undefined') {
+ console.log(
+ `\n${
+ withStyle.errorWarning(
+ `We\'ve found duplicated unique constraint names in ${
+ chalk.underline.blue(
+ tableName,
+ )
+ } table. \nThe unique constraint ${
+ chalk.underline.blue(
+ name,
+ )
+ } on the ${
+ chalk.underline.blue(
+ columnNames.join(','),
+ )
+ } columns is confilcting with a unique constraint name already defined for ${
+ chalk.underline.blue(
+ existingUnique.columns.join(','),
+ )
+ } columns\n`,
+ )
+ }`,
+ );
+ process.exit(1);
+ }
+
+ uniqueConstraintObject[name] = {
+ name: unq.name!,
+ columns: columnNames,
+ };
+ });
+
+ indexes.forEach((value) => {
+ const columns = value.config.columns;
+ const name = value.config.name;
+
+ let indexColumns = columns.map((it) => {
+ if (is(it, SQL)) {
+ const sql = dialect.sqlToQuery(it, 'indexes').sql;
+ if (typeof internal!.indexes![name] === 'undefined') {
+ internal!.indexes![name] = {
+ columns: {
+ [sql]: {
+ isExpression: true,
+ },
+ },
+ };
+ } else {
+ if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') {
+ internal!.indexes![name]!.columns[sql] = {
+ isExpression: true,
+ };
+ } else {
+ internal!.indexes![name]!.columns[sql]!.isExpression = true;
+ }
+ }
+ return sql;
+ } else {
+ return `${it.name}`;
+ }
+ });
+
+ if (value.config.unique) {
+ if (typeof uniqueConstraintObject[name] !== 'undefined') {
+ console.log(
+ `\n${
+ withStyle.errorWarning(
+ `We\'ve found duplicated unique constraint names in ${
+ chalk.underline.blue(
+ tableName,
+ )
+ } table. \nThe unique index ${
+ chalk.underline.blue(
+ name,
+ )
+ } on the ${
+ chalk.underline.blue(
+ indexColumns.join(','),
+ )
+ } columns is confilcting with a unique constraint name already defined for ${
+ chalk.underline.blue(
+ uniqueConstraintObject[name].columns.join(','),
+ )
+ } columns\n`,
+ )
+ }`,
+ );
+ process.exit(1);
+ }
+ }
+
+ indexesObject[name] = {
+ name,
+ columns: indexColumns,
+ isUnique: value.config.unique ?? false,
+ using: value.config.using,
+ algorithm: value.config.algorythm,
+ lock: value.config.lock,
+ };
+ });
+
+ // only handle tables without schemas
+ if (!schema) {
+ result[tableName] = {
+ name: tableName,
+ columns: columnsObject,
+ indexes: indexesObject,
+ compositePrimaryKeys: primaryKeysObject,
+ uniqueConstraints: uniqueConstraintObject,
+ };
+ }
+ }
+
+ /* for (const view of views) {
+ const {
+ isExisting,
+ name,
+ query,
+ schema,
+ selectedFields,
+ algorithm,
+ sqlSecurity,
+ withCheckOption,
+ } = getViewConfig(view);
+
+ const columnsObject: Record = {};
+
+ const existingView = resultViews[name];
+ if (typeof existingView !== 'undefined') {
+ console.log(
+ `\n${
+ withStyle.errorWarning(
+ `We\'ve found duplicated view name across ${
+ chalk.underline.blue(
+ schema ?? 'public',
+ )
+ } schema. Please rename your view`,
+ )
+ }`,
+ );
+ process.exit(1);
+ }
+ for (const key in selectedFields) {
+ if (is(selectedFields[key], SingleStoreColumn)) {
+ const column = selectedFields[key];
+
+ const notNull: boolean = column.notNull;
+ const sqlTypeLowered = column.getSQLType().toLowerCase();
+ const autoIncrement = typeof (column as any).autoIncrement === 'undefined'
+ ? false
+ : (column as any).autoIncrement;
+
+ const generated = column.generated;
+
+ const columnToSet: Column = {
+ name: column.name,
+ type: column.getSQLType(),
+ primaryKey: false,
+ // If field is autoincrement it's notNull by default
+ // notNull: autoIncrement ? true : notNull,
+ notNull,
+ autoincrement: autoIncrement,
+ onUpdate: (column as any).hasOnUpdateNow,
+ generated: generated
+ ? {
+ as: is(generated.as, SQL)
+ ? dialect.sqlToQuery(generated.as as SQL).sql
+ : typeof generated.as === 'function'
+ ? dialect.sqlToQuery(generated.as() as SQL).sql
+ : (generated.as as any),
+ type: generated.mode ?? 'stored',
+ }
+ : undefined,
+ };
+
+ if (column.default !== undefined) {
+ if (is(column.default, SQL)) {
+ columnToSet.default = sqlToStr(column.default, casing);
+ } else {
+ if (typeof column.default === 'string') {
+ columnToSet.default = `'${column.default}'`;
+ } else {
+ if (sqlTypeLowered === 'json') {
+ columnToSet.default = `'${JSON.stringify(column.default)}'`;
+ } else if (column.default instanceof Date) {
+ if (sqlTypeLowered === 'date') {
+ columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`;
+ } else if (
+ sqlTypeLowered.startsWith('datetime')
+ || sqlTypeLowered.startsWith('timestamp')
+ ) {
+ columnToSet.default = `'${
+ column.default
+ .toISOString()
+ .replace('T', ' ')
+ .slice(0, 23)
+ }'`;
+ }
+ } else {
+ columnToSet.default = column.default;
+ }
+ }
+ }
+ }
+ columnsObject[column.name] = columnToSet;
+ }
+ }
+
+ resultViews[name] = {
+ columns: columnsObject,
+ name,
+ isExisting,
+ definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql,
+ withCheckOption,
+ algorithm: algorithm ?? 'undefined', // set default values
+ sqlSecurity: sqlSecurity ?? 'definer', // set default values
+ };
+ } */
+
+ return {
+ version: '1',
+ dialect: 'singlestore',
+ tables: result,
+ /* views: resultViews, */
+ _meta: {
+ tables: {},
+ columns: {},
+ },
+ internal,
+ };
+};
+
+function clearDefaults(defaultValue: any, collate: string) {
+ if (typeof collate === 'undefined' || collate === null) {
+ collate = `utf8mb4`;
+ }
+
+ let resultDefault = defaultValue;
+ collate = `_${collate}`;
+ if (defaultValue.startsWith(collate)) {
+ resultDefault = resultDefault
+ .substring(collate.length, defaultValue.length)
+ .replace(/\\/g, '');
+ if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) {
+ return `('${resultDefault.substring(1, resultDefault.length - 1)}')`;
+ } else {
+ return `'${resultDefault}'`;
+ }
+ } else {
+ return `(${resultDefault})`;
+ }
+}
+
+export const fromDatabase = async (
+ db: DB,
+ inputSchema: string,
+ tablesFilter: (table: string) => boolean = (table) => true,
+ progressCallback?: (
+ stage: IntrospectStage,
+ count: number,
+ status: IntrospectStatus,
+ ) => void,
+): Promise => {
+ const result: Record = {};
+ const internals: SingleStoreKitInternals = { tables: {}, indexes: {} };
+
+ const columns = await db.query(`select * from information_schema.columns
+ where table_schema = '${inputSchema}' and table_name != '__drizzle_migrations'
+ order by table_name, ordinal_position;`);
+
+ const response = columns as RowDataPacket[];
+
+ const schemas: string[] = [];
+
+ let columnsCount = 0;
+ let tablesCount = new Set();
+ let indexesCount = 0;
+ /* let viewsCount = 0; */
+
+ const idxs = await db.query(
+ `select * from INFORMATION_SCHEMA.STATISTICS
+ WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${inputSchema}' and INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY';`,
+ );
+
+ const idxRows = idxs as RowDataPacket[];
+
+ for (const column of response) {
+ if (!tablesFilter(column['TABLE_NAME'] as string)) continue;
+
+ columnsCount += 1;
+ if (progressCallback) {
+ progressCallback('columns', columnsCount, 'fetching');
+ }
+ const schema: string = column['TABLE_SCHEMA'];
+ const tableName = column['TABLE_NAME'];
+
+ tablesCount.add(`${schema}.${tableName}`);
+ if (progressCallback) {
+ progressCallback('columns', tablesCount.size, 'fetching');
+ }
+ const columnName: string = column['COLUMN_NAME'];
+ const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO'
+ const dataType = column['DATA_TYPE']; // varchar
+ const columnType = column['COLUMN_TYPE']; // varchar(256)
+ // const columnType = column["DATA_TYPE"];
+ const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', ''
+ let columnDefault: string | null = column['COLUMN_DEFAULT'];
+ const collation: string = column['CHARACTER_SET_NAME'];
+ const geenratedExpression: string = column['GENERATION_EXPRESSION'];
+
+ let columnExtra = column['EXTRA'];
+ let isAutoincrement = false; // 'auto_increment', ''
+ let isDefaultAnExpression = false; // 'auto_increment', ''
+
+ if (typeof column['EXTRA'] !== 'undefined') {
+ columnExtra = column['EXTRA'];
+ isAutoincrement = column['EXTRA'] === 'auto_increment'; // 'auto_increment', ''
+ isDefaultAnExpression = column['EXTRA'].includes('DEFAULT_GENERATED'); // 'auto_increment', ''
+ }
+
+ // if (isPrimary) {
+ // if (typeof tableToPk[tableName] === "undefined") {
+ // tableToPk[tableName] = [columnName];
+ // } else {
+ // tableToPk[tableName].push(columnName);
+ // }
+ // }
+
+ if (schema !== inputSchema) {
+ schemas.push(schema);
+ }
+
+ const table = result[tableName];
+
+ // let changedType = columnType.replace("bigint unsigned", "serial")
+ let changedType = columnType;
+
+ if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) {
+ // check unique here
+ const uniqueIdx = idxRows.filter(
+ (it) =>
+ it['COLUMN_NAME'] === columnName
+ && it['TABLE_NAME'] === tableName
+ && it['NON_UNIQUE'] === 0,
+ );
+ if (uniqueIdx && uniqueIdx.length === 1) {
+ changedType = columnType.replace('bigint unsigned', 'serial');
+ }
+ }
+
+ if (
+ columnType.startsWith('bigint(')
+ || columnType.startsWith('tinyint(')
+ || columnType.startsWith('date(')
+ || columnType.startsWith('int(')
+ || columnType.startsWith('mediumint(')
+ || columnType.startsWith('smallint(')
+ || columnType.startsWith('text(')
+ || columnType.startsWith('time(')
+ || columnType.startsWith('year(')
+ ) {
+ changedType = columnType.replace(/\(\s*[^)]*\)$/, '');
+ }
+
+ if (columnType.includes('decimal(10,0)')) {
+ changedType = columnType.replace('decimal(10,0)', 'decimal');
+ }
+
+ if (columnDefault?.endsWith('.')) {
+ columnDefault = columnDefault.slice(0, -1);
+ }
+
+ let onUpdate: boolean | undefined = undefined;
+ if (
+ columnType.startsWith('timestamp')
+ && typeof columnExtra !== 'undefined'
+ && columnExtra.includes('on update CURRENT_TIMESTAMP')
+ ) {
+ onUpdate = true;
+ }
+
+ const newColumn: Column = {
+ default: columnDefault === null
+ ? undefined
+ : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault)
+ && !['decimal', 'char', 'varchar'].some((type) => columnType.startsWith(type))
+ ? Number(columnDefault)
+ : isDefaultAnExpression
+ ? clearDefaults(columnDefault, collation)
+ : columnDefault.startsWith('CURRENT_TIMESTAMP')
+ ? 'CURRENT_TIMESTAMP'
+ : `'${columnDefault}'`,
+ autoincrement: isAutoincrement,
+ name: columnName,
+ type: changedType,
+ primaryKey: false,
+ notNull: !isNullable,
+ onUpdate,
+ generated: geenratedExpression
+ ? {
+ as: geenratedExpression,
+ type: columnExtra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored',
+ }
+ : undefined,
+ };
+
+ // Set default to internal object
+ if (isDefaultAnExpression) {
+ if (typeof internals!.tables![tableName] === 'undefined') {
+ internals!.tables![tableName] = {
+ columns: {
+ [columnName]: {
+ isDefaultAnExpression: true,
+ },
+ },
+ };
+ } else {
+ if (
+ typeof internals!.tables![tableName]!.columns[columnName]
+ === 'undefined'
+ ) {
+ internals!.tables![tableName]!.columns[columnName] = {
+ isDefaultAnExpression: true,
+ };
+ } else {
+ internals!.tables![tableName]!.columns[
+ columnName
+ ]!.isDefaultAnExpression = true;
+ }
+ }
+ }
+
+ if (!table) {
+ result[tableName] = {
+ name: tableName,
+ columns: {
+ [columnName]: newColumn,
+ },
+ compositePrimaryKeys: {},
+ indexes: {},
+ uniqueConstraints: {},
+ };
+ } else {
+ result[tableName]!.columns[columnName] = newColumn;
+ }
+ }
+
+ const tablePks = await db.query(
+ `SELECT table_name, column_name, ordinal_position
+ FROM information_schema.table_constraints t
+ LEFT JOIN information_schema.key_column_usage k
+ USING(constraint_name,table_schema,table_name)
+ WHERE t.constraint_type='UNIQUE'
+ and table_name != '__drizzle_migrations'
+ AND t.table_schema = '${inputSchema}'
+ ORDER BY ordinal_position`,
+ );
+
+ const tableToPk: { [tname: string]: string[] } = {};
+
+ const tableToPkRows = tablePks as RowDataPacket[];
+ for (const tableToPkRow of tableToPkRows) {
+ const tableName: string = tableToPkRow['table_name'];
+ const columnName: string = tableToPkRow['column_name'];
+ const position: string = tableToPkRow['ordinal_position'];
+
+ if (typeof result[tableName] === 'undefined') {
+ continue;
+ }
+
+ if (typeof tableToPk[tableName] === 'undefined') {
+ tableToPk[tableName] = [columnName];
+ } else {
+ tableToPk[tableName].push(columnName);
+ }
+ }
+
+ for (const [key, value] of Object.entries(tableToPk)) {
+ // if (value.length > 1) {
+ result[key].compositePrimaryKeys = {
+ [`${key}_${value.join('_')}`]: {
+ name: `${key}_${value.join('_')}`,
+ columns: value,
+ },
+ };
+ // } else if (value.length === 1) {
+ // result[key].columns[value[0]].primaryKey = true;
+ // } else {
+ // }
+ }
+ if (progressCallback) {
+ progressCallback('columns', columnsCount, 'done');
+ progressCallback('tables', tablesCount.size, 'done');
+ }
+
+ for (const idxRow of idxRows) {
+ const tableSchema = idxRow['TABLE_SCHEMA'];
+ const tableName = idxRow['TABLE_NAME'];
+ const constraintName = idxRow['INDEX_NAME'];
+ const columnName: string = idxRow['COLUMN_NAME'];
+ const isUnique = idxRow['NON_UNIQUE'] === 0;
+
+ const tableInResult = result[tableName];
+ if (typeof tableInResult === 'undefined') continue;
+
+ // if (tableInResult.columns[columnName].type === "serial") continue;
+
+ indexesCount += 1;
+ if (progressCallback) {
+ progressCallback('indexes', indexesCount, 'fetching');
+ }
+
+ if (isUnique) {
+ if (
+ typeof tableInResult.uniqueConstraints[constraintName] !== 'undefined'
+ ) {
+ tableInResult.uniqueConstraints[constraintName]!.columns.push(
+ columnName,
+ );
+ } else {
+ tableInResult.uniqueConstraints[constraintName] = {
+ name: constraintName,
+ columns: [columnName],
+ };
+ }
+ }
+ }
+
+ /* const views = await db.query(
+ `select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema = '${inputSchema}';`,
+ ); */
+
+ /* const resultViews: Record = {}; */
+
+ /* viewsCount = views.length;
+ if (progressCallback) {
+ progressCallback('views', viewsCount, 'fetching');
+ }
+ for await (const view of views) {
+ const viewName = view['TABLE_NAME'];
+ const definition = view['VIEW_DEFINITION'];
+
+ const withCheckOption = view['CHECK_OPTION'] === 'NONE'
+ ? undefined
+ : view['CHECK_OPTION'].toLowerCase();
+ const sqlSecurity = view['SECURITY_TYPE'].toLowerCase();
+
+ const [createSqlStatement] = await db.query(
+ `SHOW CREATE VIEW \`${viewName}\`;`,
+ );
+ const algorithmMatch = createSqlStatement['Create View'].match(/ALGORITHM=([^ ]+)/);
+ const algorithm = algorithmMatch
+ ? algorithmMatch[1].toLowerCase()
+ : undefined;
+
+ const columns = result[viewName].columns;
+ delete result[viewName];
+
+ resultViews[viewName] = {
+ columns: columns,
+ isExisting: false,
+ name: viewName,
+ algorithm,
+ definition,
+ sqlSecurity,
+ withCheckOption,
+ };
+ } */
+
+ if (progressCallback) {
+ progressCallback('indexes', indexesCount, 'done');
+ // progressCallback("enums", 0, "fetching");
+ progressCallback('enums', 0, 'done');
+ }
+
+ return {
+ version: '1',
+ dialect: 'singlestore',
+ tables: result,
+ /* views: resultViews, */
+ _meta: {
+ tables: {},
+ columns: {},
+ },
+ internal: internals,
+ };
+};
diff --git a/drizzle-kit/src/serializer/studio.ts b/drizzle-kit/src/serializer/studio.ts
index d83a65b08..bbd811627 100644
--- a/drizzle-kit/src/serializer/studio.ts
+++ b/drizzle-kit/src/serializer/studio.ts
@@ -15,6 +15,11 @@ import {
} from 'drizzle-orm';
import { AnyMySqlTable, getTableConfig as mysqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core';
import { AnyPgTable, getTableConfig as pgTableConfig, PgTable } from 'drizzle-orm/pg-core';
+import {
+ AnySingleStoreTable,
+ getTableConfig as singlestoreTableConfig,
+ SingleStoreTable,
+} from 'drizzle-orm/singlestore-core';
import { AnySQLiteTable, getTableConfig as sqliteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core';
import fs from 'fs';
import { Hono } from 'hono';
@@ -28,6 +33,7 @@ import { z } from 'zod';
import { safeRegister } from '../cli/commands/utils';
import type { MysqlCredentials } from '../cli/validations/mysql';
import type { PostgresCredentials } from '../cli/validations/postgres';
+import type { SingleStoreCredentials } from '../cli/validations/singlestore';
import type { SqliteCredentials } from '../cli/validations/sqlite';
import { prepareFilenames } from '.';
@@ -45,7 +51,7 @@ type SchemaFile = {
export type Setup = {
dbHash: string;
- dialect: 'postgresql' | 'mysql' | 'sqlite';
+ dialect: 'postgresql' | 'mysql' | 'sqlite' | 'singlestore';
driver?: 'aws-data-api' | 'd1-http' | 'turso' | 'pglite';
proxy: (params: ProxyParams) => Promise;
customDefaults: CustomDefault[];
@@ -172,6 +178,43 @@ export const prepareSQLiteSchema = async (path: string | string[]) => {
return { schema: sqliteSchema, relations, files };
};
+export const prepareSingleStoreSchema = async (path: string | string[]) => {
+ const imports = prepareFilenames(path);
+ const singlestoreSchema: Record> = {
+ public: {},
+ };
+ const relations: Record = {};
+
+ // files content as string
+ const files = imports.map((it, index) => ({
+ // get the file name from the path
+ name: it.split('/').pop() || `schema${index}.ts`,
+ content: fs.readFileSync(it, 'utf-8'),
+ }));
+
+ const { unregister } = await safeRegister();
+ for (let i = 0; i < imports.length; i++) {
+ const it = imports[i];
+
+ const i0: Record = require(`${it}`);
+ const i0values = Object.entries(i0);
+
+ i0values.forEach(([k, t]) => {
+ if (is(t, SingleStoreTable)) {
+ const schema = singlestoreTableConfig(t).schema || 'public';
+ singlestoreSchema[schema][k] = t;
+ }
+
+ if (is(t, Relations)) {
+ relations[k] = t;
+ }
+ });
+ }
+ unregister();
+
+ return { schema: singlestoreSchema, relations, files };
+};
+
const getCustomDefaults = >(
schema: Record>,
): CustomDefault[] => {
@@ -187,8 +230,10 @@ const getCustomDefaults = >(
tableConfig = pgTableConfig(table);
} else if (is(table, MySqlTable)) {
tableConfig = mysqlTableConfig(table);
- } else {
+ } else if (is(table, SQLiteTable)) {
tableConfig = sqliteTableConfig(table);
+ } else {
+ tableConfig = singlestoreTableConfig(table);
}
tableConfig.columns.map((column) => {
@@ -346,6 +391,39 @@ export const drizzleForLibSQL = async (
};
};
+export const drizzleForSingleStore = async (
+ credentials: SingleStoreCredentials,
+ singlestoreSchema: Record>,
+ relations: Record,
+ schemaFiles?: SchemaFile[],
+): Promise => {
+ const { connectToSingleStore } = await import('../cli/connections');
+ const { proxy } = await connectToSingleStore(credentials);
+
+ const customDefaults = getCustomDefaults(singlestoreSchema);
+
+ let dbUrl: string;
+
+ if ('url' in credentials) {
+ dbUrl = credentials.url;
+ } else {
+ dbUrl =
+ `singlestore://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`;
+ }
+
+ const dbHash = createHash('sha256').update(dbUrl).digest('hex');
+
+ return {
+ dbHash,
+ dialect: 'singlestore',
+ proxy,
+ customDefaults,
+ schema: singlestoreSchema,
+ relations,
+ schemaFiles,
+ };
+};
+
export const extractRelations = (tablesConfig: {
tables: TablesRelationalConfig;
tableNamesMap: Record;
@@ -371,6 +449,8 @@ export const extractRelations = (tablesConfig: {
refSchema = mysqlTableConfig(refTable).schema;
} else if (is(refTable, SQLiteTable)) {
refSchema = undefined;
+ } else if (is(refTable, SingleStoreTable)) {
+ refSchema = singlestoreTableConfig(refTable).schema;
} else {
throw new Error('unsupported dialect');
}
diff --git a/drizzle-kit/src/snapshotsDiffer.ts b/drizzle-kit/src/snapshotsDiffer.ts
index 060f12bbd..2db4ad02c 100644
--- a/drizzle-kit/src/snapshotsDiffer.ts
+++ b/drizzle-kit/src/snapshotsDiffer.ts
@@ -117,6 +117,7 @@ import {
prepareRenameSequenceJson,
prepareRenameTableJson,
prepareRenameViewJson,
+ prepareSingleStoreCreateTableJson,
prepareSqliteAlterColumns,
prepareSQLiteCreateTable,
prepareSqliteCreateViewJson,
@@ -131,12 +132,14 @@ import {
PgSchemaSquashed,
PgSquasher,
Policy,
+ policy,
policySquashed,
Role,
roleSchema,
sequenceSquashed,
View,
} from './serializer/pgSchema';
+import { SingleStoreSchema, SingleStoreSchemaSquashed, SingleStoreSquasher } from './serializer/singlestoreSchema';
import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher, View as SqliteView } from './serializer/sqliteSchema';
import { libSQLCombineStatements, sqliteCombineStatements } from './statementCombiner';
import { copy, prepareMigrationMeta } from './utils';
@@ -393,6 +396,11 @@ export const diffResultSchemeMysql = object({
alteredViews: alteredMySqlViewSchema.array(),
});
+export const diffResultSchemeSingleStore = object({
+ alteredTablesWithColumns: alteredTableScheme.array(),
+ alteredEnums: never().array(),
+});
+
export const diffResultSchemeSQLite = object({
alteredTablesWithColumns: alteredTableScheme.array(),
alteredEnums: never().array(),
@@ -407,6 +415,7 @@ export type Table = TypeOf;
export type AlteredTable = TypeOf;
export type DiffResult = TypeOf;
export type DiffResultMysql = TypeOf;
+export type DiffResultSingleStore = TypeOf;
export type DiffResultSQLite = TypeOf;
export interface ResolverInput {
@@ -1267,17 +1276,21 @@ export const applyPgSnapshotsDiff = async (
it.name,
it.schema,
it.addedCompositePKs,
+ curFull as PgSchema,
);
deletedCompositePKs = prepareDeleteCompositePrimaryKeyPg(
it.name,
it.schema,
it.deletedCompositePKs,
+ prevFull as PgSchema,
);
}
alteredCompositePKs = prepareAlterCompositePrimaryKeyPg(
it.name,
it.schema,
it.alteredCompositePKs,
+ prevFull as PgSchema,
+ curFull as PgSchema,
);
// add logic for unique constraints
@@ -2673,6 +2686,519 @@ export const applyMysqlSnapshotsDiff = async (
};
};
+export const applySingleStoreSnapshotsDiff = async (
+ json1: SingleStoreSchemaSquashed,
+ json2: SingleStoreSchemaSquashed,
+ tablesResolver: (
+ input: ResolverInput,
+ ) => Promise>,
+ columnsResolver: (
+ input: ColumnsResolverInput,
+ ) => Promise>,
+ /* viewsResolver: (
+ input: ResolverInput,
+ ) => Promise>, */
+ prevFull: SingleStoreSchema,
+ curFull: SingleStoreSchema,
+ action?: 'push' | undefined,
+): Promise<{
+ statements: JsonStatement[];
+ sqlStatements: string[];
+ _meta:
+ | {
+ schemas: {};
+ tables: {};
+ columns: {};
+ }
+ | undefined;
+}> => {
+ // squash indexes and fks
+
+ // squash uniqueIndexes and uniqueConstraint into constraints object
+ // it should be done for singlestore only because it has no diffs for it
+
+ // TODO: @AndriiSherman
+ // Add an upgrade to v6 and move all snaphosts to this strcutre
+ // After that we can generate singlestore in 1 object directly(same as sqlite)
+ for (const tableName in json1.tables) {
+ const table = json1.tables[tableName];
+ for (const indexName in table.indexes) {
+ const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]);
+ if (index.isUnique) {
+ table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({
+ name: index.name,
+ columns: index.columns,
+ });
+ delete json1.tables[tableName].indexes[index.name];
+ }
+ }
+ }
+
+ for (const tableName in json2.tables) {
+ const table = json2.tables[tableName];
+ for (const indexName in table.indexes) {
+ const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]);
+ if (index.isUnique) {
+ table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({
+ name: index.name,
+ columns: index.columns,
+ });
+ delete json2.tables[tableName].indexes[index.name];
+ }
+ }
+ }
+
+ const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables);
+
+ const {
+ created: createdTables,
+ deleted: deletedTables,
+ renamed: renamedTables, // renamed or moved
+ } = await tablesResolver({
+ created: tablesDiff.added,
+ deleted: tablesDiff.deleted,
+ });
+
+ const tablesPatchedSnap1 = copy(json1);
+ tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => {
+ const { name } = nameChangeFor(it, renamedTables);
+ it.name = name;
+ return [name, it];
+ });
+
+ const res = diffColumns(tablesPatchedSnap1.tables, json2.tables);
+ const columnRenames = [] as {
+ table: string;
+ renames: { from: Column; to: Column }[];
+ }[];
+
+ const columnCreates = [] as {
+ table: string;
+ columns: Column[];
+ }[];
+
+ const columnDeletes = [] as {
+ table: string;
+ columns: Column[];
+ }[];
+
+ for (let entry of Object.values(res)) {
+ const { renamed, created, deleted } = await columnsResolver({
+ tableName: entry.name,
+ schema: entry.schema,
+ deleted: entry.columns.deleted,
+ created: entry.columns.added,
+ });
+
+ if (created.length > 0) {
+ columnCreates.push({
+ table: entry.name,
+ columns: created,
+ });
+ }
+
+ if (deleted.length > 0) {
+ columnDeletes.push({
+ table: entry.name,
+ columns: deleted,
+ });
+ }
+
+ if (renamed.length > 0) {
+ columnRenames.push({
+ table: entry.name,
+ renames: renamed,
+ });
+ }
+ }
+
+ const columnRenamesDict = columnRenames.reduce(
+ (acc, it) => {
+ acc[it.table] = it.renames;
+ return acc;
+ },
+ {} as Record<
+ string,
+ {
+ from: Named;
+ to: Named;
+ }[]
+ >,
+ );
+
+ const columnsPatchedSnap1 = copy(tablesPatchedSnap1);
+ columnsPatchedSnap1.tables = mapEntries(
+ columnsPatchedSnap1.tables,
+ (tableKey, tableValue) => {
+ const patchedColumns = mapKeys(
+ tableValue.columns,
+ (columnKey, column) => {
+ const rens = columnRenamesDict[tableValue.name] || [];
+ const newName = columnChangeFor(columnKey, rens);
+ column.name = newName;
+ return newName;
+ },
+ );
+
+ tableValue.columns = patchedColumns;
+ return [tableKey, tableValue];
+ },
+ );
+
+ /* const viewsDiff = diffSchemasOrTables(json1.views, json2.views);
+
+ const {
+ created: createdViews,
+ deleted: deletedViews,
+ renamed: renamedViews, // renamed or moved
+ } = await viewsResolver({
+ created: viewsDiff.added,
+ deleted: viewsDiff.deleted,
+ });
+
+ const renamesViewDic: Record = {};
+ renamedViews.forEach((it) => {
+ renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name };
+ });
+
+ const viewsPatchedSnap1 = copy(columnsPatchedSnap1);
+ viewsPatchedSnap1.views = mapEntries(
+ viewsPatchedSnap1.views,
+ (viewKey, viewValue) => {
+ const rename = renamesViewDic[viewValue.name];
+
+ if (rename) {
+ viewValue.name = rename.to;
+ viewKey = rename.to;
+ }
+
+ return [viewKey, viewValue];
+ },
+ );
+
+ */
+ const diffResult = applyJsonDiff(tablesPatchedSnap1, json2); // replace tablesPatchedSnap1 with viewsPatchedSnap1
+
+ const typedResult: DiffResultSingleStore = diffResultSchemeSingleStore.parse(diffResult);
+
+ const jsonStatements: JsonStatement[] = [];
+
+ const jsonCreateIndexesForCreatedTables = createdTables
+ .map((it) => {
+ return prepareCreateIndexesJson(
+ it.name,
+ it.schema,
+ it.indexes,
+ curFull.internal,
+ );
+ })
+ .flat();
+
+ const jsonDropTables = deletedTables.map((it) => {
+ return prepareDropTableJson(it);
+ });
+
+ const jsonRenameTables = renamedTables.map((it) => {
+ return prepareRenameTableJson(it.from, it.to);
+ });
+
+ const alteredTables = typedResult.alteredTablesWithColumns;
+
+ const jsonAddedCompositePKs: JsonCreateCompositePK[] = [];
+
+ const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = [];
+ const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = [];
+ const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = [];
+
+ const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames
+ .map((it) => prepareRenameColumns(it.table, '', it.renames))
+ .flat();
+
+ const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates
+ .map((it) => _prepareAddColumns(it.table, '', it.columns))
+ .flat();
+
+ const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes
+ .map((it) => _prepareDropColumns(it.table, '', it.columns))
+ .flat();
+
+ alteredTables.forEach((it) => {
+ // This part is needed to make sure that same columns in a table are not triggered for change
+ // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name
+ // We double-check that pk with same set of columns are both in added and deleted diffs
+ let addedColumns: string[] = [];
+ for (const addedPkName of Object.keys(it.addedCompositePKs)) {
+ const addedPkColumns = it.addedCompositePKs[addedPkName];
+ addedColumns = SingleStoreSquasher.unsquashPK(addedPkColumns).columns;
+ }
+
+ let deletedColumns: string[] = [];
+ for (const deletedPkName of Object.keys(it.deletedCompositePKs)) {
+ const deletedPkColumns = it.deletedCompositePKs[deletedPkName];
+ deletedColumns = SingleStoreSquasher.unsquashPK(deletedPkColumns).columns;
+ }
+
+ // Don't need to sort, but need to add tests for it
+ // addedColumns.sort();
+ // deletedColumns.sort();
+ const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns);
+
+ // add logic for unique constraints
+ let addedUniqueConstraints: JsonCreateUniqueConstraint[] = [];
+ let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = [];
+ let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = [];
+
+ let createdCheckConstraints: JsonCreateCheckConstraint[] = [];
+ let deletedCheckConstraints: JsonDeleteCheckConstraint[] = [];
+
+ addedUniqueConstraints = prepareAddUniqueConstraint(
+ it.name,
+ it.schema,
+ it.addedUniqueConstraints,
+ );
+ deletedUniqueConstraints = prepareDeleteUniqueConstraint(
+ it.name,
+ it.schema,
+ it.deletedUniqueConstraints,
+ );
+ if (it.alteredUniqueConstraints) {
+ const added: Record = {};
+ const deleted: Record = {};
+ for (const k of Object.keys(it.alteredUniqueConstraints)) {
+ added[k] = it.alteredUniqueConstraints[k].__new;
+ deleted[k] = it.alteredUniqueConstraints[k].__old;
+ }
+ addedUniqueConstraints.push(
+ ...prepareAddUniqueConstraint(it.name, it.schema, added),
+ );
+ deletedUniqueConstraints.push(
+ ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted),
+ );
+ }
+
+ createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints);
+ deletedCheckConstraints = prepareDeleteCheckConstraint(
+ it.name,
+ it.schema,
+ it.deletedCheckConstraints,
+ );
+
+ // skip for push
+ if (it.alteredCheckConstraints && action !== 'push') {
+ const added: Record = {};
+ const deleted: Record = {};
+
+ for (const k of Object.keys(it.alteredCheckConstraints)) {
+ added[k] = it.alteredCheckConstraints[k].__new;
+ deleted[k] = it.alteredCheckConstraints[k].__old;
+ }
+ createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added));
+ deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted));
+ }
+
+ jsonAddedUniqueConstraints.push(...addedUniqueConstraints);
+ jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints);
+ jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints);
+ });
+
+ const rColumns = jsonRenameColumnsStatements.map((it) => {
+ const tableName = it.tableName;
+ const schema = it.schema;
+ return {
+ from: { schema, table: tableName, column: it.oldColumnName },
+ to: { schema, table: tableName, column: it.newColumnName },
+ };
+ });
+
+ const jsonTableAlternations = alteredTables
+ .map((it) => {
+ return prepareAlterColumnsMysql(
+ it.name,
+ it.schema,
+ it.altered,
+ json1,
+ json2,
+ action,
+ );
+ })
+ .flat();
+
+ const jsonCreateIndexesForAllAlteredTables = alteredTables
+ .map((it) => {
+ return prepareCreateIndexesJson(
+ it.name,
+ it.schema,
+ it.addedIndexes || {},
+ curFull.internal,
+ );
+ })
+ .flat();
+
+ const jsonDropIndexesForAllAlteredTables = alteredTables
+ .map((it) => {
+ return prepareDropIndexesJson(
+ it.name,
+ it.schema,
+ it.deletedIndexes || {},
+ );
+ })
+ .flat();
+
+ alteredTables.forEach((it) => {
+ const droppedIndexes = Object.keys(it.alteredIndexes).reduce(
+ (current, item: string) => {
+ current[item] = it.alteredIndexes[item].__old;
+ return current;
+ },
+ {} as Record,
+ );
+ const createdIndexes = Object.keys(it.alteredIndexes).reduce(
+ (current, item: string) => {
+ current[item] = it.alteredIndexes[item].__new;
+ return current;
+ },
+ {} as Record,
+ );
+
+ jsonCreateIndexesForAllAlteredTables.push(
+ ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}),
+ );
+ jsonDropIndexesForAllAlteredTables.push(
+ ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}),
+ );
+ });
+
+ const jsonSingleStoreCreateTables = createdTables.map((it) => {
+ return prepareSingleStoreCreateTableJson(
+ it,
+ curFull as SingleStoreSchema,
+ curFull.internal,
+ );
+ });
+
+ /* const createViews: JsonCreateSingleStoreViewStatement[] = [];
+ const dropViews: JsonDropViewStatement[] = [];
+ const renameViews: JsonRenameViewStatement[] = [];
+ const alterViews: JsonAlterSingleStoreViewStatement[] = [];
+
+ createViews.push(
+ ...createdViews.filter((it) => !it.isExisting).map((it) => {
+ return prepareSingleStoreCreateViewJson(
+ it.name,
+ it.definition!,
+ it.meta,
+ );
+ }),
+ );
+
+ dropViews.push(
+ ...deletedViews.filter((it) => !it.isExisting).map((it) => {
+ return prepareDropViewJson(it.name);
+ }),
+ );
+
+ renameViews.push(
+ ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[it.from.name].isExisting).map((it) => {
+ return prepareRenameViewJson(it.to.name, it.from.name);
+ }),
+ );
+
+ const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting);
+
+ for (const alteredView of alteredViews) {
+ const { definition, meta } = json2.views[alteredView.name];
+
+ if (alteredView.alteredExisting) {
+ dropViews.push(prepareDropViewJson(alteredView.name));
+
+ createViews.push(
+ prepareSingleStoreCreateViewJson(
+ alteredView.name,
+ definition!,
+ meta,
+ ),
+ );
+
+ continue;
+ }
+
+ if (alteredView.alteredDefinition && action !== 'push') {
+ createViews.push(
+ prepareSingleStoreCreateViewJson(
+ alteredView.name,
+ definition!,
+ meta,
+ true,
+ ),
+ );
+ continue;
+ }
+
+ if (alteredView.alteredMeta) {
+ const view = curFull['views'][alteredView.name];
+ alterViews.push(
+ prepareSingleStoreAlterView(view),
+ );
+ }
+ } */
+
+ jsonStatements.push(...jsonSingleStoreCreateTables);
+
+ jsonStatements.push(...jsonDropTables);
+ jsonStatements.push(...jsonRenameTables);
+ jsonStatements.push(...jsonRenameColumnsStatements);
+
+ /*jsonStatements.push(...createViews);
+ jsonStatements.push(...dropViews);
+ jsonStatements.push(...renameViews);
+ jsonStatements.push(...alterViews);
+ */
+ jsonStatements.push(...jsonDeletedUniqueConstraints);
+
+ // Will need to drop indexes before changing any columns in table
+ // Then should go column alternations and then index creation
+ jsonStatements.push(...jsonDropIndexesForAllAlteredTables);
+
+ jsonStatements.push(...jsonTableAlternations);
+ jsonStatements.push(...jsonAddedCompositePKs);
+
+ jsonStatements.push(...jsonAddedUniqueConstraints);
+ jsonStatements.push(...jsonDeletedUniqueConstraints);
+
+ jsonStatements.push(...jsonAddColumnsStatemets);
+
+ jsonStatements.push(...jsonCreateIndexesForCreatedTables);
+
+ jsonStatements.push(...jsonCreateIndexesForAllAlteredTables);
+
+ jsonStatements.push(...jsonDropColumnsStatemets);
+
+ jsonStatements.push(...jsonAddedCompositePKs);
+
+ jsonStatements.push(...jsonAlteredUniqueConstraints);
+
+ const sqlStatements = fromJson(jsonStatements, 'singlestore');
+
+ const uniqueSqlStatements: string[] = [];
+ sqlStatements.forEach((ss) => {
+ if (!uniqueSqlStatements.includes(ss)) {
+ uniqueSqlStatements.push(ss);
+ }
+ });
+
+ const rTables = renamedTables.map((it) => {
+ return { from: it.from, to: it.to };
+ });
+
+ const _meta = prepareMigrationMeta([], rTables, rColumns);
+
+ return {
+ statements: jsonStatements,
+ sqlStatements: uniqueSqlStatements,
+ _meta,
+ };
+};
+
export const applySqliteSnapshotsDiff = async (
json1: SQLiteSchemaSquashed,
json2: SQLiteSchemaSquashed,
diff --git a/drizzle-kit/src/sqlgenerator.ts b/drizzle-kit/src/sqlgenerator.ts
index 3c88a86ce..a35c001fd 100644
--- a/drizzle-kit/src/sqlgenerator.ts
+++ b/drizzle-kit/src/sqlgenerator.ts
@@ -85,7 +85,9 @@ import {
import { Dialect } from './schemaValidator';
import { MySqlSquasher } from './serializer/mysqlSchema';
import { PgSquasher, policy } from './serializer/pgSchema';
+import { SingleStoreSquasher } from './serializer/singlestoreSchema';
import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema';
+
import { escapeSingleQuotes } from './utils';
const parseType = (schemaPrefix: string, type: string) => {
@@ -572,6 +574,81 @@ class MySqlCreateTableConvertor extends Convertor {
return statement;
}
}
+class SingleStoreCreateTableConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'create_table' && dialect === 'singlestore';
+ }
+
+ convert(st: JsonCreateTableStatement) {
+ const {
+ tableName,
+ columns,
+ schema,
+ compositePKs,
+ uniqueConstraints,
+ internals,
+ } = st;
+
+ let statement = '';
+ statement += `CREATE TABLE \`${tableName}\` (\n`;
+ for (let i = 0; i < columns.length; i++) {
+ const column = columns[i];
+
+ const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : '';
+ const notNullStatement = column.notNull ? ' NOT NULL' : '';
+ const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : '';
+
+ const onUpdateStatement = column.onUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+
+ const autoincrementStatement = column.autoincrement
+ ? ' AUTO_INCREMENT'
+ : '';
+
+ const generatedStatement = column.generated
+ ? ` GENERATED ALWAYS AS (${column.generated?.as}) ${column.generated?.type.toUpperCase()}`
+ : '';
+
+ statement += '\t'
+ + `\`${column.name}\` ${column.type}${autoincrementStatement}${primaryKeyStatement}${notNullStatement}${defaultStatement}${onUpdateStatement}${generatedStatement}`;
+ statement += i === columns.length - 1 ? '' : ',\n';
+ }
+
+ if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) {
+ statement += ',\n';
+ const compositePK = SingleStoreSquasher.unsquashPK(compositePKs[0]);
+ statement += `\tCONSTRAINT \`${st.compositePkName}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`;
+ }
+
+ if (
+ typeof uniqueConstraints !== 'undefined'
+ && uniqueConstraints.length > 0
+ ) {
+ for (const uniqueConstraint of uniqueConstraints) {
+ statement += ',\n';
+ const unsquashedUnique = SingleStoreSquasher.unsquashUnique(uniqueConstraint);
+
+ const uniqueString = unsquashedUnique.columns
+ .map((it) => {
+ return internals?.indexes
+ ? internals?.indexes[unsquashedUnique.name]?.columns[it]
+ ?.isExpression
+ ? it
+ : `\`${it}\``
+ : `\`${it}\``;
+ })
+ .join(',');
+
+ statement += `\tCONSTRAINT \`${unsquashedUnique.name}\` UNIQUE(${uniqueString})`;
+ }
+ }
+
+ statement += `\n);`;
+ statement += `\n`;
+ return statement;
+ }
+}
export class SQLiteCreateTableConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
@@ -1165,6 +1242,29 @@ class MySqlAlterTableAddCheckConstraintConvertor extends Convertor {
}
}
+class SingleStoreAlterTableAddUniqueConstraintConvertor extends Convertor {
+ can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean {
+ return statement.type === 'create_unique_constraint' && dialect === 'singlestore';
+ }
+ convert(statement: JsonCreateUniqueConstraint): string {
+ const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data);
+
+ return `ALTER TABLE \`${statement.tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` UNIQUE(\`${
+ unsquashed.columns.join('`,`')
+ }\`);`;
+ }
+}
+class SingleStoreAlterTableDropUniqueConstraintConvertor extends Convertor {
+ can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean {
+ return statement.type === 'delete_unique_constraint' && dialect === 'singlestore';
+ }
+ convert(statement: JsonDeleteUniqueConstraint): string {
+ const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data);
+
+ return `ALTER TABLE \`${statement.tableName}\` DROP INDEX \`${unsquashed.name}\`;`;
+ }
+}
+
class MySqlAlterTableDeleteCheckConstraintConvertor extends Convertor {
can(statement: JsonDeleteCheckConstraint, dialect: Dialect): boolean {
return (
@@ -1431,6 +1531,17 @@ class MySQLDropTableConvertor extends Convertor {
}
}
+class SingleStoreDropTableConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'drop_table' && dialect === 'singlestore';
+ }
+
+ convert(statement: JsonDropTableStatement) {
+ const { tableName } = statement;
+ return `DROP TABLE \`${tableName}\`;`;
+ }
+}
+
export class SQLiteDropTableConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return statement.type === 'drop_table' && (dialect === 'sqlite' || dialect === 'turso');
@@ -1479,6 +1590,17 @@ class MySqlRenameTableConvertor extends Convertor {
}
}
+class SingleStoreRenameTableConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'rename_table' && dialect === 'singlestore';
+ }
+
+ convert(statement: JsonRenameTableStatement) {
+ const { tableNameFrom, tableNameTo } = statement;
+ return `RENAME TABLE \`${tableNameFrom}\` TO \`${tableNameTo}\`;`;
+ }
+}
+
class PgAlterTableRenameColumnConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
@@ -1510,6 +1632,19 @@ class MySqlAlterTableRenameColumnConvertor extends Convertor {
}
}
+class SingleStoreAlterTableRenameColumnConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return (
+ statement.type === 'alter_table_rename_column' && dialect === 'singlestore'
+ );
+ }
+
+ convert(statement: JsonRenameColumnStatement) {
+ const { tableName, oldColumnName, newColumnName } = statement;
+ return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`;
+ }
+}
+
class SQLiteAlterTableRenameColumnConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
@@ -1552,6 +1687,17 @@ class MySqlAlterTableDropColumnConvertor extends Convertor {
}
}
+class SingleStoreAlterTableDropColumnConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'alter_table_drop_column' && dialect === 'singlestore';
+ }
+
+ convert(statement: JsonDropColumnStatement) {
+ const { tableName, columnName } = statement;
+ return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`;
+ }
+}
+
class SQLiteAlterTableDropColumnConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return statement.type === 'alter_table_drop_column' && (dialect === 'sqlite' || dialect === 'turso');
@@ -1659,6 +1805,37 @@ class MySqlAlterTableAddColumnConvertor extends Convertor {
}
}
+class SingleStoreAlterTableAddColumnConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'alter_table_add_column' && dialect === 'singlestore';
+ }
+
+ convert(statement: JsonAddColumnStatement) {
+ const { tableName, column } = statement;
+ const {
+ name,
+ type,
+ notNull,
+ primaryKey,
+ autoincrement,
+ onUpdate,
+ generated,
+ } = column;
+
+ const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`;
+ const notNullStatement = `${notNull ? ' NOT NULL' : ''}`;
+ const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`;
+ const autoincrementStatement = `${autoincrement ? ' AUTO_INCREMENT' : ''}`;
+ const onUpdateStatement = `${onUpdate ? ' ON UPDATE CURRENT_TIMESTAMP' : ''}`;
+
+ const generatedStatement = generated
+ ? ` GENERATED ALWAYS AS (${generated?.as}) ${generated?.type.toUpperCase()}`
+ : '';
+
+ return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${notNullStatement}${onUpdateStatement}${generatedStatement};`;
+ }
+}
+
export class SQLiteAlterTableAddColumnConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
@@ -2462,76 +2639,429 @@ class MySqlModifyColumn extends Convertor {
}
}
-class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor {
+class SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'create_composite_pk' && dialect === 'postgresql';
+ return (
+ statement.type === 'alter_table_alter_column_alter_generated'
+ && dialect === 'singlestore'
+ );
}
- convert(statement: JsonCreateCompositePK) {
- const { name, columns } = PgSquasher.unsquashPK(statement.data);
+ convert(statement: JsonAlterColumnAlterGeneratedStatement) {
+ const {
+ tableName,
+ columnName,
+ schema,
+ columnNotNull: notNull,
+ columnDefault,
+ columnOnUpdate,
+ columnAutoIncrement,
+ columnPk,
+ columnGenerated,
+ } = statement;
- const tableNameWithSchema = statement.schema
- ? `"${statement.schema}"."${statement.tableName}"`
- : `"${statement.tableName}"`;
+ const tableNameWithSchema = schema
+ ? `\`${schema}\`.\`${tableName}\``
+ : `\`${tableName}\``;
- return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.constraintName}" PRIMARY KEY("${
- columns.join('","')
- }");`;
+ const addColumnStatement = new SingleStoreAlterTableAddColumnConvertor().convert({
+ schema,
+ tableName,
+ column: {
+ name: columnName,
+ type: statement.newDataType,
+ notNull,
+ default: columnDefault,
+ onUpdate: columnOnUpdate,
+ autoincrement: columnAutoIncrement,
+ primaryKey: columnPk,
+ generated: columnGenerated,
+ },
+ type: 'alter_table_add_column',
+ });
+
+ return [
+ `ALTER TABLE ${tableNameWithSchema} drop column \`${columnName}\`;`,
+ addColumnStatement,
+ ];
}
}
-class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor {
+
+class SingleStoreAlterTableAlterColumnSetDefaultConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'delete_composite_pk' && dialect === 'postgresql';
+ return (
+ statement.type === 'alter_table_alter_column_set_default'
+ && dialect === 'singlestore'
+ );
}
- convert(statement: JsonDeleteCompositePK) {
- const { name, columns } = PgSquasher.unsquashPK(statement.data);
-
- const tableNameWithSchema = statement.schema
- ? `"${statement.schema}"."${statement.tableName}"`
- : `"${statement.tableName}"`;
-
- return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`;
+ convert(statement: JsonAlterColumnSetDefaultStatement) {
+ const { tableName, columnName } = statement;
+ return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`;
}
}
-class PgAlterTableAlterCompositePrimaryKeyConvertor extends Convertor {
+class SingleStoreAlterTableAlterColumnDropDefaultConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'alter_composite_pk' && dialect === 'postgresql';
- }
-
- convert(statement: JsonAlterCompositePK) {
- const { name, columns } = PgSquasher.unsquashPK(statement.old);
- const { name: newName, columns: newColumns } = PgSquasher.unsquashPK(
- statement.new,
+ return (
+ statement.type === 'alter_table_alter_column_drop_default'
+ && dialect === 'singlestore'
);
+ }
- const tableNameWithSchema = statement.schema
- ? `"${statement.schema}"."${statement.tableName}"`
- : `"${statement.tableName}"`;
-
- console.log(statement.oldConstraintName, statement.newConstraintName);
- return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.oldConstraintName}";\n${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.newConstraintName}" PRIMARY KEY("${
- newColumns.join('","')
- }");`;
+ convert(statement: JsonAlterColumnDropDefaultStatement) {
+ const { tableName, columnName } = statement;
+ return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`;
}
}
-class MySqlAlterTableCreateCompositePrimaryKeyConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'create_composite_pk' && dialect === 'mysql';
+class SingleStoreAlterTableAddPk extends Convertor {
+ can(statement: JsonStatement, dialect: string): boolean {
+ return (
+ statement.type === 'alter_table_alter_column_set_pk'
+ && dialect === 'singlestore'
+ );
}
-
- convert(statement: JsonCreateCompositePK) {
- const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
- return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY(\`${columns.join('`,`')}\`);`;
+ convert(statement: JsonAlterColumnSetPrimaryKeyStatement): string {
+ return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`;
}
}
-class MySqlAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor {
- can(statement: JsonStatement, dialect: Dialect): boolean {
- return statement.type === 'delete_composite_pk' && dialect === 'mysql';
- }
+class SingleStoreAlterTableDropPk extends Convertor {
+ can(statement: JsonStatement, dialect: string): boolean {
+ return (
+ statement.type === 'alter_table_alter_column_drop_pk'
+ && dialect === 'singlestore'
+ );
+ }
+ convert(statement: JsonAlterColumnDropPrimaryKeyStatement): string {
+ return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`;
+ }
+}
+
+type SingleStoreModifyColumnStatement =
+ | JsonAlterColumnDropNotNullStatement
+ | JsonAlterColumnSetNotNullStatement
+ | JsonAlterColumnTypeStatement
+ | JsonAlterColumnDropOnUpdateStatement
+ | JsonAlterColumnSetOnUpdateStatement
+ | JsonAlterColumnDropAutoincrementStatement
+ | JsonAlterColumnSetAutoincrementStatement
+ | JsonAlterColumnSetDefaultStatement
+ | JsonAlterColumnDropDefaultStatement
+ | JsonAlterColumnSetGeneratedStatement
+ | JsonAlterColumnDropGeneratedStatement;
+
+class SingleStoreModifyColumn extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return (
+ (statement.type === 'alter_table_alter_column_set_type'
+ || statement.type === 'alter_table_alter_column_set_notnull'
+ || statement.type === 'alter_table_alter_column_drop_notnull'
+ || statement.type === 'alter_table_alter_column_drop_on_update'
+ || statement.type === 'alter_table_alter_column_set_on_update'
+ || statement.type === 'alter_table_alter_column_set_autoincrement'
+ || statement.type === 'alter_table_alter_column_drop_autoincrement'
+ || statement.type === 'alter_table_alter_column_set_default'
+ || statement.type === 'alter_table_alter_column_drop_default'
+ || statement.type === 'alter_table_alter_column_set_generated'
+ || statement.type === 'alter_table_alter_column_drop_generated')
+ && dialect === 'singlestore'
+ );
+ }
+
+ convert(statement: SingleStoreModifyColumnStatement) {
+ const { tableName, columnName } = statement;
+ let columnType = ``;
+ let columnDefault: any = '';
+ let columnNotNull = '';
+ let columnOnUpdate = '';
+ let columnAutoincrement = '';
+ let primaryKey = statement.columnPk ? ' PRIMARY KEY' : '';
+ let columnGenerated = '';
+
+ if (statement.type === 'alter_table_alter_column_drop_notnull') {
+ columnType = ` ${statement.newDataType}`;
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+ } else if (statement.type === 'alter_table_alter_column_set_notnull') {
+ columnNotNull = ` NOT NULL`;
+ columnType = ` ${statement.newDataType}`;
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+ } else if (statement.type === 'alter_table_alter_column_drop_on_update') {
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnType = ` ${statement.newDataType}`;
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnOnUpdate = '';
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+ } else if (statement.type === 'alter_table_alter_column_set_on_update') {
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = ` ON UPDATE CURRENT_TIMESTAMP`;
+ columnType = ` ${statement.newDataType}`;
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+ } else if (
+ statement.type === 'alter_table_alter_column_set_autoincrement'
+ ) {
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnType = ` ${statement.newDataType}`;
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnAutoincrement = ' AUTO_INCREMENT';
+ } else if (
+ statement.type === 'alter_table_alter_column_drop_autoincrement'
+ ) {
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnType = ` ${statement.newDataType}`;
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnAutoincrement = '';
+ } else if (statement.type === 'alter_table_alter_column_set_default') {
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnType = ` ${statement.newDataType}`;
+ columnDefault = ` DEFAULT ${statement.newDefaultValue}`;
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+ } else if (statement.type === 'alter_table_alter_column_drop_default') {
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnType = ` ${statement.newDataType}`;
+ columnDefault = '';
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+ } else if (statement.type === 'alter_table_alter_column_set_generated') {
+ columnType = ` ${statement.newDataType}`;
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+
+ if (statement.columnGenerated?.type === 'virtual') {
+ return [
+ new SingleStoreAlterTableDropColumnConvertor().convert({
+ type: 'alter_table_drop_column',
+ tableName: statement.tableName,
+ columnName: statement.columnName,
+ schema: statement.schema,
+ }),
+ new SingleStoreAlterTableAddColumnConvertor().convert({
+ tableName,
+ column: {
+ name: columnName,
+ type: statement.newDataType,
+ notNull: statement.columnNotNull,
+ default: statement.columnDefault,
+ onUpdate: statement.columnOnUpdate,
+ autoincrement: statement.columnAutoIncrement,
+ primaryKey: statement.columnPk,
+ generated: statement.columnGenerated,
+ },
+ schema: statement.schema,
+ type: 'alter_table_add_column',
+ }),
+ ];
+ } else {
+ columnGenerated = statement.columnGenerated
+ ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}`
+ : '';
+ }
+ } else if (statement.type === 'alter_table_alter_column_drop_generated') {
+ columnType = ` ${statement.newDataType}`;
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+
+ if (statement.oldColumn?.generated?.type === 'virtual') {
+ return [
+ new SingleStoreAlterTableDropColumnConvertor().convert({
+ type: 'alter_table_drop_column',
+ tableName: statement.tableName,
+ columnName: statement.columnName,
+ schema: statement.schema,
+ }),
+ new SingleStoreAlterTableAddColumnConvertor().convert({
+ tableName,
+ column: {
+ name: columnName,
+ type: statement.newDataType,
+ notNull: statement.columnNotNull,
+ default: statement.columnDefault,
+ onUpdate: statement.columnOnUpdate,
+ autoincrement: statement.columnAutoIncrement,
+ primaryKey: statement.columnPk,
+ generated: statement.columnGenerated,
+ },
+ schema: statement.schema,
+ type: 'alter_table_add_column',
+ }),
+ ];
+ }
+ } else {
+ columnType = ` ${statement.newDataType}`;
+ columnNotNull = statement.columnNotNull ? ` NOT NULL` : '';
+ columnOnUpdate = columnOnUpdate = statement.columnOnUpdate
+ ? ` ON UPDATE CURRENT_TIMESTAMP`
+ : '';
+ columnDefault = statement.columnDefault
+ ? ` DEFAULT ${statement.columnDefault}`
+ : '';
+ columnAutoincrement = statement.columnAutoIncrement
+ ? ' AUTO_INCREMENT'
+ : '';
+ columnGenerated = statement.columnGenerated
+ ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}`
+ : '';
+ }
+
+ // Seems like getting value from simple json2 shanpshot makes dates be dates
+ columnDefault = columnDefault instanceof Date
+ ? columnDefault.toISOString()
+ : columnDefault;
+
+ return `ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${columnName}\`${columnType}${columnAutoincrement}${columnNotNull}${columnDefault}${columnOnUpdate}${columnGenerated};`;
+ }
+}
+class SqliteAlterTableAlterColumnDropDefaultConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return (
+ statement.type === 'alter_table_alter_column_drop_default'
+ && dialect === 'sqlite'
+ );
+ }
+
+ convert(statement: JsonAlterColumnDropDefaultStatement) {
+ return (
+ '/*\n SQLite does not support "Drop default from column" out of the box, we do not generate automatic migration for that, so it has to be done manually'
+ + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php'
+ + '\n https://www.sqlite.org/lang_altertable.html'
+ + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3'
+ + "\n\n Due to that we don't generate migration automatically and it has to be done manually"
+ + '\n*/'
+ );
+ }
+}
+
+class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'create_composite_pk' && dialect === 'postgresql';
+ }
+
+ convert(statement: JsonCreateCompositePK) {
+ const { name, columns } = PgSquasher.unsquashPK(statement.data);
+
+ const tableNameWithSchema = statement.schema
+ ? `"${statement.schema}"."${statement.tableName}"`
+ : `"${statement.tableName}"`;
+
+ return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.constraintName}" PRIMARY KEY("${
+ columns.join('","')
+ }");`;
+ }
+}
+class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'delete_composite_pk' && dialect === 'postgresql';
+ }
+
+ convert(statement: JsonDeleteCompositePK) {
+ const { name, columns } = PgSquasher.unsquashPK(statement.data);
+
+ const tableNameWithSchema = statement.schema
+ ? `"${statement.schema}"."${statement.tableName}"`
+ : `"${statement.tableName}"`;
+
+ return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`;
+ }
+}
+
+class PgAlterTableAlterCompositePrimaryKeyConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'alter_composite_pk' && dialect === 'postgresql';
+ }
+
+ convert(statement: JsonAlterCompositePK) {
+ const { name, columns } = PgSquasher.unsquashPK(statement.old);
+ const { name: newName, columns: newColumns } = PgSquasher.unsquashPK(
+ statement.new,
+ );
+
+ const tableNameWithSchema = statement.schema
+ ? `"${statement.schema}"."${statement.tableName}"`
+ : `"${statement.tableName}"`;
+
+ return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.oldConstraintName}";\n${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.newConstraintName}" PRIMARY KEY("${
+ newColumns.join('","')
+ }");`;
+ }
+}
+
+class MySqlAlterTableCreateCompositePrimaryKeyConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'create_composite_pk' && dialect === 'mysql';
+ }
+
+ convert(statement: JsonCreateCompositePK) {
+ const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
+ return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY(\`${columns.join('`,`')}\`);`;
+ }
+}
+
+class MySqlAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'delete_composite_pk' && dialect === 'mysql';
+ }
convert(statement: JsonDeleteCompositePK) {
const { name, columns } = MySqlSquasher.unsquashPK(statement.data);
@@ -2553,6 +3083,89 @@ class MySqlAlterTableAlterCompositePrimaryKeyConvertor extends Convertor {
}
}
+class SqliteAlterTableCreateCompositePrimaryKeyConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'create_composite_pk' && dialect === 'sqlite';
+ }
+
+ convert(statement: JsonCreateCompositePK) {
+ let msg = '/*\n';
+ msg += `You're trying to add PRIMARY KEY(${statement.data}) to '${statement.tableName}' table\n`;
+ msg += 'SQLite does not support adding primary key to an already created table\n';
+ msg += 'You can do it in 3 steps with drizzle orm:\n';
+ msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n';
+ msg += ' - migrate old data from one table to another\n';
+ msg += ' - delete old_table in schema, generate sql\n\n';
+ msg += 'or create manual migration like below:\n\n';
+ msg += 'ALTER TABLE table_name RENAME TO old_table;\n';
+ msg += 'CREATE TABLE table_name (\n';
+ msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n';
+ msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n';
+ msg += '\t...\n';
+ msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n';
+ msg += ' );\n';
+ msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n';
+ msg += "Due to that we don't generate migration automatically and it has to be done manually\n";
+ msg += '*/\n';
+ return msg;
+ }
+}
+class SqliteAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'delete_composite_pk' && dialect === 'sqlite';
+ }
+
+ convert(statement: JsonDeleteCompositePK) {
+ let msg = '/*\n';
+ msg += `You're trying to delete PRIMARY KEY(${statement.data}) from '${statement.tableName}' table\n`;
+ msg += 'SQLite does not supportprimary key deletion from existing table\n';
+ msg += 'You can do it in 3 steps with drizzle orm:\n';
+ msg += ' - create new mirror table table without pk, rename current table to old_table, generate SQL\n';
+ msg += ' - migrate old data from one table to another\n';
+ msg += ' - delete old_table in schema, generate sql\n\n';
+ msg += 'or create manual migration like below:\n\n';
+ msg += 'ALTER TABLE table_name RENAME TO old_table;\n';
+ msg += 'CREATE TABLE table_name (\n';
+ msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n';
+ msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n';
+ msg += '\t...\n';
+ msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n';
+ msg += ' );\n';
+ msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n';
+ msg += "Due to that we don't generate migration automatically and it has to be done manually\n";
+ msg += '*/\n';
+ return msg;
+ }
+}
+
+class SqliteAlterTableAlterCompositePrimaryKeyConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'alter_composite_pk' && dialect === 'sqlite';
+ }
+
+ convert(statement: JsonAlterCompositePK) {
+ let msg = '/*\n';
+ msg += 'SQLite does not support altering primary key\n';
+ msg += 'You can do it in 3 steps with drizzle orm:\n';
+ msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n';
+ msg += ' - migrate old data from one table to another\n';
+ msg += ' - delete old_table in schema, generate sql\n\n';
+ msg += 'or create manual migration like below:\n\n';
+ msg += 'ALTER TABLE table_name RENAME TO old_table;\n';
+ msg += 'CREATE TABLE table_name (\n';
+ msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n';
+ msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n';
+ msg += '\t...\n';
+ msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n';
+ msg += ' );\n';
+ msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n';
+ msg += "Due to that we don't generate migration automatically and it has to be done manually\n";
+ msg += '*/\n';
+
+ return msg;
+ }
+}
+
class PgAlterTableAlterColumnSetPrimaryKeyConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
@@ -2895,6 +3508,32 @@ class CreateMySqlIndexConvertor extends Convertor {
}
}
+class CreateSingleStoreIndexConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'create_index' && dialect === 'singlestore';
+ }
+
+ convert(statement: JsonCreateIndexStatement): string {
+ // should be changed
+ const { name, columns, isUnique } = SingleStoreSquasher.unsquashIdx(
+ statement.data,
+ );
+ const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX';
+
+ const uniqueString = columns
+ .map((it) => {
+ return statement.internal?.indexes
+ ? statement.internal?.indexes[name]?.columns[it]?.isExpression
+ ? it
+ : `\`${it}\``
+ : `\`${it}\``;
+ })
+ .join(',');
+
+ return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`;
+ }
+}
+
export class CreateSqliteIndexConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return statement.type === 'create_index' && (dialect === 'sqlite' || dialect === 'turso');
@@ -3039,6 +3678,17 @@ class MySqlDropIndexConvertor extends Convertor {
}
}
+class SingleStoreDropIndexConvertor extends Convertor {
+ can(statement: JsonStatement, dialect: Dialect): boolean {
+ return statement.type === 'drop_index' && dialect === 'singlestore';
+ }
+
+ convert(statement: JsonDropIndexStatement): string {
+ const { name } = SingleStoreSquasher.unsquashIdx(statement.data);
+ return `DROP INDEX \`${name}\` ON \`${statement.tableName}\`;`;
+ }
+}
+
class SQLiteRecreateTableConvertor extends Convertor {
can(statement: JsonStatement, dialect: Dialect): boolean {
return (
@@ -3174,6 +3824,7 @@ class LibSQLRecreateTableConvertor extends Convertor {
const convertors: Convertor[] = [];
convertors.push(new PgCreateTableConvertor());
convertors.push(new MySqlCreateTableConvertor());
+convertors.push(new SingleStoreCreateTableConvertor());
convertors.push(new SQLiteCreateTableConvertor());
convertors.push(new SQLiteRecreateTableConvertor());
convertors.push(new LibSQLRecreateTableConvertor());
@@ -3210,22 +3861,27 @@ convertors.push(new AlterPgSequenceConvertor());
convertors.push(new PgDropTableConvertor());
convertors.push(new MySQLDropTableConvertor());
+convertors.push(new SingleStoreDropTableConvertor());
convertors.push(new SQLiteDropTableConvertor());
convertors.push(new PgRenameTableConvertor());
convertors.push(new MySqlRenameTableConvertor());
+convertors.push(new SingleStoreRenameTableConvertor());
convertors.push(new SqliteRenameTableConvertor());
convertors.push(new PgAlterTableRenameColumnConvertor());
convertors.push(new MySqlAlterTableRenameColumnConvertor());
+convertors.push(new SingleStoreAlterTableRenameColumnConvertor());
convertors.push(new SQLiteAlterTableRenameColumnConvertor());
convertors.push(new PgAlterTableDropColumnConvertor());
convertors.push(new MySqlAlterTableDropColumnConvertor());
+convertors.push(new SingleStoreAlterTableDropColumnConvertor());
convertors.push(new SQLiteAlterTableDropColumnConvertor());
convertors.push(new PgAlterTableAddColumnConvertor());
convertors.push(new MySqlAlterTableAddColumnConvertor());
+convertors.push(new SingleStoreAlterTableAddColumnConvertor());
convertors.push(new SQLiteAlterTableAddColumnConvertor());
convertors.push(new PgAlterTableAlterColumnSetTypeConvertor());
@@ -3241,13 +3897,18 @@ convertors.push(new MySqlAlterTableDeleteCheckConstraintConvertor());
convertors.push(new MySQLAlterTableAddUniqueConstraintConvertor());
convertors.push(new MySQLAlterTableDropUniqueConstraintConvertor());
+convertors.push(new SingleStoreAlterTableAddUniqueConstraintConvertor());
+convertors.push(new SingleStoreAlterTableDropUniqueConstraintConvertor());
+
convertors.push(new CreatePgIndexConvertor());
convertors.push(new CreateMySqlIndexConvertor());
+convertors.push(new CreateSingleStoreIndexConvertor());
convertors.push(new CreateSqliteIndexConvertor());
convertors.push(new PgDropIndexConvertor());
convertors.push(new SqliteDropIndexConvertor());
convertors.push(new MySqlDropIndexConvertor());
+convertors.push(new SingleStoreDropIndexConvertor());
convertors.push(new PgAlterTableAlterColumnSetPrimaryKeyConvertor());
convertors.push(new PgAlterTableAlterColumnDropPrimaryKeyConvertor());
@@ -3281,6 +3942,8 @@ convertors.push(new PgAlterTableAlterColumnAlterrGeneratedConvertor());
convertors.push(new MySqlAlterTableAlterColumnAlterrGeneratedConvertor());
+convertors.push(new SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor());
+
convertors.push(new SqliteAlterTableAlterColumnDropGeneratedConvertor());
convertors.push(new SqliteAlterTableAlterColumnAlterGeneratedConvertor());
convertors.push(new SqliteAlterTableAlterColumnSetExpressionConvertor());
@@ -3290,6 +3953,8 @@ convertors.push(new LibSQLModifyColumn());
// convertors.push(new MySqlAlterTableAlterColumnSetDefaultConvertor());
// convertors.push(new MySqlAlterTableAlterColumnDropDefaultConvertor());
+convertors.push(new SingleStoreModifyColumn());
+
convertors.push(new PgCreateForeignKeyConvertor());
convertors.push(new MySqlCreateForeignKeyConvertor());
@@ -3321,6 +3986,9 @@ convertors.push(new MySqlAlterTableCreateCompositePrimaryKeyConvertor());
convertors.push(new MySqlAlterTableAddPk());
convertors.push(new MySqlAlterTableAlterCompositePrimaryKeyConvertor());
+convertors.push(new SingleStoreAlterTableDropPk());
+convertors.push(new SingleStoreAlterTableAddPk());
+
export function fromJson(
statements: JsonStatement[],
dialect: Dialect,
diff --git a/drizzle-kit/src/utils.ts b/drizzle-kit/src/utils.ts
index 685e2efb5..2638ca4ef 100644
--- a/drizzle-kit/src/utils.ts
+++ b/drizzle-kit/src/utils.ts
@@ -10,6 +10,7 @@ import { assertUnreachable, snapshotVersion } from './global';
import type { Dialect } from './schemaValidator';
import { backwardCompatibleMysqlSchema } from './serializer/mysqlSchema';
import { backwardCompatiblePgSchema } from './serializer/pgSchema';
+import { backwardCompatibleSingleStoreSchema } from './serializer/singlestoreSchema';
import { backwardCompatibleSqliteSchema } from './serializer/sqliteSchema';
import type { ProxyParams } from './serializer/studio';
@@ -123,6 +124,8 @@ const validatorForDialect = (dialect: Dialect) => {
return { validator: backwardCompatibleSqliteSchema, version: 6 };
case 'mysql':
return { validator: backwardCompatibleMysqlSchema, version: 5 };
+ case 'singlestore':
+ return { validator: backwardCompatibleSingleStoreSchema, version: 1 };
}
};
diff --git a/drizzle-kit/tests/cli-generate.test.ts b/drizzle-kit/tests/cli-generate.test.ts
index 6c8cae09e..a4adf979f 100644
--- a/drizzle-kit/tests/cli-generate.test.ts
+++ b/drizzle-kit/tests/cli-generate.test.ts
@@ -39,6 +39,7 @@ test('generate #1', async (t) => {
out: 'drizzle',
bundle: false,
casing: undefined,
+ driver: undefined,
});
});
@@ -59,6 +60,7 @@ test('generate #2', async (t) => {
out: 'out',
bundle: false,
casing: undefined,
+ driver: undefined,
});
});
@@ -76,6 +78,7 @@ test('generate #3', async (t) => {
out: 'drizzle',
bundle: false,
casing: undefined,
+ driver: undefined,
});
});
@@ -94,6 +97,7 @@ test('generate #4', async (t) => {
out: 'drizzle',
bundle: false,
casing: undefined,
+ driver: undefined,
});
});
@@ -111,6 +115,7 @@ test('generate #5', async (t) => {
out: 'drizzle',
bundle: false,
casing: undefined,
+ driver: undefined,
});
});
@@ -128,6 +133,7 @@ test('generate #6', async (t) => {
out: 'drizzle',
bundle: false,
casing: undefined,
+ driver: undefined,
});
});
@@ -148,6 +154,7 @@ test('generate #7', async (t) => {
out: 'drizzle',
bundle: false,
casing: undefined,
+ driver: undefined,
});
});
@@ -166,6 +173,25 @@ test('generate #8', async (t) => {
out: 'drizzle',
bundle: true, // expo driver
casing: undefined,
+ driver: 'expo',
+ });
+});
+
+test('generate #9', async (t) => {
+ const res = await brotest(generate, '--config=durable-sqlite.config.ts');
+ assert.equal(res.type, 'handler');
+ if (res.type !== 'handler') assert.fail(res.type, 'handler');
+ expect(res.options).toStrictEqual({
+ dialect: 'sqlite',
+ name: undefined,
+ custom: false,
+ prefix: 'index',
+ breakpoints: true,
+ schema: './schema.ts',
+ out: 'drizzle',
+ bundle: true, // expo driver
+ casing: undefined,
+ driver: 'durable-sqlite',
});
});
@@ -187,6 +213,7 @@ test('generate #9', async (t) => {
out: 'out',
bundle: false,
casing: undefined,
+ driver: undefined,
});
});
diff --git a/drizzle-kit/tests/cli/durable-sqlite.config.ts b/drizzle-kit/tests/cli/durable-sqlite.config.ts
new file mode 100644
index 000000000..c3f4e44f0
--- /dev/null
+++ b/drizzle-kit/tests/cli/durable-sqlite.config.ts
@@ -0,0 +1,7 @@
+import { defineConfig } from '../../src';
+
+export default defineConfig({
+ schema: './schema.ts',
+ dialect: 'sqlite',
+ driver: 'durable-sqlite',
+});
diff --git a/drizzle-kit/tests/introspect/singlestore.test.ts b/drizzle-kit/tests/introspect/singlestore.test.ts
new file mode 100644
index 000000000..71960c3f7
--- /dev/null
+++ b/drizzle-kit/tests/introspect/singlestore.test.ts
@@ -0,0 +1,275 @@
+import Docker from 'dockerode';
+import 'dotenv/config';
+import { SQL, sql } from 'drizzle-orm';
+import {
+ bigint,
+ char,
+ decimal,
+ double,
+ float,
+ int,
+ mediumint,
+ singlestoreTable,
+ smallint,
+ text,
+ tinyint,
+ varchar,
+} from 'drizzle-orm/singlestore-core';
+import * as fs from 'fs';
+import getPort from 'get-port';
+import { Connection, createConnection } from 'mysql2/promise';
+import { introspectSingleStoreToFile } from 'tests/schemaDiffer';
+import { v4 as uuid } from 'uuid';
+import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest';
+
+let client: Connection;
+let singlestoreContainer: Docker.Container;
+
+async function createDockerDB(): Promise {
+ const docker = new Docker();
+ const port = await getPort({ port: 3306 });
+ const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest';
+
+ const pullStream = await docker.pull(image);
+ await new Promise((resolve, reject) =>
+ docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err))
+ );
+
+ singlestoreContainer = await docker.createContainer({
+ Image: image,
+ Env: ['ROOT_PASSWORD=singlestore'],
+ name: `drizzle-integration-tests-${uuid()}`,
+ HostConfig: {
+ AutoRemove: true,
+ PortBindings: {
+ '3306/tcp': [{ HostPort: `${port}` }],
+ },
+ },
+ });
+
+ await singlestoreContainer.start();
+ await new Promise((resolve) => setTimeout(resolve, 4000));
+
+ return `singlestore://root:singlestore@localhost:${port}/`;
+}
+
+beforeAll(async () => {
+ const connectionString = process.env.SINGLESTORE_CONNECTION_STRING ?? await createDockerDB();
+
+ const sleep = 1000;
+ let timeLeft = 20000;
+ let connected = false;
+ let lastError: unknown | undefined;
+ do {
+ try {
+ client = await createConnection(connectionString);
+ await client.connect();
+ connected = true;
+ break;
+ } catch (e) {
+ lastError = e;
+ await new Promise((resolve) => setTimeout(resolve, sleep));
+ timeLeft -= sleep;
+ }
+ } while (timeLeft > 0);
+ if (!connected) {
+ console.error('Cannot connect to SingleStore');
+ await client?.end().catch(console.error);
+ await singlestoreContainer?.stop().catch(console.error);
+ throw lastError;
+ }
+});
+
+afterAll(async () => {
+ await client?.end().catch(console.error);
+ await singlestoreContainer?.stop().catch(console.error);
+});
+
+beforeEach(async () => {
+ await client.query(`drop database if exists \`drizzle\`;`);
+ await client.query(`create database \`drizzle\`;`);
+ await client.query(`use \`drizzle\`;`);
+});
+
+if (!fs.existsSync('tests/introspect/singlestore')) {
+ fs.mkdirSync('tests/introspect/singlestore');
+}
+
+// TODO: Unskip this test when generated column is implemented
+/* test.skip('generated always column: link to another column', async () => {
+ const schema = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ email: text('email'),
+ generatedEmail: text('generatedEmail').generatedAlwaysAs(
+ (): SQL => sql`\`email\``,
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await introspectSingleStoreToFile(
+ client,
+ schema,
+ 'generated-link-column',
+ 'drizzle',
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+}); */
+
+// TODO: Unskip this test when generated column is implemented
+/* test.skip('generated always column virtual: link to another column', async () => {
+ const schema = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ email: text('email'),
+ generatedEmail: text('generatedEmail').generatedAlwaysAs(
+ (): SQL => sql`\`email\``,
+ { mode: 'virtual' },
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await introspectSingleStoreToFile(
+ client,
+ schema,
+ 'generated-link-column-virtual',
+ 'drizzle',
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+}); */
+
+test('Default value of character type column: char', async () => {
+ const schema = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ sortKey: char('sortKey', { length: 255 }).default('0'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await introspectSingleStoreToFile(
+ client,
+ schema,
+ 'default-value-char-column',
+ 'drizzle',
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+});
+
+test('Default value of character type column: varchar', async () => {
+ const schema = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ sortKey: varchar('sortKey', { length: 255 }).default('0'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await introspectSingleStoreToFile(
+ client,
+ schema,
+ 'default-value-varchar-column',
+ 'drizzle',
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+});
+
+// TODO: Unskip this test when views are implemented
+/* test('view #1', async () => {
+ const users = singlestoreTable('users', { id: int('id') });
+ const testView = singlestoreView('some_view', { id: int('id') }).as(
+ sql`select \`drizzle\`.\`users\`.\`id\` AS \`id\` from \`drizzle\`.\`users\``,
+ );
+
+ const schema = {
+ users: users,
+ testView,
+ };
+
+ const { statements, sqlStatements } = await introspectSingleStoreToFile(
+ client,
+ schema,
+ 'view-1',
+ 'drizzle',
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+}); */
+
+// TODO: Unskip this test when views are implemented
+/* test('view #2', async () => {
+ const users = singlestoreTable('some_users', { id: int('id') });
+ const testView = singlestoreView('some_view', { id: int('id') }).algorithm('temptable').sqlSecurity('definer').as(
+ sql`SELECT * FROM ${users}`,
+ );
+
+ const schema = {
+ users: users,
+ testView,
+ };
+
+ const { statements, sqlStatements } = await introspectSingleStoreToFile(
+ client,
+ schema,
+ 'view-2',
+ 'drizzle',
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+}); */
+
+test('handle float type', async () => {
+ const schema = {
+ table: singlestoreTable('table', {
+ col1: float(),
+ col2: float({ precision: 2 }),
+ col3: float({ precision: 2, scale: 1 }),
+ }),
+ };
+
+ const { statements, sqlStatements } = await introspectSingleStoreToFile(
+ client,
+ schema,
+ 'handle-float-type',
+ 'drizzle',
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+});
+
+test('handle unsigned numerical types', async () => {
+ const schema = {
+ table: singlestoreTable('table', {
+ col1: int({ unsigned: true }),
+ col2: tinyint({ unsigned: true }),
+ col3: smallint({ unsigned: true }),
+ col4: mediumint({ unsigned: true }),
+ col5: bigint({ mode: 'number', unsigned: true }),
+ col6: float({ unsigned: true }),
+ col7: float({ precision: 2, scale: 1, unsigned: true }),
+ col8: double({ unsigned: true }),
+ col9: double({ precision: 2, scale: 1, unsigned: true }),
+ col10: decimal({ unsigned: true }),
+ col11: decimal({ precision: 2, scale: 1, unsigned: true }),
+ }),
+ };
+
+ const { statements, sqlStatements } = await introspectSingleStoreToFile(
+ client,
+ schema,
+ 'handle-unsigned-numerical-types',
+ 'drizzle',
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+});
diff --git a/drizzle-kit/tests/push/singlestore-push.test.ts b/drizzle-kit/tests/push/singlestore-push.test.ts
new file mode 100644
index 000000000..4ad3c6c0e
--- /dev/null
+++ b/drizzle-kit/tests/push/singlestore-push.test.ts
@@ -0,0 +1,266 @@
+import Docker from 'dockerode';
+import { int, singlestoreTable } from 'drizzle-orm/singlestore-core';
+import fs from 'fs';
+import getPort from 'get-port';
+import { Connection, createConnection } from 'mysql2/promise';
+import { diffTestSchemasPushSingleStore } from 'tests/schemaDiffer';
+import { v4 as uuid } from 'uuid';
+import { afterAll, beforeAll, expect, test } from 'vitest';
+
+let client: Connection;
+let singlestoreContainer: Docker.Container;
+
+async function createDockerDB(): Promise {
+ const docker = new Docker();
+ const port = await getPort({ port: 3306 });
+ const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest';
+
+ const pullStream = await docker.pull(image);
+ await new Promise((resolve, reject) =>
+ docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err))
+ );
+
+ singlestoreContainer = await docker.createContainer({
+ Image: image,
+ Env: ['ROOT_PASSWORD=singlestore'],
+ name: `drizzle-integration-tests-${uuid()}`,
+ HostConfig: {
+ AutoRemove: true,
+ PortBindings: {
+ '3306/tcp': [{ HostPort: `${port}` }],
+ },
+ },
+ });
+
+ await singlestoreContainer.start();
+ await new Promise((resolve) => setTimeout(resolve, 4000));
+
+ return `singlestore://root:singlestore@localhost:${port}/`;
+}
+
+beforeAll(async () => {
+ const connectionString = process.env.MYSQL_CONNECTION_STRING ?? (await createDockerDB());
+
+ const sleep = 1000;
+ let timeLeft = 20000;
+ let connected = false;
+ let lastError: unknown | undefined;
+ do {
+ try {
+ client = await createConnection(connectionString);
+ await client.connect();
+ connected = true;
+ break;
+ } catch (e) {
+ lastError = e;
+ await new Promise((resolve) => setTimeout(resolve, sleep));
+ timeLeft -= sleep;
+ }
+ } while (timeLeft > 0);
+ if (!connected) {
+ console.error('Cannot connect to MySQL');
+ await client?.end().catch(console.error);
+ await singlestoreContainer?.stop().catch(console.error);
+ throw lastError;
+ }
+
+ await client.query('DROP DATABASE IF EXISTS drizzle;');
+ await client.query('CREATE DATABASE drizzle;');
+ await client.query('USE drizzle;');
+});
+
+afterAll(async () => {
+ await client?.end().catch(console.error);
+ await singlestoreContainer?.stop().catch(console.error);
+});
+
+if (!fs.existsSync('tests/push/singlestore')) {
+ fs.mkdirSync('tests/push/singlestore');
+}
+
+test('db has checks. Push with same names', async () => {
+ const schema1 = {
+ test: singlestoreTable('test', {
+ id: int('id').primaryKey(),
+ values: int('values').default(1),
+ }),
+ };
+ const schema2 = {
+ test: singlestoreTable('test', {
+ id: int('id').primaryKey(),
+ values: int('values').default(1),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasPushSingleStore(
+ client,
+ schema1,
+ schema2,
+ [],
+ 'drizzle',
+ );
+
+ expect(statements).toStrictEqual([]);
+ expect(sqlStatements).toStrictEqual([]);
+
+ await client.query(`DROP TABLE \`test\`;`);
+});
+
+// TODO: Unskip this test when views are implemented
+/* test.skip.skip('create view', async () => {
+ const table = singlestoreTable('test', {
+ id: int('id').primaryKey(),
+ });
+
+ const schema1 = {
+ test: table,
+ };
+
+ const schema2 = {
+ test: table,
+ view: singlestoreView('view').as((qb) => qb.select().from(table)),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasPushSingleStore(
+ client,
+ schema1,
+ schema2,
+ [],
+ 'drizzle',
+ false,
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ definition: 'select `id` from `test`',
+ name: 'view',
+ type: 'singlestore_create_view',
+ replace: false,
+ sqlSecurity: 'definer',
+ withCheckOption: undefined,
+ algorithm: 'undefined',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ `CREATE ALGORITHM = undefined
+SQL SECURITY definer
+VIEW \`view\` AS (select \`id\` from \`test\`);`,
+ ]);
+
+ await client.query(`DROP TABLE \`test\`;`);
+}); */
+
+// TODO: Unskip this test when views are implemented
+/* test.skip('drop view', async () => {
+ const table = singlestoreTable('test', {
+ id: int('id').primaryKey(),
+ });
+
+ const schema1 = {
+ test: table,
+ view: singlestoreView('view').as((qb) => qb.select().from(table)),
+ };
+
+ const schema2 = {
+ test: table,
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasPushSingleStore(
+ client,
+ schema1,
+ schema2,
+ [],
+ 'drizzle',
+ false,
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ name: 'view',
+ type: 'drop_view',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual(['DROP VIEW `view`;']);
+ await client.query(`DROP TABLE \`test\`;`);
+ await client.query(`DROP VIEW \`view\`;`);
+}); */
+
+// TODO: Unskip this test when views are implemented
+/* test.skip('alter view ".as"', async () => {
+ const table = singlestoreTable('test', {
+ id: int('id').primaryKey(),
+ });
+
+ const schema1 = {
+ test: table,
+ view: singlestoreView('view').as((qb) =>
+ qb
+ .select()
+ .from(table)
+ .where(sql`${table.id} = 1`)
+ ),
+ };
+
+ const schema2 = {
+ test: table,
+ view: singlestoreView('view').as((qb) => qb.select().from(table)),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasPushSingleStore(
+ client,
+ schema1,
+ schema2,
+ [],
+ 'drizzle',
+ false,
+ );
+
+ expect(statements.length).toBe(0);
+ expect(sqlStatements.length).toBe(0);
+
+ await client.query(`DROP TABLE \`test\`;`);
+ await client.query(`DROP VIEW \`view\`;`);
+}); */
+
+// TODO: Unskip this test when views are implemented
+/* test.skip('alter meta options with distinct in definition', async () => {
+ const table = singlestoreTable('test', {
+ id: int('id').primaryKey(),
+ });
+
+ const schema1 = {
+ test: table,
+ view: singlestoreView('view')
+ .withCheckOption('cascaded')
+ .sqlSecurity('definer')
+ .algorithm('merge')
+ .as((qb) =>
+ qb
+ .selectDistinct()
+ .from(table)
+ .where(sql`${table.id} = 1`)
+ ),
+ };
+
+ const schema2 = {
+ test: table,
+ view: singlestoreView('view')
+ .withCheckOption('cascaded')
+ .sqlSecurity('definer')
+ .algorithm('undefined')
+ .as((qb) => qb.selectDistinct().from(table)),
+ };
+
+ await expect(
+ diffTestSchemasPushSingleStore(
+ client,
+ schema1,
+ schema2,
+ [],
+ 'drizzle',
+ false,
+ ),
+ ).rejects.toThrowError();
+
+ await client.query(`DROP TABLE \`test\`;`);
+}); */
diff --git a/drizzle-kit/tests/push/singlestore.test.ts b/drizzle-kit/tests/push/singlestore.test.ts
new file mode 100644
index 000000000..82c72063c
--- /dev/null
+++ b/drizzle-kit/tests/push/singlestore.test.ts
@@ -0,0 +1,439 @@
+import Docker from 'dockerode';
+import { SQL, sql } from 'drizzle-orm';
+import {
+ bigint,
+ binary,
+ char,
+ date,
+ datetime,
+ decimal,
+ double,
+ float,
+ int,
+ json,
+ mediumint,
+ primaryKey,
+ serial,
+ singlestoreEnum,
+ singlestoreTable,
+ smallint,
+ text,
+ time,
+ timestamp,
+ tinyint,
+ varbinary,
+ varchar,
+ year,
+} from 'drizzle-orm/singlestore-core';
+import getPort from 'get-port';
+import { Connection, createConnection } from 'mysql2/promise';
+import { diffTestSchemasPushSingleStore, diffTestSchemasSingleStore } from 'tests/schemaDiffer';
+import { v4 as uuid } from 'uuid';
+import { expect } from 'vitest';
+import { DialectSuite, run } from './common';
+
+async function createDockerDB(context: any): Promise {
+ const docker = new Docker();
+ const port = await getPort({ port: 3306 });
+ const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest';
+
+ const pullStream = await docker.pull(image);
+ await new Promise((resolve, reject) =>
+ docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err))
+ );
+
+ context.singlestoreContainer = await docker.createContainer({
+ Image: image,
+ Env: ['ROOT_PASSWORD=singlestore'],
+ name: `drizzle-integration-tests-${uuid()}`,
+ HostConfig: {
+ AutoRemove: true,
+ PortBindings: {
+ '3306/tcp': [{ HostPort: `${port}` }],
+ },
+ },
+ });
+
+ await context.singlestoreContainer.start();
+ await new Promise((resolve) => setTimeout(resolve, 4000));
+
+ return `singlestore://root:singlestore@localhost:${port}/`;
+}
+
+const singlestoreSuite: DialectSuite = {
+ allTypes: async function(context: any): Promise {
+ const schema1 = {
+ allBigInts: singlestoreTable('all_big_ints', {
+ simple: bigint('simple', { mode: 'number' }),
+ columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(),
+ columnDefault: bigint('column_default', { mode: 'number' }).default(12),
+ columnDefaultSql: bigint('column_default_sql', {
+ mode: 'number',
+ }).default(12),
+ }),
+ allBools: singlestoreTable('all_bools', {
+ simple: tinyint('simple'),
+ columnNotNull: tinyint('column_not_null').notNull(),
+ columnDefault: tinyint('column_default').default(1),
+ }),
+ allChars: singlestoreTable('all_chars', {
+ simple: char('simple', { length: 1 }),
+ columnNotNull: char('column_not_null', { length: 45 }).notNull(),
+ // columnDefault: char("column_default", { length: 1 }).default("h"),
+ columnDefaultSql: char('column_default_sql', { length: 1 }).default(
+ 'h',
+ ),
+ }),
+ // allDateTimes: singlestoreTable("all_date_times", {
+ // simple: datetime("simple", { mode: "string", fsp: 1 }),
+ // columnNotNull: datetime("column_not_null", {
+ // mode: "string",
+ // }).notNull(),
+ // columnDefault: datetime("column_default", { mode: "string" }).default(
+ // "2023-03-01 14:05:29"
+ // ),
+ // }),
+ allDates: singlestoreTable('all_dates', {
+ simple: date('simple', { mode: 'string' }),
+ column_not_null: date('column_not_null', { mode: 'string' }).notNull(),
+ column_default: date('column_default', { mode: 'string' }).default(
+ '2023-03-01',
+ ),
+ }),
+ allDecimals: singlestoreTable('all_decimals', {
+ simple: decimal('simple', { precision: 1, scale: 0 }),
+ columnNotNull: decimal('column_not_null', {
+ precision: 45,
+ scale: 3,
+ }).notNull(),
+ columnDefault: decimal('column_default', {
+ precision: 10,
+ scale: 0,
+ }).default('100'),
+ columnDefaultSql: decimal('column_default_sql', {
+ precision: 10,
+ scale: 0,
+ }).default('101'),
+ }),
+
+ allDoubles: singlestoreTable('all_doubles', {
+ simple: double('simple'),
+ columnNotNull: double('column_not_null').notNull(),
+ columnDefault: double('column_default').default(100),
+ columnDefaultSql: double('column_default_sql').default(101),
+ }),
+
+ allEnums: singlestoreTable('all_enums', {
+ simple: singlestoreEnum('simple', ['hi', 'hello']),
+ }),
+
+ allEnums1: singlestoreTable('all_enums1', {
+ simple: singlestoreEnum('simple', ['hi', 'hello']).default('hi'),
+ }),
+
+ allFloats: singlestoreTable('all_floats', {
+ columnNotNull: float('column_not_null').notNull(),
+ columnDefault: float('column_default').default(100),
+ columnDefaultSql: float('column_default_sql').default(101),
+ }),
+
+ allInts: singlestoreTable('all_ints', {
+ simple: int('simple'),
+ columnNotNull: int('column_not_null').notNull(),
+ columnDefault: int('column_default').default(100),
+ columnDefaultSql: int('column_default_sql').default(101),
+ }),
+
+ allIntsRef: singlestoreTable('all_ints_ref', {
+ simple: int('simple'),
+ columnNotNull: int('column_not_null').notNull(),
+ columnDefault: int('column_default').default(100),
+ columnDefaultSql: int('column_default_sql').default(101),
+ }),
+
+ // allJsons: singlestoreTable("all_jsons", {
+ // columnDefaultObject: json("column_default_object")
+ // .default({ hello: "world world" })
+ // .notNull(),
+ // columnDefaultArray: json("column_default_array").default({
+ // hello: { "world world": ["foo", "bar"] },
+ // foo: "bar",
+ // fe: 23,
+ // }),
+ // column: json("column"),
+ // }),
+
+ allMInts: singlestoreTable('all_m_ints', {
+ simple: mediumint('simple'),
+ columnNotNull: mediumint('column_not_null').notNull(),
+ columnDefault: mediumint('column_default').default(100),
+ columnDefaultSql: mediumint('column_default_sql').default(101),
+ }),
+
+ allReals: singlestoreTable('all_reals', {
+ simple: double('simple', { precision: 5, scale: 2 }),
+ columnNotNull: double('column_not_null').notNull(),
+ columnDefault: double('column_default').default(100),
+ columnDefaultSql: double('column_default_sql').default(101),
+ }),
+
+ allSInts: singlestoreTable('all_s_ints', {
+ simple: smallint('simple'),
+ columnNotNull: smallint('column_not_null').notNull(),
+ columnDefault: smallint('column_default').default(100),
+ columnDefaultSql: smallint('column_default_sql').default(101),
+ }),
+
+ // allSmallSerials: singlestoreTable("all_small_serials", {
+ // columnAll: serial("column_all").notNull(),
+ // }),
+
+ allTInts: singlestoreTable('all_t_ints', {
+ simple: tinyint('simple'),
+ columnNotNull: tinyint('column_not_null').notNull(),
+ columnDefault: tinyint('column_default').default(10),
+ columnDefaultSql: tinyint('column_default_sql').default(11),
+ }),
+
+ allTexts: singlestoreTable('all_texts', {
+ simple: text('simple'),
+ columnNotNull: text('column_not_null').notNull(),
+ columnDefault: text('column_default').default('hello'),
+ columnDefaultSql: text('column_default_sql').default('hello'),
+ }),
+
+ allTimes: singlestoreTable('all_times', {
+ // simple: time("simple", { fsp: 1 }),
+ columnNotNull: time('column_not_null').notNull(),
+ columnDefault: time('column_default').default('22:12:12'),
+ }),
+
+ allTimestamps: singlestoreTable('all_timestamps', {
+ // columnDateNow: timestamp("column_date_now", {
+ // fsp: 1,
+ // mode: "string",
+ // }).default(sql`(now())`),
+ columnAll: timestamp('column_all', { mode: 'string' })
+ .default('2023-03-01 14:05:29')
+ .notNull(),
+ column: timestamp('column', { mode: 'string' }).default(
+ '2023-02-28 16:18:31',
+ ),
+ }),
+
+ allVarChars: singlestoreTable('all_var_chars', {
+ simple: varchar('simple', { length: 100 }),
+ columnNotNull: varchar('column_not_null', { length: 45 }).notNull(),
+ columnDefault: varchar('column_default', { length: 100 }).default(
+ 'hello',
+ ),
+ columnDefaultSql: varchar('column_default_sql', {
+ length: 100,
+ }).default('hello'),
+ }),
+
+ allVarbinaries: singlestoreTable('all_varbinaries', {
+ simple: varbinary('simple', { length: 100 }),
+ columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(),
+ columnDefault: varbinary('column_default', { length: 12 }),
+ }),
+
+ allYears: singlestoreTable('all_years', {
+ simple: year('simple'),
+ columnNotNull: year('column_not_null').notNull(),
+ columnDefault: year('column_default').default(2022),
+ }),
+
+ binafry: singlestoreTable('binary', {
+ simple: binary('simple', { length: 1 }),
+ columnNotNull: binary('column_not_null', { length: 1 }).notNull(),
+ columnDefault: binary('column_default', { length: 12 }),
+ }),
+ };
+
+ const { statements } = await diffTestSchemasPushSingleStore(
+ context.client as Connection,
+ schema1,
+ schema1,
+ [],
+ 'drizzle',
+ false,
+ );
+ console.log(statements);
+ expect(statements.length).toBe(0);
+ expect(statements).toEqual([]);
+
+ const { sqlStatements: dropStatements } = await diffTestSchemasSingleStore(
+ schema1,
+ {},
+ [],
+ false,
+ );
+
+ for (const st of dropStatements) {
+ await context.client.query(st);
+ }
+ },
+ addBasicIndexes: function(context?: any): Promise {
+ return {} as any;
+ },
+ changeIndexFields: function(context?: any): Promise {
+ return {} as any;
+ },
+ dropIndex: function(context?: any): Promise {
+ return {} as any;
+ },
+ indexesToBeNotTriggered: function(context?: any): Promise {
+ return {} as any;
+ },
+ indexesTestCase1: function(context?: any): Promise {
+ return {} as any;
+ },
+ async case1() {
+ // TODO: implement if needed
+ expect(true).toBe(true);
+ },
+ addNotNull: function(context?: any): Promise {
+ return {} as any;
+ },
+ addNotNullWithDataNoRollback: function(context?: any): Promise {
+ return {} as any;
+ },
+ addBasicSequences: function(context?: any): Promise {
+ return {} as any;
+ },
+ addGeneratedColumn: async function(context: any): Promise {
+ return {} as any;
+ },
+ addGeneratedToColumn: async function(context: any): Promise {
+ return {} as any;
+ },
+ dropGeneratedConstraint: async function(context: any): Promise {
+ return {} as any;
+ },
+ alterGeneratedConstraint: async function(context: any): Promise {
+ return {} as any;
+ },
+ createTableWithGeneratedConstraint: function(context?: any): Promise {
+ return {} as any;
+ },
+ createCompositePrimaryKey: async function(context: any): Promise {
+ const schema1 = {};
+
+ const schema2 = {
+ table: singlestoreTable('table', {
+ col1: int('col1').notNull(),
+ col2: int('col2').notNull(),
+ }, (t) => ({
+ pk: primaryKey({
+ columns: [t.col1, t.col2],
+ }),
+ })),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasPushSingleStore(
+ context.client as Connection,
+ schema1,
+ schema2,
+ [],
+ 'drizzle',
+ false,
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ type: 'create_table',
+ tableName: 'table',
+ schema: undefined,
+ internals: {
+ indexes: {},
+ tables: {},
+ },
+ compositePKs: ['table_col1_col2_pk;col1,col2'],
+ compositePkName: 'table_col1_col2_pk',
+ uniqueConstraints: [],
+ columns: [
+ { name: 'col1', type: 'int', primaryKey: false, notNull: true, autoincrement: false },
+ { name: 'col2', type: 'int', primaryKey: false, notNull: true, autoincrement: false },
+ ],
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'CREATE TABLE `table` (\n\t`col1` int NOT NULL,\n\t`col2` int NOT NULL,\n\tCONSTRAINT `table_col1_col2_pk` PRIMARY KEY(`col1`,`col2`)\n);\n',
+ ]);
+ },
+ renameTableWithCompositePrimaryKey: async function(context?: any): Promise {
+ const productsCategoriesTable = (tableName: string) => {
+ return singlestoreTable(tableName, {
+ productId: varchar('product_id', { length: 10 }).notNull(),
+ categoryId: varchar('category_id', { length: 10 }).notNull(),
+ }, (t) => ({
+ pk: primaryKey({
+ columns: [t.productId, t.categoryId],
+ }),
+ }));
+ };
+
+ const schema1 = {
+ table: productsCategoriesTable('products_categories'),
+ };
+ const schema2 = {
+ test: productsCategoriesTable('products_to_categories'),
+ };
+
+ const { sqlStatements } = await diffTestSchemasPushSingleStore(
+ context.client as Connection,
+ schema1,
+ schema2,
+ ['public.products_categories->public.products_to_categories'],
+ 'drizzle',
+ false,
+ );
+
+ // It's not possible to create/alter/drop primary keys in SingleStore
+ expect(sqlStatements).toStrictEqual([
+ 'RENAME TABLE `products_categories` TO `products_to_categories`;',
+ ]);
+
+ await context.client.query(`DROP TABLE \`products_categories\``);
+ },
+};
+
+run(
+ singlestoreSuite,
+ async (context: any) => {
+ const connectionString = process.env.SINGLESTORE_CONNECTION_STRING
+ ?? (await createDockerDB(context));
+
+ const sleep = 1000;
+ let timeLeft = 20000;
+ let connected = false;
+ let lastError: unknown | undefined;
+ do {
+ try {
+ context.client = await createConnection(connectionString);
+ await context.client.connect();
+ connected = true;
+ break;
+ } catch (e) {
+ lastError = e;
+ await new Promise((resolve) => setTimeout(resolve, sleep));
+ timeLeft -= sleep;
+ }
+ } while (timeLeft > 0);
+ if (!connected) {
+ console.error('Cannot connect to SingleStore');
+ await context.client?.end().catch(console.error);
+ await context.singlestoreContainer?.stop().catch(console.error);
+ throw lastError;
+ }
+
+ await context.client.query(`DROP DATABASE IF EXISTS \`drizzle\`;`);
+ await context.client.query('CREATE DATABASE drizzle;');
+ await context.client.query('USE drizzle;');
+ },
+ async (context: any) => {
+ await context.client?.end().catch(console.error);
+ await context.singlestoreContainer?.stop().catch(console.error);
+ },
+);
diff --git a/drizzle-kit/tests/schemaDiffer.ts b/drizzle-kit/tests/schemaDiffer.ts
index adc7aecbf..9c7f212aa 100644
--- a/drizzle-kit/tests/schemaDiffer.ts
+++ b/drizzle-kit/tests/schemaDiffer.ts
@@ -18,6 +18,7 @@ import {
PgTable,
PgView,
} from 'drizzle-orm/pg-core';
+import { SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-core';
import { SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core';
import * as fs from 'fs';
import { Connection } from 'mysql2/promise';
@@ -42,22 +43,28 @@ import { Entities } from 'src/cli/validations/cli';
import { CasingType } from 'src/cli/validations/common';
import { schemaToTypeScript as schemaToTypeScriptMySQL } from 'src/introspect-mysql';
import { schemaToTypeScript } from 'src/introspect-pg';
+import { schemaToTypeScript as schemaToTypeScriptSingleStore } from 'src/introspect-singlestore';
import { schemaToTypeScript as schemaToTypeScriptSQLite } from 'src/introspect-sqlite';
import { prepareFromMySqlImports } from 'src/serializer/mysqlImports';
import { mysqlSchema, squashMysqlScheme, ViewSquashed } from 'src/serializer/mysqlSchema';
-import { generateMySqlSnapshot } from 'src/serializer/mysqlSerializer';
-import { fromDatabase as fromMySqlDatabase } from 'src/serializer/mysqlSerializer';
+import { fromDatabase as fromMySqlDatabase, generateMySqlSnapshot } from 'src/serializer/mysqlSerializer';
import { prepareFromPgImports } from 'src/serializer/pgImports';
-import { pgSchema, PgSquasher, Policy, Role, squashPgScheme, View } from 'src/serializer/pgSchema';
+import { pgSchema, Policy, Role, squashPgScheme, View } from 'src/serializer/pgSchema';
import { fromDatabase, generatePgSnapshot } from 'src/serializer/pgSerializer';
+import { prepareFromSingleStoreImports } from 'src/serializer/singlestoreImports';
+import { singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema';
+import {
+ fromDatabase as fromSingleStoreDatabase,
+ generateSingleStoreSnapshot,
+} from 'src/serializer/singlestoreSerializer';
import { prepareFromSqliteImports } from 'src/serializer/sqliteImports';
import { sqliteSchema, squashSqliteScheme, View as SqliteView } from 'src/serializer/sqliteSchema';
-import { fromDatabase as fromSqliteDatabase } from 'src/serializer/sqliteSerializer';
-import { generateSqliteSnapshot } from 'src/serializer/sqliteSerializer';
+import { fromDatabase as fromSqliteDatabase, generateSqliteSnapshot } from 'src/serializer/sqliteSerializer';
import {
applyLibSQLSnapshotsDiff,
applyMysqlSnapshotsDiff,
applyPgSnapshotsDiff,
+ applySingleStoreSnapshotsDiff,
applySqliteSnapshotsDiff,
Column,
ColumnsResolverInput,
@@ -78,15 +85,33 @@ import {
export type PostgresSchema = Record<
string,
- PgTable | PgEnum | PgSchema | PgSequence | PgView | PgMaterializedView | PgRole | PgPolicy
+ | PgTable
+ | PgEnum
+ | PgSchema
+ | PgSequence
+ | PgView
+ | PgMaterializedView
+ | PgRole
+ | PgPolicy
+>;
+export type MysqlSchema = Record<
+ string,
+ MySqlTable | MySqlSchema | MySqlView
>;
-export type MysqlSchema = Record | MySqlSchema | MySqlView>;
export type SqliteSchema = Record | SQLiteView>;
+export type SinglestoreSchema = Record<
+ string,
+ SingleStoreTable | SingleStoreSchema /* | SingleStoreView */
+>;
export const testSchemasResolver =
(renames: Set) => async (input: ResolverInput): Promise> => {
try {
- if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
return {
created: input.created,
renamed: [],
@@ -138,273 +163,297 @@ export const testSchemasResolver =
}
};
-export const testSequencesResolver =
- (renames: Set) => async (input: ResolverInput): Promise> => {
- try {
- if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) {
- return {
- created: input.created,
- moved: [],
- renamed: [],
- deleted: input.deleted,
- };
- }
+export const testSequencesResolver = (renames: Set) =>
+async (
+ input: ResolverInput,
+): Promise> => {
+ try {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
+ return {
+ created: input.created,
+ moved: [],
+ renamed: [],
+ deleted: input.deleted,
+ };
+ }
- let createdSequences = [...input.created];
- let deletedSequences = [...input.deleted];
+ let createdSequences = [...input.created];
+ let deletedSequences = [...input.deleted];
- const result: {
- created: Sequence[];
- moved: { name: string; schemaFrom: string; schemaTo: string }[];
- renamed: { from: Sequence; to: Sequence }[];
- deleted: Sequence[];
- } = { created: [], renamed: [], deleted: [], moved: [] };
+ const result: {
+ created: Sequence[];
+ moved: { name: string; schemaFrom: string; schemaTo: string }[];
+ renamed: { from: Sequence; to: Sequence }[];
+ deleted: Sequence[];
+ } = { created: [], renamed: [], deleted: [], moved: [] };
- for (let rename of renames) {
- const [from, to] = rename.split('->');
+ for (let rename of renames) {
+ const [from, to] = rename.split('->');
+
+ const idxFrom = deletedSequences.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === from;
+ });
- const idxFrom = deletedSequences.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === from;
+ if (idxFrom >= 0) {
+ const idxTo = createdSequences.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === to;
});
- if (idxFrom >= 0) {
- const idxTo = createdSequences.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === to;
+ const tableFrom = deletedSequences[idxFrom];
+ const tableTo = createdSequences[idxFrom];
+
+ if (tableFrom.schema !== tableTo.schema) {
+ result.moved.push({
+ name: tableFrom.name,
+ schemaFrom: tableFrom.schema,
+ schemaTo: tableTo.schema,
});
+ }
- const tableFrom = deletedSequences[idxFrom];
- const tableTo = createdSequences[idxFrom];
-
- if (tableFrom.schema !== tableTo.schema) {
- result.moved.push({
- name: tableFrom.name,
- schemaFrom: tableFrom.schema,
- schemaTo: tableTo.schema,
- });
- }
-
- if (tableFrom.name !== tableTo.name) {
- result.renamed.push({
- from: deletedSequences[idxFrom],
- to: createdSequences[idxTo],
- });
- }
-
- delete createdSequences[idxTo];
- delete deletedSequences[idxFrom];
-
- createdSequences = createdSequences.filter(Boolean);
- deletedSequences = deletedSequences.filter(Boolean);
+ if (tableFrom.name !== tableTo.name) {
+ result.renamed.push({
+ from: deletedSequences[idxFrom],
+ to: createdSequences[idxTo],
+ });
}
+
+ delete createdSequences[idxTo];
+ delete deletedSequences[idxFrom];
+
+ createdSequences = createdSequences.filter(Boolean);
+ deletedSequences = deletedSequences.filter(Boolean);
}
+ }
- result.created = createdSequences;
- result.deleted = deletedSequences;
+ result.created = createdSequences;
+ result.deleted = deletedSequences;
- return result;
- } catch (e) {
- console.error(e);
- throw e;
+ return result;
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+};
+
+export const testEnumsResolver = (renames: Set) =>
+async (
+ input: ResolverInput,
+): Promise> => {
+ try {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
+ return {
+ created: input.created,
+ moved: [],
+ renamed: [],
+ deleted: input.deleted,
+ };
}
- };
-export const testEnumsResolver =
- (renames: Set) => async (input: ResolverInput): Promise> => {
- try {
- if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) {
- return {
- created: input.created,
- moved: [],
- renamed: [],
- deleted: input.deleted,
- };
- }
+ let createdEnums = [...input.created];
+ let deletedEnums = [...input.deleted];
- let createdEnums = [...input.created];
- let deletedEnums = [...input.deleted];
+ const result: {
+ created: Enum[];
+ moved: { name: string; schemaFrom: string; schemaTo: string }[];
+ renamed: { from: Enum; to: Enum }[];
+ deleted: Enum[];
+ } = { created: [], renamed: [], deleted: [], moved: [] };
- const result: {
- created: Enum[];
- moved: { name: string; schemaFrom: string; schemaTo: string }[];
- renamed: { from: Enum; to: Enum }[];
- deleted: Enum[];
- } = { created: [], renamed: [], deleted: [], moved: [] };
+ for (let rename of renames) {
+ const [from, to] = rename.split('->');
- for (let rename of renames) {
- const [from, to] = rename.split('->');
+ const idxFrom = deletedEnums.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === from;
+ });
- const idxFrom = deletedEnums.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === from;
+ if (idxFrom >= 0) {
+ const idxTo = createdEnums.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === to;
});
- if (idxFrom >= 0) {
- const idxTo = createdEnums.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === to;
+ const tableFrom = deletedEnums[idxFrom];
+ const tableTo = createdEnums[idxFrom];
+
+ if (tableFrom.schema !== tableTo.schema) {
+ result.moved.push({
+ name: tableFrom.name,
+ schemaFrom: tableFrom.schema,
+ schemaTo: tableTo.schema,
});
+ }
- const tableFrom = deletedEnums[idxFrom];
- const tableTo = createdEnums[idxFrom];
-
- if (tableFrom.schema !== tableTo.schema) {
- result.moved.push({
- name: tableFrom.name,
- schemaFrom: tableFrom.schema,
- schemaTo: tableTo.schema,
- });
- }
-
- if (tableFrom.name !== tableTo.name) {
- result.renamed.push({
- from: deletedEnums[idxFrom],
- to: createdEnums[idxTo],
- });
- }
-
- delete createdEnums[idxTo];
- delete deletedEnums[idxFrom];
-
- createdEnums = createdEnums.filter(Boolean);
- deletedEnums = deletedEnums.filter(Boolean);
+ if (tableFrom.name !== tableTo.name) {
+ result.renamed.push({
+ from: deletedEnums[idxFrom],
+ to: createdEnums[idxTo],
+ });
}
+
+ delete createdEnums[idxTo];
+ delete deletedEnums[idxFrom];
+
+ createdEnums = createdEnums.filter(Boolean);
+ deletedEnums = deletedEnums.filter(Boolean);
}
+ }
- result.created = createdEnums;
- result.deleted = deletedEnums;
+ result.created = createdEnums;
+ result.deleted = deletedEnums;
- return result;
- } catch (e) {
- console.error(e);
- throw e;
+ return result;
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+};
+
+export const testTablesResolver = (renames: Set) =>
+async (
+ input: ResolverInput,
+): Promise> => {
+ try {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
+ return {
+ created: input.created,
+ moved: [],
+ renamed: [],
+ deleted: input.deleted,
+ };
}
- };
-export const testTablesResolver =
- (renames: Set) => async (input: ResolverInput): Promise> => {
- try {
- if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) {
- return {
- created: input.created,
- moved: [],
- renamed: [],
- deleted: input.deleted,
- };
- }
+ let createdTables = [...input.created];
+ let deletedTables = [...input.deleted];
- let createdTables = [...input.created];
- let deletedTables = [...input.deleted];
+ const result: {
+ created: Table[];
+ moved: { name: string; schemaFrom: string; schemaTo: string }[];
+ renamed: { from: Table; to: Table }[];
+ deleted: Table[];
+ } = { created: [], renamed: [], deleted: [], moved: [] };
- const result: {
- created: Table[];
- moved: { name: string; schemaFrom: string; schemaTo: string }[];
- renamed: { from: Table; to: Table }[];
- deleted: Table[];
- } = { created: [], renamed: [], deleted: [], moved: [] };
+ for (let rename of renames) {
+ const [from, to] = rename.split('->');
- for (let rename of renames) {
- const [from, to] = rename.split('->');
+ const idxFrom = deletedTables.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === from;
+ });
- const idxFrom = deletedTables.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === from;
+ if (idxFrom >= 0) {
+ const idxTo = createdTables.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === to;
});
- if (idxFrom >= 0) {
- const idxTo = createdTables.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === to;
+ const tableFrom = deletedTables[idxFrom];
+ const tableTo = createdTables[idxFrom];
+
+ if (tableFrom.schema !== tableTo.schema) {
+ result.moved.push({
+ name: tableFrom.name,
+ schemaFrom: tableFrom.schema,
+ schemaTo: tableTo.schema,
});
+ }
- const tableFrom = deletedTables[idxFrom];
- const tableTo = createdTables[idxFrom];
-
- if (tableFrom.schema !== tableTo.schema) {
- result.moved.push({
- name: tableFrom.name,
- schemaFrom: tableFrom.schema,
- schemaTo: tableTo.schema,
- });
- }
-
- if (tableFrom.name !== tableTo.name) {
- result.renamed.push({
- from: deletedTables[idxFrom],
- to: createdTables[idxTo],
- });
- }
-
- delete createdTables[idxTo];
- delete deletedTables[idxFrom];
-
- createdTables = createdTables.filter(Boolean);
- deletedTables = deletedTables.filter(Boolean);
+ if (tableFrom.name !== tableTo.name) {
+ result.renamed.push({
+ from: deletedTables[idxFrom],
+ to: createdTables[idxTo],
+ });
}
+
+ delete createdTables[idxTo];
+ delete deletedTables[idxFrom];
+
+ createdTables = createdTables.filter(Boolean);
+ deletedTables = deletedTables.filter(Boolean);
}
+ }
- result.created = createdTables;
- result.deleted = deletedTables;
+ result.created = createdTables;
+ result.deleted = deletedTables;
- return result;
- } catch (e) {
- console.error(e);
- throw e;
+ return result;
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+};
+
+export const testColumnsResolver = (renames: Set) =>
+async (
+ input: ColumnsResolverInput,
+): Promise> => {
+ try {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
+ return {
+ tableName: input.tableName,
+ schema: input.schema,
+ created: input.created,
+ renamed: [],
+ deleted: input.deleted,
+ };
}
- };
-export const testColumnsResolver =
- (renames: Set) => async (input: ColumnsResolverInput): Promise> => {
- try {
- if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) {
- return {
- tableName: input.tableName,
- schema: input.schema,
- created: input.created,
- renamed: [],
- deleted: input.deleted,
- };
- }
+ let createdColumns = [...input.created];
+ let deletedColumns = [...input.deleted];
- let createdColumns = [...input.created];
- let deletedColumns = [...input.deleted];
+ const renamed: { from: Column; to: Column }[] = [];
- const renamed: { from: Column; to: Column }[] = [];
+ const schema = input.schema || 'public';
- const schema = input.schema || 'public';
+ for (let rename of renames) {
+ const [from, to] = rename.split('->');
- for (let rename of renames) {
- const [from, to] = rename.split('->');
+ const idxFrom = deletedColumns.findIndex((it) => {
+ return `${schema}.${input.tableName}.${it.name}` === from;
+ });
- const idxFrom = deletedColumns.findIndex((it) => {
- return `${schema}.${input.tableName}.${it.name}` === from;
+ if (idxFrom >= 0) {
+ const idxTo = createdColumns.findIndex((it) => {
+ return `${schema}.${input.tableName}.${it.name}` === to;
});
- if (idxFrom >= 0) {
- const idxTo = createdColumns.findIndex((it) => {
- return `${schema}.${input.tableName}.${it.name}` === to;
- });
-
- renamed.push({
- from: deletedColumns[idxFrom],
- to: createdColumns[idxTo],
- });
+ renamed.push({
+ from: deletedColumns[idxFrom],
+ to: createdColumns[idxTo],
+ });
- delete createdColumns[idxTo];
- delete deletedColumns[idxFrom];
+ delete createdColumns[idxTo];
+ delete deletedColumns[idxFrom];
- createdColumns = createdColumns.filter(Boolean);
- deletedColumns = deletedColumns.filter(Boolean);
- }
+ createdColumns = createdColumns.filter(Boolean);
+ deletedColumns = deletedColumns.filter(Boolean);
}
-
- return {
- tableName: input.tableName,
- schema: input.schema,
- created: createdColumns,
- deleted: deletedColumns,
- renamed,
- };
- } catch (e) {
- console.error(e);
- throw e;
}
- };
+
+ return {
+ tableName: input.tableName,
+ schema: input.schema,
+ created: createdColumns,
+ deleted: deletedColumns,
+ renamed,
+ };
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+};
export const testPolicyResolver = (renames: Set) =>
async (
@@ -586,208 +635,301 @@ async (
}
};
-export const testViewsResolver =
- (renames: Set) => async (input: ResolverInput): Promise> => {
- try {
- if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) {
- return {
- created: input.created,
- moved: [],
- renamed: [],
- deleted: input.deleted,
- };
- }
+export const testViewsResolver = (renames: Set) =>
+async (
+ input: ResolverInput,
+): Promise> => {
+ try {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
+ return {
+ created: input.created,
+ moved: [],
+ renamed: [],
+ deleted: input.deleted,
+ };
+ }
- let createdViews = [...input.created];
- let deletedViews = [...input.deleted];
+ let createdViews = [...input.created];
+ let deletedViews = [...input.deleted];
- const result: {
- created: View[];
- moved: { name: string; schemaFrom: string; schemaTo: string }[];
- renamed: { from: View; to: View }[];
- deleted: View[];
- } = { created: [], renamed: [], deleted: [], moved: [] };
+ const result: {
+ created: View[];
+ moved: { name: string; schemaFrom: string; schemaTo: string }[];
+ renamed: { from: View; to: View }[];
+ deleted: View[];
+ } = { created: [], renamed: [], deleted: [], moved: [] };
- for (let rename of renames) {
- const [from, to] = rename.split('->');
+ for (let rename of renames) {
+ const [from, to] = rename.split('->');
- const idxFrom = deletedViews.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === from;
+ const idxFrom = deletedViews.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === from;
+ });
+
+ if (idxFrom >= 0) {
+ const idxTo = createdViews.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === to;
});
- if (idxFrom >= 0) {
- const idxTo = createdViews.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === to;
- });
+ const viewFrom = deletedViews[idxFrom];
+ const viewTo = createdViews[idxFrom];
- const viewFrom = deletedViews[idxFrom];
- const viewTo = createdViews[idxFrom];
-
- if (viewFrom.schema !== viewTo.schema) {
- result.moved.push({
- name: viewFrom.name,
- schemaFrom: viewFrom.schema,
- schemaTo: viewTo.schema,
- });
- }
-
- if (viewFrom.name !== viewTo.name) {
- result.renamed.push({
- from: deletedViews[idxFrom],
- to: createdViews[idxTo],
- });
- }
-
- delete createdViews[idxTo];
- delete deletedViews[idxFrom];
-
- createdViews = createdViews.filter(Boolean);
- deletedViews = deletedViews.filter(Boolean);
+ if (viewFrom.schema !== viewTo.schema) {
+ result.moved.push({
+ name: viewFrom.name,
+ schemaFrom: viewFrom.schema,
+ schemaTo: viewTo.schema,
+ });
}
- }
- result.created = createdViews;
- result.deleted = deletedViews;
+ if (viewFrom.name !== viewTo.name) {
+ result.renamed.push({
+ from: deletedViews[idxFrom],
+ to: createdViews[idxTo],
+ });
+ }
- return result;
- } catch (e) {
- console.error(e);
- throw e;
- }
- };
+ delete createdViews[idxTo];
+ delete deletedViews[idxFrom];
-export const testViewsResolverMySql =
- (renames: Set) =>
- async (input: ResolverInput): Promise> => {
- try {
- if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) {
- return {
- created: input.created,
- moved: [],
- renamed: [],
- deleted: input.deleted,
- };
+ createdViews = createdViews.filter(Boolean);
+ deletedViews = deletedViews.filter(Boolean);
}
+ }
- let createdViews = [...input.created];
- let deletedViews = [...input.deleted];
-
- const result: {
- created: ViewSquashed[];
- moved: { name: string; schemaFrom: string; schemaTo: string }[];
- renamed: { from: ViewSquashed; to: ViewSquashed }[];
- deleted: ViewSquashed[];
- } = { created: [], renamed: [], deleted: [], moved: [] };
+ result.created = createdViews;
+ result.deleted = deletedViews;
- for (let rename of renames) {
- const [from, to] = rename.split('->');
+ return result;
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+};
- const idxFrom = deletedViews.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === from;
+export const testViewsResolverMySql = (renames: Set) =>
+async (
+ input: ResolverInput,
+): Promise> => {
+ try {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
+ return {
+ created: input.created,
+ moved: [],
+ renamed: [],
+ deleted: input.deleted,
+ };
+ }
+
+ let createdViews = [...input.created];
+ let deletedViews = [...input.deleted];
+
+ const result: {
+ created: ViewSquashed[];
+ moved: { name: string; schemaFrom: string; schemaTo: string }[];
+ renamed: { from: ViewSquashed; to: ViewSquashed }[];
+ deleted: ViewSquashed[];
+ } = { created: [], renamed: [], deleted: [], moved: [] };
+
+ for (let rename of renames) {
+ const [from, to] = rename.split('->');
+
+ const idxFrom = deletedViews.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === from;
+ });
+
+ if (idxFrom >= 0) {
+ const idxTo = createdViews.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === to;
});
- if (idxFrom >= 0) {
- const idxTo = createdViews.findIndex((it) => {
- return `${it.schema || 'public'}.${it.name}` === to;
+ const viewFrom = deletedViews[idxFrom];
+ const viewTo = createdViews[idxFrom];
+
+ if (viewFrom.schema !== viewTo.schema) {
+ result.moved.push({
+ name: viewFrom.name,
+ schemaFrom: viewFrom.schema,
+ schemaTo: viewTo.schema,
});
+ }
- const viewFrom = deletedViews[idxFrom];
- const viewTo = createdViews[idxFrom];
-
- if (viewFrom.schema !== viewTo.schema) {
- result.moved.push({
- name: viewFrom.name,
- schemaFrom: viewFrom.schema,
- schemaTo: viewTo.schema,
- });
- }
-
- if (viewFrom.name !== viewTo.name) {
- result.renamed.push({
- from: deletedViews[idxFrom],
- to: createdViews[idxTo],
- });
- }
-
- delete createdViews[idxTo];
- delete deletedViews[idxFrom];
-
- createdViews = createdViews.filter(Boolean);
- deletedViews = deletedViews.filter(Boolean);
+ if (viewFrom.name !== viewTo.name) {
+ result.renamed.push({
+ from: deletedViews[idxFrom],
+ to: createdViews[idxTo],
+ });
}
+
+ delete createdViews[idxTo];
+ delete deletedViews[idxFrom];
+
+ createdViews = createdViews.filter(Boolean);
+ deletedViews = deletedViews.filter(Boolean);
}
+ }
- result.created = createdViews;
- result.deleted = deletedViews;
+ result.created = createdViews;
+ result.deleted = deletedViews;
- return result;
- } catch (e) {
- console.error(e);
- throw e;
+ return result;
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+};
+
+export const testViewsResolverSingleStore = (renames: Set) =>
+async (
+ input: ResolverInput,
+): Promise> => {
+ try {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
+ return {
+ created: input.created,
+ moved: [],
+ renamed: [],
+ deleted: input.deleted,
+ };
}
- };
-export const testViewsResolverSqlite =
- (renames: Set) => async (input: ResolverInput): Promise> => {
- try {
- if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) {
- return {
- created: input.created,
- moved: [],
- renamed: [],
- deleted: input.deleted,
- };
- }
+ let createdViews = [...input.created];
+ let deletedViews = [...input.deleted];
- let createdViews = [...input.created];
- let deletedViews = [...input.deleted];
+ const result: {
+ created: ViewSquashed[];
+ moved: { name: string; schemaFrom: string; schemaTo: string }[];
+ renamed: { from: ViewSquashed; to: ViewSquashed }[];
+ deleted: ViewSquashed[];
+ } = { created: [], renamed: [], deleted: [], moved: [] };
- const result: {
- created: SqliteView[];
- moved: { name: string; schemaFrom: string; schemaTo: string }[];
- renamed: { from: SqliteView; to: SqliteView }[];
- deleted: SqliteView[];
- } = { created: [], renamed: [], deleted: [], moved: [] };
+ for (let rename of renames) {
+ const [from, to] = rename.split('->');
- for (let rename of renames) {
- const [from, to] = rename.split('->');
+ const idxFrom = deletedViews.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === from;
+ });
- const idxFrom = deletedViews.findIndex((it) => {
- return it.name === from;
+ if (idxFrom >= 0) {
+ const idxTo = createdViews.findIndex((it) => {
+ return `${it.schema || 'public'}.${it.name}` === to;
});
- if (idxFrom >= 0) {
- const idxTo = createdViews.findIndex((it) => {
- return it.name === to;
+ const viewFrom = deletedViews[idxFrom];
+ const viewTo = createdViews[idxFrom];
+
+ if (viewFrom.schema !== viewTo.schema) {
+ result.moved.push({
+ name: viewFrom.name,
+ schemaFrom: viewFrom.schema,
+ schemaTo: viewTo.schema,
+ });
+ }
+
+ if (viewFrom.name !== viewTo.name) {
+ result.renamed.push({
+ from: deletedViews[idxFrom],
+ to: createdViews[idxTo],
});
+ }
+
+ delete createdViews[idxTo];
+ delete deletedViews[idxFrom];
+
+ createdViews = createdViews.filter(Boolean);
+ deletedViews = deletedViews.filter(Boolean);
+ }
+ }
+
+ result.created = createdViews;
+ result.deleted = deletedViews;
- const viewFrom = deletedViews[idxFrom];
- const viewTo = createdViews[idxFrom];
+ return result;
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+};
- if (viewFrom.name !== viewTo.name) {
- result.renamed.push({
- from: deletedViews[idxFrom],
- to: createdViews[idxTo],
- });
- }
+export const testViewsResolverSqlite = (renames: Set) =>
+async (
+ input: ResolverInput,
+): Promise> => {
+ try {
+ if (
+ input.created.length === 0
+ || input.deleted.length === 0
+ || renames.size === 0
+ ) {
+ return {
+ created: input.created,
+ moved: [],
+ renamed: [],
+ deleted: input.deleted,
+ };
+ }
+
+ let createdViews = [...input.created];
+ let deletedViews = [...input.deleted];
+
+ const result: {
+ created: SqliteView[];
+ moved: { name: string; schemaFrom: string; schemaTo: string }[];
+ renamed: { from: SqliteView; to: SqliteView }[];
+ deleted: SqliteView[];
+ } = { created: [], renamed: [], deleted: [], moved: [] };
+
+ for (let rename of renames) {
+ const [from, to] = rename.split('->');
+
+ const idxFrom = deletedViews.findIndex((it) => {
+ return it.name === from;
+ });
+
+ if (idxFrom >= 0) {
+ const idxTo = createdViews.findIndex((it) => {
+ return it.name === to;
+ });
- delete createdViews[idxTo];
- delete deletedViews[idxFrom];
+ const viewFrom = deletedViews[idxFrom];
+ const viewTo = createdViews[idxFrom];
- createdViews = createdViews.filter(Boolean);
- deletedViews = deletedViews.filter(Boolean);
+ if (viewFrom.name !== viewTo.name) {
+ result.renamed.push({
+ from: deletedViews[idxFrom],
+ to: createdViews[idxTo],
+ });
}
- }
- result.created = createdViews;
- result.deleted = deletedViews;
+ delete createdViews[idxTo];
+ delete deletedViews[idxFrom];
- return result;
- } catch (e) {
- console.error(e);
- throw e;
+ createdViews = createdViews.filter(Boolean);
+ deletedViews = deletedViews.filter(Boolean);
+ }
}
- };
+
+ result.created = createdViews;
+ result.deleted = deletedViews;
+
+ return result;
+ } catch (e) {
+ console.error(e);
+ throw e;
+ }
+};
export const diffTestSchemasPush = async (
client: PGlite,
@@ -798,13 +940,19 @@ export const diffTestSchemasPush = async (
schemas: string[] = ['public'],
casing?: CasingType | undefined,
entities?: Entities,
- sqlStatementsToRun: { before?: string[]; after?: string[]; runApply?: boolean } = {
+ sqlStatementsToRun: {
+ before?: string[];
+ after?: string[];
+ runApply?: boolean;
+ } = {
before: [],
after: [],
runApply: true,
},
) => {
- const shouldRunApply = sqlStatementsToRun.runApply === undefined ? true : sqlStatementsToRun.runApply;
+ const shouldRunApply = sqlStatementsToRun.runApply === undefined
+ ? true
+ : sqlStatementsToRun.runApply;
for (const st of sqlStatementsToRun.before ?? []) {
await client.query(st);
@@ -837,6 +985,19 @@ export const diffTestSchemasPush = async (
);
}
+ // do introspect into PgSchemaInternal
+ const introspectedSchema = await fromDatabase(
+ {
+ query: async (query: string, values?: any[] | undefined) => {
+ const res = await client.query(query, values);
+ return res.rows as any[];
+ },
+ },
+ undefined,
+ schemas,
+ entities,
+ );
+
const leftTables = Object.values(right).filter((it) => is(it, PgTable)) as PgTable[];
const leftSchemas = Object.values(right).filter((it) => is(it, PgSchema)) as PgSchema[];
@@ -865,21 +1026,6 @@ export const diffTestSchemasPush = async (
casing,
);
- // do introspect into PgSchemaInternal
- const introspectedSchema = await fromDatabase(
- {
- query: async (query: string, values?: any[] | undefined) => {
- const res = await client.query(query, values);
- return res.rows as any[];
- },
- },
- undefined,
- schemas,
- entities,
- undefined,
- serialized2,
- );
-
const { version: v1, dialect: d1, ...rest1 } = introspectedSchema;
const { version: v2, dialect: d2, ...rest2 } = serialized2;
@@ -975,7 +1121,10 @@ export const diffTestSchemasPush = async (
}
};
-export const applyPgDiffs = async (sn: PostgresSchema, casing: CasingType | undefined) => {
+export const applyPgDiffs = async (
+ sn: PostgresSchema,
+ casing: CasingType | undefined,
+) => {
const dryRun = {
version: '7',
dialect: 'postgresql',
@@ -1122,81 +1271,366 @@ export const diffTestSchemas = async (
const { version: v2, dialect: d2, ...rest2 } = serialized2;
const sch1 = {
- version: '7',
- dialect: 'postgresql',
+ version: '7',
+ dialect: 'postgresql',
+ id: '0',
+ prevId: '0',
+ ...rest1,
+ } as const;
+
+ const sch2 = {
+ version: '7',
+ dialect: 'postgresql',
+ id: '0',
+ prevId: '0',
+ ...rest2,
+ } as const;
+
+ const sn1 = squashPgScheme(sch1);
+ const sn2 = squashPgScheme(sch2);
+
+ const validatedPrev = pgSchema.parse(sch1);
+ const validatedCur = pgSchema.parse(sch2);
+
+ const renames = new Set(renamesArr);
+
+ if (!cli) {
+ const { sqlStatements, statements } = await applyPgSnapshotsDiff(
+ sn1,
+ sn2,
+ testSchemasResolver(renames),
+ testEnumsResolver(renames),
+ testSequencesResolver(renames),
+ testPolicyResolver(renames),
+ testIndPolicyResolver(renames),
+ testRolesResolver(renames),
+ testTablesResolver(renames),
+ testColumnsResolver(renames),
+ testViewsResolver(renames),
+ validatedPrev,
+ validatedCur,
+ );
+ return { sqlStatements, statements };
+ } else {
+ const { sqlStatements, statements } = await applyPgSnapshotsDiff(
+ sn1,
+ sn2,
+ schemasResolver,
+ enumsResolver,
+ sequencesResolver,
+ policyResolver,
+ indPolicyResolver,
+ roleResolver,
+ tablesResolver,
+ columnsResolver,
+ viewsResolver,
+ validatedPrev,
+ validatedCur,
+ );
+ return { sqlStatements, statements };
+ }
+};
+
+export const diffTestSchemasPushMysql = async (
+ client: Connection,
+ left: MysqlSchema,
+ right: MysqlSchema,
+ renamesArr: string[],
+ schema: string,
+ cli: boolean = false,
+ casing?: CasingType | undefined,
+) => {
+ const { sqlStatements } = await applyMySqlDiffs(left, casing);
+ for (const st of sqlStatements) {
+ await client.query(st);
+ }
+ // do introspect into PgSchemaInternal
+ const introspectedSchema = await fromMySqlDatabase(
+ {
+ query: async (sql: string, params?: any[]) => {
+ const res = await client.execute(sql, params);
+ return res[0] as any;
+ },
+ },
+ schema,
+ );
+
+ const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[];
+
+ const leftViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[];
+
+ const serialized2 = generateMySqlSnapshot(leftTables, leftViews, casing);
+
+ const { version: v1, dialect: d1, ...rest1 } = introspectedSchema;
+ const { version: v2, dialect: d2, ...rest2 } = serialized2;
+
+ const sch1 = {
+ version: '5',
+ dialect: 'mysql',
+ id: '0',
+ prevId: '0',
+ ...rest1,
+ } as const;
+
+ const sch2 = {
+ version: '5',
+ dialect: 'mysql',
+ id: '0',
+ prevId: '0',
+ ...rest2,
+ } as const;
+
+ const sn1 = squashMysqlScheme(sch1);
+ const sn2 = squashMysqlScheme(sch2);
+
+ const validatedPrev = mysqlSchema.parse(sch1);
+ const validatedCur = mysqlSchema.parse(sch2);
+
+ const renames = new Set(renamesArr);
+
+ if (!cli) {
+ const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
+ sn1,
+ sn2,
+ testTablesResolver(renames),
+ testColumnsResolver(renames),
+ testViewsResolverMySql(renames),
+ validatedPrev,
+ validatedCur,
+ 'push',
+ );
+ return { sqlStatements, statements };
+ } else {
+ const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
+ sn1,
+ sn2,
+ tablesResolver,
+ columnsResolver,
+ mySqlViewsResolver,
+ validatedPrev,
+ validatedCur,
+ 'push',
+ );
+ return { sqlStatements, statements };
+ }
+};
+
+export const applyMySqlDiffs = async (
+ sn: MysqlSchema,
+ casing: CasingType | undefined,
+) => {
+ const dryRun = {
+ version: '5',
+ dialect: 'mysql',
+ id: '0',
+ prevId: '0',
+ views: {},
+ tables: {},
+ enums: {},
+ schemas: {},
+ _meta: {
+ schemas: {},
+ tables: {},
+ columns: {},
+ },
+ } as const;
+
+ const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[];
+
+ const views = Object.values(sn).filter((it) => is(it, MySqlView)) as MySqlView[];
+
+ const serialized1 = generateMySqlSnapshot(tables, views, casing);
+
+ const { version: v1, dialect: d1, ...rest1 } = serialized1;
+
+ const sch1 = {
+ version: '5',
+ dialect: 'mysql',
+ id: '0',
+ prevId: '0',
+ ...rest1,
+ } as const;
+
+ const sn1 = squashMysqlScheme(sch1);
+
+ const validatedPrev = mysqlSchema.parse(dryRun);
+ const validatedCur = mysqlSchema.parse(sch1);
+
+ const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
+ dryRun,
+ sn1,
+ testTablesResolver(new Set()),
+ testColumnsResolver(new Set()),
+ testViewsResolverMySql(new Set()),
+ validatedPrev,
+ validatedCur,
+ );
+ return { sqlStatements, statements };
+};
+
+export const diffTestSchemasMysql = async (
+ left: MysqlSchema,
+ right: MysqlSchema,
+ renamesArr: string[],
+ cli: boolean = false,
+ casing?: CasingType | undefined,
+) => {
+ const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[];
+
+ const leftViews = Object.values(left).filter((it) => is(it, MySqlView)) as MySqlView[];
+
+ const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[];
+
+ const rightViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[];
+
+ const serialized1 = generateMySqlSnapshot(leftTables, leftViews, casing);
+ const serialized2 = generateMySqlSnapshot(rightTables, rightViews, casing);
+
+ const { version: v1, dialect: d1, ...rest1 } = serialized1;
+ const { version: v2, dialect: d2, ...rest2 } = serialized2;
+
+ const sch1 = {
+ version: '5',
+ dialect: 'mysql',
+ id: '0',
+ prevId: '0',
+ ...rest1,
+ } as const;
+
+ const sch2 = {
+ version: '5',
+ dialect: 'mysql',
+ id: '0',
+ prevId: '0',
+ ...rest2,
+ } as const;
+
+ const sn1 = squashMysqlScheme(sch1);
+ const sn2 = squashMysqlScheme(sch2);
+
+ const validatedPrev = mysqlSchema.parse(sch1);
+ const validatedCur = mysqlSchema.parse(sch2);
+
+ const renames = new Set(renamesArr);
+
+ if (!cli) {
+ const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
+ sn1,
+ sn2,
+ testTablesResolver(renames),
+ testColumnsResolver(renames),
+ testViewsResolverMySql(renames),
+ validatedPrev,
+ validatedCur,
+ );
+ return { sqlStatements, statements };
+ }
+
+ const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
+ sn1,
+ sn2,
+ tablesResolver,
+ columnsResolver,
+ mySqlViewsResolver,
+ validatedPrev,
+ validatedCur,
+ );
+ return { sqlStatements, statements };
+};
+
+export const diffTestSchemasSingleStore = async (
+ left: SinglestoreSchema,
+ right: SinglestoreSchema,
+ renamesArr: string[],
+ cli: boolean = false,
+ casing?: CasingType | undefined,
+) => {
+ const leftTables = Object.values(left).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[];
+
+ /* const leftViews = Object.values(left).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */
+
+ const rightTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[];
+
+ /* const rightViews = Object.values(right).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */
+
+ const serialized1 = generateSingleStoreSnapshot(
+ leftTables,
+ /* leftViews, */
+ casing,
+ );
+ const serialized2 = generateSingleStoreSnapshot(
+ rightTables,
+ /* rightViews, */
+ casing,
+ );
+
+ const { version: v1, dialect: d1, ...rest1 } = serialized1;
+ const { version: v2, dialect: d2, ...rest2 } = serialized2;
+
+ const sch1 = {
+ version: '1',
+ dialect: 'singlestore',
id: '0',
prevId: '0',
...rest1,
} as const;
const sch2 = {
- version: '7',
- dialect: 'postgresql',
+ version: '1',
+ dialect: 'singlestore',
id: '0',
prevId: '0',
...rest2,
} as const;
- const sn1 = squashPgScheme(sch1);
- const sn2 = squashPgScheme(sch2);
+ const sn1 = squashSingleStoreScheme(sch1);
+ const sn2 = squashSingleStoreScheme(sch2);
- const validatedPrev = pgSchema.parse(sch1);
- const validatedCur = pgSchema.parse(sch2);
+ const validatedPrev = singlestoreSchema.parse(sch1);
+ const validatedCur = singlestoreSchema.parse(sch2);
const renames = new Set(renamesArr);
if (!cli) {
- const { sqlStatements, statements } = await applyPgSnapshotsDiff(
+ const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff(
sn1,
sn2,
- testSchemasResolver(renames),
- testEnumsResolver(renames),
- testSequencesResolver(renames),
- testPolicyResolver(renames),
- testIndPolicyResolver(renames),
- testRolesResolver(renames),
testTablesResolver(renames),
testColumnsResolver(renames),
- testViewsResolver(renames),
- validatedPrev,
- validatedCur,
- );
- return { sqlStatements, statements };
- } else {
- const { sqlStatements, statements } = await applyPgSnapshotsDiff(
- sn1,
- sn2,
- schemasResolver,
- enumsResolver,
- sequencesResolver,
- policyResolver,
- indPolicyResolver,
- roleResolver,
- tablesResolver,
- columnsResolver,
- viewsResolver,
+ /* testViewsResolverSingleStore(renames), */
validatedPrev,
validatedCur,
);
return { sqlStatements, statements };
}
+
+ const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff(
+ sn1,
+ sn2,
+ tablesResolver,
+ columnsResolver,
+ /* singleStoreViewsResolver, */
+ validatedPrev,
+ validatedCur,
+ );
+ return { sqlStatements, statements };
};
-export const diffTestSchemasPushMysql = async (
+export const diffTestSchemasPushSingleStore = async (
client: Connection,
- left: MysqlSchema,
- right: MysqlSchema,
+ left: SinglestoreSchema,
+ right: SinglestoreSchema,
renamesArr: string[],
schema: string,
cli: boolean = false,
casing?: CasingType | undefined,
) => {
- const { sqlStatements } = await applyMySqlDiffs(left, casing);
+ const { sqlStatements } = await applySingleStoreDiffs(left, casing);
for (const st of sqlStatements) {
await client.query(st);
}
// do introspect into PgSchemaInternal
- const introspectedSchema = await fromMySqlDatabase(
+ const introspectedSchema = await fromSingleStoreDatabase(
{
query: async (sql: string, params?: any[]) => {
const res = await client.execute(sql, params);
@@ -1206,58 +1640,62 @@ export const diffTestSchemasPushMysql = async (
schema,
);
- const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[];
+ const leftTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[];
- const leftViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[];
+ /* const leftViews = Object.values(right).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */
- const serialized2 = generateMySqlSnapshot(leftTables, leftViews, casing);
+ const serialized2 = generateSingleStoreSnapshot(
+ leftTables,
+ /* leftViews, */
+ casing,
+ );
const { version: v1, dialect: d1, ...rest1 } = introspectedSchema;
const { version: v2, dialect: d2, ...rest2 } = serialized2;
const sch1 = {
- version: '5',
- dialect: 'mysql',
+ version: '1',
+ dialect: 'singlestore',
id: '0',
prevId: '0',
...rest1,
} as const;
const sch2 = {
- version: '5',
- dialect: 'mysql',
+ version: '1',
+ dialect: 'singlestore',
id: '0',
prevId: '0',
...rest2,
} as const;
- const sn1 = squashMysqlScheme(sch1);
- const sn2 = squashMysqlScheme(sch2);
+ const sn1 = squashSingleStoreScheme(sch1);
+ const sn2 = squashSingleStoreScheme(sch2);
- const validatedPrev = mysqlSchema.parse(sch1);
- const validatedCur = mysqlSchema.parse(sch2);
+ const validatedPrev = singlestoreSchema.parse(sch1);
+ const validatedCur = singlestoreSchema.parse(sch2);
const renames = new Set(renamesArr);
if (!cli) {
- const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
+ const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff(
sn1,
sn2,
testTablesResolver(renames),
testColumnsResolver(renames),
- testViewsResolverMySql(renames),
+ /* testViewsResolverSingleStore(renames), */
validatedPrev,
validatedCur,
'push',
);
return { sqlStatements, statements };
} else {
- const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
+ const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff(
sn1,
sn2,
tablesResolver,
columnsResolver,
- mySqlViewsResolver,
+ /* singleStoreViewsResolver, */
validatedPrev,
validatedCur,
'push',
@@ -1266,14 +1704,17 @@ export const diffTestSchemasPushMysql = async (
}
};
-export const applyMySqlDiffs = async (sn: MysqlSchema, casing: CasingType | undefined) => {
+export const applySingleStoreDiffs = async (
+ sn: SinglestoreSchema,
+ casing: CasingType | undefined,
+) => {
const dryRun = {
- version: '5',
- dialect: 'mysql',
+ version: '1',
+ dialect: 'singlestore',
id: '0',
prevId: '0',
- views: {},
tables: {},
+ views: {},
enums: {},
schemas: {},
_meta: {
@@ -1283,103 +1724,33 @@ export const applyMySqlDiffs = async (sn: MysqlSchema, casing: CasingType | unde
},
} as const;
- const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[];
+ const tables = Object.values(sn).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[];
- const views = Object.values(sn).filter((it) => is(it, MySqlView)) as MySqlView[];
+ /* const views = Object.values(sn).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */
- const serialized1 = generateMySqlSnapshot(tables, views, casing);
+ const serialized1 = generateSingleStoreSnapshot(tables, /* views, */ casing);
const { version: v1, dialect: d1, ...rest1 } = serialized1;
const sch1 = {
- version: '5',
- dialect: 'mysql',
+ version: '1',
+ dialect: 'singlestore',
id: '0',
prevId: '0',
...rest1,
} as const;
- const sn1 = squashMysqlScheme(sch1);
+ const sn1 = squashSingleStoreScheme(sch1);
- const validatedPrev = mysqlSchema.parse(dryRun);
- const validatedCur = mysqlSchema.parse(sch1);
+ const validatedPrev = singlestoreSchema.parse(dryRun);
+ const validatedCur = singlestoreSchema.parse(sch1);
- const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
+ const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff(
dryRun,
sn1,
testTablesResolver(new Set()),
testColumnsResolver(new Set()),
- testViewsResolverMySql(new Set()),
- validatedPrev,
- validatedCur,
- );
- return { sqlStatements, statements };
-};
-
-export const diffTestSchemasMysql = async (
- left: MysqlSchema,
- right: MysqlSchema,
- renamesArr: string[],
- cli: boolean = false,
- casing?: CasingType | undefined,
-) => {
- const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[];
-
- const leftViews = Object.values(left).filter((it) => is(it, MySqlView)) as MySqlView[];
-
- const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[];
-
- const rightViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[];
-
- const serialized1 = generateMySqlSnapshot(leftTables, leftViews, casing);
- const serialized2 = generateMySqlSnapshot(rightTables, rightViews, casing);
-
- const { version: v1, dialect: d1, ...rest1 } = serialized1;
- const { version: v2, dialect: d2, ...rest2 } = serialized2;
-
- const sch1 = {
- version: '5',
- dialect: 'mysql',
- id: '0',
- prevId: '0',
- ...rest1,
- } as const;
-
- const sch2 = {
- version: '5',
- dialect: 'mysql',
- id: '0',
- prevId: '0',
- ...rest2,
- } as const;
-
- const sn1 = squashMysqlScheme(sch1);
- const sn2 = squashMysqlScheme(sch2);
-
- const validatedPrev = mysqlSchema.parse(sch1);
- const validatedCur = mysqlSchema.parse(sch2);
-
- const renames = new Set(renamesArr);
-
- if (!cli) {
- const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
- sn1,
- sn2,
- testTablesResolver(renames),
- testColumnsResolver(renames),
- testViewsResolverMySql(renames),
- validatedPrev,
- validatedCur,
- );
- return { sqlStatements, statements };
- }
-
- const { sqlStatements, statements } = await applyMysqlSnapshotsDiff(
- sn1,
- sn2,
- tablesResolver,
- columnsResolver,
- mySqlViewsResolver,
+ /* testViewsResolverSingleStore(new Set()), */
validatedPrev,
validatedCur,
);
@@ -1582,22 +1953,28 @@ export async function diffTestSchemasPushLibSQL(
'push',
);
- const { statementsToExecute, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate } =
- await libSqlLogSuggestionsAndReturn(
- {
- query: async (sql: string, params?: any[]) => {
- const res = await client.execute({ sql, args: params || [] });
- return res.rows as T[];
- },
- run: async (query: string) => {
- await client.execute(query);
- },
+ const {
+ statementsToExecute,
+ columnsToRemove,
+ infoToPrint,
+ shouldAskForApprove,
+ tablesToRemove,
+ tablesToTruncate,
+ } = await libSqlLogSuggestionsAndReturn(
+ {
+ query: async (sql: string, params?: any[]) => {
+ const res = await client.execute({ sql, args: params || [] });
+ return res.rows as T[];
+ },
+ run: async (query: string) => {
+ await client.execute(query);
},
- statements,
- sn1,
- sn2,
- _meta!,
- );
+ },
+ statements,
+ sn1,
+ sn2,
+ _meta!,
+ );
return {
sqlStatements: statementsToExecute,
@@ -1911,7 +2288,9 @@ export const introspectPgToFile = async (
fs.writeFileSync(`tests/introspect/postgres/${testName}.ts`, file.file);
// generate snapshot from ts file
- const response = await prepareFromPgImports([`tests/introspect/postgres/${testName}.ts`]);
+ const response = await prepareFromPgImports([
+ `tests/introspect/postgres/${testName}.ts`,
+ ]);
const afterFileImports = generatePgSnapshot(
response.tables,
@@ -1938,7 +2317,10 @@ export const introspectPgToFile = async (
const sn2AfterIm = squashPgScheme(sch2);
const validatedCurAfterImport = pgSchema.parse(sch2);
- const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyPgSnapshotsDiff(
+ const {
+ sqlStatements: afterFileSqlStatements,
+ statements: afterFileStatements,
+ } = await applyPgSnapshotsDiff(
initSn,
sn2AfterIm,
testSchemasResolver(new Set()),
@@ -2003,9 +2385,15 @@ export const introspectMySQLToFile = async (
fs.writeFileSync(`tests/introspect/mysql/${testName}.ts`, file.file);
- const response = await prepareFromMySqlImports([`tests/introspect/mysql/${testName}.ts`]);
+ const response = await prepareFromMySqlImports([
+ `tests/introspect/mysql/${testName}.ts`,
+ ]);
- const afterFileImports = generateMySqlSnapshot(response.tables, response.views, casing);
+ const afterFileImports = generateMySqlSnapshot(
+ response.tables,
+ response.views,
+ casing,
+ );
const { version: v2, dialect: d2, ...rest2 } = afterFileImports;
@@ -2020,7 +2408,10 @@ export const introspectMySQLToFile = async (
const sn2AfterIm = squashMysqlScheme(sch2);
const validatedCurAfterImport = mysqlSchema.parse(sch2);
- const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyMysqlSnapshotsDiff(
+ const {
+ sqlStatements: afterFileSqlStatements,
+ statements: afterFileStatements,
+ } = await applyMysqlSnapshotsDiff(
sn2AfterIm,
initSn,
testTablesResolver(new Set()),
@@ -2038,6 +2429,99 @@ export const introspectMySQLToFile = async (
};
};
+export const introspectSingleStoreToFile = async (
+ client: Connection,
+ initSchema: SinglestoreSchema,
+ testName: string,
+ schema: string,
+ casing?: CasingType | undefined,
+) => {
+ // put in db
+ const { sqlStatements } = await applySingleStoreDiffs(initSchema, casing);
+ for (const st of sqlStatements) {
+ await client.query(st);
+ }
+
+ // introspect to schema
+ const introspectedSchema = await fromSingleStoreDatabase(
+ {
+ query: async (sql: string, params?: any[] | undefined) => {
+ const res = await client.execute(sql, params);
+ return res[0] as any;
+ },
+ },
+ schema,
+ );
+
+ const file = schemaToTypeScriptSingleStore(introspectedSchema, 'camel');
+
+ fs.writeFileSync(`tests/introspect/singlestore/${testName}.ts`, file.file);
+
+ const response = await prepareFromSingleStoreImports([
+ `tests/introspect/singlestore/${testName}.ts`,
+ ]);
+
+ const afterFileImports = generateSingleStoreSnapshot(
+ response.tables,
+ /* response.views, */
+ casing,
+ );
+
+ const { version: v2, dialect: d2, ...rest2 } = afterFileImports;
+
+ const sch2 = {
+ version: '1',
+ dialect: 'singlestore',
+ id: '0',
+ prevId: '0',
+ ...rest2,
+ } as const;
+
+ const sn2AfterIm = squashSingleStoreScheme(sch2);
+ const validatedCurAfterImport = singlestoreSchema.parse(sch2);
+
+ const leftTables = Object.values(initSchema).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[];
+
+ const initSnapshot = generateSingleStoreSnapshot(
+ leftTables,
+ /* response.views, */
+ casing,
+ );
+
+ const { version: initV, dialect: initD, ...initRest } = initSnapshot;
+
+ const initSch = {
+ version: '1',
+ dialect: 'singlestore',
+ id: '0',
+ prevId: '0',
+ ...initRest,
+ } as const;
+
+ const initSn = squashSingleStoreScheme(initSch);
+ const validatedCur = singlestoreSchema.parse(initSch);
+
+ const {
+ sqlStatements: afterFileSqlStatements,
+ statements: afterFileStatements,
+ } = await applySingleStoreSnapshotsDiff(
+ sn2AfterIm,
+ initSn,
+ testTablesResolver(new Set()),
+ testColumnsResolver(new Set()),
+ /* testViewsResolverSingleStore(new Set()), */
+ validatedCurAfterImport,
+ validatedCur,
+ );
+
+ fs.rmSync(`tests/introspect/singlestore/${testName}.ts`);
+
+ return {
+ sqlStatements: afterFileSqlStatements,
+ statements: afterFileStatements,
+ };
+};
+
export const introspectSQLiteToFile = async (
client: Database,
initSchema: SqliteSchema,
@@ -2081,9 +2565,15 @@ export const introspectSQLiteToFile = async (
fs.writeFileSync(`tests/introspect/sqlite/${testName}.ts`, file.file);
- const response = await prepareFromSqliteImports([`tests/introspect/sqlite/${testName}.ts`]);
+ const response = await prepareFromSqliteImports([
+ `tests/introspect/sqlite/${testName}.ts`,
+ ]);
- const afterFileImports = generateSqliteSnapshot(response.tables, response.views, casing);
+ const afterFileImports = generateSqliteSnapshot(
+ response.tables,
+ response.views,
+ casing,
+ );
const { version: v2, dialect: d2, ...rest2 } = afterFileImports;
@@ -2098,7 +2588,10 @@ export const introspectSQLiteToFile = async (
const sn2AfterIm = squashSqliteScheme(sch2);
const validatedCurAfterImport = sqliteSchema.parse(sch2);
- const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applySqliteSnapshotsDiff(
+ const {
+ sqlStatements: afterFileSqlStatements,
+ statements: afterFileStatements,
+ } = await applySqliteSnapshotsDiff(
sn2AfterIm,
initSn,
testTablesResolver(new Set()),
@@ -2159,9 +2652,15 @@ export const introspectLibSQLToFile = async (
fs.writeFileSync(`tests/introspect/libsql/${testName}.ts`, file.file);
- const response = await prepareFromSqliteImports([`tests/introspect/libsql/${testName}.ts`]);
+ const response = await prepareFromSqliteImports([
+ `tests/introspect/libsql/${testName}.ts`,
+ ]);
- const afterFileImports = generateSqliteSnapshot(response.tables, response.views, casing);
+ const afterFileImports = generateSqliteSnapshot(
+ response.tables,
+ response.views,
+ casing,
+ );
const { version: v2, dialect: d2, ...rest2 } = afterFileImports;
@@ -2176,7 +2675,10 @@ export const introspectLibSQLToFile = async (
const sn2AfterIm = squashSqliteScheme(sch2);
const validatedCurAfterImport = sqliteSchema.parse(sch2);
- const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyLibSQLSnapshotsDiff(
+ const {
+ sqlStatements: afterFileSqlStatements,
+ statements: afterFileStatements,
+ } = await applyLibSQLSnapshotsDiff(
sn2AfterIm,
initSn,
testTablesResolver(new Set()),
diff --git a/drizzle-kit/tests/singlestore-generated.test.ts b/drizzle-kit/tests/singlestore-generated.test.ts
new file mode 100644
index 000000000..8944f3b21
--- /dev/null
+++ b/drizzle-kit/tests/singlestore-generated.test.ts
@@ -0,0 +1,1290 @@
+import { SQL, sql } from 'drizzle-orm';
+import { int, singlestoreTable, text } from 'drizzle-orm/singlestore-core';
+import { expect, test } from 'vitest';
+import { diffTestSchemasSingleStore } from './schemaDiffer';
+
+test('generated as callback: add column with generated constraint', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${to.users.name} || 'hello'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ column: {
+ generated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'stored',
+ },
+ autoincrement: false,
+ name: 'gen_name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ },
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_add_column',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;",
+ ]);
+});
+
+test('generated as callback: add generated constraint to an exisiting column as stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').notNull(),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name')
+ .notNull()
+ .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, {
+ mode: 'stored',
+ }),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'to add'",
+ type: 'stored',
+ },
+ columnAutoIncrement: false,
+ columnName: 'gen_name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_set_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;",
+ ]);
+});
+
+test('generated as callback: add generated constraint to an exisiting column as virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').notNull(),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name')
+ .notNull()
+ .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, {
+ mode: 'virtual',
+ }),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'to add'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_set_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` DROP COLUMN `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;",
+ ]);
+});
+
+test('generated as callback: drop generated constraint as stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${from.users.name} || 'to delete'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName1: text('gen_name'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: undefined,
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ oldColumn: {
+ autoincrement: false,
+ generated: {
+ as: "`users`.`name` || 'to delete'",
+ type: 'stored',
+ },
+ name: 'gen_name',
+ notNull: false,
+ onUpdate: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ type: 'alter_table_alter_column_drop_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;',
+ ]);
+});
+
+test('generated as callback: drop generated constraint as virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${from.users.name} || 'to delete'`,
+ { mode: 'virtual' },
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName1: text('gen_name'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: undefined,
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ oldColumn: {
+ autoincrement: false,
+ generated: {
+ as: "`users`.`name` || 'to delete'",
+ type: 'virtual',
+ },
+ name: 'gen_name',
+ notNull: false,
+ onUpdate: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ tableName: 'users',
+ type: 'alter_table_alter_column_drop_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` DROP COLUMN `gen_name`;',
+ 'ALTER TABLE `users` ADD `gen_name` text;',
+ ]);
+});
+
+test('generated as callback: change generated constraint type from virtual to stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${from.users.name}`,
+ { mode: 'virtual' },
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${to.users.name} || 'hello'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'stored',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;",
+ ]);
+});
+
+test('generated as callback: change generated constraint type from stored to virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${from.users.name}`,
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${to.users.name} || 'hello'`,
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;",
+ ]);
+});
+
+test('generated as callback: change generated constraint', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${from.users.name}`,
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ (): SQL => sql`${to.users.name} || 'hello'`,
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;",
+ ]);
+});
+
+// ---
+
+test('generated as sql: add column with generated constraint', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\` || 'hello'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ column: {
+ generated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'stored',
+ },
+ autoincrement: false,
+ name: 'gen_name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ },
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_add_column',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;",
+ ]);
+});
+
+test('generated as sql: add generated constraint to an exisiting column as stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').notNull(),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name')
+ .notNull()
+ .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, {
+ mode: 'stored',
+ }),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'to add'",
+ type: 'stored',
+ },
+ columnAutoIncrement: false,
+ columnName: 'gen_name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_set_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;",
+ ]);
+});
+
+test('generated as sql: add generated constraint to an exisiting column as virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').notNull(),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name')
+ .notNull()
+ .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, {
+ mode: 'virtual',
+ }),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'to add'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_set_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` DROP COLUMN `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;",
+ ]);
+});
+
+test('generated as sql: drop generated constraint as stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\` || 'to delete'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName1: text('gen_name'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: undefined,
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ oldColumn: {
+ autoincrement: false,
+ generated: {
+ as: "`users`.`name` || 'to delete'",
+ type: 'stored',
+ },
+ name: 'gen_name',
+ notNull: false,
+ onUpdate: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ type: 'alter_table_alter_column_drop_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;',
+ ]);
+});
+
+test('generated as sql: drop generated constraint as virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\` || 'to delete'`,
+ { mode: 'virtual' },
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName1: text('gen_name'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: undefined,
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ oldColumn: {
+ autoincrement: false,
+ generated: {
+ as: "`users`.`name` || 'to delete'",
+ type: 'virtual',
+ },
+ name: 'gen_name',
+ notNull: false,
+ onUpdate: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ tableName: 'users',
+ type: 'alter_table_alter_column_drop_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` DROP COLUMN `gen_name`;',
+ 'ALTER TABLE `users` ADD `gen_name` text;',
+ ]);
+});
+
+test('generated as sql: change generated constraint type from virtual to stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\``,
+ { mode: 'virtual' },
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\` || 'hello'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'stored',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;",
+ ]);
+});
+
+test('generated as sql: change generated constraint type from stored to virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\``,
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\` || 'hello'`,
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;",
+ ]);
+});
+
+test('generated as sql: change generated constraint', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\``,
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ sql`\`users\`.\`name\` || 'hello'`,
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;",
+ ]);
+});
+
+// ---
+
+test('generated as string: add column with generated constraint', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ `\`users\`.\`name\` || 'hello'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ column: {
+ generated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'stored',
+ },
+ autoincrement: false,
+ name: 'gen_name',
+ notNull: false,
+ primaryKey: false,
+ type: 'text',
+ },
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_add_column',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;",
+ ]);
+});
+
+test('generated as string: add generated constraint to an exisiting column as stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').notNull(),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name')
+ .notNull()
+ .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, {
+ mode: 'stored',
+ }),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'to add'",
+ type: 'stored',
+ },
+ columnAutoIncrement: false,
+ columnName: 'gen_name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_set_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;",
+ ]);
+});
+
+test('generated as string: add generated constraint to an exisiting column as virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').notNull(),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name')
+ .notNull()
+ .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, {
+ mode: 'virtual',
+ }),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'to add'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: true,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_set_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` DROP COLUMN `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;",
+ ]);
+});
+
+test('generated as string: drop generated constraint as stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ `\`users\`.\`name\` || 'to delete'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName1: text('gen_name'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: undefined,
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ oldColumn: {
+ autoincrement: false,
+ generated: {
+ as: "`users`.`name` || 'to delete'",
+ type: 'stored',
+ },
+ name: 'gen_name',
+ notNull: false,
+ onUpdate: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ type: 'alter_table_alter_column_drop_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;',
+ ]);
+});
+
+test('generated as string: drop generated constraint as virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ `\`users\`.\`name\` || 'to delete'`,
+ { mode: 'virtual' },
+ ),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName1: text('gen_name'),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: undefined,
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ oldColumn: {
+ autoincrement: false,
+ generated: {
+ as: "`users`.`name` || 'to delete'",
+ type: 'virtual',
+ },
+ name: 'gen_name',
+ notNull: false,
+ onUpdate: undefined,
+ primaryKey: false,
+ type: 'text',
+ },
+ tableName: 'users',
+ type: 'alter_table_alter_column_drop_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` DROP COLUMN `gen_name`;',
+ 'ALTER TABLE `users` ADD `gen_name` text;',
+ ]);
+});
+
+test('generated as string: change generated constraint type from virtual to stored', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``, {
+ mode: 'virtual',
+ }),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ `\`users\`.\`name\` || 'hello'`,
+ { mode: 'stored' },
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'stored',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;",
+ ]);
+});
+
+test('generated as string: change generated constraint type from stored to virtual', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ `\`users\`.\`name\` || 'hello'`,
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;",
+ ]);
+});
+
+test('generated as string: change generated constraint', async () => {
+ const from = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``),
+ }),
+ };
+ const to = {
+ users: singlestoreTable('users', {
+ id: int('id'),
+ id2: int('id2'),
+ name: text('name'),
+ generatedName: text('gen_name').generatedAlwaysAs(
+ `\`users\`.\`name\` || 'hello'`,
+ ),
+ }),
+ };
+
+ const { statements, sqlStatements } = await diffTestSchemasSingleStore(
+ from,
+ to,
+ [],
+ );
+
+ expect(statements).toStrictEqual([
+ {
+ columnAutoIncrement: false,
+ columnDefault: undefined,
+ columnGenerated: {
+ as: "`users`.`name` || 'hello'",
+ type: 'virtual',
+ },
+ columnName: 'gen_name',
+ columnNotNull: false,
+ columnOnUpdate: undefined,
+ columnPk: false,
+ newDataType: 'text',
+ schema: '',
+ tableName: 'users',
+ type: 'alter_table_alter_column_alter_generated',
+ },
+ ]);
+ expect(sqlStatements).toStrictEqual([
+ 'ALTER TABLE `users` drop column `gen_name`;',
+ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;",
+ ]);
+});
diff --git a/drizzle-kit/tests/singlestore-schemas.test.ts b/drizzle-kit/tests/singlestore-schemas.test.ts
new file mode 100644
index 000000000..db9fe0480
--- /dev/null
+++ b/drizzle-kit/tests/singlestore-schemas.test.ts
@@ -0,0 +1,155 @@
+import { singlestoreSchema, singlestoreTable } from 'drizzle-orm/singlestore-core';
+import { expect, test } from 'vitest';
+import { diffTestSchemasSingleStore } from './schemaDiffer';
+
+// We don't manage databases(schemas) in MySQL with Drizzle Kit
+test('add schema #1', async () => {
+ const to = {
+ devSchema: singlestoreSchema('dev'),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore({}, to, []);
+
+ expect(statements.length).toBe(0);
+});
+
+test('add schema #2', async () => {
+ const from = {
+ devSchema: singlestoreSchema('dev'),
+ };
+ const to = {
+ devSchema: singlestoreSchema('dev'),
+ devSchema2: singlestoreSchema('dev2'),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, []);
+
+ expect(statements.length).toBe(0);
+});
+
+test('delete schema #1', async () => {
+ const from = {
+ devSchema: singlestoreSchema('dev'),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, {}, []);
+
+ expect(statements.length).toBe(0);
+});
+
+test('delete schema #2', async () => {
+ const from = {
+ devSchema: singlestoreSchema('dev'),
+ devSchema2: singlestoreSchema('dev2'),
+ };
+ const to = {
+ devSchema: singlestoreSchema('dev'),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, []);
+
+ expect(statements.length).toBe(0);
+});
+
+test('rename schema #1', async () => {
+ const from = {
+ devSchema: singlestoreSchema('dev'),
+ };
+ const to = {
+ devSchema2: singlestoreSchema('dev2'),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, ['dev->dev2']);
+
+ expect(statements.length).toBe(0);
+});
+
+test('rename schema #2', async () => {
+ const from = {
+ devSchema: singlestoreSchema('dev'),
+ devSchema1: singlestoreSchema('dev1'),
+ };
+ const to = {
+ devSchema: singlestoreSchema('dev'),
+ devSchema2: singlestoreSchema('dev2'),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']);
+
+ expect(statements.length).toBe(0);
+});
+
+test('add table to schema #1', async () => {
+ const dev = singlestoreSchema('dev');
+ const from = {};
+ const to = {
+ dev,
+ users: dev.table('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']);
+
+ expect(statements.length).toBe(0);
+});
+
+test('add table to schema #2', async () => {
+ const dev = singlestoreSchema('dev');
+ const from = { dev };
+ const to = {
+ dev,
+ users: dev.table('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']);
+
+ expect(statements.length).toBe(0);
+});
+
+test('add table to schema #3', async () => {
+ const dev = singlestoreSchema('dev');
+ const from = { dev };
+ const to = {
+ dev,
+ usersInDev: dev.table('users', {}),
+ users: singlestoreTable('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'users',
+ schema: undefined,
+ columns: [],
+ uniqueConstraints: [],
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ compositePkName: '',
+ compositePKs: [],
+ });
+});
+
+test('remove table from schema #1', async () => {
+ const dev = singlestoreSchema('dev');
+ const from = { dev, users: dev.table('users', {}) };
+ const to = {
+ dev,
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']);
+
+ expect(statements.length).toBe(0);
+});
+
+test('remove table from schema #2', async () => {
+ const dev = singlestoreSchema('dev');
+ const from = { dev, users: dev.table('users', {}) };
+ const to = {};
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']);
+
+ expect(statements.length).toBe(0);
+});
diff --git a/drizzle-kit/tests/singlestore.test.ts b/drizzle-kit/tests/singlestore.test.ts
new file mode 100644
index 000000000..3bdccab81
--- /dev/null
+++ b/drizzle-kit/tests/singlestore.test.ts
@@ -0,0 +1,580 @@
+import { sql } from 'drizzle-orm';
+import {
+ index,
+ json,
+ primaryKey,
+ serial,
+ singlestoreSchema,
+ singlestoreTable,
+ text,
+ uniqueIndex,
+} from 'drizzle-orm/singlestore-core';
+import { expect, test } from 'vitest';
+import { diffTestSchemasSingleStore } from './schemaDiffer';
+
+test('add table #1', async () => {
+ const to = {
+ users: singlestoreTable('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore({}, to, []);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'users',
+ schema: undefined,
+ columns: [],
+ compositePKs: [],
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ uniqueConstraints: [],
+ compositePkName: '',
+ });
+});
+
+test('add table #2', async () => {
+ const to = {
+ users: singlestoreTable('users', {
+ id: serial('id').primaryKey(),
+ }),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore({}, to, []);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'users',
+ schema: undefined,
+ columns: [
+ {
+ name: 'id',
+ notNull: true,
+ primaryKey: false,
+ type: 'serial',
+ autoincrement: true,
+ },
+ ],
+ compositePKs: ['users_id;id'],
+ compositePkName: 'users_id',
+ uniqueConstraints: [],
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ });
+});
+
+test('add table #3', async () => {
+ const to = {
+ users: singlestoreTable(
+ 'users',
+ {
+ id: serial('id'),
+ },
+ (t) => {
+ return {
+ pk: primaryKey({
+ name: 'users_pk',
+ columns: [t.id],
+ }),
+ };
+ },
+ ),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore({}, to, []);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'users',
+ schema: undefined,
+ columns: [
+ {
+ name: 'id',
+ notNull: true,
+ primaryKey: false,
+ type: 'serial',
+ autoincrement: true,
+ },
+ ],
+ compositePKs: ['users_pk;id'],
+ uniqueConstraints: [],
+ compositePkName: 'users_pk',
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ });
+});
+
+test('add table #4', async () => {
+ const to = {
+ users: singlestoreTable('users', {}),
+ posts: singlestoreTable('posts', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore({}, to, []);
+
+ expect(statements.length).toBe(2);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'users',
+ schema: undefined,
+ columns: [],
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ compositePKs: [],
+ uniqueConstraints: [],
+ compositePkName: '',
+ });
+ expect(statements[1]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'posts',
+ schema: undefined,
+ columns: [],
+ compositePKs: [],
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ uniqueConstraints: [],
+ compositePkName: '',
+ });
+});
+
+test('add table #5', async () => {
+ const schema = singlestoreSchema('folder');
+ const from = {
+ schema,
+ };
+
+ const to = {
+ schema,
+ users: schema.table('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, []);
+
+ expect(statements.length).toBe(0);
+});
+
+test('add table #6', async () => {
+ const from = {
+ users1: singlestoreTable('users1', {}),
+ };
+
+ const to = {
+ users2: singlestoreTable('users2', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, []);
+
+ expect(statements.length).toBe(2);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'users2',
+ schema: undefined,
+ columns: [],
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ compositePKs: [],
+ uniqueConstraints: [],
+ compositePkName: '',
+ });
+ expect(statements[1]).toStrictEqual({
+ policies: [],
+ type: 'drop_table',
+ tableName: 'users1',
+ schema: undefined,
+ });
+});
+
+test('add table #7', async () => {
+ const from = {
+ users1: singlestoreTable('users1', {}),
+ };
+
+ const to = {
+ users: singlestoreTable('users', {}),
+ users2: singlestoreTable('users2', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'public.users1->public.users2',
+ ]);
+
+ expect(statements.length).toBe(2);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'users',
+ schema: undefined,
+ columns: [],
+ compositePKs: [],
+ uniqueConstraints: [],
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ compositePkName: '',
+ });
+ expect(statements[1]).toStrictEqual({
+ type: 'rename_table',
+ tableNameFrom: 'users1',
+ tableNameTo: 'users2',
+ fromSchema: undefined,
+ toSchema: undefined,
+ });
+});
+
+test('add schema + table #1', async () => {
+ const schema = singlestoreSchema('folder');
+
+ const to = {
+ schema,
+ users: schema.table('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore({}, to, []);
+
+ expect(statements.length).toBe(0);
+});
+
+test('change schema with tables #1', async () => {
+ const schema = singlestoreSchema('folder');
+ const schema2 = singlestoreSchema('folder2');
+ const from = {
+ schema,
+ users: schema.table('users', {}),
+ };
+ const to = {
+ schema2,
+ users: schema2.table('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'folder->folder2',
+ ]);
+
+ expect(statements.length).toBe(0);
+});
+
+test('change table schema #1', async () => {
+ const schema = singlestoreSchema('folder');
+ const from = {
+ schema,
+ users: singlestoreTable('users', {}),
+ };
+ const to = {
+ schema,
+ users: schema.table('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'public.users->folder.users',
+ ]);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ policies: [],
+ type: 'drop_table',
+ tableName: 'users',
+ schema: undefined,
+ });
+});
+
+test('change table schema #2', async () => {
+ const schema = singlestoreSchema('folder');
+ const from = {
+ schema,
+ users: schema.table('users', {}),
+ };
+ const to = {
+ schema,
+ users: singlestoreTable('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'folder.users->public.users',
+ ]);
+
+ expect(statements.length).toBe(1);
+ expect(statements[0]).toStrictEqual({
+ type: 'create_table',
+ tableName: 'users',
+ schema: undefined,
+ columns: [],
+ uniqueConstraints: [],
+ compositePkName: '',
+ compositePKs: [],
+ internals: {
+ tables: {},
+ indexes: {},
+ },
+ });
+});
+
+test('change table schema #3', async () => {
+ const schema1 = singlestoreSchema('folder1');
+ const schema2 = singlestoreSchema('folder2');
+ const from = {
+ schema1,
+ schema2,
+ users: schema1.table('users', {}),
+ };
+ const to = {
+ schema1,
+ schema2,
+ users: schema2.table('users', {}),
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'folder1.users->folder2.users',
+ ]);
+
+ expect(statements.length).toBe(0);
+});
+
+test('change table schema #4', async () => {
+ const schema1 = singlestoreSchema('folder1');
+ const schema2 = singlestoreSchema('folder2');
+ const from = {
+ schema1,
+ users: schema1.table('users', {}),
+ };
+ const to = {
+ schema1,
+ schema2, // add schema
+ users: schema2.table('users', {}), // move table
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'folder1.users->folder2.users',
+ ]);
+
+ expect(statements.length).toBe(0);
+});
+
+test('change table schema #5', async () => {
+ const schema1 = singlestoreSchema('folder1');
+ const schema2 = singlestoreSchema('folder2');
+ const from = {
+ schema1, // remove schema
+ users: schema1.table('users', {}),
+ };
+ const to = {
+ schema2, // add schema
+ users: schema2.table('users', {}), // move table
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'folder1.users->folder2.users',
+ ]);
+
+ expect(statements.length).toBe(0);
+});
+
+test('change table schema #5', async () => {
+ const schema1 = singlestoreSchema('folder1');
+ const schema2 = singlestoreSchema('folder2');
+ const from = {
+ schema1,
+ schema2,
+ users: schema1.table('users', {}),
+ };
+ const to = {
+ schema1,
+ schema2,
+ users: schema2.table('users2', {}), // rename and move table
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'folder1.users->folder2.users2',
+ ]);
+
+ expect(statements.length).toBe(0);
+});
+
+test('change table schema #6', async () => {
+ const schema1 = singlestoreSchema('folder1');
+ const schema2 = singlestoreSchema('folder2');
+ const from = {
+ schema1,
+ users: schema1.table('users', {}),
+ };
+ const to = {
+ schema2, // rename schema
+ users: schema2.table('users2', {}), // rename table
+ };
+
+ const { statements } = await diffTestSchemasSingleStore(from, to, [
+ 'folder1->folder2',
+ 'folder2.users->folder2.users2',
+ ]);
+
+ expect(statements.length).toBe(0);
+});
+
+test('add table #10', async () => {
+ const to = {
+ users: singlestoreTable('table', {
+ json: json('json').default({}),
+ }),
+ };
+
+ const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []);
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ "CREATE TABLE `table` (\n\t`json` json DEFAULT '{}'\n);\n",
+ );
+});
+
+test('add table #11', async () => {
+ const to = {
+ users: singlestoreTable('table', {
+ json: json('json').default([]),
+ }),
+ };
+
+ const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []);
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ "CREATE TABLE `table` (\n\t`json` json DEFAULT '[]'\n);\n",
+ );
+});
+
+test('add table #12', async () => {
+ const to = {
+ users: singlestoreTable('table', {
+ json: json('json').default([1, 2, 3]),
+ }),
+ };
+
+ const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []);
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ "CREATE TABLE `table` (\n\t`json` json DEFAULT '[1,2,3]'\n);\n",
+ );
+});
+
+test('add table #13', async () => {
+ const to = {
+ users: singlestoreTable('table', {
+ json: json('json').default({ key: 'value' }),
+ }),
+ };
+
+ const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []);
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ 'CREATE TABLE `table` (\n\t`json` json DEFAULT \'{"key":"value"}\'\n);\n',
+ );
+});
+
+test('add table #14', async () => {
+ const to = {
+ users: singlestoreTable('table', {
+ json: json('json').default({
+ key: 'value',
+ arr: [1, 2, 3],
+ }),
+ }),
+ };
+
+ const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []);
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe(
+ 'CREATE TABLE `table` (\n\t`json` json DEFAULT \'{"key":"value","arr":[1,2,3]}\'\n);\n',
+ );
+});
+
+// TODO: add bson type tests
+
+// TODO: add blob type tests
+
+// TODO: add uuid type tests
+
+// TODO: add guid type tests
+
+// TODO: add vector type tests
+
+// TODO: add geopoint type tests
+
+test('drop index', async () => {
+ const from = {
+ users: singlestoreTable(
+ 'table',
+ {
+ name: text('name'),
+ },
+ (t) => {
+ return {
+ idx: index('name_idx').on(t.name),
+ };
+ },
+ ),
+ };
+
+ const to = {
+ users: singlestoreTable('table', {
+ name: text('name'),
+ }),
+ };
+
+ const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []);
+ expect(sqlStatements.length).toBe(1);
+ expect(sqlStatements[0]).toBe('DROP INDEX `name_idx` ON `table`;');
+});
+
+test('add table with indexes', async () => {
+ const from = {};
+
+ const to = {
+ users: singlestoreTable(
+ 'users',
+ {
+ id: serial('id').primaryKey(),
+ name: text('name'),
+ email: text('email'),
+ },
+ (t) => ({
+ uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`),
+ indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`),
+ indexExprMultiple: index('indexExprMultiple').on(
+ sql`(lower(${t.email}))`,
+ sql`(lower(${t.email}))`,
+ ),
+
+ uniqueCol: uniqueIndex('uniqueCol').on(t.email),
+ indexCol: index('indexCol').on(t.email),
+ indexColMultiple: index('indexColMultiple').on(t.email, t.email),
+
+ indexColExpr: index('indexColExpr').on(
+ sql`(lower(${t.email}))`,
+ t.email,
+ ),
+ }),
+ ),
+ };
+
+ const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []);
+ expect(sqlStatements.length).toBe(6);
+ expect(sqlStatements).toStrictEqual([
+ `CREATE TABLE \`users\` (\n\t\`id\` serial AUTO_INCREMENT NOT NULL,\n\t\`name\` text,\n\t\`email\` text,\n\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`),\n\tCONSTRAINT \`uniqueExpr\` UNIQUE((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE(\`email\`)
+);
+`,
+ 'CREATE INDEX `indexExpr` ON `users` ((lower(`email`)));',
+ 'CREATE INDEX `indexExprMultiple` ON `users` ((lower(`email`)),(lower(`email`)));',
+ 'CREATE INDEX `indexCol` ON `users` (`email`);',
+ 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);',
+ 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);',
+ ]);
+});
diff --git a/drizzle-kit/tests/testsinglestore.ts b/drizzle-kit/tests/testsinglestore.ts
new file mode 100644
index 000000000..1dc97d9c3
--- /dev/null
+++ b/drizzle-kit/tests/testsinglestore.ts
@@ -0,0 +1,29 @@
+import { index, singlestoreTable, text } from 'drizzle-orm/singlestore-core';
+import { diffTestSchemasSingleStore } from './schemaDiffer';
+
+const from = {
+ users: singlestoreTable(
+ 'table',
+ {
+ name: text('name'),
+ },
+ (t) => {
+ return {
+ idx: index('name_idx').on(t.name),
+ };
+ },
+ ),
+};
+
+const to = {
+ users: singlestoreTable('table', {
+ name: text('name'),
+ }),
+};
+
+diffTestSchemasSingleStore(from, to, []).then((res) => {
+ const { statements, sqlStatements } = res;
+
+ console.log(statements);
+ console.log(sqlStatements);
+});
diff --git a/drizzle-kit/tests/validations.test.ts b/drizzle-kit/tests/validations.test.ts
index 82731ee25..8a64603bb 100644
--- a/drizzle-kit/tests/validations.test.ts
+++ b/drizzle-kit/tests/validations.test.ts
@@ -1,5 +1,6 @@
import { mysqlCredentials } from 'src/cli/validations/mysql';
import { postgresCredentials } from 'src/cli/validations/postgres';
+import { singlestoreCredentials } from 'src/cli/validations/singlestore';
import { sqliteCredentials } from 'src/cli/validations/sqlite';
import { expect, test } from 'vitest';
@@ -698,3 +699,171 @@ test('mysql #17', () => {
});
}).toThrowError();
});
+
+test('singlestore #1', () => {
+ expect(
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ database: 'database',
+ host: 'host',
+ }),
+ ).toStrictEqual({
+ database: 'database',
+ host: 'host',
+ });
+});
+
+test('singlestore #2', () => {
+ expect(
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ database: 'database',
+ host: 'host',
+ }),
+ ).toStrictEqual({
+ database: 'database',
+ host: 'host',
+ });
+});
+
+test('singlestore #3', () => {
+ expect(
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ host: 'host',
+ port: 1234,
+ user: 'user',
+ password: 'password',
+ database: 'database',
+ ssl: 'require',
+ }),
+ ).toStrictEqual({
+ host: 'host',
+ port: 1234,
+ user: 'user',
+ password: 'password',
+ database: 'database',
+ ssl: 'require',
+ });
+});
+
+test('singlestore #4', () => {
+ expect(
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ host: 'host',
+ database: 'database',
+ ssl: 'allow',
+ }),
+ ).toStrictEqual({
+ host: 'host',
+ database: 'database',
+ ssl: 'allow',
+ });
+});
+
+test('singlestore #5', () => {
+ expect(
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ host: 'host',
+ database: 'database',
+ ssl: {
+ ca: 'ca',
+ cert: 'cert',
+ },
+ }),
+ ).toStrictEqual({
+ host: 'host',
+ database: 'database',
+ ssl: {
+ ca: 'ca',
+ cert: 'cert',
+ },
+ });
+});
+
+test('singlestore #6', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ });
+ }).toThrowError();
+});
+
+test('singlestore #7', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ url: undefined,
+ });
+ }).toThrowError();
+});
+
+test('singlestore #8', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ url: '',
+ });
+ }).toThrowError();
+});
+
+test('singlestore #9', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ host: '',
+ database: '',
+ });
+ }).toThrowError();
+});
+
+test('singlestore #10', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ database: '',
+ });
+ }).toThrowError();
+});
+
+test('singlestore #11', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ host: '',
+ });
+ }).toThrowError();
+});
+
+test('singlestore #12', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ database: ' ',
+ host: '',
+ });
+ }).toThrowError();
+});
+
+test('singlestore #13', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ database: '',
+ host: ' ',
+ });
+ }).toThrowError();
+});
+
+test('singlestore #14', () => {
+ expect(() => {
+ singlestoreCredentials.parse({
+ dialect: 'singlestore',
+ database: ' ',
+ host: ' ',
+ port: '',
+ });
+ }).toThrowError();
+});
diff --git a/drizzle-kit/tests/wrap-param.test.ts b/drizzle-kit/tests/wrap-param.test.ts
index 542998bda..a27d27d45 100644
--- a/drizzle-kit/tests/wrap-param.test.ts
+++ b/drizzle-kit/tests/wrap-param.test.ts
@@ -7,6 +7,9 @@ test('wrapParam', () => {
expect(wrapParam('url', 'mysql://user:password@localhost:3306/database', false, 'url')).toBe(
` [${chalk.green('✓')}] url: 'mysql://user:****@localhost:3306/database'`,
);
+ expect(wrapParam('url', 'singlestore://user:password@localhost:3306/database', false, 'url')).toBe(
+ ` [${chalk.green('✓')}] url: 'singlestore://user:****@localhost:3306/database'`,
+ );
expect(wrapParam('url', 'postgresql://user:password@localhost:5432/database', false, 'url')).toBe(
` [${chalk.green('✓')}] url: 'postgresql://user:****@localhost:5432/database'`,
);
diff --git a/drizzle-kit/vitest.config.ts b/drizzle-kit/vitest.config.ts
index 602e96ede..fd728eb11 100644
--- a/drizzle-kit/vitest.config.ts
+++ b/drizzle-kit/vitest.config.ts
@@ -5,8 +5,17 @@ export default defineConfig({
test: {
include: [
'tests/**/*.test.ts',
+ // Need to test it first before pushing changes
+ // 'tests/singlestore-schemas.test.ts',
+ // 'tests/singlestore-views.test.ts',
+ // 'tests/push/singlestore-push.test.ts',
+ // 'tests/push/singlestore.test.ts',
],
+ // This one was excluded because we need to modify an API for SingleStore-generated columns.
+ // It’s in the backlog.
+ exclude: ['tests/**/singlestore-generated.test.ts'],
+
typecheck: {
tsconfig: 'tsconfig.json',
},
diff --git a/drizzle-orm/package.json b/drizzle-orm/package.json
index 87c3f0bc2..3fa8e4bc3 100644
--- a/drizzle-orm/package.json
+++ b/drizzle-orm/package.json
@@ -1,6 +1,6 @@
{
"name": "drizzle-orm",
- "version": "0.36.4",
+ "version": "0.37.0",
"description": "Drizzle ORM package for SQL databases",
"type": "module",
"scripts": {
@@ -29,6 +29,7 @@
"orm",
"pg",
"mysql",
+ "singlestore",
"postgresql",
"postgres",
"sqlite",
@@ -45,7 +46,7 @@
},
"peerDependencies": {
"@aws-sdk/client-rds-data": ">=3",
- "@cloudflare/workers-types": ">=3",
+ "@cloudflare/workers-types": ">=4",
"@electric-sql/pglite": ">=0.2.0",
"@libsql/client": ">=0.10.0",
"@libsql/client-wasm": ">=0.10.0",
@@ -164,7 +165,7 @@
},
"devDependencies": {
"@aws-sdk/client-rds-data": "^3.549.0",
- "@cloudflare/workers-types": "^4.20230904.0",
+ "@cloudflare/workers-types": "^4.20241112.0",
"@electric-sql/pglite": "^0.2.12",
"@libsql/client": "^0.10.0",
"@libsql/client-wasm": "^0.10.0",
diff --git a/drizzle-orm/src/column-builder.ts b/drizzle-orm/src/column-builder.ts
index f621343d9..207f28026 100644
--- a/drizzle-orm/src/column-builder.ts
+++ b/drizzle-orm/src/column-builder.ts
@@ -2,6 +2,7 @@ import { entityKind } from '~/entity.ts';
import type { Column } from './column.ts';
import type { MySqlColumn } from './mysql-core/index.ts';
import type { ExtraConfigColumn, PgColumn, PgSequenceOptions } from './pg-core/index.ts';
+import type { SingleStoreColumn } from './singlestore-core/index.ts';
import type { SQL } from './sql/sql.ts';
import type { SQLiteColumn } from './sqlite-core/index.ts';
import type { Assume, Simplify } from './utils.ts';
@@ -17,7 +18,7 @@ export type ColumnDataType =
| 'custom'
| 'buffer';
-export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'common';
+export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'singlestore' | 'common';
export type GeneratedStorageMode = 'virtual' | 'stored';
@@ -314,6 +315,7 @@ export type BuildColumn<
TDialect extends Dialect,
> = TDialect extends 'pg' ? PgColumn>
: TDialect extends 'mysql' ? MySqlColumn>
+ : TDialect extends 'singlestore' ? SingleStoreColumn>
: TDialect extends 'sqlite' ? SQLiteColumn>
: TDialect extends 'common' ? Column>
: never;
@@ -356,5 +358,6 @@ export type BuildExtraConfigColumns<
export type ChangeColumnTableName =
TDialect extends 'pg' ? PgColumn>
: TDialect extends 'mysql' ? MySqlColumn>
+ : TDialect extends 'singlestore' ? SingleStoreColumn>
: TDialect extends 'sqlite' ? SQLiteColumn>
: never;
diff --git a/drizzle-orm/src/durable-sqlite/driver.ts b/drizzle-orm/src/durable-sqlite/driver.ts
new file mode 100644
index 000000000..0be110084
--- /dev/null
+++ b/drizzle-orm/src/durable-sqlite/driver.ts
@@ -0,0 +1,60 @@
+///
+import { entityKind } from '~/entity.ts';
+import { DefaultLogger } from '~/logger.ts';
+import {
+ createTableRelationsHelpers,
+ extractTablesRelationalConfig,
+ type ExtractTablesWithRelations,
+ type RelationalSchemaConfig,
+ type TablesRelationalConfig,
+} from '~/relations.ts';
+import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts';
+import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts';
+import type { DrizzleConfig } from '~/utils.ts';
+import { SQLiteDOSession } from './session.ts';
+
+export class DrizzleSqliteDODatabase<
+ TSchema extends Record = Record,
+> extends BaseSQLiteDatabase<'sync', SqlStorageCursor>, TSchema> {
+ static override readonly [entityKind]: string = 'DrizzleSqliteDODatabase';
+
+ /** @internal */
+ declare readonly session: SQLiteDOSession>;
+}
+
+export function drizzle<
+ TSchema extends Record = Record,
+ TClient extends DurableObjectStorage = DurableObjectStorage,
+>(
+ client: TClient,
+ config: DrizzleConfig = {},
+): DrizzleSqliteDODatabase & {
+ $client: TClient;
+} {
+ const dialect = new SQLiteSyncDialect({ casing: config.casing });
+ let logger;
+ if (config.logger === true) {
+ logger = new DefaultLogger();
+ } else if (config.logger !== false) {
+ logger = config.logger;
+ }
+
+ let schema: RelationalSchemaConfig | undefined;
+ if (config.schema) {
+ const tablesConfig = extractTablesRelationalConfig(
+ config.schema,
+ createTableRelationsHelpers,
+ );
+ schema = {
+ fullSchema: config.schema,
+ schema: tablesConfig.tables,
+ tableNamesMap: tablesConfig.tableNamesMap,
+ };
+ }
+
+ const session = new SQLiteDOSession(client as DurableObjectStorage, dialect, schema, { logger });
+ const db = new DrizzleSqliteDODatabase('sync', dialect, session, schema) as DrizzleSqliteDODatabase;
+ ( db).$client = client;
+
+ return db as any;
+}
diff --git a/drizzle-orm/src/durable-sqlite/index.ts b/drizzle-orm/src/durable-sqlite/index.ts
new file mode 100644
index 000000000..b1b6a52e7
--- /dev/null
+++ b/drizzle-orm/src/durable-sqlite/index.ts
@@ -0,0 +1,2 @@
+export * from './driver.ts';
+export * from './session.ts';
diff --git a/drizzle-orm/src/durable-sqlite/migrator.ts b/drizzle-orm/src/durable-sqlite/migrator.ts
new file mode 100644
index 000000000..8410b2900
--- /dev/null
+++ b/drizzle-orm/src/durable-sqlite/migrator.ts
@@ -0,0 +1,85 @@
+import type { MigrationMeta } from '~/migrator.ts';
+import { sql } from '~/sql/index.ts';
+import type { DrizzleSqliteDODatabase } from './driver.ts';
+
+interface MigrationConfig {
+ journal: {
+ entries: { idx: number; when: number; tag: string; breakpoints: boolean }[];
+ };
+ migrations: Record;
+}
+
+function readMigrationFiles({ journal, migrations }: MigrationConfig): MigrationMeta[] {
+ const migrationQueries: MigrationMeta[] = [];
+
+ for (const journalEntry of journal.entries) {
+ const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`];
+
+ if (!query) {
+ throw new Error(`Missing migration: ${journalEntry.tag}`);
+ }
+
+ try {
+ const result = query.split('--> statement-breakpoint').map((it) => {
+ return it;
+ });
+
+ migrationQueries.push({
+ sql: result,
+ bps: journalEntry.breakpoints,
+ folderMillis: journalEntry.when,
+ hash: '',
+ });
+ } catch {
+ throw new Error(`Failed to parse migration: ${journalEntry.tag}`);
+ }
+ }
+
+ return migrationQueries;
+}
+
+export async function migrate<
+ TSchema extends Record,
+>(
+ db: DrizzleSqliteDODatabase,
+ config: MigrationConfig,
+): Promise {
+ const migrations = readMigrationFiles(config);
+
+ db.transaction((tx) => {
+ try {
+ const migrationsTable = '__drizzle_migrations';
+
+ const migrationTableCreate = sql`
+ CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} (
+ id SERIAL PRIMARY KEY,
+ hash text NOT NULL,
+ created_at numeric
+ )
+ `;
+ db.run(migrationTableCreate);
+
+ const dbMigrations = db.values<[number, string, string]>(
+ sql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`,
+ );
+
+ const lastDbMigration = dbMigrations[0] ?? undefined;
+
+ for (const migration of migrations) {
+ if (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) {
+ for (const stmt of migration.sql) {
+ db.run(sql.raw(stmt));
+ }
+ db.run(
+ sql`INSERT INTO ${
+ sql.identifier(migrationsTable)
+ } ("hash", "created_at") VALUES(${migration.hash}, ${migration.folderMillis})`,
+ );
+ }
+ }
+ } catch (error: any) {
+ tx.rollback();
+ throw error;
+ }
+ });
+}
diff --git a/drizzle-orm/src/durable-sqlite/session.ts b/drizzle-orm/src/durable-sqlite/session.ts
new file mode 100644
index 000000000..dca5ce7cf
--- /dev/null
+++ b/drizzle-orm/src/durable-sqlite/session.ts
@@ -0,0 +1,181 @@
+import { entityKind } from '~/entity.ts';
+import type { Logger } from '~/logger.ts';
+import { NoopLogger } from '~/logger.ts';
+import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts';
+import { fillPlaceholders, type Query } from '~/sql/sql.ts';
+import { type SQLiteSyncDialect, SQLiteTransaction } from '~/sqlite-core/index.ts';
+import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts';
+import {
+ type PreparedQueryConfig as PreparedQueryConfigBase,
+ type SQLiteExecuteMethod,
+ SQLiteSession,
+ type SQLiteTransactionConfig,
+} from '~/sqlite-core/session.ts';
+import { SQLitePreparedQuery as PreparedQueryBase } from '~/sqlite-core/session.ts';
+import { mapResultRow } from '~/utils.ts';
+
+export interface SQLiteDOSessionOptions {
+ logger?: Logger;
+}
+
+type PreparedQueryConfig = Omit;
+
+export class SQLiteDOSession, TSchema extends TablesRelationalConfig>
+ extends SQLiteSession<
+ 'sync',
+ SqlStorageCursor>,
+ TFullSchema,
+ TSchema
+ >
+{
+ static override readonly [entityKind]: string = 'SQLiteDOSession';
+
+ private logger: Logger;
+
+ constructor(
+ private client: DurableObjectStorage,
+ dialect: SQLiteSyncDialect,
+ private schema: RelationalSchemaConfig | undefined,
+ options: SQLiteDOSessionOptions = {},
+ ) {
+ super(dialect);
+ this.logger = options.logger ?? new NoopLogger();
+ }
+
+ prepareQuery>(
+ query: Query,
+ fields: SelectedFieldsOrdered | undefined,
+ executeMethod: SQLiteExecuteMethod,
+ isResponseInArrayMode: boolean,
+ customResultMapper?: (rows: unknown[][]) => unknown,
+ ): SQLiteDOPreparedQuery {
+ return new SQLiteDOPreparedQuery(
+ this.client,
+ query,
+ this.logger,
+ fields,
+ executeMethod,
+ isResponseInArrayMode,
+ customResultMapper,
+ );
+ }
+
+ override transaction(
+ transaction: (
+ tx: SQLiteTransaction<'sync', SqlStorageCursor>, TFullSchema, TSchema>,
+ ) => T,
+ _config?: SQLiteTransactionConfig,
+ ): T {
+ const tx = new SQLiteDOTransaction('sync', this.dialect, this, this.schema);
+ this.client.transactionSync(() => {
+ transaction(tx);
+ });
+ return {} as any;
+ }
+}
+
+export class SQLiteDOTransaction, TSchema extends TablesRelationalConfig>
+ extends SQLiteTransaction<
+ 'sync',
+ SqlStorageCursor>,
+ TFullSchema,
+ TSchema
+ >
+{
+ static override readonly [entityKind]: string = 'SQLiteDOTransaction';
+
+ override transaction(transaction: (tx: SQLiteDOTransaction) => T): T {
+ const tx = new SQLiteDOTransaction('sync', this.dialect, this.session, this.schema, this.nestedIndex + 1);
+ this.session.transaction(() => transaction(tx));
+
+ return {} as any;
+ }
+}
+
+export class SQLiteDOPreparedQuery extends PreparedQueryBase<{
+ type: 'sync';
+ run: void;
+ all: T['all'];
+ get: T['get'];
+ values: T['values'];
+ execute: T['execute'];
+}> {
+ static override readonly [entityKind]: string = 'SQLiteDOPreparedQuery';
+
+ constructor(
+ private client: DurableObjectStorage,
+ query: Query,
+ private logger: Logger,
+ private fields: SelectedFieldsOrdered | undefined,
+ executeMethod: SQLiteExecuteMethod,
+ private _isResponseInArrayMode: boolean,
+ private customResultMapper?: (rows: unknown[][]) => unknown,
+ ) {
+ super('sync', executeMethod, query);
+ }
+
+ run(placeholderValues?: Record): void {
+ const params = fillPlaceholders(this.query.params, placeholderValues ?? {});
+ this.logger.logQuery(this.query.sql, params);
+
+ params.length > 0 ? this.client.sql.exec(this.query.sql, ...params) : this.client.sql.exec(this.query.sql);
+ }
+
+ all(placeholderValues?: Record): T['all'] {
+ const { fields, joinsNotNullableMap, query, logger, client, customResultMapper } = this;
+ if (!fields && !customResultMapper) {
+ const params = fillPlaceholders(query.params, placeholderValues ?? {});
+ logger.logQuery(query.sql, params);
+
+ return params.length > 0 ? client.sql.exec(query.sql, ...params).toArray() : client.sql.exec(query.sql).toArray();
+ }
+
+ const rows = this.values(placeholderValues) as unknown[][];
+
+ if (customResultMapper) {
+ return customResultMapper(rows) as T['all'];
+ }
+
+ return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap));
+ }
+
+ get(placeholderValues?: Record): T['get'] {
+ const params = fillPlaceholders(this.query.params, placeholderValues ?? {});
+ this.logger.logQuery(this.query.sql, params);
+
+ const { fields, client, joinsNotNullableMap, customResultMapper, query } = this;
+ if (!fields && !customResultMapper) {
+ return params.length > 0 ? client.sql.exec(query.sql, ...params).one() : client.sql.exec(query.sql).one();
+ }
+
+ const rows = this.values(placeholderValues) as unknown[][];
+ const row = rows[0];
+
+ if (!row) {
+ return undefined;
+ }
+
+ if (customResultMapper) {
+ return customResultMapper(rows) as T['get'];
+ }
+
+ return mapResultRow(fields!, row, joinsNotNullableMap);
+ }
+
+ values(placeholderValues?: Record): T['values'] {
+ const params = fillPlaceholders(this.query.params, placeholderValues ?? {});
+ this.logger.logQuery(this.query.sql, params);
+
+ const res = params.length > 0
+ ? this.client.sql.exec(this.query.sql, ...params)
+ : this.client.sql.exec(this.query.sql);
+
+ // @ts-ignore .raw().toArray() exists
+ return res.raw().toArray();
+ }
+
+ /** @internal */
+ isResponseInArrayMode(): boolean {
+ return this._isResponseInArrayMode;
+ }
+}
diff --git a/drizzle-orm/src/neon-http/driver.ts b/drizzle-orm/src/neon-http/driver.ts
index cbe1689fb..209e41963 100644
--- a/drizzle-orm/src/neon-http/driver.ts
+++ b/drizzle-orm/src/neon-http/driver.ts
@@ -1,4 +1,4 @@
-import type { HTTPTransactionOptions, NeonQueryFunction } from '@neondatabase/serverless';
+import type { HTTPQueryOptions, HTTPTransactionOptions, NeonQueryFunction } from '@neondatabase/serverless';
import { neon, types } from '@neondatabase/serverless';
import type { BatchItem, BatchResponse } from '~/batch.ts';
import { entityKind } from '~/entity.ts';
@@ -42,7 +42,7 @@ export class NeonHttpDriver {
function wrap(
target: T,
- token: string,
+ token: Exclude['authToken'], undefined>,
cb: (target: any, p: string | symbol, res: any) => any,
deep?: boolean,
) {
@@ -57,7 +57,7 @@ function wrap(
return new Proxy(element as any, {
apply(target, thisArg, argArray) {
const res = target.call(thisArg, ...argArray);
- if ('setToken' in res && typeof res.setToken === 'function') {
+ if (typeof res === 'object' && res !== null && 'setToken' in res && typeof res.setToken === 'function') {
res.setToken(token);
}
return cb(target, p, res);
@@ -73,7 +73,7 @@ export class NeonHttpDatabase<
static override readonly [entityKind]: string = 'NeonHttpDatabase';
$withAuth(
- token: string,
+ token: Exclude['authToken'], undefined>,
): Omit<
this,
Exclude<
diff --git a/drizzle-orm/src/neon-http/session.ts b/drizzle-orm/src/neon-http/session.ts
index c8f8f5a33..00ffe26e7 100644
--- a/drizzle-orm/src/neon-http/session.ts
+++ b/drizzle-orm/src/neon-http/session.ts
@@ -11,7 +11,7 @@ import { PgPreparedQuery as PgPreparedQuery, PgSession } from '~/pg-core/session
import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts';
import type { PreparedQuery } from '~/session.ts';
import { fillPlaceholders, type Query, type SQL } from '~/sql/sql.ts';
-import { mapResultRow } from '~/utils.ts';
+import { mapResultRow, type NeonAuthToken } from '~/utils.ts';
export type NeonHttpClient = NeonQueryFunction;
@@ -40,11 +40,11 @@ export class NeonHttpPreparedQuery extends PgPrep
async execute(placeholderValues: Record | undefined): Promise;
/** @internal */
- async execute(placeholderValues: Record | undefined, token?: string): Promise;
+ async execute(placeholderValues: Record | undefined, token?: NeonAuthToken): Promise;
/** @internal */
async execute(
placeholderValues: Record | undefined = {},
- token: string | undefined = this.authToken,
+ token: NeonAuthToken | undefined = this.authToken,
): Promise {
const params = fillPlaceholders(this.query.params, placeholderValues);
@@ -108,9 +108,9 @@ export class NeonHttpPreparedQuery extends PgPrep
values(placeholderValues: Record | undefined): Promise;
/** @internal */
- values(placeholderValues: Record | undefined, token?: string): Promise;
+ values(placeholderValues: Record | undefined, token?: NeonAuthToken): Promise;
/** @internal */
- values(placeholderValues: Record | undefined = {}, token?: string): Promise {
+ values(placeholderValues: Record | undefined = {}, token?: NeonAuthToken): Promise {
const params = fillPlaceholders(this.query.params, placeholderValues);
this.logger.logQuery(this.query.sql, params);
return this.client(this.query.sql, params, { arrayMode: true, fullResults: true, authToken: token }).then((
@@ -203,9 +203,9 @@ export class NeonHttpSession<
override async count(sql: SQL): Promise;
/** @internal */
- override async count(sql: SQL, token?: string): Promise;
+ override async count(sql: SQL, token?: NeonAuthToken): Promise;
/** @internal */
- override async count(sql: SQL, token?: string): Promise {
+ override async count(sql: SQL, token?: NeonAuthToken): Promise {
const res = await this.execute<{ rows: [{ count: string }] }>(sql, token);
return Number(
diff --git a/drizzle-orm/src/pg-core/db.ts b/drizzle-orm/src/pg-core/db.ts
index e3c7e4444..29dc4f166 100644
--- a/drizzle-orm/src/pg-core/db.ts
+++ b/drizzle-orm/src/pg-core/db.ts
@@ -21,7 +21,7 @@ import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelation
import { SelectionProxyHandler } from '~/selection-proxy.ts';
import { type ColumnsSelection, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts';
import { WithSubquery } from '~/subquery.ts';
-import type { DrizzleTypeError } from '~/utils.ts';
+import type { DrizzleTypeError, NeonAuthToken } from '~/utils.ts';
import type { PgColumn } from './columns/index.ts';
import { PgCountBuilder } from './query-builders/count.ts';
import { RelationalQueryBuilder } from './query-builders/query.ts';
@@ -597,7 +597,7 @@ export class PgDatabase<
return new PgRefreshMaterializedView(view, this.session, this.dialect);
}
- protected authToken?: string;
+ protected authToken?: NeonAuthToken;
execute = Record