From 955b5fd5edaecf5b65a0152dcc764f841114124d Mon Sep 17 00:00:00 2001 From: Russell Dunphy Date: Tue, 27 Jan 2026 23:34:37 +0000 Subject: [PATCH 1/7] Add database migration system for generating and running migrations Introduces a complete migration workflow: - Schema diffing engine that compares TypeScript config against the live database - SQL migration file generation with timestamped naming - Safety checks inspired by strong_migrations (warns on drops, unsafe type casts, etc.) - Migration runner with PostgreSQL advisory locking for concurrent safety - Migration tracking table (_orm_migrations) with checksum verification - Support for non-transactional migrations via -- orm:no-transaction directive - CLI commands: orm generate migration, orm db migrate, orm db migrate status - Programmatic API: migrate.generate(), migrate.run(), migrate.status() Also enhances getForeignKeys to return onUpdate/onDelete actions and getUniqueConstraints to return structured column data and nullsNotDistinct. Co-Authored-By: Claude Opus 4.5 --- .claude/settings.json | 7 + .github/workflows/ci.yml | 8 + .github/workflows/deploy-docs.yml | 4 + .github/workflows/publish-preview.yml | 98 +++ .npmrc | 1 + README.md | 4 +- docs/docs/cli/commands.md | 36 +- docs/docs/cli/configuration.md | 2 +- docs/docs/cli/migrations.md | 229 ++++++ docs/docs/getting-started.md | 4 +- docs/sidebars.ts | 2 +- packages/orm-cli/package.json | 10 +- packages/orm-cli/src/cli.ts | 10 +- .../orm-cli/src/commands/db-migrate.test.ts | 346 ++++++++ packages/orm-cli/src/commands/db-migrate.ts | 9 + .../src/commands/db-migrate/handler.ts | 32 + .../src/commands/db-migrate/options.ts | 3 + .../db-pull/util/relationNames.test.ts | 4 + .../util/renderFieldDefinition.test.ts | 12 + .../db-pull/util/renderFieldDefinition.ts | 5 +- .../util/renderModel.constraints.test.ts | 14 + .../util/renderModel.relations.test.ts | 24 + .../db-pull/util/renderRelations.test.ts | 2 + .../src/commands/generate-migration.ts | 9 + .../commands/generate-migration/handler.ts | 126 +++ .../commands/generate-migration/options.ts | 15 + packages/orm-cli/src/test/setup.ts | 8 +- packages/orm-cli/src/types.ts | 1 + packages/orm-cli/src/util/migrations.ts | 82 ++ packages/orm-config/package.json | 8 + packages/orm-fixtures/package.json | 8 + packages/orm-migrate/package.json | 10 +- packages/orm-migrate/src/index.ts | 14 + .../src/migrations/configToSnapshot.test.ts | 244 ++++++ .../src/migrations/configToSnapshot.ts | 106 +++ .../src/migrations/diff/detectRenames.test.ts | 326 ++++++++ .../src/migrations/diff/detectRenames.ts | 174 ++++ .../src/migrations/diff/diffSnapshots.test.ts | 767 ++++++++++++++++++ .../src/migrations/diff/diffSnapshots.ts | 313 +++++++ .../migrations/diff/operationToSql.test.ts | 333 ++++++++ .../src/migrations/diff/operationToSql.ts | 219 +++++ .../orm-migrate/src/migrations/diff/types.ts | 73 ++ .../orm-migrate/src/migrations/generate.ts | 58 ++ packages/orm-migrate/src/migrations/lock.ts | 29 + .../src/migrations/pulledToSnapshot.test.ts | 324 ++++++++ .../src/migrations/pulledToSnapshot.ts | 140 ++++ packages/orm-migrate/src/migrations/run.ts | 87 ++ .../src/migrations/safety/checkSafety.test.ts | 224 +++++ .../src/migrations/safety/checkSafety.ts | 116 +++ .../src/migrations/safety/safeCasts.test.ts | 132 +++ .../src/migrations/safety/safeCasts.ts | 113 +++ .../src/migrations/safety/types.ts | 10 + .../orm-migrate/src/migrations/tracking.ts | 85 ++ packages/orm-migrate/src/migrations/types.ts | 53 ++ .../src/pull/getExtensions.test.ts | 45 + .../orm-migrate/src/pull/getExtensions.ts | 28 + .../orm-migrate/src/pull/getForeignKeys.ts | 32 +- .../src/pull/getUniqueConstraints.ts | 19 +- packages/orm-migrate/src/pull/index.ts | 1 + packages/orm-schema/package.json | 8 + packages/orm-testing/package.json | 8 + packages/orm/package.json | 10 +- packages/orm/src/orm.ts | 8 +- packages/sql/package.json | 10 +- packages/toolbox/package.json | 8 + pnpm-lock.yaml | 22 +- 66 files changed, 5218 insertions(+), 54 deletions(-) create mode 100644 .claude/settings.json create mode 100644 .github/workflows/publish-preview.yml create mode 100644 docs/docs/cli/migrations.md create mode 100644 packages/orm-cli/src/commands/db-migrate.test.ts create mode 100644 packages/orm-cli/src/commands/db-migrate.ts create mode 100644 packages/orm-cli/src/commands/db-migrate/handler.ts create mode 100644 packages/orm-cli/src/commands/db-migrate/options.ts create mode 100644 packages/orm-cli/src/commands/generate-migration.ts create mode 100644 packages/orm-cli/src/commands/generate-migration/handler.ts create mode 100644 packages/orm-cli/src/commands/generate-migration/options.ts create mode 100644 packages/orm-cli/src/util/migrations.ts create mode 100644 packages/orm-migrate/src/migrations/configToSnapshot.test.ts create mode 100644 packages/orm-migrate/src/migrations/configToSnapshot.ts create mode 100644 packages/orm-migrate/src/migrations/diff/detectRenames.test.ts create mode 100644 packages/orm-migrate/src/migrations/diff/detectRenames.ts create mode 100644 packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts create mode 100644 packages/orm-migrate/src/migrations/diff/diffSnapshots.ts create mode 100644 packages/orm-migrate/src/migrations/diff/operationToSql.test.ts create mode 100644 packages/orm-migrate/src/migrations/diff/operationToSql.ts create mode 100644 packages/orm-migrate/src/migrations/diff/types.ts create mode 100644 packages/orm-migrate/src/migrations/generate.ts create mode 100644 packages/orm-migrate/src/migrations/lock.ts create mode 100644 packages/orm-migrate/src/migrations/pulledToSnapshot.test.ts create mode 100644 packages/orm-migrate/src/migrations/pulledToSnapshot.ts create mode 100644 packages/orm-migrate/src/migrations/run.ts create mode 100644 packages/orm-migrate/src/migrations/safety/checkSafety.test.ts create mode 100644 packages/orm-migrate/src/migrations/safety/checkSafety.ts create mode 100644 packages/orm-migrate/src/migrations/safety/safeCasts.test.ts create mode 100644 packages/orm-migrate/src/migrations/safety/safeCasts.ts create mode 100644 packages/orm-migrate/src/migrations/safety/types.ts create mode 100644 packages/orm-migrate/src/migrations/tracking.ts create mode 100644 packages/orm-migrate/src/migrations/types.ts create mode 100644 packages/orm-migrate/src/pull/getExtensions.test.ts create mode 100644 packages/orm-migrate/src/pull/getExtensions.ts diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 00000000..94e878ec --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,7 @@ +{ + "permissions": { + "allow": [ + "Bash(pnpm --filter @casekit/orm-cli --filter @casekit/orm-migrate test)" + ] + } +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8a791f81..4030e09a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,6 +2,11 @@ name: CI env: DO_NOT_TRACK: 1 +permissions: + contents: read + packages: read + pull-requests: write + on: push: branches: [main] @@ -43,6 +48,7 @@ jobs: with: node-version-file: ".tool-versions" cache: "pnpm" + registry-url: "https://npm.pkg.github.com" - name: Get pnpm store directory shell: bash @@ -51,6 +57,8 @@ jobs: - name: Install dependencies run: pnpm install --frozen-lockfile + env: + NODE_AUTH_TOKEN: ${{ secrets.GH_PACKAGES_TOKEN }} - name: Run tests run: pnpm test diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index b7cbcc87..676a9956 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -8,6 +8,7 @@ on: permissions: contents: read + packages: read pages: write id-token: write @@ -27,9 +28,12 @@ jobs: with: node-version: 20 cache: pnpm + registry-url: "https://npm.pkg.github.com" - name: Install dependencies run: pnpm install + env: + NODE_AUTH_TOKEN: ${{ secrets.GH_PACKAGES_TOKEN }} - name: Build docs run: pnpm --filter @casekit/orm-docs build diff --git a/.github/workflows/publish-preview.yml b/.github/workflows/publish-preview.yml new file mode 100644 index 00000000..66279e59 --- /dev/null +++ b/.github/workflows/publish-preview.yml @@ -0,0 +1,98 @@ +name: Publish Preview Packages + +on: + workflow_dispatch: + push: + branches-ignore: + - "dependabot/**" + paths: + - "packages/**" + +jobs: + publish: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - uses: actions/checkout@v6 + + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + node-version-file: ".tool-versions" + cache: "pnpm" + registry-url: "https://npm.pkg.github.com" + + - name: Install dependencies + run: pnpm install --frozen-lockfile + env: + NODE_AUTH_TOKEN: ${{ secrets.GH_PACKAGES_TOKEN }} + + - name: Build packages + run: pnpm build + + - name: Generate version string + id: version + run: | + BRANCH="${GITHUB_REF_NAME}" + # Sanitize branch name for npm (replace / with -) + BRANCH_SAFE=$(echo "$BRANCH" | sed 's/[^a-zA-Z0-9-]/-/g' | sed 's/--*/-/g' | sed 's/^-//' | sed 's/-$//') + SHORT_SHA="${GITHUB_SHA::7}" + VERSION="0.0.0-${BRANCH_SAFE}.${SHORT_SHA}" + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "branch=$BRANCH" >> $GITHUB_OUTPUT + echo "Publishing version: $VERSION" + + - name: Update package versions + run: pnpm -r exec npm version ${{ steps.version.outputs.version }} --no-git-tag-version + + - name: Publish packages + run: pnpm -r publish --access public --tag preview --no-git-checks + env: + NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Summary + run: | + echo "## Published Packages" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "Version: \`${{ steps.version.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "Install with:" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`bash" >> $GITHUB_STEP_SUMMARY + echo "pnpm add @casekit/orm@${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + + cleanup: + runs-on: ubuntu-latest + needs: publish + permissions: + packages: write + strategy: + matrix: + package: + - orm + - orm-cli + - orm-schema + - orm-config + - orm-migrate + - orm-testing + - orm-fixtures + - sql + - toolbox + + steps: + - name: Delete old ${{ matrix.package }} versions + uses: actions/delete-package-versions@v5 + with: + package-name: ${{ matrix.package }} + package-type: npm + min-versions-to-keep: 50 + delete-only-pre-release-versions: true + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.npmrc b/.npmrc index e69de29b..24586ee6 100644 --- a/.npmrc +++ b/.npmrc @@ -0,0 +1 @@ +@casekit:registry=https://npm.pkg.github.com diff --git a/README.md b/README.md index ce37dca7..ee304e12 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ npm add @casekit/orm @casekit/orm-cli @casekit/orm-migrate pg zod ### 1. Initialize your project ```bash -npx orm init --directory ./src/db +pnpm orm init --directory ./src/db ``` ### 2. Define your models @@ -69,7 +69,7 @@ export const db = orm(config); ### 4. Push schema to database ```bash -npx orm db push +pnpm orm db push ``` ### 5. Query with full type safety diff --git a/docs/docs/cli/commands.md b/docs/docs/cli/commands.md index e471c2ca..f17c7b87 100644 --- a/docs/docs/cli/commands.md +++ b/docs/docs/cli/commands.md @@ -20,7 +20,7 @@ All commands support these options: Initialize a new project with ORM configuration. ```bash -npx orm init [options] +pnpm orm init [options] ``` ### Options @@ -33,7 +33,7 @@ npx orm init [options] ### Example ```bash -npx orm init --directory ./src/db +pnpm orm init --directory ./src/db ``` ### Generated Files @@ -48,12 +48,12 @@ npx orm init --directory ./src/db **DANGER** DO NOT USE THIS ON A PRODUCTION DATABASE, OR DATA LOSS MAY OCCUR. ::: -Proper migration support will come in a future release. +This command is for development only. For production environments, use [migrations](./migrations.md) instead. Push the schema to the database, creating tables and constraints. ```bash -npx orm db push +pnpm orm db push ``` This command: @@ -66,7 +66,7 @@ This command: ### Example ```bash -npx orm db push +pnpm orm db push ``` Output: @@ -85,7 +85,7 @@ Pushing schemas public to database Introspect the database and generate model files. ```bash -npx orm db pull [options] +pnpm orm db pull [options] ``` ### Options @@ -99,13 +99,13 @@ npx orm db pull [options] ```bash # Pull from default schema -npx orm db pull +pnpm orm db pull # Pull from specific schemas -npx orm db pull --schema public --schema audit +pnpm orm db pull --schema public --schema audit # Force overwrite -npx orm db pull --force +pnpm orm db pull --force ``` ### Generated Files @@ -134,7 +134,7 @@ src/db/models/ Drop all schemas used by your models. ```bash -npx orm db drop +pnpm orm db drop ``` :::warning @@ -144,7 +144,7 @@ This is a destructive operation that deletes all data. Use with caution. ### Example ```bash -npx orm db drop +pnpm orm db drop ``` Output: @@ -158,7 +158,7 @@ Output: Generate a skeleton model file. ```bash -npx orm generate model [options] +pnpm orm generate model [options] ``` ### Arguments @@ -176,7 +176,7 @@ npx orm generate model [options] ### Example ```bash -npx orm generate model user +pnpm orm generate model user ``` Creates `src/db/models/user.ts`: @@ -197,22 +197,22 @@ And updates `src/db/models/index.ts` to export it. ```bash # 1. Initialize project -npx orm init --directory ./src/db +pnpm orm init --directory ./src/db # 2. Write your models in ./src/db/models/ # 3. Push schema to database -npx orm db push +pnpm orm db push ``` ### Database-First (Generate Models from Database) ```bash # 1. Initialize project -npx orm init --directory ./src/db +pnpm orm init --directory ./src/db # 2. Pull schema from existing database -npx orm db pull --schema public +pnpm orm db pull --schema public # 3. Customize generated models as needed ``` @@ -221,5 +221,5 @@ npx orm db pull --schema public ```bash # Drop and recreate -npx orm db drop && npx orm db push +pnpm orm db drop && pnpm orm db push ``` diff --git a/docs/docs/cli/configuration.md b/docs/docs/cli/configuration.md index f4d4efb8..78f3ee18 100644 --- a/docs/docs/cli/configuration.md +++ b/docs/docs/cli/configuration.md @@ -148,7 +148,7 @@ DB_PASSWORD=secret Specify a different config file: ```bash -npx orm db push --config ./config/orm.config.ts +pnpm orm db push --config ./config/orm.config.ts ``` ## Complete Example diff --git a/docs/docs/cli/migrations.md b/docs/docs/cli/migrations.md new file mode 100644 index 00000000..2fb27352 --- /dev/null +++ b/docs/docs/cli/migrations.md @@ -0,0 +1,229 @@ +--- +sidebar_position: 3 +--- + +# Migrations + +:::warning[Alpha Feature] +**The migrations functionality is currently in alpha and not ready for production use.** The API may change, and there may be bugs or missing features. Use with caution and always read the generated SQL before running migrations. +::: + +Database migrations allow you to evolve your schema over time in a controlled, versioned way. Instead of directly pushing schema changes (which can cause data loss), migrations generate SQL files that can be reviewed, version-controlled, and applied incrementally. + +## Overview + +The migration system: + +1. **Compares** your TypeScript model definitions against the current database state +2. **Generates** SQL migration files with the necessary changes +3. **Tracks** which migrations have been applied +4. **Warns** about potentially unsafe operations + +## Commands + +### orm generate migration + +Generate a new migration from schema changes. + +```bash +pnpm orm generate migration --name [options] +``` + +#### Options + +| Option | Alias | Description | +|--------|-------|-------------| +| `--name` | `-n` | Short description for the migration filename (required) | +| `--unsafe` | | Proceed even with unsafe operations without prompting | + +#### Example + +```bash +pnpm orm generate migration --name add-users-table +``` + +Output: +``` +Generated migration: ./migrations/20240115143022_add-users-table.sql + +CREATE SCHEMA IF NOT EXISTS "app"; + +CREATE TABLE "app"."user" ( + "id" uuid NOT NULL DEFAULT gen_random_uuid(), + "email" text NOT NULL, + "name" text, + PRIMARY KEY ("id") +); + +CREATE UNIQUE INDEX "user_email_key" ON "app"."user" ("email"); + +✅ Done +``` + +### orm db migrate + +Run all pending migrations. + +```bash +pnpm orm db migrate +``` + +#### Example + +```bash +pnpm orm db migrate +``` + +Output: +``` +Applied 2 migration(s): + - 20240115143022_add-users-table + - 20240116091500_add-posts-table +✅ Done +``` + +If already up to date: +``` +Already up to date. +``` + +## Migration Files + +Migrations are stored as SQL files in the migrations directory (default: `./migrations`). You can configure this in your `orm.config.ts`: + +```typescript +export default { + db: orm({ ... }), + directory: "./src/db", + migrate: { + migrationsPath: "./migrations", + }, +}; +``` + +### File Format + +Migration files are named with a timestamp prefix for ordering: + +``` +migrations/ +├── 20240115143022_add-users-table.sql +├── 20240116091500_add-posts-table.sql +└── 20240120102030_add-user-email-index.sql +``` + +### Non-Transactional Migrations + +Some operations (like `CREATE INDEX CONCURRENTLY`) cannot run inside a transaction. Add a special comment at the top of the migration file: + +```sql +-- orm:no-transaction +CREATE INDEX CONCURRENTLY "user_email_idx" ON "app"."user" ("email"); +``` + +## Safety Checks + +The migration generator analyzes operations and warns about potentially dangerous changes: + +### Unsafe Operations + +These operations may cause data loss or application errors: + +- **Dropping tables** — Permanent data loss +- **Dropping columns** — Data loss and potential application errors if code still references the column +- **Dropping schemas** — Removes all objects within the schema +- **Unsafe type changes** — Converting between incompatible types (e.g., `text` to `integer`) + +### Cautious Operations + +These operations may cause issues on large tables: + +- **Adding foreign keys** — Validates all existing rows while holding a lock +- **Adding unique constraints** — Blocks writes while building the index +- **Setting NOT NULL** — Scans the entire table while holding a lock + +When unsafe operations are detected, you'll be prompted to confirm: + +``` +🚨 Unsafe operations: + - Dropping column "legacy_field" from "app"."user" may cause errors if application code still references it. + Suggestion: Deploy code changes that stop using this column first, then drop it in a subsequent migration. + +This migration contains unsafe operations. Do you want to keep it? (y/N) +``` + +## Column Renames + +When the migration generator detects a column being dropped and another added in the same table, it will ask if this is a rename: + +``` +Column "old_name" is being dropped and "new_name" is being added in "app"."user". Is this a rename? (Y/n) +``` + +If confirmed, it generates a `RENAME COLUMN` statement instead of separate `DROP` and `ADD` operations, preserving data. + +## Migration Tracking + +Applied migrations are tracked in a `_orm_migrations` table in the `public` schema: + +```sql +CREATE TABLE public._orm_migrations ( + id serial PRIMARY KEY, + name text NOT NULL UNIQUE, + applied_at timestamptz NOT NULL DEFAULT now(), + checksum text NOT NULL +); +``` + +### Checksum Verification + +Migration file checksums are stored when applied. If a migration file is modified after being applied, the next `db migrate` will fail with a checksum mismatch error. This prevents accidentally running modified migrations. + +## Workflow + +### Typical Development Workflow + +```bash +# 1. Make changes to your model definitions + +# 2. Generate a migration +pnpm orm generate migration --name describe_your_changes + +# 3. Review the generated SQL file + +# 4. Apply the migration +pnpm orm db migrate + +# 5. Commit both model changes and migration file +git add . +git commit -m "Add user email field" +``` + +### Team Workflow + +1. Each developer generates migrations locally +2. Migration files are committed to version control +3. Migrations are applied in order on all environments +4. The timestamp prefix ensures correct ordering even with parallel development + +## Limitations + +Current limitations of the alpha migration system: + +- No automatic rollback/down migrations +- No migration squashing +- Limited support for complex ALTER operations +- No dry-run mode for `db migrate` +- Extension changes may require manual intervention + +## Comparison with db push + +| Feature | `db push` | `db migrate` | +|---------|-----------|--------------| +| Safe for production | No | Yes (with caution) | +| Versioned changes | No | Yes | +| Reviewable SQL | No | Yes | +| Data preservation | No guarantee | By design | +| Speed | Fast | Depends on migration | + +Use `db push` for rapid development and prototyping. Use migrations when you need controlled, versioned schema changes. diff --git a/docs/docs/getting-started.md b/docs/docs/getting-started.md index 4622088f..09a35233 100644 --- a/docs/docs/getting-started.md +++ b/docs/docs/getting-started.md @@ -37,7 +37,7 @@ npm add @casekit/orm @casekit/orm-cli @casekit/orm-migrate pg zod The CLI can scaffold your project structure: ```bash -npx orm init --directory ./src/db +pnpm orm init --directory ./src/db ``` This creates: @@ -147,7 +147,7 @@ Create the tables in your database: ```bash createdb myapp -npx orm db push +pnpm orm db push ``` ## Basic Operations diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 459e92c5..aa67609d 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -33,7 +33,7 @@ const sidebars: SidebarsConfig = { type: "category", label: "CLI", collapsed: true, - items: ["cli/commands", "cli/configuration"], + items: ["cli/commands", "cli/configuration", "cli/migrations"], }, ], }; diff --git a/packages/orm-cli/package.json b/packages/orm-cli/package.json index d3c58ce3..628d1b8c 100644 --- a/packages/orm-cli/package.json +++ b/packages/orm-cli/package.json @@ -11,7 +11,7 @@ "@casekit/orm-migrate": "workspace:*", "@casekit/sql": "workspace:*", "@casekit/toolbox": "workspace:*", - "@casekit/unindent": "^1.0.5", + "@casekit/unindent": "0.0.0-gh-packages.634dab0", "@inquirer/prompts": "^8.2.0", "@inquirer/testing": "^3.0.4", "byline": "^5.0.0", @@ -52,6 +52,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/orm-cli" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "pg": "^8.13.1" }, diff --git a/packages/orm-cli/src/cli.ts b/packages/orm-cli/src/cli.ts index 95f32d35..ed7ea32b 100644 --- a/packages/orm-cli/src/cli.ts +++ b/packages/orm-cli/src/cli.ts @@ -2,8 +2,10 @@ import yargs from "yargs"; import { hideBin } from "yargs/helpers"; import { dbDrop } from "#commands/db-drop.js"; +import { dbMigrate } from "#commands/db-migrate.js"; import { dbPush } from "#commands/db-push.js"; import { dbPull } from "./commands/db-pull.js"; +import { generateMigration } from "./commands/generate-migration.js"; import { generateModel } from "./commands/generate-model.js"; import { init } from "./commands/init.js"; import { globalOptions } from "./options.js"; @@ -13,10 +15,14 @@ await yargs(hideBin(process.argv)) .scriptName("orm") .options(globalOptions) .command("db", "Commands for managing your database", (yargs) => - yargs.command(dbDrop).command(dbPush).command(dbPull), + yargs + .command(dbDrop) + .command(dbPush) + .command(dbPull) + .command(dbMigrate), ) .command("generate", "Commands for generating files", (yargs) => - yargs.command(generateModel), + yargs.command(generateModel).command(generateMigration), ) .command(init) .help() diff --git a/packages/orm-cli/src/commands/db-migrate.test.ts b/packages/orm-cli/src/commands/db-migrate.test.ts new file mode 100644 index 00000000..114c8509 --- /dev/null +++ b/packages/orm-cli/src/commands/db-migrate.test.ts @@ -0,0 +1,346 @@ +import { fs, vol } from "memfs"; +import { randomUUID } from "node:crypto"; +import pg from "pg"; +import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; +import yargs from "yargs"; + +import { orm, sql } from "@casekit/orm"; +import { unindent } from "@casekit/unindent"; + +import { globalOptions } from "#options.js"; +import { OrmCLIConfig } from "#types.js"; +import * as loadConfig from "#util/loadConfig.js"; +import { dbMigrate } from "./db-migrate.js"; +import { dbPush } from "./db-push.js"; +import { generateMigration } from "./generate-migration.js"; + +describe("db migrate", () => { + const schema = `orm_migrate_${randomUUID().replace(/-/g, "_")}`; + let db: pg.Client; + + const migrationsPath = "/project/migrations"; + + beforeEach(async () => { + db = new pg.Client(); + await db.connect(); + await db.query(`CREATE SCHEMA IF NOT EXISTS "${schema}"`); + + vi.spyOn(process, "cwd").mockReturnValue("/project"); + + vol.fromJSON( + { + "orm.config.ts": "// placeholder", + }, + "/project", + ); + }); + + afterEach(async () => { + await db.query(`DROP SCHEMA IF EXISTS "${schema}" CASCADE`); + await db.query(`DROP TABLE IF EXISTS public._orm_migrations`); + await db.end(); + vol.reset(); + vi.restoreAllMocks(); + }); + + test("generates migration for new table", async () => { + const user = { + schema, + fields: { + id: { + type: "uuid", + primaryKey: true, + default: sql`gen_random_uuid()`, + }, + email: { type: "text", unique: true }, + name: { type: "text", nullable: true }, + }, + } as const; + + const config = { + db: orm({ schema, models: { user } }), + directory: "./app/db.server", + migrate: { migrationsPath }, + } satisfies OrmCLIConfig; + + await config.db.connect(); + vi.spyOn(loadConfig, "loadConfig").mockResolvedValue(config); + + await yargs() + .options(globalOptions) + .command(generateMigration) + .parseAsync("migration --name create_users --unsafe"); + + await config.db.close(); + + const migrations = fs.readdirSync(migrationsPath) as string[]; + expect(migrations.length).toBe(1); + expect(migrations[0]).toMatch(/^\d{14}_create-users\.sql$/); + + const content = fs.readFileSync( + `${migrationsPath}/${migrations[0]}`, + "utf8", + ) as string; + + expect(content.trim()).toEqual( + unindent` + CREATE SCHEMA IF NOT EXISTS "${schema}"; + + CREATE TABLE "${schema}"."user" ( + "id" uuid NOT NULL DEFAULT gen_random_uuid(), + "email" text NOT NULL, + "name" text, + PRIMARY KEY ("id") + ); + `, + ); + }); + + test("runs pending migrations", async () => { + const user = { + schema, + fields: { + id: { + type: "uuid", + primaryKey: true, + default: sql`gen_random_uuid()`, + }, + email: { type: "text", unique: true }, + name: { type: "text", nullable: true }, + }, + } as const; + + const config = { + db: orm({ schema, models: { user } }), + directory: "./app/db.server", + migrate: { migrationsPath }, + } satisfies OrmCLIConfig; + + await config.db.connect(); + vi.spyOn(loadConfig, "loadConfig").mockResolvedValue(config); + + await yargs() + .options(globalOptions) + .command(generateMigration) + .parseAsync("migration --name create_users --unsafe"); + + await yargs() + .options(globalOptions) + .command(dbMigrate) + .parseAsync("migrate"); + + await config.db.close(); + + const result = await db.query( + ` + SELECT column_name + FROM information_schema.columns + WHERE table_schema = $1 AND table_name = 'user' + ORDER BY ordinal_position + `, + [schema], + ); + + expect(result.rows.map((r) => r.column_name)).toEqual([ + "id", + "email", + "name", + ]); + + const migrationRecord = await db.query(` + SELECT name, checksum FROM public._orm_migrations + `); + expect(migrationRecord.rows.length).toBe(1); + expect(migrationRecord.rows[0].name).toMatch(/^\d{14}_create-users$/); + }); + + test("returns no changes when schema is in sync", async () => { + const user = { + schema, + fields: { + id: { type: "uuid", primaryKey: true }, + }, + } as const; + + const config = { + db: orm({ schema, models: { user } }), + directory: "./app/db.server", + migrate: { migrationsPath }, + } satisfies OrmCLIConfig; + + await config.db.connect(); + vi.spyOn(loadConfig, "loadConfig").mockResolvedValue(config); + + await yargs().options(globalOptions).command(dbPush).parseAsync("push"); + + const consoleSpy = vi.spyOn(console, "log"); + + await yargs() + .options(globalOptions) + .command(generateMigration) + .parseAsync("migration --name no_changes --unsafe"); + + expect(consoleSpy).toHaveBeenCalledWith("No changes detected."); + + await config.db.close(); + }); + + test("generates incremental migration for schema changes", async () => { + const userV1 = { + schema, + fields: { + id: { type: "uuid", primaryKey: true }, + email: { type: "text" }, + }, + } as const; + + const configV1 = { + db: orm({ schema, models: { user: userV1 } }), + directory: "./app/db.server", + migrate: { migrationsPath }, + } satisfies OrmCLIConfig; + + await configV1.db.connect(); + vi.spyOn(loadConfig, "loadConfig").mockResolvedValue(configV1); + + await yargs().options(globalOptions).command(dbPush).parseAsync("push"); + await configV1.db.close(); + + const userV2 = { + schema, + fields: { + id: { type: "uuid", primaryKey: true }, + email: { type: "text" }, + createdAt: { type: "timestamptz", default: sql`now()` }, + }, + } as const; + + const configV2 = { + db: orm({ schema, models: { user: userV2 } }), + directory: "./app/db.server", + migrate: { migrationsPath }, + } satisfies OrmCLIConfig; + + await configV2.db.connect(); + vi.spyOn(loadConfig, "loadConfig").mockResolvedValue(configV2); + + await yargs() + .options(globalOptions) + .command(generateMigration) + .parseAsync("migration --name add_created_at --unsafe"); + + await configV2.db.close(); + + const migrations = fs.readdirSync(migrationsPath) as string[]; + expect(migrations.length).toBe(1); + expect(migrations[0]).toMatch(/^\d{14}_add-created-at\.sql$/); + + const content = fs.readFileSync( + `${migrationsPath}/${migrations[0]}`, + "utf8", + ) as string; + + expect(content.trim()).toEqual( + unindent` + ALTER TABLE "${schema}"."user" ADD COLUMN "createdAt" timestamptz NOT NULL DEFAULT now(); + `, + ); + }); + + test("multiple migrations are applied in order", async () => { + const user = { + schema, + fields: { + id: { type: "uuid", primaryKey: true }, + }, + } as const; + + const config = { + db: orm({ schema, models: { user } }), + directory: "./app/db.server", + migrate: { migrationsPath }, + } satisfies OrmCLIConfig; + + await config.db.connect(); + vi.spyOn(loadConfig, "loadConfig").mockResolvedValue(config); + + fs.mkdirSync(migrationsPath, { recursive: true }); + fs.writeFileSync( + `${migrationsPath}/20240101000000_first.sql`, + `CREATE TABLE "${schema}"."first_table" (id serial PRIMARY KEY);`, + ); + fs.writeFileSync( + `${migrationsPath}/20240101000001_second.sql`, + `CREATE TABLE "${schema}"."second_table" (id serial PRIMARY KEY);`, + ); + + await yargs() + .options(globalOptions) + .command(dbMigrate) + .parseAsync("migrate"); + + await config.db.close(); + + const tables = await db.query( + ` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = $1 + ORDER BY table_name + `, + [schema], + ); + + expect(tables.rows.map((r) => r.table_name)).toEqual([ + "first_table", + "second_table", + ]); + + const migrationRecords = await db.query(` + SELECT name FROM public._orm_migrations ORDER BY id + `); + expect(migrationRecords.rows.map((r) => r.name)).toEqual([ + "20240101000000_first", + "20240101000001_second", + ]); + }); + + test("already up to date when no pending migrations", async () => { + const user = { + schema, + fields: { + id: { type: "uuid", primaryKey: true }, + }, + } as const; + + const config = { + db: orm({ schema, models: { user } }), + directory: "./app/db.server", + migrate: { migrationsPath }, + } satisfies OrmCLIConfig; + + await config.db.connect(); + vi.spyOn(loadConfig, "loadConfig").mockResolvedValue(config); + + await yargs() + .options(globalOptions) + .command(generateMigration) + .parseAsync("migration --name create_users --unsafe"); + + await yargs() + .options(globalOptions) + .command(dbMigrate) + .parseAsync("migrate"); + + const consoleSpy = vi.spyOn(console, "log"); + + await yargs() + .options(globalOptions) + .command(dbMigrate) + .parseAsync("migrate"); + + expect(consoleSpy).toHaveBeenCalledWith("Already up to date."); + + await config.db.close(); + }); +}); diff --git a/packages/orm-cli/src/commands/db-migrate.ts b/packages/orm-cli/src/commands/db-migrate.ts new file mode 100644 index 00000000..46347798 --- /dev/null +++ b/packages/orm-cli/src/commands/db-migrate.ts @@ -0,0 +1,9 @@ +import { handler } from "./db-migrate/handler.js"; +import { builder } from "./db-migrate/options.js"; + +export const dbMigrate = { + command: "migrate", + desc: "Run pending migrations", + builder, + handler, +}; diff --git a/packages/orm-cli/src/commands/db-migrate/handler.ts b/packages/orm-cli/src/commands/db-migrate/handler.ts new file mode 100644 index 00000000..6c8067c6 --- /dev/null +++ b/packages/orm-cli/src/commands/db-migrate/handler.ts @@ -0,0 +1,32 @@ +import { migrate } from "@casekit/orm-migrate"; + +import { Handler } from "#types.js"; +import { loadConfig } from "#util/loadConfig.js"; +import { readMigrationFiles } from "#util/migrations.js"; +import { builder } from "./options.js"; + +export const handler: Handler = async (opts) => { + const config = await loadConfig(opts); + const { db } = config; + + const migrationsPath = config.migrate?.migrationsPath ?? "./migrations"; + + try { + const migrations = readMigrationFiles(migrationsPath); + const result = await migrate.run(db, migrations); + + if (result.alreadyUpToDate) { + console.log("Already up to date."); + } else { + console.log(`Applied ${result.applied.length} migration(s):`); + for (const name of result.applied) { + console.log(` - ${name}`); + } + console.log("✅ Done"); + } + } catch (e) { + console.error("Error running migrations", e); + process.exitCode = 1; + throw e; + } +}; diff --git a/packages/orm-cli/src/commands/db-migrate/options.ts b/packages/orm-cli/src/commands/db-migrate/options.ts new file mode 100644 index 00000000..b7ed1bc6 --- /dev/null +++ b/packages/orm-cli/src/commands/db-migrate/options.ts @@ -0,0 +1,3 @@ +import { Builder } from "#types.js"; + +export const builder: Builder = {} as const; diff --git a/packages/orm-cli/src/commands/db-pull/util/relationNames.test.ts b/packages/orm-cli/src/commands/db-pull/util/relationNames.test.ts index dc8dda2d..c8757927 100644 --- a/packages/orm-cli/src/commands/db-pull/util/relationNames.test.ts +++ b/packages/orm-cli/src/commands/db-pull/util/relationNames.test.ts @@ -19,6 +19,8 @@ describe("guessManyToOneRelationName", () => { tableTo, columnsFrom, columnsTo: ["id"], + onUpdate: null, + onDelete: null, }); it("handles single column foreign keys", () => { @@ -103,6 +105,8 @@ describe("guessOneToManyRelationName", () => { tableTo, columnsFrom, columnsTo: ["id"], + onUpdate: null, + onDelete: null, }); it("handles standard one-to-many relationships", () => { diff --git a/packages/orm-cli/src/commands/db-pull/util/renderFieldDefinition.test.ts b/packages/orm-cli/src/commands/db-pull/util/renderFieldDefinition.test.ts index 6a68c6d3..4cd7cc88 100644 --- a/packages/orm-cli/src/commands/db-pull/util/renderFieldDefinition.test.ts +++ b/packages/orm-cli/src/commands/db-pull/util/renderFieldDefinition.test.ts @@ -107,6 +107,8 @@ describe("renderFieldDefinition", () => { table: "test", name: "test_email_key", definition: "UNIQUE (email)", + columns: ["email"], + nullsNotDistinct: false, }, ], }); @@ -125,6 +127,8 @@ describe("renderFieldDefinition", () => { table: "test", name: "test_email_key", definition: "UNIQUE NULLS NOT DISTINCT (email)", + columns: ["email"], + nullsNotDistinct: false, }, ], }); @@ -145,6 +149,8 @@ describe("renderFieldDefinition", () => { tableTo: "users", columnsFrom: ["user_id"], columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], }); @@ -165,6 +171,8 @@ describe("renderFieldDefinition", () => { tableTo: "users", columnsFrom: ["email"], columnsTo: ["email"], + onUpdate: null, + onDelete: null, }, ], }); @@ -185,6 +193,8 @@ describe("renderFieldDefinition", () => { tableTo: "users", columnsFrom: ["user_id", "tenant_id"], columnsTo: ["id", "tenant_id"], + onUpdate: null, + onDelete: null, }, ], }); @@ -208,6 +218,8 @@ describe("renderFieldDefinition", () => { table: "test", name: "test_email_key", definition: 'UNIQUE ("email")', + columns: ["email"], + nullsNotDistinct: false, }, ], }); diff --git a/packages/orm-cli/src/commands/db-pull/util/renderFieldDefinition.ts b/packages/orm-cli/src/commands/db-pull/util/renderFieldDefinition.ts index 31f176dc..318dfdd6 100644 --- a/packages/orm-cli/src/commands/db-pull/util/renderFieldDefinition.ts +++ b/packages/orm-cli/src/commands/db-pull/util/renderFieldDefinition.ts @@ -19,9 +19,8 @@ export const renderFieldDefinition = (column: Column, table: Table): string => { // Primary key (single column) const isPrimaryKey = - table.primaryKey && - table.primaryKey.columns.length === 1 && - table.primaryKey.columns[0] === column.column; + table.primaryKey?.columns.length === 1 && + table.primaryKey?.columns[0] === column.column; if (isPrimaryKey) { parts.push("primaryKey: true"); } diff --git a/packages/orm-cli/src/commands/db-pull/util/renderModel.constraints.test.ts b/packages/orm-cli/src/commands/db-pull/util/renderModel.constraints.test.ts index 56dfe55d..82278659 100644 --- a/packages/orm-cli/src/commands/db-pull/util/renderModel.constraints.test.ts +++ b/packages/orm-cli/src/commands/db-pull/util/renderModel.constraints.test.ts @@ -196,6 +196,8 @@ describe("renderModel - constraints", () => { name: "users_email_key", definition: "CREATE UNIQUE INDEX users_email_key ON public.users USING btree (email)", + columns: ["email"], + nullsNotDistinct: false, }, ], }; @@ -261,6 +263,8 @@ describe("renderModel - constraints", () => { name: "users_email_key", definition: "CREATE UNIQUE INDEX users_email_key ON public.users USING btree (email) NULLS NOT DISTINCT", + columns: ["email"], + nullsNotDistinct: false, }, ], }; @@ -346,6 +350,8 @@ describe("renderModel - constraints", () => { name: "products_name_category_key", definition: "CREATE UNIQUE INDEX products_name_category_key ON public.products USING btree (name, category)", + columns: ["name", "category"], + nullsNotDistinct: false, }, ], }; @@ -429,6 +435,8 @@ describe("renderModel - constraints", () => { name: "users_email_deleted_at_key", definition: "CREATE UNIQUE INDEX users_email_deleted_at_key ON public.users USING btree (email, deleted_at) NULLS NOT DISTINCT", + columns: ["email", "deleted_at"], + nullsNotDistinct: false, }, ], }; @@ -497,6 +505,8 @@ describe("renderModel - constraints", () => { columnsFrom: ["author_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -575,6 +585,8 @@ describe("renderModel - constraints", () => { columnsFrom: ["color_hex"], tableTo: "colors", columnsTo: ["hex"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -671,6 +683,8 @@ describe("renderModel - constraints", () => { columnsFrom: ["company_id", "company_code"], tableTo: "companies", columnsTo: ["id", "code"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, diff --git a/packages/orm-cli/src/commands/db-pull/util/renderModel.relations.test.ts b/packages/orm-cli/src/commands/db-pull/util/renderModel.relations.test.ts index 91acb8c9..75360708 100644 --- a/packages/orm-cli/src/commands/db-pull/util/renderModel.relations.test.ts +++ b/packages/orm-cli/src/commands/db-pull/util/renderModel.relations.test.ts @@ -52,6 +52,8 @@ describe("renderModel - relations", () => { columnsFrom: ["author_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -158,6 +160,8 @@ describe("renderModel - relations", () => { columnsFrom: ["author_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -233,6 +237,8 @@ describe("renderModel - relations", () => { columnsFrom: ["color_hex"], tableTo: "colors", columnsTo: ["hex"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -313,6 +319,8 @@ describe("renderModel - relations", () => { columnsFrom: ["created_by_user_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -403,6 +411,8 @@ describe("renderModel - relations", () => { columnsFrom: ["created_by_user_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -493,6 +503,8 @@ describe("renderModel - relations", () => { columnsFrom: ["company_id", "company_code"], tableTo: "companies", columnsTo: ["id", "code"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -609,6 +621,8 @@ describe("renderModel - relations", () => { columnsFrom: ["author_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, { schema: "public", @@ -617,6 +631,8 @@ describe("renderModel - relations", () => { columnsFrom: ["editor_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, { schema: "public", @@ -625,6 +641,8 @@ describe("renderModel - relations", () => { columnsFrom: ["category_id"], tableTo: "categories", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -755,6 +773,8 @@ describe("renderModel - relations", () => { columnsFrom: ["author_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, { schema: "public", @@ -763,6 +783,8 @@ describe("renderModel - relations", () => { columnsFrom: ["editor_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, @@ -797,6 +819,8 @@ describe("renderModel - relations", () => { columnsFrom: ["author_id"], tableTo: "users", columnsTo: ["id"], + onUpdate: null, + onDelete: null, }, ], primaryKey: null, diff --git a/packages/orm-cli/src/commands/db-pull/util/renderRelations.test.ts b/packages/orm-cli/src/commands/db-pull/util/renderRelations.test.ts index 4ad561de..d436e3d1 100644 --- a/packages/orm-cli/src/commands/db-pull/util/renderRelations.test.ts +++ b/packages/orm-cli/src/commands/db-pull/util/renderRelations.test.ts @@ -29,6 +29,8 @@ describe("renderRelations", () => { tableTo: "users", columnsFrom: ["user_id"], columnsTo: ["id"], + onUpdate: null, + onDelete: null, ...overrides, }); diff --git a/packages/orm-cli/src/commands/generate-migration.ts b/packages/orm-cli/src/commands/generate-migration.ts new file mode 100644 index 00000000..c8239e43 --- /dev/null +++ b/packages/orm-cli/src/commands/generate-migration.ts @@ -0,0 +1,9 @@ +import { handler } from "./generate-migration/handler.js"; +import { builder } from "./generate-migration/options.js"; + +export const generateMigration = { + command: "migration", + desc: "Generate a new migration from schema changes", + builder, + handler, +}; diff --git a/packages/orm-cli/src/commands/generate-migration/handler.ts b/packages/orm-cli/src/commands/generate-migration/handler.ts new file mode 100644 index 00000000..20d02bca --- /dev/null +++ b/packages/orm-cli/src/commands/generate-migration/handler.ts @@ -0,0 +1,126 @@ +import { confirm } from "@inquirer/prompts"; +import { unlinkSync } from "fs"; + +import { + type PotentialRename, + applyRenames, + detectPotentialRenames, + migrate, + operationsToSql, +} from "@casekit/orm-migrate"; + +import { Handler } from "#types.js"; +import { loadConfig } from "#util/loadConfig.js"; +import { + generateMigrationFilename, + writeMigrationFile, +} from "#util/migrations.js"; +import { builder } from "./options.js"; + +/** + * Prompt the user to confirm each potential column rename. + * Returns the list of confirmed renames. + */ +const promptForRenames = async ( + potentialRenames: PotentialRename[], +): Promise => { + const confirmed: PotentialRename[] = []; + + for (const rename of potentialRenames) { + const isRename = await confirm({ + message: `Column "${rename.dropColumn}" is being dropped and "${rename.addColumn}" is being added in "${rename.schema}"."${rename.table}". Is this a rename?`, + default: true, + }); + + if (isRename) { + confirmed.push(rename); + } + } + + return confirmed; +}; + +export const handler: Handler = async (opts) => { + const config = await loadConfig(opts); + const { db } = config; + + const migrationsPath = config.migrate?.migrationsPath ?? "./migrations"; + + try { + const result = await migrate.generate(db); + + if (!result) { + console.log("No changes detected."); + return; + } + + // Detect potential column renames and prompt the user + let operations = result.operations; + let sql = result.sql; + + const potentialRenames = detectPotentialRenames(operations); + if (potentialRenames.length > 0) { + const confirmedRenames = await promptForRenames(potentialRenames); + if (confirmedRenames.length > 0) { + operations = applyRenames(operations, confirmedRenames); + sql = operationsToSql(operations).join("\n\n") + "\n"; + } + } + + // Write the migration file + const filename = generateMigrationFilename(opts.name); + const filePath = writeMigrationFile(migrationsPath, filename, sql); + + console.log(`\nGenerated migration: ${filePath}\n`); + console.log(sql); + + if (result.warnings.length > 0) { + const unsafeWarnings = result.warnings.filter( + (w) => w.level === "unsafe", + ); + const cautiousWarnings = result.warnings.filter( + (w) => w.level === "cautious", + ); + + if (cautiousWarnings.length > 0) { + console.log("\n⚠️ Cautious operations:"); + for (const w of cautiousWarnings) { + console.log(` - ${w.message}`); + if (w.suggestion) { + console.log(` Suggestion: ${w.suggestion}`); + } + } + } + + if (unsafeWarnings.length > 0) { + console.log("\n🚨 Unsafe operations:"); + for (const w of unsafeWarnings) { + console.log(` - ${w.message}`); + if (w.suggestion) { + console.log(` Suggestion: ${w.suggestion}`); + } + } + + if (!opts.force && !opts.unsafe) { + const proceed = await confirm({ + message: + "This migration contains unsafe operations. Do you want to keep it?", + default: false, + }); + + if (!proceed) { + unlinkSync(filePath); + console.log("Migration file removed."); + return; + } + } + } + } + + console.log("✅ Done"); + } catch (e) { + console.error("Error generating migration", e); + process.exitCode = 1; + throw e; + } +}; diff --git a/packages/orm-cli/src/commands/generate-migration/options.ts b/packages/orm-cli/src/commands/generate-migration/options.ts new file mode 100644 index 00000000..2d91fb45 --- /dev/null +++ b/packages/orm-cli/src/commands/generate-migration/options.ts @@ -0,0 +1,15 @@ +import { Options } from "yargs"; + +export const builder = { + name: { + type: "string", + alias: "n", + desc: "Short description for the migration filename", + demandOption: true, + }, + unsafe: { + type: "boolean", + desc: "Proceed even with unsafe operations", + default: false, + }, +} as const satisfies Record; diff --git a/packages/orm-cli/src/test/setup.ts b/packages/orm-cli/src/test/setup.ts index ca351dd7..e3675801 100644 --- a/packages/orm-cli/src/test/setup.ts +++ b/packages/orm-cli/src/test/setup.ts @@ -5,7 +5,13 @@ import { afterAll, afterEach, beforeAll, beforeEach, vi } from "vitest"; import * as loadConfig from "#util/loadConfig.js"; -vi.mock("fs"); +vi.mock("fs", async () => { + const memfs = await import("memfs"); + return { + ...memfs.fs, + default: memfs.fs, + }; +}); vi.mock("prettier"); vi.mock("@inquirer/prompts"); diff --git a/packages/orm-cli/src/types.ts b/packages/orm-cli/src/types.ts index e984a3c7..d4ca3c61 100644 --- a/packages/orm-cli/src/types.ts +++ b/packages/orm-cli/src/types.ts @@ -10,6 +10,7 @@ export interface OrmCLIConfig { directory: string; migrate?: { connection?: ConnectionConfig; + migrationsPath?: string; }; } diff --git a/packages/orm-cli/src/util/migrations.ts b/packages/orm-cli/src/util/migrations.ts new file mode 100644 index 00000000..ebbfc200 --- /dev/null +++ b/packages/orm-cli/src/util/migrations.ts @@ -0,0 +1,82 @@ +import { createHash } from "crypto"; +import { + existsSync, + mkdirSync, + readFileSync, + readdirSync, + writeFileSync, +} from "fs"; +import { join } from "path"; + +export interface MigrationFile { + name: string; + path: string; + sql: string; + checksum: string; +} + +/** + * Generate a timestamped migration filename. + * Format: YYYYMMDDHHMMSS_description.sql + */ +export const generateMigrationFilename = (description: string): string => { + // toISOString() returns UTC in ISO 8601 format which sorts alphabetically + const timestamp = new Date() + .toISOString() + .replace(/[-:T.Z]/g, "") + .slice(0, 14); + + const slug = description + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-|-$/g, ""); + + return `${timestamp}_${slug}.sql`; +}; + +/** + * Compute a SHA-256 checksum of content. + */ +export const checksum = (content: string): string => { + return createHash("sha256").update(content).digest("hex"); +}; + +/** + * Read all migration SQL files from a directory, sorted by filename. + */ +export const readMigrationFiles = (migrationsPath: string): MigrationFile[] => { + if (!existsSync(migrationsPath)) { + return []; + } + + const files = readdirSync(migrationsPath) + .filter((f) => f.endsWith(".sql")) + .sort(); + + return files.map((file) => { + const filePath = join(migrationsPath, file); + const content = readFileSync(filePath, "utf-8"); + return { + name: file.replace(/\.sql$/, ""), + path: filePath, + sql: content, + checksum: checksum(content), + }; + }); +}; + +/** + * Write a migration file to the migrations directory. + * Creates the directory if it doesn't exist. + * Returns the full path of the written file. + */ +export const writeMigrationFile = ( + migrationsPath: string, + filename: string, + content: string, +): string => { + mkdirSync(migrationsPath, { recursive: true }); + const filePath = join(migrationsPath, filename); + writeFileSync(filePath, content, "utf-8"); + return filePath; +}; diff --git a/packages/orm-config/package.json b/packages/orm-config/package.json index 9fff9b71..2ceab080 100644 --- a/packages/orm-config/package.json +++ b/packages/orm-config/package.json @@ -36,6 +36,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/orm-config" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "pg": "^8.13.1", "zod": "^4.0.17" diff --git a/packages/orm-fixtures/package.json b/packages/orm-fixtures/package.json index f568b15d..4c7ffb25 100644 --- a/packages/orm-fixtures/package.json +++ b/packages/orm-fixtures/package.json @@ -33,6 +33,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/orm-fixtures" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "pg": "^8.13.1", "zod": "^4.0.17" diff --git a/packages/orm-migrate/package.json b/packages/orm-migrate/package.json index eb9b14ec..db797176 100644 --- a/packages/orm-migrate/package.json +++ b/packages/orm-migrate/package.json @@ -16,7 +16,7 @@ "@casekit/orm-fixtures": "workspace:*", "@casekit/prettier-config": "workspace:*", "@casekit/tsconfig": "workspace:*", - "@casekit/unindent": "^1.0.5", + "@casekit/unindent": "0.0.0-gh-packages.634dab0", "@trivago/prettier-plugin-sort-imports": "^5.2.2", "@types/node": "^24.0.3", "@types/pg": "^8.15.4", @@ -41,6 +41,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/orm-migrate" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "pg": "^8.13.1", "zod": "^4.0.17" diff --git a/packages/orm-migrate/src/index.ts b/packages/orm-migrate/src/index.ts index 865a9797..0791709e 100644 --- a/packages/orm-migrate/src/index.ts +++ b/packages/orm-migrate/src/index.ts @@ -1,4 +1,6 @@ import { drop } from "#drop.js"; +import { generate } from "#migrations/generate.js"; +import { run } from "#migrations/run.js"; import { pull } from "#pull.js"; import { push } from "#push.js"; @@ -6,6 +8,8 @@ export const migrate = { drop, push, pull, + generate, + run, }; export type { Table } from "#pull.js"; @@ -15,3 +19,13 @@ export type { PrimaryKey, UniqueConstraint, } from "#pull/index.js"; +export type { GenerateResult } from "#migrations/generate.js"; +export type { Migration, RunResult } from "#migrations/run.js"; +export type { SafetyWarning, SafetyLevel } from "#migrations/safety/types.js"; +export type { SchemaDiffOperation } from "#migrations/diff/types.js"; +export { + detectPotentialRenames, + applyRenames, +} from "#migrations/diff/detectRenames.js"; +export type { PotentialRename } from "#migrations/diff/detectRenames.js"; +export { operationsToSql } from "#migrations/diff/operationToSql.js"; diff --git a/packages/orm-migrate/src/migrations/configToSnapshot.test.ts b/packages/orm-migrate/src/migrations/configToSnapshot.test.ts new file mode 100644 index 00000000..ebb4f846 --- /dev/null +++ b/packages/orm-migrate/src/migrations/configToSnapshot.test.ts @@ -0,0 +1,244 @@ +import { describe, expect, test } from "vitest"; +import { z } from "zod"; + +import { orm, sql } from "@casekit/orm"; + +import { configToSnapshot } from "./configToSnapshot.js"; + +describe("configToSnapshot", () => { + test("converts a simple model to a snapshot", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + name: { type: "text" }, + }, + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + + expect(snapshot.schemas).toEqual(["app"]); + expect(snapshot.tables).toHaveLength(1); + expect(snapshot.tables[0]!.schema).toBe("app"); + expect(snapshot.tables[0]!.name).toBe("user"); + expect(snapshot.tables[0]!.primaryKey).toEqual({ + name: "user_pkey", + columns: ["id"], + }); + expect(snapshot.tables[0]!.columns).toEqual([ + { name: "id", type: "serial", nullable: false, default: null }, + { name: "name", type: "text", nullable: false, default: null }, + ]); + }); + + test("handles nullable fields", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + bio: { type: "text", nullable: true }, + }, + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + const bioCol = snapshot.tables[0]!.columns.find( + (c) => c.name === "bio", + ); + expect(bioCol!.nullable).toBe(true); + }); + + test("handles string defaults", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + role: { type: "text", default: "user" }, + }, + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + const roleCol = snapshot.tables[0]!.columns.find( + (c) => c.name === "role", + ); + expect(roleCol!.default).toBe("'user'"); + }); + + test("handles SQL statement defaults", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + createdAt: { + type: "timestamptz", + default: sql`now()`, + }, + }, + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + const col = snapshot.tables[0]!.columns.find( + (c) => c.name === "createdAt", + ); + expect(col!.default).toBe("now()"); + }); + + test("handles numeric defaults", () => { + const db = orm({ + schema: "app", + models: { + item: { + fields: { + id: { type: "serial", primaryKey: true }, + quantity: { type: "integer", default: 0 }, + }, + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + const col = snapshot.tables[0]!.columns.find( + (c) => c.name === "quantity", + ); + expect(col!.default).toBe("0"); + }); + + test("handles boolean defaults", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + active: { type: "boolean", default: true }, + }, + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + const col = snapshot.tables[0]!.columns.find( + (c) => c.name === "active", + ); + expect(col!.default).toBe("true"); + }); + + test("converts foreign keys", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + }, + }, + post: { + fields: { + id: { type: "serial", primaryKey: true }, + authorId: { type: "integer" }, + }, + foreignKeys: [ + { + fields: ["authorId"], + references: { + model: "user", + fields: ["id"], + }, + onDelete: "CASCADE", + }, + ], + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + const postTable = snapshot.tables.find((t) => t.name === "post"); + expect(postTable!.foreignKeys).toHaveLength(1); + expect(postTable!.foreignKeys[0]).toEqual( + expect.objectContaining({ + columns: ["authorId"], + referencesTable: "user", + referencesColumns: ["id"], + onDelete: "CASCADE", + }), + ); + }); + + test("converts unique constraints", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + email: { type: "text" }, + }, + uniqueConstraints: [{ fields: ["email"] }], + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + const table = snapshot.tables[0]!; + expect(table.uniqueConstraints).toHaveLength(1); + expect(table.uniqueConstraints[0]!.columns).toEqual(["email"]); + }); + + test("converts extensions", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + }, + }, + }, + extensions: ["uuid-ossp"], + }); + + const snapshot = configToSnapshot(db.config); + expect(snapshot.extensions).toContainEqual({ + name: "uuid-ossp", + schema: "app", + }); + }); + + test("collects schemas from multiple models", () => { + const db = orm({ + schema: "app", + models: { + user: { + fields: { + id: { type: "serial", primaryKey: true }, + }, + }, + auditLog: { + schema: "audit", + fields: { + id: { type: "serial", primaryKey: true }, + }, + }, + }, + }); + + const snapshot = configToSnapshot(db.config); + expect(snapshot.schemas).toContain("app"); + expect(snapshot.schemas).toContain("audit"); + }); +}); diff --git a/packages/orm-migrate/src/migrations/configToSnapshot.ts b/packages/orm-migrate/src/migrations/configToSnapshot.ts new file mode 100644 index 00000000..a47da684 --- /dev/null +++ b/packages/orm-migrate/src/migrations/configToSnapshot.ts @@ -0,0 +1,106 @@ +import { NormalizedConfig } from "@casekit/orm-config"; +import { SQLStatement } from "@casekit/sql"; + +import { arrayToSqlArray } from "#push/arrayToSqlArray.js"; +import type { + ColumnSnapshot, + ForeignKeySnapshot, + SchemaSnapshot, + TableSnapshot, + UniqueConstraintSnapshot, +} from "./types.js"; + +/** + * Serialise a field default value to a string that can be compared + * with the normalised default values returned by pullDefault(). + */ +const serialiseDefault = (value: unknown): string | null => { + if (value === null || value === undefined) { + return null; + } + + if (value instanceof SQLStatement) { + return value.text; + } + + if (typeof value === "string") { + return `'${value}'`; + } + + if (typeof value === "number" || typeof value === "bigint") { + return String(value); + } + + if (typeof value === "boolean") { + return value ? "true" : "false"; + } + + if (Array.isArray(value)) { + return `'${arrayToSqlArray(value)}'`; + } + + // Objects (JSON) + return `'${JSON.stringify(value)}'`; +}; + +/** + * Convert a NormalizedConfig (the desired TypeScript schema state) + * into a SchemaSnapshot for diffing against the database state. + */ +export const configToSnapshot = (config: NormalizedConfig): SchemaSnapshot => { + const models = Object.values(config.models); + + const schemas = [...new Set(models.map((m) => m.schema))].sort(); + + const extensions = config.extensions.flatMap((ext) => + schemas.map((schema) => ({ name: ext, schema })), + ); + + const tables: TableSnapshot[] = models.map((model) => { + const fields = Object.values(model.fields); + + const columns: ColumnSnapshot[] = fields.map((field) => ({ + name: field.column, + type: field.type, + nullable: field.nullable, + default: serialiseDefault(field.default), + })); + + const pkColumns = model.primaryKey.map((pk) => pk.column); + const primaryKey = { + name: pkColumns.length > 0 ? `${model.table}_pkey` : null, + columns: pkColumns, + }; + + const foreignKeys: ForeignKeySnapshot[] = model.foreignKeys.map( + (fk) => ({ + name: fk.name, + columns: fk.columns, + referencesSchema: fk.references.schema, + referencesTable: fk.references.table, + referencesColumns: fk.references.columns, + onDelete: fk.onDelete ?? null, + onUpdate: fk.onUpdate ?? null, + }), + ); + + const uniqueConstraints: UniqueConstraintSnapshot[] = + model.uniqueConstraints.map((uc) => ({ + name: uc.name, + columns: uc.columns, + nullsNotDistinct: uc.nullsNotDistinct ?? false, + where: uc.where ? uc.where.text : null, + })); + + return { + schema: model.schema, + name: model.table, + columns, + primaryKey, + foreignKeys, + uniqueConstraints, + }; + }); + + return { schemas, extensions, tables }; +}; diff --git a/packages/orm-migrate/src/migrations/diff/detectRenames.test.ts b/packages/orm-migrate/src/migrations/diff/detectRenames.test.ts new file mode 100644 index 00000000..80320392 --- /dev/null +++ b/packages/orm-migrate/src/migrations/diff/detectRenames.test.ts @@ -0,0 +1,326 @@ +import { describe, expect, test } from "vitest"; + +import { applyRenames, detectPotentialRenames } from "./detectRenames.js"; +import type { SchemaDiffOperation } from "./types.js"; + +describe("detectPotentialRenames", () => { + test("returns empty array when no drops or adds", () => { + const ops: SchemaDiffOperation[] = [ + { type: "createSchema", schema: "app" }, + ]; + expect(detectPotentialRenames(ops)).toEqual([]); + }); + + test("returns empty array when only drops", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "dropColumn", + schema: "app", + table: "users", + column: "old", + }, + ]; + expect(detectPotentialRenames(ops)).toEqual([]); + }); + + test("returns empty array when only adds", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "new", + type: "text", + nullable: false, + default: null, + }, + }, + ]; + expect(detectPotentialRenames(ops)).toEqual([]); + }); + + test("detects potential rename when drop and add in same table", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "dropColumn", + schema: "app", + table: "users", + column: "old_name", + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "new_name", + type: "text", + nullable: false, + default: null, + }, + }, + ]; + + const renames = detectPotentialRenames(ops); + expect(renames).toHaveLength(1); + expect(renames[0]).toEqual({ + schema: "app", + table: "users", + dropColumn: "old_name", + addColumn: "new_name", + type: "text", + nullable: false, + default: null, + }); + }); + + test("does not pair drop and add from different tables", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "dropColumn", + schema: "app", + table: "users", + column: "old", + }, + { + type: "addColumn", + schema: "app", + table: "posts", + column: { + name: "new", + type: "text", + nullable: false, + default: null, + }, + }, + ]; + + expect(detectPotentialRenames(ops)).toEqual([]); + }); + + test("pairs multiple drops and adds in same table", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "dropColumn", + schema: "app", + table: "users", + column: "old1", + }, + { + type: "dropColumn", + schema: "app", + table: "users", + column: "old2", + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "new1", + type: "text", + nullable: false, + default: null, + }, + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "new2", + type: "text", + nullable: true, + default: null, + }, + }, + ]; + + const renames = detectPotentialRenames(ops); + expect(renames).toHaveLength(2); + }); + + test("does not reuse adds for multiple drops", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "dropColumn", + schema: "app", + table: "users", + column: "old1", + }, + { + type: "dropColumn", + schema: "app", + table: "users", + column: "old2", + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "new1", + type: "text", + nullable: false, + default: null, + }, + }, + ]; + + const renames = detectPotentialRenames(ops); + // Only one rename possible since there's only one add + expect(renames).toHaveLength(1); + }); +}); + +describe("applyRenames", () => { + test("replaces drop+add with rename", () => { + const ops: SchemaDiffOperation[] = [ + { type: "createSchema", schema: "app" }, + { + type: "dropColumn", + schema: "app", + table: "users", + column: "old_name", + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "new_name", + type: "text", + nullable: false, + default: null, + }, + }, + ]; + + const confirmedRenames = [ + { + schema: "app", + table: "users", + dropColumn: "old_name", + addColumn: "new_name", + type: "text", + nullable: false, + default: null, + }, + ]; + + const result = applyRenames(ops, confirmedRenames); + + expect(result).toHaveLength(2); + expect(result[0]).toEqual({ type: "createSchema", schema: "app" }); + expect(result[1]).toEqual({ + type: "renameColumn", + schema: "app", + table: "users", + oldName: "old_name", + newName: "new_name", + }); + }); + + test("preserves unaffected operations", () => { + const ops: SchemaDiffOperation[] = [ + { type: "createSchema", schema: "app" }, + { + type: "dropColumn", + schema: "app", + table: "users", + column: "keep_drop", + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "keep_add", + type: "text", + nullable: false, + default: null, + }, + }, + ]; + + // No confirmed renames + const result = applyRenames(ops, []); + + expect(result).toEqual(ops); + }); + + test("handles partial confirmations", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "dropColumn", + schema: "app", + table: "users", + column: "rename_me", + }, + { + type: "dropColumn", + schema: "app", + table: "users", + column: "delete_me", + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "renamed", + type: "text", + nullable: false, + default: null, + }, + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "new_col", + type: "text", + nullable: true, + default: null, + }, + }, + ]; + + // Only confirm the first rename + const confirmedRenames = [ + { + schema: "app", + table: "users", + dropColumn: "rename_me", + addColumn: "renamed", + type: "text", + nullable: false, + default: null, + }, + ]; + + const result = applyRenames(ops, confirmedRenames); + + // Should have: rename, drop, add (for the unconfirmed pair) + expect(result).toHaveLength(3); + expect(result.find((op) => op.type === "renameColumn")).toEqual({ + type: "renameColumn", + schema: "app", + table: "users", + oldName: "rename_me", + newName: "renamed", + }); + expect(result.find((op) => op.type === "dropColumn")).toEqual({ + type: "dropColumn", + schema: "app", + table: "users", + column: "delete_me", + }); + expect( + result.find( + (op) => op.type === "addColumn" && op.column.name === "new_col", + ), + ).toBeDefined(); + }); +}); diff --git a/packages/orm-migrate/src/migrations/diff/detectRenames.ts b/packages/orm-migrate/src/migrations/diff/detectRenames.ts new file mode 100644 index 00000000..73440ce4 --- /dev/null +++ b/packages/orm-migrate/src/migrations/diff/detectRenames.ts @@ -0,0 +1,174 @@ +import type { SchemaDiffOperation } from "./types.js"; + +export interface PotentialRename { + schema: string; + table: string; + dropColumn: string; + addColumn: string; + type: string; + nullable: boolean; + default: string | null; +} + +/** + * Detect potential column renames in a list of operations. + * + * A potential rename is when: + * 1. A column is dropped + * 2. A column is added to the same table + * 3. Both columns have the same type, nullable, and default + * + * Returns pairs of (dropColumn, addColumn) that could be renames. + */ +export const detectPotentialRenames = ( + ops: SchemaDiffOperation[], +): PotentialRename[] => { + const potentialRenames: PotentialRename[] = []; + + // Group operations by table + const dropsByTable = new Map< + string, + Array<{ + column: string; + op: SchemaDiffOperation & { type: "dropColumn" }; + }> + >(); + const addsByTable = new Map< + string, + Array<{ + column: SchemaDiffOperation & { type: "addColumn" }; + }> + >(); + + for (const op of ops) { + if (op.type === "dropColumn") { + const key = `${op.schema}.${op.table}`; + if (!dropsByTable.has(key)) { + dropsByTable.set(key, []); + } + dropsByTable.get(key)!.push({ column: op.column, op }); + } else if (op.type === "addColumn") { + const key = `${op.schema}.${op.table}`; + if (!addsByTable.has(key)) { + addsByTable.set(key, []); + } + addsByTable.get(key)!.push({ column: op }); + } + } + + // For each table with both drops and adds, find matching pairs + for (const [tableKey, drops] of dropsByTable) { + const adds = addsByTable.get(tableKey); + if (!adds) continue; + + // Track which adds have been matched to avoid duplicates + const matchedAdds = new Set(); + + for (const drop of drops) { + for (let i = 0; i < adds.length; i++) { + if (matchedAdds.has(i)) continue; + + const add = adds[i]!; + const addCol = add.column.column; + + // Check if columns have matching attributes + // (we can't know the dropped column's attributes, so we just pair them up) + // The user will be prompted to confirm + if (columnsCouldBeRename(drop.op, add.column)) { + const [schema, table] = tableKey.split("."); + potentialRenames.push({ + schema: schema!, + table: table!, + dropColumn: drop.column, + addColumn: addCol.name, + type: addCol.type, + nullable: addCol.nullable, + default: addCol.default, + }); + matchedAdds.add(i); + break; // Only match one add per drop + } + } + } + } + + return potentialRenames; +}; + +/** + * Check if a drop and add could reasonably be a rename. + * Currently just checks that they're in the same table. + * More sophisticated checks could be added later. + */ +const columnsCouldBeRename = ( + _drop: SchemaDiffOperation & { type: "dropColumn" }, + _add: SchemaDiffOperation & { type: "addColumn" }, +): boolean => { + // For now, any drop+add in the same table is a potential rename + // The user will confirm via prompt + return true; +}; + +/** + * Apply confirmed renames to an operations list. + * + * For each confirmed rename: + * 1. Remove the dropColumn operation + * 2. Remove the addColumn operation + * 3. Add a renameColumn operation + * + * Returns the modified operations list. + */ +export const applyRenames = ( + ops: SchemaDiffOperation[], + confirmedRenames: PotentialRename[], +): SchemaDiffOperation[] => { + const result: SchemaDiffOperation[] = []; + + // Create a set of operations to remove + const dropsToRemove = new Set(); + const addsToRemove = new Set(); + + for (const rename of confirmedRenames) { + dropsToRemove.add( + `${rename.schema}.${rename.table}.${rename.dropColumn}`, + ); + addsToRemove.add( + `${rename.schema}.${rename.table}.${rename.addColumn}`, + ); + } + + // Filter out the drops and adds, and add the renames + for (const op of ops) { + if (op.type === "dropColumn") { + const key = `${op.schema}.${op.table}.${op.column}`; + if (dropsToRemove.has(key)) { + // Find the corresponding rename and add it + const rename = confirmedRenames.find( + (r) => + r.schema === op.schema && + r.table === op.table && + r.dropColumn === op.column, + ); + if (rename) { + result.push({ + type: "renameColumn", + schema: rename.schema, + table: rename.table, + oldName: rename.dropColumn, + newName: rename.addColumn, + }); + } + continue; + } + } else if (op.type === "addColumn") { + const key = `${op.schema}.${op.table}.${op.column.name}`; + if (addsToRemove.has(key)) { + continue; // Skip, already handled by renameColumn + } + } + result.push(op); + } + + return result; +}; diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts new file mode 100644 index 00000000..e52d09a8 --- /dev/null +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts @@ -0,0 +1,767 @@ +import { describe, expect, test } from "vitest"; + +import type { SchemaSnapshot, TableSnapshot } from "../types.js"; +import { diffSnapshots } from "./diffSnapshots.js"; + +const emptySnapshot: SchemaSnapshot = { + schemas: [], + extensions: [], + tables: [], +}; + +const makeTable = ( + overrides: Partial> & { + name: string; + primaryKey?: string[]; + }, +): TableSnapshot => { + const { primaryKey: pkColumns, ...rest } = overrides; + return { + schema: "app", + columns: [], + primaryKey: { + name: pkColumns?.length ? `${overrides.name}_pkey` : null, + columns: pkColumns ?? [], + }, + foreignKeys: [], + uniqueConstraints: [], + ...rest, + }; +}; + +describe("diffSnapshots", () => { + test("returns no operations when snapshots are identical", () => { + const snapshot: SchemaSnapshot = { + schemas: ["app"], + extensions: [{ name: "uuid-ossp", schema: "app" }], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "id", + type: "serial", + nullable: false, + default: null, + }, + ], + primaryKey: ["id"], + }), + ], + }; + + const ops = diffSnapshots(snapshot, snapshot); + expect(ops).toEqual([]); + }); + + describe("schemas", () => { + test("detects added schema", () => { + const current = emptySnapshot; + const desired: SchemaSnapshot = { + ...emptySnapshot, + schemas: ["app"], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "createSchema", + schema: "app", + }); + }); + + test("detects removed schema", () => { + const current: SchemaSnapshot = { + ...emptySnapshot, + schemas: ["app"], + }; + const desired = emptySnapshot; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "dropSchema", + schema: "app", + }); + }); + }); + + describe("extensions", () => { + test("detects added extension", () => { + const current: SchemaSnapshot = { + ...emptySnapshot, + schemas: ["app"], + }; + const desired: SchemaSnapshot = { + ...emptySnapshot, + schemas: ["app"], + extensions: [{ name: "uuid-ossp", schema: "app" }], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "createExtension", + name: "uuid-ossp", + schema: "app", + }); + }); + + test("detects removed extension", () => { + const current: SchemaSnapshot = { + ...emptySnapshot, + schemas: ["app"], + extensions: [{ name: "uuid-ossp", schema: "app" }], + }; + const desired: SchemaSnapshot = { + ...emptySnapshot, + schemas: ["app"], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "dropExtension", + name: "uuid-ossp", + schema: "app", + }); + }); + }); + + describe("tables", () => { + test("detects new table", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "id", + type: "serial", + nullable: false, + default: null, + }, + ], + primaryKey: ["id"], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "createTable", + table: desired.tables[0], + }); + }); + + test("detects dropped table", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "old_table" })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "dropTable", + schema: "app", + table: "old_table", + }); + }); + }); + + describe("columns", () => { + test("detects added column", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "id", + type: "serial", + nullable: false, + default: null, + }, + ], + }), + ], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "id", + type: "serial", + nullable: false, + default: null, + }, + { + name: "email", + type: "text", + nullable: false, + default: null, + }, + ], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "email", + type: "text", + nullable: false, + default: null, + }, + }); + }); + + test("detects dropped column", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "id", + type: "serial", + nullable: false, + default: null, + }, + { + name: "legacy", + type: "text", + nullable: true, + default: null, + }, + ], + }), + ], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "id", + type: "serial", + nullable: false, + default: null, + }, + ], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "dropColumn", + schema: "app", + table: "users", + column: "legacy", + }); + }); + + test("detects type change", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "age", + type: "text", + nullable: true, + default: null, + }, + ], + }), + ], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "age", + type: "integer", + nullable: true, + default: null, + }, + ], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "alterColumn", + schema: "app", + table: "users", + column: "age", + changes: { type: { from: "text", to: "integer" } }, + }); + }); + + test("detects nullability change", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "email", + type: "text", + nullable: true, + default: null, + }, + ], + }), + ], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "email", + type: "text", + nullable: false, + default: null, + }, + ], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "alterColumn", + schema: "app", + table: "users", + column: "email", + changes: { nullable: { from: true, to: false } }, + }); + }); + + test("detects default change", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "role", + type: "text", + nullable: false, + default: null, + }, + ], + }), + ], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "role", + type: "text", + nullable: false, + default: "'user'", + }, + ], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "alterColumn", + schema: "app", + table: "users", + column: "role", + changes: { default: { from: null, to: "'user'" } }, + }); + }); + + test("detects multiple changes on the same column", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "status", + type: "text", + nullable: true, + default: null, + }, + ], + }), + ], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + columns: [ + { + name: "status", + type: "varchar(50)", + nullable: false, + default: "'active'", + }, + ], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "alterColumn", + schema: "app", + table: "users", + column: "status", + changes: { + type: { from: "text", to: "varchar(50)" }, + nullable: { from: true, to: false }, + default: { from: null, to: "'active'" }, + }, + }); + }); + + test("does not emit alterColumn when column is unchanged", () => { + const col = { + name: "id", + type: "serial", + nullable: false, + default: null, + }; + const snapshot: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users", columns: [col] })], + }; + + const ops = diffSnapshots(snapshot, snapshot); + expect(ops).toEqual([]); + }); + }); + + describe("foreign keys", () => { + test("detects added foreign key", () => { + const fk = { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: null, + onUpdate: null, + }; + + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts" })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts", foreignKeys: [fk] })], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "addForeignKey", + schema: "app", + table: "posts", + foreignKey: fk, + }); + }); + + test("detects dropped foreign key", () => { + const fk = { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: null, + onUpdate: null, + }; + + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts", foreignKeys: [fk] })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts" })], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "dropForeignKey", + schema: "app", + table: "posts", + constraintName: "fk_posts_user", + }); + }); + + test("detects changed foreign key (drop + add)", () => { + const oldFk = { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: null, + onUpdate: null, + }; + const newFk = { + ...oldFk, + onDelete: "CASCADE", + }; + + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts", foreignKeys: [oldFk] })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts", foreignKeys: [newFk] })], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "dropForeignKey", + schema: "app", + table: "posts", + constraintName: "fk_posts_user", + }); + expect(ops).toContainEqual({ + type: "addForeignKey", + schema: "app", + table: "posts", + foreignKey: newFk, + }); + }); + }); + + describe("unique constraints", () => { + test("detects added unique constraint", () => { + const uc = { + name: "users_email_key", + columns: ["email"], + }; + + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users" })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users", uniqueConstraints: [uc] })], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "addUniqueConstraint", + schema: "app", + table: "users", + constraint: uc, + }); + }); + + test("detects dropped unique constraint", () => { + const uc = { + name: "users_email_key", + columns: ["email"], + }; + + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users", uniqueConstraints: [uc] })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users" })], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "dropUniqueConstraint", + schema: "app", + table: "users", + constraintName: "users_email_key", + }); + }); + }); + + describe("primary keys", () => { + test("detects changed primary key", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users", primaryKey: ["id"] })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "users", + primaryKey: ["id", "tenant_id"], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "alterPrimaryKey", + schema: "app", + table: "users", + oldConstraintName: "users_pkey", + oldColumns: ["id"], + newColumns: ["id", "tenant_id"], + }); + }); + + test("does not emit when primary key is unchanged", () => { + const snapshot: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users", primaryKey: ["id"] })], + }; + + const ops = diffSnapshots(snapshot, snapshot); + expect(ops).toEqual([]); + }); + }); + + describe("operation ordering", () => { + test("creates schemas before tables", () => { + const ops = diffSnapshots(emptySnapshot, { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users" })], + }); + + const createSchemaIdx = ops.findIndex( + (o) => o.type === "createSchema", + ); + const createTableIdx = ops.findIndex( + (o) => o.type === "createTable", + ); + expect(createSchemaIdx).toBeLessThan(createTableIdx); + }); + + test("drops foreign keys before dropping columns", () => { + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "posts", + columns: [ + { + name: "user_id", + type: "integer", + nullable: false, + default: null, + }, + ], + foreignKeys: [ + { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: null, + onUpdate: null, + }, + ], + }), + ], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts" })], + }; + + const ops = diffSnapshots(current, desired); + const dropFkIdx = ops.findIndex((o) => o.type === "dropForeignKey"); + const dropColIdx = ops.findIndex((o) => o.type === "dropColumn"); + expect(dropFkIdx).toBeLessThan(dropColIdx); + }); + + test("drops tables before schemas", () => { + const current: SchemaSnapshot = { + schemas: ["old"], + extensions: [], + tables: [makeTable({ name: "users", schema: "old" })], + }; + const desired = emptySnapshot; + + const ops = diffSnapshots(current, desired); + const dropTableIdx = ops.findIndex((o) => o.type === "dropTable"); + const dropSchemaIdx = ops.findIndex((o) => o.type === "dropSchema"); + expect(dropTableIdx).toBeLessThan(dropSchemaIdx); + }); + }); +}); diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts new file mode 100644 index 00000000..f71dfaaf --- /dev/null +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts @@ -0,0 +1,313 @@ +import type { + ColumnSnapshot, + ForeignKeySnapshot, + PrimaryKeySnapshot, + SchemaSnapshot, + TableSnapshot, + UniqueConstraintSnapshot, +} from "../types.js"; +import type { ColumnChanges, SchemaDiffOperation } from "./types.js"; + +/** + * Diff two schema snapshots and return the operations needed + * to transform `current` (the database) into `desired` (the config). + * + * Operations are returned in a safe execution order. + */ +export const diffSnapshots = ( + current: SchemaSnapshot, + desired: SchemaSnapshot, +): SchemaDiffOperation[] => { + const ops: SchemaDiffOperation[] = []; + + // 1. Schemas + ops.push(...diffSchemas(current.schemas, desired.schemas)); + + // 2. Extensions + ops.push(...diffExtensions(current.extensions, desired.extensions)); + + // Build lookup maps for tables + const currentTables = new Map( + current.tables.map((t) => [`${t.schema}.${t.name}`, t]), + ); + const desiredTables = new Map( + desired.tables.map((t) => [`${t.schema}.${t.name}`, t]), + ); + + // 3. New tables (create entire table) + for (const [key, table] of desiredTables) { + if (!currentTables.has(key)) { + ops.push({ type: "createTable", table }); + } + } + + // 4. Modified tables (diff columns, constraints) + for (const [key, desiredTable] of desiredTables) { + const currentTable = currentTables.get(key); + if (currentTable) { + ops.push(...diffTable(currentTable, desiredTable)); + } + } + + // 5. Dropped tables + for (const [key, table] of currentTables) { + if (!desiredTables.has(key)) { + ops.push({ + type: "dropTable", + schema: table.schema, + table: table.name, + }); + } + } + + // 6. Drop schemas (only schemas that no longer have any tables) + // Schemas are dropped last + for (const schema of current.schemas) { + if (!desired.schemas.includes(schema)) { + ops.push({ type: "dropSchema", schema }); + } + } + + return ops; +}; + +const diffSchemas = ( + current: string[], + desired: string[], +): SchemaDiffOperation[] => { + const ops: SchemaDiffOperation[] = []; + for (const schema of desired) { + if (!current.includes(schema)) { + ops.push({ type: "createSchema", schema }); + } + } + // Schema drops are handled at the top level after table drops + return ops; +}; + +const diffExtensions = ( + current: SchemaSnapshot["extensions"], + desired: SchemaSnapshot["extensions"], +): SchemaDiffOperation[] => { + const ops: SchemaDiffOperation[] = []; + const currentSet = new Set(current.map((e) => `${e.schema}.${e.name}`)); + const desiredSet = new Set(desired.map((e) => `${e.schema}.${e.name}`)); + + for (const ext of desired) { + if (!currentSet.has(`${ext.schema}.${ext.name}`)) { + ops.push({ + type: "createExtension", + name: ext.name, + schema: ext.schema, + }); + } + } + for (const ext of current) { + if (!desiredSet.has(`${ext.schema}.${ext.name}`)) { + ops.push({ + type: "dropExtension", + name: ext.name, + schema: ext.schema, + }); + } + } + return ops; +}; + +const diffTable = ( + current: TableSnapshot, + desired: TableSnapshot, +): SchemaDiffOperation[] => { + const ops: SchemaDiffOperation[] = []; + const { schema, name: table } = desired; + + const currentColMap = new Map(current.columns.map((c) => [c.name, c])); + const desiredColMap = new Map(desired.columns.map((c) => [c.name, c])); + + const currentFkMap = new Map( + current.foreignKeys.map((fk) => [fk.name, fk]), + ); + const desiredFkMap = new Map( + desired.foreignKeys.map((fk) => [fk.name, fk]), + ); + + const currentUcMap = new Map( + current.uniqueConstraints.map((uc) => [uc.name, uc]), + ); + const desiredUcMap = new Map( + desired.uniqueConstraints.map((uc) => [uc.name, uc]), + ); + + // 1. Add columns + for (const [name, col] of desiredColMap) { + if (!currentColMap.has(name)) { + ops.push({ type: "addColumn", schema, table, column: col }); + } + } + + // 2. Alter columns + for (const [name, desiredCol] of desiredColMap) { + const currentCol = currentColMap.get(name); + if (currentCol) { + const changes = diffColumn(currentCol, desiredCol); + if (changes) { + ops.push({ + type: "alterColumn", + schema, + table, + column: name, + changes, + }); + } + } + } + + // 3. Alter primary key + ops.push( + ...diffPrimaryKey( + schema, + table, + current.primaryKey, + desired.primaryKey, + ), + ); + + // 4. Drop foreign keys (before dropping columns they may reference) + for (const [name, currentFk] of currentFkMap) { + const desiredFk = desiredFkMap.get(name); + if (!desiredFk || !foreignKeysEqual(currentFk, desiredFk)) { + ops.push({ + type: "dropForeignKey", + schema, + table, + constraintName: name, + }); + } + } + + // 5. Drop unique constraints (before dropping columns they may reference) + for (const [name, currentUc] of currentUcMap) { + const desiredUc = desiredUcMap.get(name); + if (!desiredUc || !uniqueConstraintsEqual(currentUc, desiredUc)) { + ops.push({ + type: "dropUniqueConstraint", + schema, + table, + constraintName: name, + }); + } + } + + // 6. Drop columns + for (const [name] of currentColMap) { + if (!desiredColMap.has(name)) { + ops.push({ type: "dropColumn", schema, table, column: name }); + } + } + + // 7. Add foreign keys + for (const [name, desiredFk] of desiredFkMap) { + const currentFk = currentFkMap.get(name); + if (!currentFk || !foreignKeysEqual(currentFk, desiredFk)) { + ops.push({ + type: "addForeignKey", + schema, + table, + foreignKey: desiredFk, + }); + } + } + + // 8. Add unique constraints + for (const [name, desiredUc] of desiredUcMap) { + const currentUc = currentUcMap.get(name); + if (!currentUc || !uniqueConstraintsEqual(currentUc, desiredUc)) { + ops.push({ + type: "addUniqueConstraint", + schema, + table, + constraint: desiredUc, + }); + } + } + + return ops; +}; + +const diffColumn = ( + current: ColumnSnapshot, + desired: ColumnSnapshot, +): ColumnChanges | null => { + const changes: ColumnChanges = {}; + let hasChanges = false; + + if (current.type !== desired.type) { + changes.type = { from: current.type, to: desired.type }; + hasChanges = true; + } + + if (current.nullable !== desired.nullable) { + changes.nullable = { from: current.nullable, to: desired.nullable }; + hasChanges = true; + } + + if (current.default !== desired.default) { + changes.default = { from: current.default, to: desired.default }; + hasChanges = true; + } + + return hasChanges ? changes : null; +}; + +const diffPrimaryKey = ( + schema: string, + table: string, + current: PrimaryKeySnapshot, + desired: PrimaryKeySnapshot, +): SchemaDiffOperation[] => { + // Compare columns in order - PK column order is semantically significant + if (current.columns.join(",") === desired.columns.join(",")) { + return []; + } + + // Only emit if both are non-empty (actual change) or one is empty (add/remove) + if (desired.columns.length === 0 && current.columns.length === 0) { + return []; + } + + return [ + { + type: "alterPrimaryKey", + schema, + table, + oldConstraintName: current.name, + oldColumns: current.columns, + newColumns: desired.columns, + }, + ]; +}; + +const foreignKeysEqual = ( + a: ForeignKeySnapshot, + b: ForeignKeySnapshot, +): boolean => { + return ( + a.columns.join(",") === b.columns.join(",") && + a.referencesSchema === b.referencesSchema && + a.referencesTable === b.referencesTable && + a.referencesColumns.join(",") === b.referencesColumns.join(",") && + a.onDelete === b.onDelete && + a.onUpdate === b.onUpdate + ); +}; + +const uniqueConstraintsEqual = ( + a: UniqueConstraintSnapshot, + b: UniqueConstraintSnapshot, +): boolean => { + return ( + a.columns.join(",") === b.columns.join(",") && + (a.nullsNotDistinct ?? false) === (b.nullsNotDistinct ?? false) && + (a.where ?? null) === (b.where ?? null) + ); +}; diff --git a/packages/orm-migrate/src/migrations/diff/operationToSql.test.ts b/packages/orm-migrate/src/migrations/diff/operationToSql.test.ts new file mode 100644 index 00000000..6163b4a4 --- /dev/null +++ b/packages/orm-migrate/src/migrations/diff/operationToSql.test.ts @@ -0,0 +1,333 @@ +import { describe, expect, test } from "vitest"; + +import { operationToSql, operationsToSql } from "./operationToSql.js"; +import type { SchemaDiffOperation } from "./types.js"; + +describe("operationToSql", () => { + test("createSchema", () => { + const sql = operationToSql({ + type: "createSchema", + schema: "app", + }).text; + expect(sql).toContain('CREATE SCHEMA IF NOT EXISTS "app"'); + }); + + test("dropSchema", () => { + const sql = operationToSql({ + type: "dropSchema", + schema: "app", + }).text; + expect(sql).toContain('DROP SCHEMA IF EXISTS "app"'); + }); + + test("createExtension", () => { + const sql = operationToSql({ + type: "createExtension", + name: "uuid-ossp", + schema: "public", + }).text; + expect(sql).toContain('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"'); + expect(sql).toContain('SCHEMA "public"'); + }); + + test("dropExtension", () => { + const sql = operationToSql({ + type: "dropExtension", + name: "uuid-ossp", + schema: "public", + }).text; + expect(sql).toContain('DROP EXTENSION IF EXISTS "uuid-ossp"'); + }); + + test("createTable with columns and primary key", () => { + const sql = operationToSql({ + type: "createTable", + table: { + schema: "app", + name: "users", + columns: [ + { + name: "id", + type: "serial", + nullable: false, + default: null, + }, + { + name: "name", + type: "text", + nullable: false, + default: null, + }, + { + name: "email", + type: "text", + nullable: true, + default: null, + }, + ], + primaryKey: { name: "users_pkey", columns: ["id"] }, + foreignKeys: [], + uniqueConstraints: [], + }, + }).text; + expect(sql).toContain('CREATE TABLE "app"."users"'); + expect(sql).toContain('"id" serial NOT NULL'); + expect(sql).toContain('"name" text NOT NULL'); + expect(sql).toContain('"email" text'); + expect(sql).toContain('PRIMARY KEY ("id")'); + }); + + test("createTable with column default", () => { + const sql = operationToSql({ + type: "createTable", + table: { + schema: "app", + name: "users", + columns: [ + { + name: "role", + type: "text", + nullable: false, + default: "'user'", + }, + ], + primaryKey: { name: null, columns: [] }, + foreignKeys: [], + uniqueConstraints: [], + }, + }).text; + expect(sql).toContain("DEFAULT 'user'"); + }); + + test("dropTable", () => { + const sql = operationToSql({ + type: "dropTable", + schema: "app", + table: "users", + }).text; + expect(sql).toContain('DROP TABLE IF EXISTS "app"."users"'); + }); + + test("addColumn", () => { + const sql = operationToSql({ + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "email", + type: "text", + nullable: false, + default: null, + }, + }).text; + expect(sql).toContain( + 'ALTER TABLE "app"."users" ADD COLUMN "email" text NOT NULL', + ); + }); + + test("addColumn with default", () => { + const sql = operationToSql({ + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "role", + type: "text", + nullable: false, + default: "'user'", + }, + }).text; + expect(sql).toContain("DEFAULT 'user'"); + }); + + test("dropColumn", () => { + const sql = operationToSql({ + type: "dropColumn", + schema: "app", + table: "users", + column: "email", + }).text; + expect(sql).toContain('ALTER TABLE "app"."users" DROP COLUMN "email"'); + }); + + test("alterColumn type change", () => { + const sql = operationToSql({ + type: "alterColumn", + schema: "app", + table: "users", + column: "age", + changes: { type: { from: "text", to: "integer" } }, + }).text; + expect(sql).toContain('ALTER COLUMN "age" TYPE integer'); + }); + + test("alterColumn set NOT NULL", () => { + const sql = operationToSql({ + type: "alterColumn", + schema: "app", + table: "users", + column: "email", + changes: { nullable: { from: true, to: false } }, + }).text; + expect(sql).toContain('ALTER COLUMN "email" SET NOT NULL'); + }); + + test("alterColumn drop NOT NULL", () => { + const sql = operationToSql({ + type: "alterColumn", + schema: "app", + table: "users", + column: "email", + changes: { nullable: { from: false, to: true } }, + }).text; + expect(sql).toContain('ALTER COLUMN "email" DROP NOT NULL'); + }); + + test("alterColumn set default", () => { + const sql = operationToSql({ + type: "alterColumn", + schema: "app", + table: "users", + column: "role", + changes: { default: { from: null, to: "'admin'" } }, + }).text; + expect(sql).toContain("SET DEFAULT 'admin'"); + }); + + test("alterColumn drop default", () => { + const sql = operationToSql({ + type: "alterColumn", + schema: "app", + table: "users", + column: "role", + changes: { default: { from: "'user'", to: null } }, + }).text; + expect(sql).toContain("DROP DEFAULT"); + }); + + test("addForeignKey", () => { + const sql = operationToSql({ + type: "addForeignKey", + schema: "app", + table: "posts", + foreignKey: { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: "CASCADE", + onUpdate: null, + }, + }).text; + expect(sql).toContain('ADD CONSTRAINT "fk_posts_user"'); + expect(sql).toContain('FOREIGN KEY ("user_id")'); + expect(sql).toContain('REFERENCES "app"."users" ("id")'); + expect(sql).toContain("ON DELETE CASCADE"); + expect(sql).not.toContain("ON UPDATE"); + }); + + test("dropForeignKey", () => { + const sql = operationToSql({ + type: "dropForeignKey", + schema: "app", + table: "posts", + constraintName: "fk_posts_user", + }).text; + expect(sql).toContain('DROP CONSTRAINT "fk_posts_user"'); + }); + + test("addUniqueConstraint", () => { + const sql = operationToSql({ + type: "addUniqueConstraint", + schema: "app", + table: "users", + constraint: { + name: "users_email_key", + columns: ["email"], + }, + }).text; + expect(sql).toContain('CREATE UNIQUE INDEX "users_email_key"'); + expect(sql).toContain('ON "app"."users" ("email")'); + }); + + test("addUniqueConstraint with nullsNotDistinct", () => { + const sql = operationToSql({ + type: "addUniqueConstraint", + schema: "app", + table: "users", + constraint: { + name: "users_email_key", + columns: ["email"], + nullsNotDistinct: true, + }, + }).text; + expect(sql).toContain("NULLS NOT DISTINCT"); + }); + + test("addUniqueConstraint with where clause", () => { + const sql = operationToSql({ + type: "addUniqueConstraint", + schema: "app", + table: "users", + constraint: { + name: "users_email_key", + columns: ["email"], + where: "deleted_at IS NULL", + }, + }).text; + expect(sql).toContain("WHERE (deleted_at IS NULL)"); + }); + + test("dropUniqueConstraint", () => { + const sql = operationToSql({ + type: "dropUniqueConstraint", + schema: "app", + table: "users", + constraintName: "users_email_key", + }).text; + expect(sql).toContain('DROP INDEX IF EXISTS "app"."users_email_key"'); + }); + + test("alterPrimaryKey", () => { + const sql = operationToSql({ + type: "alterPrimaryKey", + schema: "app", + table: "users", + oldConstraintName: "users_pkey", + oldColumns: ["id"], + newColumns: ["id", "tenant_id"], + }).text; + expect(sql).toContain("DROP CONSTRAINT"); + expect(sql).toContain('ADD PRIMARY KEY ("id", "tenant_id")'); + }); + + test("renameColumn", () => { + const sql = operationToSql({ + type: "renameColumn", + schema: "app", + table: "users", + oldName: "old_name", + newName: "new_name", + }).text; + expect(sql).toBe( + 'ALTER TABLE "app"."users" RENAME COLUMN "old_name" TO "new_name";', + ); + }); +}); + +describe("operationsToSql", () => { + test("converts multiple operations to trimmed SQL strings", () => { + const ops: SchemaDiffOperation[] = [ + { type: "createSchema", schema: "app" }, + { type: "dropSchema", schema: "old" }, + ]; + const sqls = operationsToSql(ops); + expect(sqls).toHaveLength(2); + expect(sqls[0]).toContain("CREATE SCHEMA"); + expect(sqls[1]).toContain("DROP SCHEMA"); + // Should be trimmed + expect(sqls[0]![0]).not.toBe(" "); + expect(sqls[0]![0]).not.toBe("\n"); + }); +}); diff --git a/packages/orm-migrate/src/migrations/diff/operationToSql.ts b/packages/orm-migrate/src/migrations/diff/operationToSql.ts new file mode 100644 index 00000000..d641e93b --- /dev/null +++ b/packages/orm-migrate/src/migrations/diff/operationToSql.ts @@ -0,0 +1,219 @@ +import { SQLStatement, sql } from "@casekit/sql"; + +import type { ColumnSnapshot, TableSnapshot } from "../types.js"; +import type { ColumnChanges, SchemaDiffOperation } from "./types.js"; + +/** + * Convert a list of diff operations into SQL statements. + * Returns an array of SQL strings ready to be written to a migration file. + */ +export const operationsToSql = (ops: SchemaDiffOperation[]): string[] => { + return ops.map((op) => operationToSql(op).text.trim()); +}; + +/** + * Convert a single diff operation into a SQLStatement. + */ +export const operationToSql = (op: SchemaDiffOperation): SQLStatement => { + switch (op.type) { + case "createSchema": + return sql`CREATE SCHEMA IF NOT EXISTS ${sql.ident(op.schema)};`; + + case "dropSchema": + return sql`DROP SCHEMA IF EXISTS ${sql.ident(op.schema)};`; + + case "createExtension": + return sql`CREATE EXTENSION IF NOT EXISTS ${sql.ident(op.name)} SCHEMA ${sql.ident(op.schema)};`; + + case "dropExtension": + return sql`DROP EXTENSION IF EXISTS ${sql.ident(op.name)};`; + + case "createTable": + return createTableSql(op.table); + + case "dropTable": + return sql`DROP TABLE IF EXISTS ${sql.ident(op.schema)}.${sql.ident(op.table)};`; + + case "addColumn": + return addColumnSql(op.schema, op.table, op.column); + + case "dropColumn": + return sql`ALTER TABLE ${sql.ident(op.schema)}.${sql.ident(op.table)} DROP COLUMN ${sql.ident(op.column)};`; + + case "renameColumn": + return sql`ALTER TABLE ${sql.ident(op.schema)}.${sql.ident(op.table)} RENAME COLUMN ${sql.ident(op.oldName)} TO ${sql.ident(op.newName)};`; + + case "alterColumn": + return alterColumnSql(op.schema, op.table, op.column, op.changes); + + case "addForeignKey": + return addForeignKeySql(op.schema, op.table, op.foreignKey); + + case "dropForeignKey": + return sql`ALTER TABLE ${sql.ident(op.schema)}.${sql.ident(op.table)} DROP CONSTRAINT ${sql.ident(op.constraintName)};`; + + case "addUniqueConstraint": + return addUniqueConstraintSql(op.schema, op.table, op.constraint); + + case "dropUniqueConstraint": + return sql`DROP INDEX IF EXISTS ${sql.ident(op.schema)}.${sql.ident(op.constraintName)};`; + + case "alterPrimaryKey": + return alterPrimaryKeySql(op); + } +}; + +const createTableSql = (table: TableSnapshot): SQLStatement => { + const statement = sql`CREATE TABLE ${sql.ident(table.schema)}.${sql.ident(table.name)} (\n`; + + table.columns.forEach((col, i) => { + statement.append` ${sql.ident(col.name)} `; + statement.push(new SQLStatement(col.type)); + + if (!col.nullable) statement.append` NOT NULL`; + + if (col.default !== null) { + statement.append` DEFAULT `; + statement.push(new SQLStatement(col.default)); + } + + if (i < table.columns.length - 1) statement.append`,\n`; + }); + + if (table.primaryKey.columns.length > 0) { + statement.append`,\n PRIMARY KEY (${sql.join( + table.primaryKey.columns.map(sql.ident), + ", ", + )})`; + } + + statement.append`\n);`; + return statement; +}; + +const addColumnSql = ( + schema: string, + table: string, + column: ColumnSnapshot, +): SQLStatement => { + const statement = sql`ALTER TABLE ${sql.ident(schema)}.${sql.ident(table)} ADD COLUMN ${sql.ident(column.name)} `; + statement.push(new SQLStatement(column.type)); + + if (!column.nullable) statement.append` NOT NULL`; + + if (column.default !== null) { + statement.append` DEFAULT `; + statement.push(new SQLStatement(column.default)); + } + + statement.append`;`; + return statement; +}; + +const alterColumnSql = ( + schema: string, + table: string, + column: string, + changes: ColumnChanges, +): SQLStatement => { + const statements: SQLStatement[] = []; + + if (changes.type) { + statements.push( + sql`ALTER TABLE ${sql.ident(schema)}.${sql.ident(table)} ALTER COLUMN ${sql.ident(column)} TYPE ${new SQLStatement(changes.type.to)};`, + ); + } + + if (changes.nullable) { + if (changes.nullable.to) { + statements.push( + sql`ALTER TABLE ${sql.ident(schema)}.${sql.ident(table)} ALTER COLUMN ${sql.ident(column)} DROP NOT NULL;`, + ); + } else { + statements.push( + sql`ALTER TABLE ${sql.ident(schema)}.${sql.ident(table)} ALTER COLUMN ${sql.ident(column)} SET NOT NULL;`, + ); + } + } + + if (changes.default) { + if (changes.default.to === null) { + statements.push( + sql`ALTER TABLE ${sql.ident(schema)}.${sql.ident(table)} ALTER COLUMN ${sql.ident(column)} DROP DEFAULT;`, + ); + } else { + const stmt = sql`ALTER TABLE ${sql.ident(schema)}.${sql.ident(table)} ALTER COLUMN ${sql.ident(column)} SET DEFAULT `; + stmt.push(new SQLStatement(changes.default.to)); + stmt.append`;`; + statements.push(stmt); + } + } + + return sql.join(statements, "\n"); +}; + +const addForeignKeySql = ( + schema: string, + table: string, + fk: import("../types.js").ForeignKeySnapshot, +): SQLStatement => { + const statement = sql`ALTER TABLE ${sql.ident(schema)}.${sql.ident(table)} ADD CONSTRAINT ${sql.ident(fk.name)} FOREIGN KEY (${sql.join(fk.columns.map(sql.ident), ", ")}) REFERENCES ${sql.ident(fk.referencesSchema)}.${sql.ident(fk.referencesTable)} (${sql.join(fk.referencesColumns.map(sql.ident), ", ")})`; + + if (fk.onDelete) { + statement.append` ON DELETE `; + statement.push(new SQLStatement(fk.onDelete)); + } + + if (fk.onUpdate) { + statement.append` ON UPDATE `; + statement.push(new SQLStatement(fk.onUpdate)); + } + + statement.append`;`; + return statement; +}; + +const addUniqueConstraintSql = ( + schema: string, + table: string, + constraint: import("../types.js").UniqueConstraintSnapshot, +): SQLStatement => { + const statement = sql`CREATE UNIQUE INDEX ${sql.ident(constraint.name)} ON ${sql.ident(schema)}.${sql.ident(table)} (${sql.join(constraint.columns.map(sql.ident), ", ")})`; + + if (constraint.nullsNotDistinct) { + statement.append` NULLS NOT DISTINCT`; + } + + if (constraint.where) { + statement.append` WHERE (`; + statement.push(new SQLStatement(constraint.where)); + statement.append`)`; + } + + statement.append`;`; + return statement; +}; + +const alterPrimaryKeySql = (op: { + schema: string; + table: string; + oldConstraintName: string | null; + oldColumns: string[]; + newColumns: string[]; +}): SQLStatement => { + const statements: SQLStatement[] = []; + + if (op.oldColumns.length > 0 && op.oldConstraintName) { + statements.push( + sql`ALTER TABLE ${sql.ident(op.schema)}.${sql.ident(op.table)} DROP CONSTRAINT IF EXISTS ${sql.ident(op.oldConstraintName)};`, + ); + } + + if (op.newColumns.length > 0) { + statements.push( + sql`ALTER TABLE ${sql.ident(op.schema)}.${sql.ident(op.table)} ADD PRIMARY KEY (${sql.join(op.newColumns.map(sql.ident), ", ")});`, + ); + } + + return sql.join(statements, "\n"); +}; diff --git a/packages/orm-migrate/src/migrations/diff/types.ts b/packages/orm-migrate/src/migrations/diff/types.ts new file mode 100644 index 00000000..8786b275 --- /dev/null +++ b/packages/orm-migrate/src/migrations/diff/types.ts @@ -0,0 +1,73 @@ +import type { + ColumnSnapshot, + ForeignKeySnapshot, + TableSnapshot, + UniqueConstraintSnapshot, +} from "../types.js"; + +export interface ColumnChanges { + type?: { from: string; to: string }; + nullable?: { from: boolean; to: boolean }; + default?: { from: string | null; to: string | null }; +} + +export type SchemaDiffOperation = + | { type: "createSchema"; schema: string } + | { type: "dropSchema"; schema: string } + | { type: "createExtension"; name: string; schema: string } + | { type: "dropExtension"; name: string; schema: string } + | { type: "createTable"; table: TableSnapshot } + | { type: "dropTable"; schema: string; table: string } + | { + type: "addColumn"; + schema: string; + table: string; + column: ColumnSnapshot; + } + | { type: "dropColumn"; schema: string; table: string; column: string } + | { + type: "renameColumn"; + schema: string; + table: string; + oldName: string; + newName: string; + } + | { + type: "alterColumn"; + schema: string; + table: string; + column: string; + changes: ColumnChanges; + } + | { + type: "addForeignKey"; + schema: string; + table: string; + foreignKey: ForeignKeySnapshot; + } + | { + type: "dropForeignKey"; + schema: string; + table: string; + constraintName: string; + } + | { + type: "addUniqueConstraint"; + schema: string; + table: string; + constraint: UniqueConstraintSnapshot; + } + | { + type: "dropUniqueConstraint"; + schema: string; + table: string; + constraintName: string; + } + | { + type: "alterPrimaryKey"; + schema: string; + table: string; + oldConstraintName: string | null; + oldColumns: string[]; + newColumns: string[]; + }; diff --git a/packages/orm-migrate/src/migrations/generate.ts b/packages/orm-migrate/src/migrations/generate.ts new file mode 100644 index 00000000..86f82ab5 --- /dev/null +++ b/packages/orm-migrate/src/migrations/generate.ts @@ -0,0 +1,58 @@ +import { Orm } from "@casekit/orm"; + +import { pull } from "#pull.js"; +import { getExtensions } from "#pull/getExtensions.js"; +import { configToSnapshot } from "./configToSnapshot.js"; +import { diffSnapshots } from "./diff/diffSnapshots.js"; +import { operationsToSql } from "./diff/operationToSql.js"; +import type { SchemaDiffOperation } from "./diff/types.js"; +import { pulledToSnapshot } from "./pulledToSnapshot.js"; +import { checkSafety } from "./safety/checkSafety.js"; +import type { SafetyWarning } from "./safety/types.js"; + +export interface GenerateResult { + operations: SchemaDiffOperation[]; + warnings: SafetyWarning[]; + sql: string; +} + +/** + * Generate migration SQL by diffing the TypeScript config + * against the current database state. + * + * Returns null if there are no differences. + */ +export const generate = async (db: Orm): Promise => { + // 1. Get the desired state from the TypeScript config + const desiredSnapshot = configToSnapshot(db.config); + + // 2. Get the current state from the database + const schemas = [ + ...new Set(Object.values(db.config.models).map((m) => m.schema)), + ]; + const [tables, extensions] = await Promise.all([ + pull(db, schemas), + db.query(getExtensions(schemas)), + ]); + const currentSnapshot = pulledToSnapshot(tables, extensions); + + // 3. Diff + const operations = diffSnapshots(currentSnapshot, desiredSnapshot); + + if (operations.length === 0) { + return null; + } + + // 4. Safety checks + const warnings = checkSafety(operations); + + // 5. Generate SQL + const sqlStatements = operationsToSql(operations); + const sql = sqlStatements.join("\n\n") + "\n"; + + return { + operations, + warnings, + sql, + }; +}; diff --git a/packages/orm-migrate/src/migrations/lock.ts b/packages/orm-migrate/src/migrations/lock.ts new file mode 100644 index 00000000..f43c90bf --- /dev/null +++ b/packages/orm-migrate/src/migrations/lock.ts @@ -0,0 +1,29 @@ +import { Orm } from "@casekit/orm"; + +/** + * Advisory lock key derived from CRC32("casekit-orm-migrate"). + * PostgreSQL advisory locks require integer keys, so we use a + * deterministic hash of a meaningful string to avoid collisions. + */ +const MIGRATION_LOCK_KEY = 3315941887; + +/** + * Acquire a session-level advisory lock for migrations. + * Blocks until the lock is available, ensuring only one migrator + * can run at a time. + * + * Session-level locks (not transaction-level) are used because some + * migrations may need to run outside transactions + * (e.g. CREATE INDEX CONCURRENTLY). + */ +export const acquireMigrationLock = async (db: Orm): Promise => { + await db.query`SELECT pg_advisory_lock(${MIGRATION_LOCK_KEY})`; +}; + +/** + * Release the session-level advisory lock for migrations. + * Should always be called in a finally block after acquireMigrationLock. + */ +export const releaseMigrationLock = async (db: Orm): Promise => { + await db.query`SELECT pg_advisory_unlock(${MIGRATION_LOCK_KEY})`; +}; diff --git a/packages/orm-migrate/src/migrations/pulledToSnapshot.test.ts b/packages/orm-migrate/src/migrations/pulledToSnapshot.test.ts new file mode 100644 index 00000000..8f474350 --- /dev/null +++ b/packages/orm-migrate/src/migrations/pulledToSnapshot.test.ts @@ -0,0 +1,324 @@ +import { describe, expect, test } from "vitest"; + +import type { Table } from "#pull.js"; +import { pulledToSnapshot } from "./pulledToSnapshot.js"; + +const makeColumn = ( + overrides: Partial & { column: string }, +): import("#pull/index.js").Column => ({ + schema: "app", + table: "users", + ordinalPosition: 1, + type: "text", + default: null, + nullable: false, + udtSchema: "pg_catalog", + udt: "text", + elementType: null, + elementTypeSchema: null, + cardinality: 0, + size: null, + isSerial: false, + ...overrides, +}); + +describe("pulledToSnapshot", () => { + test("converts a simple table", () => { + const tables: Table[] = [ + { + schema: "app", + name: "users", + columns: [ + makeColumn({ + column: "id", + type: "integer", + isSerial: true, + }), + makeColumn({ column: "name", type: "text" }), + ], + primaryKey: { + schema: "app", + table: "users", + constraintName: "users_pkey", + columns: ["id"], + }, + foreignKeys: [], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + + expect(snapshot.schemas).toEqual(["app"]); + expect(snapshot.extensions).toEqual([]); + expect(snapshot.tables).toHaveLength(1); + expect(snapshot.tables[0]!.schema).toBe("app"); + expect(snapshot.tables[0]!.name).toBe("users"); + expect(snapshot.tables[0]!.primaryKey).toEqual({ + name: "users_pkey", + columns: ["id"], + }); + }); + + test("serial columns are mapped to serial type and null default", () => { + const tables: Table[] = [ + { + schema: "app", + name: "users", + columns: [ + makeColumn({ + column: "id", + type: "integer", + isSerial: true, + default: "nextval('users_id_seq'::regclass)", + }), + ], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + const col = snapshot.tables[0]!.columns[0]!; + expect(col.type).toBe("serial"); + expect(col.default).toBeNull(); + }); + + test("smallserial and bigserial types", () => { + const tables: Table[] = [ + { + schema: "app", + name: "items", + columns: [ + makeColumn({ + column: "small_id", + type: "smallint", + isSerial: true, + }), + makeColumn({ + column: "big_id", + type: "bigint", + isSerial: true, + }), + ], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.tables[0]!.columns[0]!.type).toBe("smallserial"); + expect(snapshot.tables[0]!.columns[1]!.type).toBe("bigserial"); + }); + + test("array columns", () => { + const tables: Table[] = [ + { + schema: "app", + name: "items", + columns: [ + makeColumn({ + column: "tags", + type: "ARRAY", + elementType: "text", + cardinality: 1, + }), + ], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.tables[0]!.columns[0]!.type).toBe("text[]"); + }); + + test("sized character types", () => { + const tables: Table[] = [ + { + schema: "app", + name: "items", + columns: [ + makeColumn({ + column: "code", + type: "character varying", + size: 50, + }), + ], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.tables[0]!.columns[0]!.type).toBe("varchar(50)"); + }); + + test("converts foreign keys with actions", () => { + const tables: Table[] = [ + { + schema: "app", + name: "posts", + columns: [makeColumn({ column: "user_id", type: "integer" })], + primaryKey: null, + foreignKeys: [ + { + schema: "app", + constraintName: "fk_posts_user", + tableFrom: "posts", + columnsFrom: ["user_id"], + tableTo: "users", + columnsTo: ["id"], + onDelete: "CASCADE", + onUpdate: null, + }, + ], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.tables[0]!.foreignKeys).toEqual([ + { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: "CASCADE", + onUpdate: null, + }, + ]); + }); + + test("converts unique constraints", () => { + const tables: Table[] = [ + { + schema: "app", + name: "users", + columns: [makeColumn({ column: "email", type: "text" })], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [ + { + schema: "app", + table: "users", + name: "users_email_key", + columns: ["email"], + definition: + "CREATE UNIQUE INDEX users_email_key ON app.users USING btree (email)", + nullsNotDistinct: false, + }, + ], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.tables[0]!.uniqueConstraints).toEqual([ + { + name: "users_email_key", + columns: ["email"], + nullsNotDistinct: false, + where: null, + }, + ]); + }); + + test("extracts WHERE clause from unique constraint definition", () => { + const tables: Table[] = [ + { + schema: "app", + name: "users", + columns: [makeColumn({ column: "email", type: "text" })], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [ + { + schema: "app", + table: "users", + name: "users_email_active_key", + columns: ["email"], + definition: + "CREATE UNIQUE INDEX users_email_active_key ON app.users USING btree (email) WHERE (deleted_at IS NULL)", + nullsNotDistinct: false, + }, + ], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.tables[0]!.uniqueConstraints[0]!.where).toBe( + "deleted_at IS NULL", + ); + }); + + test("handles table with no primary key", () => { + const tables: Table[] = [ + { + schema: "app", + name: "logs", + columns: [makeColumn({ column: "message", type: "text" })], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.tables[0]!.primaryKey).toEqual({ + name: null, + columns: [], + }); + }); + + test("preserves column defaults for non-serial columns", () => { + const tables: Table[] = [ + { + schema: "app", + name: "users", + columns: [ + makeColumn({ + column: "role", + type: "text", + default: "'user'", + }), + ], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.tables[0]!.columns[0]!.default).toBe("'user'"); + }); + + test("collects schemas from multiple tables", () => { + const tables: Table[] = [ + { + schema: "app", + name: "users", + columns: [], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [], + }, + { + schema: "audit", + name: "logs", + columns: [], + primaryKey: null, + foreignKeys: [], + uniqueConstraints: [], + }, + ]; + + const snapshot = pulledToSnapshot(tables); + expect(snapshot.schemas).toContain("app"); + expect(snapshot.schemas).toContain("audit"); + }); +}); diff --git a/packages/orm-migrate/src/migrations/pulledToSnapshot.ts b/packages/orm-migrate/src/migrations/pulledToSnapshot.ts new file mode 100644 index 00000000..23732b4c --- /dev/null +++ b/packages/orm-migrate/src/migrations/pulledToSnapshot.ts @@ -0,0 +1,140 @@ +import type { Table } from "#pull.js"; +import type { Extension } from "#pull/getExtensions.js"; +import type { + ColumnSnapshot, + ExtensionSnapshot, + ForeignKeySnapshot, + SchemaSnapshot, + TableSnapshot, + UniqueConstraintSnapshot, +} from "./types.js"; + +/** + * Extract a WHERE clause from a unique index definition string. + * e.g. "CREATE UNIQUE INDEX idx ON schema.table (col) WHERE (active = true)" + * returns "active = true" + */ +const extractWhereClause = (definition: string): string | null => { + const match = /\bWHERE\s+\((.+)\)\s*$/i.exec(definition); + return match?.[1] ?? null; +}; + +/** + * Convert the Table[] and Extension[] returned by pull (the current database state) + * into a SchemaSnapshot for diffing against the config state. + */ +export const pulledToSnapshot = ( + tables: Table[], + pulledExtensions: Extension[] = [], +): SchemaSnapshot => { + const schemas = [...new Set(tables.map((t) => t.schema))].sort(); + + const extensions: ExtensionSnapshot[] = pulledExtensions.map((ext) => ({ + name: ext.name, + schema: ext.schema, + })); + + const snapshotTables: TableSnapshot[] = tables.map((table) => { + const columns: ColumnSnapshot[] = table.columns.map((col) => ({ + name: col.column, + type: col.isSerial ? serialType(col.type) : columnType(col), + nullable: col.nullable, + default: col.isSerial ? null : (col.default ?? null), + })); + + const primaryKey = { + name: table.primaryKey?.constraintName ?? null, + columns: table.primaryKey?.columns ?? [], + }; + + const foreignKeys: ForeignKeySnapshot[] = table.foreignKeys.map( + (fk) => ({ + name: fk.constraintName, + columns: fk.columnsFrom, + referencesSchema: fk.schema, + referencesTable: fk.tableTo, + referencesColumns: fk.columnsTo, + onDelete: fk.onDelete ?? null, + onUpdate: fk.onUpdate ?? null, + }), + ); + + const uniqueConstraints: UniqueConstraintSnapshot[] = + table.uniqueConstraints.map((uc) => ({ + name: uc.name, + columns: uc.columns, + nullsNotDistinct: uc.nullsNotDistinct, + where: extractWhereClause(uc.definition), + })); + + return { + schema: table.schema, + name: table.name, + columns, + primaryKey, + foreignKeys, + uniqueConstraints, + }; + }); + + return { schemas, extensions, tables: snapshotTables }; +}; + +/** + * Map serial-type columns back to their config type name. + * When PG reports a column as integer/smallint/bigint with isSerial=true, + * the config would have declared it as serial/smallserial/bigserial. + */ +const serialType = (pgType: string): string => { + switch (pgType.toLowerCase()) { + case "integer": + case "int": + case "int4": + return "serial"; + case "smallint": + case "int2": + return "smallserial"; + case "bigint": + case "int8": + return "bigserial"; + default: + return pgType; + } +}; + +/** + * Build the type string for a pulled column, handling arrays and sized types. + */ +const columnType = (col: { + type: string; + elementType: string | null; + cardinality: number; + size: number | null; +}): string => { + // Array types + if (col.type === "ARRAY" && col.elementType) { + const base = col.elementType.toLowerCase(); + return base + "[]".repeat(Math.max(col.cardinality, 1)); + } + + const base = col.type.toLowerCase(); + + // Sized character types + if (col.size !== null && col.size > 0) { + if ( + base === "character varying" || + base === "varchar" || + base === "character" || + base === "char" || + base === "bpchar" + ) { + const typeName = + base === "character varying" || base === "varchar" + ? "varchar" + : "char"; + return `${typeName}(${col.size})`; + } + } + + return base; +}; diff --git a/packages/orm-migrate/src/migrations/run.ts b/packages/orm-migrate/src/migrations/run.ts new file mode 100644 index 00000000..87db77ec --- /dev/null +++ b/packages/orm-migrate/src/migrations/run.ts @@ -0,0 +1,87 @@ +import { Orm, SQLStatement } from "@casekit/orm"; + +import { acquireMigrationLock, releaseMigrationLock } from "./lock.js"; +import { + ensureMigrationTable, + getAppliedMigrations, + recordMigration, + verifyChecksums, +} from "./tracking.js"; + +export interface Migration { + name: string; + sql: string; + checksum: string; +} + +export interface RunResult { + applied: string[]; + alreadyUpToDate: boolean; +} + +/** + * Run all pending migrations in order. + * + * - Acquires an advisory lock to prevent concurrent migration runs + * - Verifies checksums of already-applied migrations + * - Runs each pending migration in its own transaction + * (unless marked with -- orm:no-transaction) + * - Records each applied migration in the tracking table + */ +export const run = async ( + db: Orm, + migrations: Migration[], +): Promise => { + await ensureMigrationTable(db); + + try { + await acquireMigrationLock(db); + const applied = await getAppliedMigrations(db); + + // Verify checksums of already-applied migrations + const mismatches = verifyChecksums(applied, migrations); + if (mismatches.length > 0) { + throw new Error( + `Migration checksum verification failed:\n${mismatches.join("\n")}`, + ); + } + + // Determine pending migrations + const appliedNames = new Set(applied.map((m) => m.name)); + const pending = migrations.filter((m) => !appliedNames.has(m.name)); + + if (pending.length === 0) { + return { applied: [], alreadyUpToDate: true }; + } + + const appliedMigrationNames: string[] = []; + + for (const migration of pending) { + const isNoTransaction = migration.sql + .trimStart() + .startsWith("-- orm:no-transaction"); + + if (isNoTransaction) { + // Execute outside a transaction (for CREATE INDEX CONCURRENTLY etc.) + await db.query(new SQLStatement(migration.sql)); + await recordMigration(db, migration.name, migration.checksum); + } else { + // Execute within a transaction + await db.transact(async (db) => { + await db.query(new SQLStatement(migration.sql)); + await recordMigration( + db, + migration.name, + migration.checksum, + ); + }); + } + + appliedMigrationNames.push(migration.name); + } + + return { applied: appliedMigrationNames, alreadyUpToDate: false }; + } finally { + await releaseMigrationLock(db); + } +}; diff --git a/packages/orm-migrate/src/migrations/safety/checkSafety.test.ts b/packages/orm-migrate/src/migrations/safety/checkSafety.test.ts new file mode 100644 index 00000000..dd962a58 --- /dev/null +++ b/packages/orm-migrate/src/migrations/safety/checkSafety.test.ts @@ -0,0 +1,224 @@ +import { describe, expect, test } from "vitest"; + +import type { SchemaDiffOperation } from "../diff/types.js"; +import { checkSafety } from "./checkSafety.js"; + +describe("checkSafety", () => { + test("returns no warnings for safe operations", () => { + const ops: SchemaDiffOperation[] = [ + { type: "createSchema", schema: "app" }, + { type: "createExtension", name: "uuid-ossp", schema: "app" }, + { + type: "createTable", + table: { + schema: "app", + name: "users", + columns: [ + { + name: "id", + type: "serial", + nullable: false, + default: null, + }, + ], + primaryKey: { name: "users_pkey", columns: ["id"] }, + foreignKeys: [], + uniqueConstraints: [], + }, + }, + { + type: "addColumn", + schema: "app", + table: "users", + column: { + name: "email", + type: "text", + nullable: true, + default: null, + }, + }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toEqual([]); + }); + + test("flags dropTable as unsafe", () => { + const ops: SchemaDiffOperation[] = [ + { type: "dropTable", schema: "app", table: "users" }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toHaveLength(1); + expect(warnings[0]!.level).toBe("unsafe"); + expect(warnings[0]!.message).toContain("users"); + }); + + test("flags dropColumn as unsafe", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "dropColumn", + schema: "app", + table: "users", + column: "email", + }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toHaveLength(1); + expect(warnings[0]!.level).toBe("unsafe"); + expect(warnings[0]!.message).toContain("email"); + }); + + test("flags dropSchema as unsafe", () => { + const ops: SchemaDiffOperation[] = [ + { type: "dropSchema", schema: "old_app" }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toHaveLength(1); + expect(warnings[0]!.level).toBe("unsafe"); + expect(warnings[0]!.message).toContain("old_app"); + }); + + test("flags unsafe type change as unsafe", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "alterColumn", + schema: "app", + table: "users", + column: "age", + changes: { + type: { from: "text", to: "integer" }, + }, + }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toHaveLength(1); + expect(warnings[0]!.level).toBe("unsafe"); + expect(warnings[0]!.message).toContain("text"); + expect(warnings[0]!.message).toContain("integer"); + }); + + test("does not flag safe type change", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "alterColumn", + schema: "app", + table: "users", + column: "name", + changes: { + type: { from: "varchar(50)", to: "varchar(255)" }, + }, + }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toEqual([]); + }); + + test("flags setting NOT NULL as cautious", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "alterColumn", + schema: "app", + table: "users", + column: "email", + changes: { + nullable: { from: true, to: false }, + }, + }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toHaveLength(1); + expect(warnings[0]!.level).toBe("cautious"); + }); + + test("does not flag dropping NOT NULL", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "alterColumn", + schema: "app", + table: "users", + column: "email", + changes: { + nullable: { from: false, to: true }, + }, + }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toEqual([]); + }); + + test("flags addForeignKey as cautious", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "addForeignKey", + schema: "app", + table: "posts", + foreignKey: { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: null, + onUpdate: null, + }, + }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toHaveLength(1); + expect(warnings[0]!.level).toBe("cautious"); + }); + + test("flags addUniqueConstraint as cautious", () => { + const ops: SchemaDiffOperation[] = [ + { + type: "addUniqueConstraint", + schema: "app", + table: "users", + constraint: { + name: "users_email_key", + columns: ["email"], + }, + }, + ]; + + const warnings = checkSafety(ops); + expect(warnings).toHaveLength(1); + expect(warnings[0]!.level).toBe("cautious"); + }); + + test("collects multiple warnings from multiple operations", () => { + const ops: SchemaDiffOperation[] = [ + { type: "dropTable", schema: "app", table: "old_table" }, + { + type: "dropColumn", + schema: "app", + table: "users", + column: "legacy", + }, + { + type: "alterColumn", + schema: "app", + table: "users", + column: "age", + changes: { + type: { from: "text", to: "integer" }, + nullable: { from: true, to: false }, + }, + }, + ]; + + const warnings = checkSafety(ops); + // dropTable (1) + dropColumn (1) + unsafe type change (1) + set NOT NULL (1) = 4 + expect(warnings).toHaveLength(4); + expect(warnings.filter((w) => w.level === "unsafe")).toHaveLength(3); + expect(warnings.filter((w) => w.level === "cautious")).toHaveLength(1); + }); +}); diff --git a/packages/orm-migrate/src/migrations/safety/checkSafety.ts b/packages/orm-migrate/src/migrations/safety/checkSafety.ts new file mode 100644 index 00000000..0c5e6c5b --- /dev/null +++ b/packages/orm-migrate/src/migrations/safety/checkSafety.ts @@ -0,0 +1,116 @@ +import type { SchemaDiffOperation } from "../diff/types.js"; +import { isSafeCast } from "./safeCasts.js"; +import type { SafetyWarning } from "./types.js"; + +/** + * Analyse a list of schema diff operations and return warnings + * for operations that may be unsafe or require caution. + * + * Inspired by the strong_migrations Ruby gem. + */ +export const checkSafety = (ops: SchemaDiffOperation[]): SafetyWarning[] => { + const warnings: SafetyWarning[] = []; + + for (const op of ops) { + warnings.push(...checkOperation(op)); + } + + return warnings; +}; + +const checkOperation = (op: SchemaDiffOperation): SafetyWarning[] => { + switch (op.type) { + case "dropTable": + return [ + { + level: "unsafe", + operation: op, + message: `Dropping table "${op.schema}"."${op.table}" will cause permanent data loss.`, + suggestion: + "Ensure all data has been migrated before dropping. Consider renaming the table first to verify nothing depends on it.", + }, + ]; + + case "dropColumn": + return [ + { + level: "unsafe", + operation: op, + message: `Dropping column "${op.column}" from "${op.schema}"."${op.table}" may cause errors if application code still references it.`, + suggestion: + "Deploy code changes that stop using this column first, then drop it in a subsequent migration.", + }, + ]; + + case "dropSchema": + return [ + { + level: "unsafe", + operation: op, + message: `Dropping schema "${op.schema}" will remove all objects within it.`, + suggestion: + "Ensure all tables have been migrated out of this schema first.", + }, + ]; + + case "alterColumn": + return checkAlterColumn(op); + + case "addForeignKey": + return [ + { + level: "cautious", + operation: op, + message: `Adding foreign key "${op.foreignKey.name}" on "${op.schema}"."${op.table}" will validate all existing rows while holding a lock, blocking writes.`, + suggestion: + "For large tables, consider adding the constraint with NOT VALID first, then validating in a separate step.", + }, + ]; + + case "addUniqueConstraint": + return [ + { + level: "cautious", + operation: op, + message: `Adding unique constraint "${op.constraint.name}" on "${op.schema}"."${op.table}" will block writes while the index is built.`, + suggestion: + "For large tables, consider using CREATE INDEX CONCURRENTLY in a non-transactional migration instead.", + }, + ]; + + default: + return []; + } +}; + +const checkAlterColumn = ( + op: Extract, +): SafetyWarning[] => { + const warnings: SafetyWarning[] = []; + + // Type change + if (op.changes.type) { + if (!isSafeCast(op.changes.type.from, op.changes.type.to)) { + warnings.push({ + level: "unsafe", + operation: op, + message: `Changing type of "${op.column}" in "${op.schema}"."${op.table}" from ${op.changes.type.from} to ${op.changes.type.to} may rewrite the entire table, blocking reads and writes.`, + suggestion: + "Consider adding a new column with the desired type, migrating data, then dropping the old column.", + }); + } + } + + // Setting NOT NULL on an existing column + if (op.changes.nullable && !op.changes.nullable.to) { + warnings.push({ + level: "cautious", + operation: op, + message: `Setting NOT NULL on "${op.column}" in "${op.schema}"."${op.table}" will scan the entire table while holding a lock.`, + suggestion: + "For large tables, consider adding a CHECK constraint with NOT VALID first, validating it separately, then setting NOT NULL (PostgreSQL 12+ can leverage the validated check constraint).", + }); + } + + return warnings; +}; diff --git a/packages/orm-migrate/src/migrations/safety/safeCasts.test.ts b/packages/orm-migrate/src/migrations/safety/safeCasts.test.ts new file mode 100644 index 00000000..9ce2018f --- /dev/null +++ b/packages/orm-migrate/src/migrations/safety/safeCasts.test.ts @@ -0,0 +1,132 @@ +import { describe, expect, test } from "vitest"; + +import { isSafeCast } from "./safeCasts.js"; + +describe("isSafeCast", () => { + test("same type is always safe", () => { + expect(isSafeCast("integer", "integer")).toBe(true); + expect(isSafeCast("text", "text")).toBe(true); + expect(isSafeCast("varchar(255)", "varchar(255)")).toBe(true); + }); + + test("normalised aliases are treated as same type", () => { + expect(isSafeCast("int", "integer")).toBe(true); + expect(isSafeCast("int4", "integer")).toBe(true); + expect(isSafeCast("int2", "smallint")).toBe(true); + expect(isSafeCast("int8", "bigint")).toBe(true); + expect(isSafeCast("bool", "boolean")).toBe(true); + expect(isSafeCast("float4", "real")).toBe(true); + expect(isSafeCast("float8", "double precision")).toBe(true); + }); + + describe("varchar widening", () => { + test("varchar(n) -> varchar(m) where m > n is safe", () => { + expect(isSafeCast("varchar(50)", "varchar(100)")).toBe(true); + expect(isSafeCast("varchar(1)", "varchar(255)")).toBe(true); + }); + + test("varchar(n) -> varchar(m) where m < n is unsafe", () => { + expect(isSafeCast("varchar(100)", "varchar(50)")).toBe(false); + }); + + test("varchar(n) -> text is safe", () => { + expect(isSafeCast("varchar(255)", "text")).toBe(true); + expect(isSafeCast("varchar(1)", "text")).toBe(true); + }); + + test("varchar(n) -> varchar (unlimited) is safe", () => { + expect(isSafeCast("varchar(255)", "varchar")).toBe(true); + }); + + test("character varying is normalised to varchar", () => { + expect(isSafeCast("character varying(50)", "varchar(100)")).toBe( + true, + ); + }); + }); + + describe("char widening", () => { + test("char(n) -> char(m) where m > n is safe", () => { + expect(isSafeCast("char(1)", "char(10)")).toBe(true); + }); + + test("char(n) -> char(m) where m < n is unsafe", () => { + expect(isSafeCast("char(10)", "char(1)")).toBe(false); + }); + }); + + describe("integer widening", () => { + test("smallint -> integer is safe", () => { + expect(isSafeCast("smallint", "integer")).toBe(true); + }); + + test("smallint -> bigint is safe", () => { + expect(isSafeCast("smallint", "bigint")).toBe(true); + }); + + test("integer -> bigint is safe", () => { + expect(isSafeCast("integer", "bigint")).toBe(true); + }); + + test("bigint -> integer is unsafe", () => { + expect(isSafeCast("bigint", "integer")).toBe(false); + }); + + test("integer -> smallint is unsafe", () => { + expect(isSafeCast("integer", "smallint")).toBe(false); + }); + + test("aliases work for integer widening", () => { + expect(isSafeCast("int2", "int4")).toBe(true); + expect(isSafeCast("int4", "int8")).toBe(true); + expect(isSafeCast("int", "bigint")).toBe(true); + }); + }); + + describe("numeric precision increase", () => { + test("increasing precision and scale is safe", () => { + expect(isSafeCast("numeric(5,2)", "numeric(10,4)")).toBe(true); + expect(isSafeCast("numeric(5,2)", "numeric(5,2)")).toBe(true); + }); + + test("decreasing precision is unsafe", () => { + expect(isSafeCast("numeric(10,4)", "numeric(5,2)")).toBe(false); + }); + + test("numeric(p,s) -> numeric (unlimited) is safe", () => { + expect(isSafeCast("numeric(10,2)", "numeric")).toBe(true); + }); + }); + + describe("cidr -> inet", () => { + test("cidr -> inet is safe", () => { + expect(isSafeCast("cidr", "inet")).toBe(true); + }); + + test("inet -> cidr is unsafe", () => { + expect(isSafeCast("inet", "cidr")).toBe(false); + }); + }); + + describe("unsafe casts", () => { + test("text -> integer is unsafe", () => { + expect(isSafeCast("text", "integer")).toBe(false); + }); + + test("integer -> text is unsafe", () => { + expect(isSafeCast("integer", "text")).toBe(false); + }); + + test("varchar -> integer is unsafe", () => { + expect(isSafeCast("varchar(255)", "integer")).toBe(false); + }); + + test("text -> varchar is unsafe", () => { + expect(isSafeCast("text", "varchar(255)")).toBe(false); + }); + + test("timestamp -> date is unsafe", () => { + expect(isSafeCast("timestamp", "date")).toBe(false); + }); + }); +}); diff --git a/packages/orm-migrate/src/migrations/safety/safeCasts.ts b/packages/orm-migrate/src/migrations/safety/safeCasts.ts new file mode 100644 index 00000000..160e8434 --- /dev/null +++ b/packages/orm-migrate/src/migrations/safety/safeCasts.ts @@ -0,0 +1,113 @@ +/** + * Determines whether a PostgreSQL column type change is safe, + * meaning it will NOT cause a full table rewrite. + * + * Safe casts include: + * - varchar(n) -> varchar(m) where m > n + * - varchar(n) -> text + * - char(n) -> char(m) where m > n + * - int -> bigint + * - smallint -> int + * - smallint -> bigint + * - numeric(a,b) -> numeric(c,d) where c >= a and d >= b + * - cidr -> inet + */ +export const isSafeCast = (from: string, to: string): boolean => { + const normFrom = normalise(from); + const normTo = normalise(to); + + if (normFrom === normTo) return true; + + // varchar(n) -> text + if (isVarchar(normFrom) && normTo === "text") return true; + + // varchar(n) -> varchar(m) where m > n + if (isVarchar(normFrom) && isVarchar(normTo)) { + const fromSize = extractSize(normFrom); + const toSize = extractSize(normTo); + if (fromSize !== null && toSize !== null && toSize > fromSize) { + return true; + } + // varchar(n) -> varchar (unlimited) + if (fromSize !== null && toSize === null) return true; + } + + // char(n) -> char(m) where m > n + if (isChar(normFrom) && isChar(normTo)) { + const fromSize = extractSize(normFrom); + const toSize = extractSize(normTo); + if (fromSize !== null && toSize !== null && toSize > fromSize) { + return true; + } + } + + // Integer widening + if ( + normFrom === "smallint" && + (normTo === "integer" || normTo === "bigint") + ) + return true; + if (normFrom === "integer" && normTo === "bigint") return true; + + // Numeric precision increase + if (isNumeric(normFrom) && isNumeric(normTo)) { + const fromParts = extractNumericPrecision(normFrom); + const toParts = extractNumericPrecision(normTo); + if (fromParts && toParts) { + if ( + toParts.precision >= fromParts.precision && + toParts.scale >= fromParts.scale + ) { + return true; + } + } + // numeric(p,s) -> numeric (unlimited) + if (fromParts && !toParts && normTo === "numeric") return true; + } + + // cidr -> inet + if (normFrom === "cidr" && normTo === "inet") return true; + + return false; +}; + +const normalise = (type: string): string => { + const t = type.toLowerCase().trim(); + // Normalise common aliases + if (t === "int" || t === "int4") return "integer"; + if (t === "int2") return "smallint"; + if (t === "int8") return "bigint"; + if (t === "float4") return "real"; + if (t === "float8") return "double precision"; + if (t === "bool") return "boolean"; + if (t.startsWith("character varying")) + return t.replace("character varying", "varchar"); + if (t === "character" || t === "bpchar") return "char"; + if (t === "decimal") return "numeric"; + return t; +}; + +const isVarchar = (type: string): boolean => + type === "varchar" || type.startsWith("varchar("); + +const isChar = (type: string): boolean => + type === "char" || type.startsWith("char("); + +const isNumeric = (type: string): boolean => + type === "numeric" || type.startsWith("numeric("); + +const extractSize = (type: string): number | null => { + const match = /\((\d+)\)/.exec(type); + return match ? parseInt(match[1]!, 10) : null; +}; + +const extractNumericPrecision = ( + type: string, +): { precision: number; scale: number } | null => { + const match = /\((\d+),\s*(\d+)\)/.exec(type); + if (!match) return null; + return { + precision: parseInt(match[1]!, 10), + scale: parseInt(match[2]!, 10), + }; +}; diff --git a/packages/orm-migrate/src/migrations/safety/types.ts b/packages/orm-migrate/src/migrations/safety/types.ts new file mode 100644 index 00000000..c539dfd4 --- /dev/null +++ b/packages/orm-migrate/src/migrations/safety/types.ts @@ -0,0 +1,10 @@ +import type { SchemaDiffOperation } from "../diff/types.js"; + +export type SafetyLevel = "safe" | "cautious" | "unsafe"; + +export interface SafetyWarning { + level: SafetyLevel; + operation: SchemaDiffOperation; + message: string; + suggestion?: string; +} diff --git a/packages/orm-migrate/src/migrations/tracking.ts b/packages/orm-migrate/src/migrations/tracking.ts new file mode 100644 index 00000000..b2280225 --- /dev/null +++ b/packages/orm-migrate/src/migrations/tracking.ts @@ -0,0 +1,85 @@ +import { z } from "zod"; + +import { Orm } from "@casekit/orm"; + +export interface AppliedMigration { + id: number; + name: string; + appliedAt: Date; + checksum: string; +} + +const AppliedMigrationSchema = z.object({ + id: z.number(), + name: z.string(), + appliedAt: z.date(), + checksum: z.string(), +}); + +/** + * Ensure the _orm_migrations tracking table exists. + */ +export const ensureMigrationTable = async (db: Orm): Promise => { + await db.query` + CREATE TABLE IF NOT EXISTS public._orm_migrations ( + id serial PRIMARY KEY, + name text NOT NULL UNIQUE, + applied_at timestamptz NOT NULL DEFAULT now(), + checksum text NOT NULL + ); + `; +}; + +/** + * Get all migrations that have been applied, ordered by id. + */ +export const getAppliedMigrations = async ( + db: Orm, +): Promise => { + return db.query(AppliedMigrationSchema)` + SELECT + id, + name, + applied_at AS "appliedAt", + checksum + FROM public._orm_migrations + ORDER BY id + `; +}; + +/** + * Record a migration as applied. + */ +export const recordMigration = async ( + db: Orm, + name: string, + checksum: string, +): Promise => { + await db.query` + INSERT INTO public._orm_migrations (name, checksum) + VALUES (${name}, ${checksum}) + `; +}; + +/** + * Verify that previously applied migrations have not been tampered with. + * Returns an array of mismatch error messages. + */ +export const verifyChecksums = ( + applied: AppliedMigration[], + migrations: { name: string; checksum: string }[], +): string[] => { + const migrationMap = new Map(migrations.map((m) => [m.name, m])); + const mismatches: string[] = []; + + for (const appliedMigration of applied) { + const migration = migrationMap.get(appliedMigration.name); + if (migration && migration.checksum !== appliedMigration.checksum) { + mismatches.push( + `Migration "${appliedMigration.name}" has been modified since it was applied (expected checksum ${appliedMigration.checksum}, got ${migration.checksum})`, + ); + } + } + + return mismatches; +}; diff --git a/packages/orm-migrate/src/migrations/types.ts b/packages/orm-migrate/src/migrations/types.ts new file mode 100644 index 00000000..7f30ca82 --- /dev/null +++ b/packages/orm-migrate/src/migrations/types.ts @@ -0,0 +1,53 @@ +/** + * A normalised, diffable representation of a database schema. + * Both the TypeScript config and the pulled DB state + * get converted into this shape before diffing. + */ +export interface SchemaSnapshot { + schemas: string[]; + extensions: ExtensionSnapshot[]; + tables: TableSnapshot[]; +} + +export interface ExtensionSnapshot { + name: string; + schema: string; +} + +export interface TableSnapshot { + schema: string; + name: string; + columns: ColumnSnapshot[]; + primaryKey: PrimaryKeySnapshot; + foreignKeys: ForeignKeySnapshot[]; + uniqueConstraints: UniqueConstraintSnapshot[]; +} + +export interface PrimaryKeySnapshot { + name: string | null; // constraint name, null if no PK + columns: string[]; +} + +export interface ColumnSnapshot { + name: string; + type: string; + nullable: boolean; + default: string | null; +} + +export interface ForeignKeySnapshot { + name: string; + columns: string[]; + referencesSchema: string; + referencesTable: string; + referencesColumns: string[]; + onDelete: string | null; + onUpdate: string | null; +} + +export interface UniqueConstraintSnapshot { + name: string; + columns: string[]; + nullsNotDistinct?: boolean; + where?: string | null; +} diff --git a/packages/orm-migrate/src/pull/getExtensions.test.ts b/packages/orm-migrate/src/pull/getExtensions.test.ts new file mode 100644 index 00000000..c4cf217b --- /dev/null +++ b/packages/orm-migrate/src/pull/getExtensions.test.ts @@ -0,0 +1,45 @@ +import pg from "pg"; +import { afterEach, beforeEach, describe, expect, test } from "vitest"; + +import { getExtensions } from "./getExtensions.js"; + +describe("getExtensions", () => { + let client: pg.Client; + const testSchema = "ext_test_schema"; + + beforeEach(async () => { + client = new pg.Client(); + await client.connect(); + await client.query(`CREATE SCHEMA IF NOT EXISTS "${testSchema}"`); + }); + + afterEach(async () => { + await client.query(`DROP SCHEMA IF EXISTS "${testSchema}" CASCADE`); + await client.end(); + }); + + test("returns empty array when no extensions in schema", async () => { + const result = await client.query(getExtensions([testSchema])); + expect(result.rows).toEqual([]); + }); + + test("returns extensions installed in pg_catalog schema", async () => { + // plpgsql is always installed in pg_catalog + const result = await client.query<{ name: string }>( + getExtensions(["pg_catalog"]), + ); + + expect(result.rows.some((e) => e.name === "plpgsql")).toBe(true); + expect(result.rows[0]).toHaveProperty("name"); + expect(result.rows[0]).toHaveProperty("schema"); + }); + + test("only returns extensions from specified schemas", async () => { + const result = await client.query<{ name: string }>( + getExtensions([testSchema]), + ); + + // plpgsql is in public, not in our test schema + expect(result.rows.find((e) => e.name === "plpgsql")).toBeUndefined(); + }); +}); diff --git a/packages/orm-migrate/src/pull/getExtensions.ts b/packages/orm-migrate/src/pull/getExtensions.ts new file mode 100644 index 00000000..270c3462 --- /dev/null +++ b/packages/orm-migrate/src/pull/getExtensions.ts @@ -0,0 +1,28 @@ +import { z } from "zod"; + +import { sql } from "@casekit/orm"; + +export const ExtensionSchema = z.object({ + name: z.string(), + schema: z.string(), +}); + +export type Extension = z.infer; + +/** + * Get all extensions installed in the specified schemas. + */ +export const getExtensions = (schemas: string[]) => + sql(ExtensionSchema)` + SELECT + e.extname AS "name", + n.nspname AS "schema" + FROM + pg_extension e + JOIN pg_namespace n ON e.extnamespace = n.oid + WHERE + n.nspname IN (${schemas}) + ORDER BY + n.nspname, + e.extname + `; diff --git a/packages/orm-migrate/src/pull/getForeignKeys.ts b/packages/orm-migrate/src/pull/getForeignKeys.ts index 9e4c46b3..9f7ae602 100644 --- a/packages/orm-migrate/src/pull/getForeignKeys.ts +++ b/packages/orm-migrate/src/pull/getForeignKeys.ts @@ -9,6 +9,8 @@ export const ForeignKeySchema = z.object({ columnsFrom: z.array(z.string()), tableTo: z.string(), columnsTo: z.array(z.string()), + onUpdate: z.string().nullable(), + onDelete: z.string().nullable(), }); export type ForeignKey = z.infer; @@ -21,7 +23,21 @@ export const getForeignKeys = (schemas: string[]) => table_from AS "tableFrom", array_agg(columns_from::text ORDER BY ordinality) AS "columnsFrom", table_to AS "tableTo", - array_agg(columns_to::text ORDER BY ordinality) AS "columnsTo" + array_agg(columns_to::text ORDER BY ordinality) AS "columnsTo", + CASE confupdtype + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "onUpdate", + CASE confdeltype + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "onDelete" FROM ( SELECT conname, @@ -30,7 +46,9 @@ export const getForeignKeys = (schemas: string[]) => cf.relname AS table_to, af.attname AS columns_to, n.nspname, - ss2.ordinality + ss2.ordinality, + ss2.confupdtype, + ss2.confdeltype FROM pg_attribute AS af, pg_attribute AS a, @@ -44,7 +62,9 @@ export const getForeignKeys = (schemas: string[]) => confrelid, conkey[i] AS conkey, confkey[i] AS confkey, - i AS ordinality + i AS ordinality, + confupdtype, + confdeltype FROM ( SELECT conname, @@ -52,6 +72,8 @@ export const getForeignKeys = (schemas: string[]) => confrelid, conkey, confkey, + confupdtype, + confdeltype, generate_series(1, array_upper(conkey, 1)) AS i FROM pg_constraint @@ -70,7 +92,9 @@ export const getForeignKeys = (schemas: string[]) => nspname, conname, table_to, - table_from + table_from, + confupdtype, + confdeltype ORDER BY nspname, table_from, diff --git a/packages/orm-migrate/src/pull/getUniqueConstraints.ts b/packages/orm-migrate/src/pull/getUniqueConstraints.ts index 0af2ac85..7b4edd4c 100644 --- a/packages/orm-migrate/src/pull/getUniqueConstraints.ts +++ b/packages/orm-migrate/src/pull/getUniqueConstraints.ts @@ -6,7 +6,9 @@ export const UniqueConstraintSchema = z.object({ schema: z.string(), table: z.string(), name: z.string(), + columns: z.array(z.string()), definition: z.string(), + nullsNotDistinct: z.boolean(), }); export type UniqueConstraint = z.infer; @@ -17,15 +19,30 @@ export const getUniqueConstraints = (schemas: string[]) => i.schemaname AS "schema", i.tablename AS "table", i.indexname AS "name", - i.indexdef AS "definition" + array_agg(a.attname::text ORDER BY k.n) AS "columns", + i.indexdef AS "definition", + COALESCE(ix.indnullsnotdistinct, false) AS "nullsNotDistinct" FROM pg_indexes i LEFT JOIN pg_constraint c ON i.indexname = c.conname AND c.contype IN ('p', 'x') + JOIN pg_class cl ON cl.relname = i.indexname + JOIN pg_namespace ns ON ns.oid = cl.relnamespace + AND ns.nspname = i.schemaname + JOIN pg_index ix ON ix.indexrelid = cl.oid + CROSS JOIN LATERAL unnest(ix.indkey) WITH ORDINALITY AS k(attnum, n) + JOIN pg_attribute a ON a.attrelid = ix.indrelid + AND a.attnum = k.attnum WHERE i.schemaname IN (${schemas}) AND c.conname IS NULL AND i.indexdef LIKE 'CREATE UNIQUE INDEX%' + GROUP BY + i.schemaname, + i.tablename, + i.indexname, + i.indexdef, + ix.indnullsnotdistinct ORDER BY i.schemaname, i.tablename, diff --git a/packages/orm-migrate/src/pull/index.ts b/packages/orm-migrate/src/pull/index.ts index d47bb54e..51d6d98c 100644 --- a/packages/orm-migrate/src/pull/index.ts +++ b/packages/orm-migrate/src/pull/index.ts @@ -1,4 +1,5 @@ export type { Column } from "./getColumns.js"; +export type { Extension } from "./getExtensions.js"; export type { ForeignKey } from "./getForeignKeys.js"; export type { PrimaryKey } from "./getPrimaryKeys.js"; export type { UniqueConstraint } from "./getUniqueConstraints.js"; diff --git a/packages/orm-schema/package.json b/packages/orm-schema/package.json index 56f151e3..68a082bc 100644 --- a/packages/orm-schema/package.json +++ b/packages/orm-schema/package.json @@ -32,6 +32,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/orm-schema" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "pg": "^8.13.1", "zod": "^4.0.17" diff --git a/packages/orm-testing/package.json b/packages/orm-testing/package.json index da468bdb..e823a79c 100644 --- a/packages/orm-testing/package.json +++ b/packages/orm-testing/package.json @@ -33,6 +33,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/orm-testing" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "zod": "^4.0.17" }, diff --git a/packages/orm/package.json b/packages/orm/package.json index 92f4fb65..22ff2ae4 100644 --- a/packages/orm/package.json +++ b/packages/orm/package.json @@ -18,7 +18,7 @@ "@casekit/orm-testing": "workspace:*", "@casekit/prettier-config": "workspace:*", "@casekit/tsconfig": "workspace:*", - "@casekit/unindent": "^1.0.5", + "@casekit/unindent": "0.0.0-gh-packages.634dab0", "@eslint/js": "^9.29.0", "@faker-js/faker": "^10.2.0", "@trivago/prettier-plugin-sort-imports": "^5.2.2", @@ -47,6 +47,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/orm" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "pg": "^8.13.1", "zod": "^4.0.17" diff --git a/packages/orm/src/orm.ts b/packages/orm/src/orm.ts index c1841d51..aed800f4 100644 --- a/packages/orm/src/orm.ts +++ b/packages/orm/src/orm.ts @@ -67,10 +67,10 @@ export const orm = (config: C): Orm => { export class Orm< const C extends Config = Config, const Models extends ModelDefinitions = C["models"], - const Operators extends - OperatorDefinitions = C["operators"] extends NonNullable - ? C["operators"] - : { where: never }, + const Operators extends OperatorDefinitions = + C["operators"] extends NonNullable + ? C["operators"] + : { where: never }, > { public readonly config: NormalizedConfig; private readonly _connection: Connection | Transaction; diff --git a/packages/sql/package.json b/packages/sql/package.json index cd620448..9b510f7d 100644 --- a/packages/sql/package.json +++ b/packages/sql/package.json @@ -5,7 +5,7 @@ "author": "", "dependencies": { "@casekit/toolbox": "workspace:*", - "@casekit/unindent": "^1.0.5", + "@casekit/unindent": "0.0.0-gh-packages.634dab0", "es-toolkit": "^1.39.3", "sql-formatter": "^15.6.5" }, @@ -34,6 +34,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/sql" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "pg": "^8.13.1", "zod": "^4.0.17" diff --git a/packages/toolbox/package.json b/packages/toolbox/package.json index d0c2db67..8f8f2177 100644 --- a/packages/toolbox/package.json +++ b/packages/toolbox/package.json @@ -28,6 +28,14 @@ }, "keywords": [], "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/casekit/orm.git", + "directory": "packages/toolbox" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, "peerDependencies": { "pg": "^8.13.1", "zod": "^4.0.17" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index afd5404c..5eb8f68c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -159,8 +159,8 @@ importers: specifier: workspace:* version: link:../../configs/tsconfig '@casekit/unindent': - specifier: ^1.0.5 - version: 1.0.5 + specifier: 0.0.0-gh-packages.634dab0 + version: 0.0.0-gh-packages.634dab0 '@eslint/js': specifier: ^9.29.0 version: 9.39.2 @@ -225,8 +225,8 @@ importers: specifier: workspace:* version: link:../toolbox '@casekit/unindent': - specifier: ^1.0.5 - version: 1.0.5 + specifier: 0.0.0-gh-packages.634dab0 + version: 0.0.0-gh-packages.634dab0 '@inquirer/prompts': specifier: ^8.2.0 version: 8.2.0(@types/node@24.10.9) @@ -466,8 +466,8 @@ importers: specifier: workspace:* version: link:../../configs/tsconfig '@casekit/unindent': - specifier: ^1.0.5 - version: 1.0.5 + specifier: 0.0.0-gh-packages.634dab0 + version: 0.0.0-gh-packages.634dab0 '@trivago/prettier-plugin-sort-imports': specifier: ^5.2.2 version: 5.2.2(@vue/compiler-sfc@3.5.19)(prettier-plugin-svelte@3.4.1(prettier@3.8.0)(svelte@5.8.1))(prettier@3.8.0)(svelte@5.8.1) @@ -615,8 +615,8 @@ importers: specifier: workspace:* version: link:../toolbox '@casekit/unindent': - specifier: ^1.0.5 - version: 1.0.5 + specifier: 0.0.0-gh-packages.634dab0 + version: 0.0.0-gh-packages.634dab0 es-toolkit: specifier: ^1.39.3 version: 1.44.0 @@ -1404,8 +1404,8 @@ packages: resolution: {integrity: sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==} engines: {node: '>=18'} - '@casekit/unindent@1.0.5': - resolution: {integrity: sha512-psC2hzmmgBh76EHWTKiLXP6LDZjyBmdCLND/in/7q+asW83wjRRANUUfL9zOzIC6R1sxbR29X9b0VGBOomK5Mg==} + '@casekit/unindent@0.0.0-gh-packages.634dab0': + resolution: {integrity: sha512-AMx0+IWrJ/X/FSA9AZaTy2zPxw7+RnMDeKwMxyVPY3SUy9wgdONXYYM7hv/OqUi/anihgXV/KogxjQTRUnw87Q==, tarball: https://npm.pkg.github.com/download/@casekit/unindent/0.0.0-gh-packages.634dab0/861c611f7b508405a826466b356d95fae584f86a} '@colors/colors@1.5.0': resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} @@ -8314,7 +8314,7 @@ snapshots: '@bcoe/v8-coverage@1.0.2': {} - '@casekit/unindent@1.0.5': {} + '@casekit/unindent@0.0.0-gh-packages.634dab0': {} '@colors/colors@1.5.0': optional: true From cf2f656468c4a6a21ab2fb5788847a6ef8e2a4cb Mon Sep 17 00:00:00 2001 From: Russell Dunphy Date: Sat, 31 Jan 2026 11:29:55 +0000 Subject: [PATCH 2/7] wip --- .../orm-cli/src/commands/db-migrate.test.ts | 4 +- packages/orm-cli/src/util/migrations.ts | 4 +- .../src/migrations/diff/diffSnapshots.ts | 80 ++++++++++--------- 3 files changed, 45 insertions(+), 43 deletions(-) diff --git a/packages/orm-cli/src/commands/db-migrate.test.ts b/packages/orm-cli/src/commands/db-migrate.test.ts index 114c8509..5a214d40 100644 --- a/packages/orm-cli/src/commands/db-migrate.test.ts +++ b/packages/orm-cli/src/commands/db-migrate.test.ts @@ -75,7 +75,7 @@ describe("db migrate", () => { const migrations = fs.readdirSync(migrationsPath) as string[]; expect(migrations.length).toBe(1); - expect(migrations[0]).toMatch(/^\d{14}_create-users\.sql$/); + expect(migrations[0]).toMatch(/^\d{14}-create-users\.sql$/); const content = fs.readFileSync( `${migrationsPath}/${migrations[0]}`, @@ -233,7 +233,7 @@ describe("db migrate", () => { const migrations = fs.readdirSync(migrationsPath) as string[]; expect(migrations.length).toBe(1); - expect(migrations[0]).toMatch(/^\d{14}_add-created-at\.sql$/); + expect(migrations[0]).toMatch(/^\d{14}-add-created-at\.sql$/); const content = fs.readFileSync( `${migrationsPath}/${migrations[0]}`, diff --git a/packages/orm-cli/src/util/migrations.ts b/packages/orm-cli/src/util/migrations.ts index ebbfc200..7d85a745 100644 --- a/packages/orm-cli/src/util/migrations.ts +++ b/packages/orm-cli/src/util/migrations.ts @@ -17,7 +17,7 @@ export interface MigrationFile { /** * Generate a timestamped migration filename. - * Format: YYYYMMDDHHMMSS_description.sql + * Format: YYYYMMDDHHMMSS-description.sql */ export const generateMigrationFilename = (description: string): string => { // toISOString() returns UTC in ISO 8601 format which sorts alphabetically @@ -31,7 +31,7 @@ export const generateMigrationFilename = (description: string): string => { .replace(/[^a-z0-9]+/g, "-") .replace(/^-|-$/g, ""); - return `${timestamp}_${slug}.sql`; + return `${timestamp}-${slug}.sql`; }; /** diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts index f71dfaaf..4d9122ea 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts @@ -125,17 +125,17 @@ const diffTable = ( const desiredColMap = new Map(desired.columns.map((c) => [c.name, c])); const currentFkMap = new Map( - current.foreignKeys.map((fk) => [fk.name, fk]), + current.foreignKeys.map((fk) => [foreignKeyContentKey(fk), fk]), ); const desiredFkMap = new Map( - desired.foreignKeys.map((fk) => [fk.name, fk]), + desired.foreignKeys.map((fk) => [foreignKeyContentKey(fk), fk]), ); const currentUcMap = new Map( - current.uniqueConstraints.map((uc) => [uc.name, uc]), + current.uniqueConstraints.map((uc) => [uniqueConstraintContentKey(uc), uc]), ); const desiredUcMap = new Map( - desired.uniqueConstraints.map((uc) => [uc.name, uc]), + desired.uniqueConstraints.map((uc) => [uniqueConstraintContentKey(uc), uc]), ); // 1. Add columns @@ -173,27 +173,27 @@ const diffTable = ( ); // 4. Drop foreign keys (before dropping columns they may reference) - for (const [name, currentFk] of currentFkMap) { - const desiredFk = desiredFkMap.get(name); - if (!desiredFk || !foreignKeysEqual(currentFk, desiredFk)) { + // Constraints are matched by content, not by name - if the content exists in both, keep it + for (const [contentKey, currentFk] of currentFkMap) { + if (!desiredFkMap.has(contentKey)) { ops.push({ type: "dropForeignKey", schema, table, - constraintName: name, + constraintName: currentFk.name, }); } } // 5. Drop unique constraints (before dropping columns they may reference) - for (const [name, currentUc] of currentUcMap) { - const desiredUc = desiredUcMap.get(name); - if (!desiredUc || !uniqueConstraintsEqual(currentUc, desiredUc)) { + // Constraints are matched by content, not by name - if the content exists in both, keep it + for (const [contentKey, currentUc] of currentUcMap) { + if (!desiredUcMap.has(contentKey)) { ops.push({ type: "dropUniqueConstraint", schema, table, - constraintName: name, + constraintName: currentUc.name, }); } } @@ -206,9 +206,9 @@ const diffTable = ( } // 7. Add foreign keys - for (const [name, desiredFk] of desiredFkMap) { - const currentFk = currentFkMap.get(name); - if (!currentFk || !foreignKeysEqual(currentFk, desiredFk)) { + // Constraints are matched by content, not by name - only add if content doesn't exist + for (const [contentKey, desiredFk] of desiredFkMap) { + if (!currentFkMap.has(contentKey)) { ops.push({ type: "addForeignKey", schema, @@ -219,9 +219,9 @@ const diffTable = ( } // 8. Add unique constraints - for (const [name, desiredUc] of desiredUcMap) { - const currentUc = currentUcMap.get(name); - if (!currentUc || !uniqueConstraintsEqual(currentUc, desiredUc)) { + // Constraints are matched by content, not by name - only add if content doesn't exist + for (const [contentKey, desiredUc] of desiredUcMap) { + if (!currentUcMap.has(contentKey)) { ops.push({ type: "addUniqueConstraint", schema, @@ -287,27 +287,29 @@ const diffPrimaryKey = ( ]; }; -const foreignKeysEqual = ( - a: ForeignKeySnapshot, - b: ForeignKeySnapshot, -): boolean => { - return ( - a.columns.join(",") === b.columns.join(",") && - a.referencesSchema === b.referencesSchema && - a.referencesTable === b.referencesTable && - a.referencesColumns.join(",") === b.referencesColumns.join(",") && - a.onDelete === b.onDelete && - a.onUpdate === b.onUpdate - ); +/** + * Generate a content-based key for a foreign key constraint. + * This allows matching constraints by their actual definition rather than name. + */ +const foreignKeyContentKey = (fk: ForeignKeySnapshot): string => { + return [ + fk.columns.join(","), + fk.referencesSchema, + fk.referencesTable, + fk.referencesColumns.join(","), + fk.onDelete ?? "", + fk.onUpdate ?? "", + ].join("|"); }; -const uniqueConstraintsEqual = ( - a: UniqueConstraintSnapshot, - b: UniqueConstraintSnapshot, -): boolean => { - return ( - a.columns.join(",") === b.columns.join(",") && - (a.nullsNotDistinct ?? false) === (b.nullsNotDistinct ?? false) && - (a.where ?? null) === (b.where ?? null) - ); +/** + * Generate a content-based key for a unique constraint. + * This allows matching constraints by their actual definition rather than name. + */ +const uniqueConstraintContentKey = (uc: UniqueConstraintSnapshot): string => { + return [ + uc.columns.join(","), + uc.nullsNotDistinct ?? false, + uc.where ?? "", + ].join("|"); }; From f65690add11a9952f595c67150de87ef14e89817 Mon Sep 17 00:00:00 2001 From: Russell Dunphy Date: Sat, 31 Jan 2026 11:32:34 +0000 Subject: [PATCH 3/7] aoeu --- .../src/migrations/diff/diffSnapshots.test.ts | 115 ++++++++++++++++++ .../src/migrations/diff/diffSnapshots.ts | 31 ++++- .../src/migrations/diff/operationToSql.ts | 6 + .../orm-migrate/src/migrations/diff/types.ts | 13 ++ 4 files changed, 159 insertions(+), 6 deletions(-) diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts index e52d09a8..cc1bcbb2 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts @@ -595,6 +595,69 @@ describe("diffSnapshots", () => { foreignKey: newFk, }); }); + + test("detects renamed foreign key (same content, different name)", () => { + const oldFk = { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: null, + onUpdate: null, + }; + const newFk = { + ...oldFk, + name: "posts_user_id_fkey", + }; + + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts", foreignKeys: [oldFk] })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts", foreignKeys: [newFk] })], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "renameForeignKey", + schema: "app", + table: "posts", + oldName: "fk_posts_user", + newName: "posts_user_id_fkey", + }); + expect(ops).not.toContainEqual( + expect.objectContaining({ type: "dropForeignKey" }), + ); + expect(ops).not.toContainEqual( + expect.objectContaining({ type: "addForeignKey" }), + ); + }); + + test("does not emit rename when foreign key is unchanged", () => { + const fk = { + name: "fk_posts_user", + columns: ["user_id"], + referencesSchema: "app", + referencesTable: "users", + referencesColumns: ["id"], + onDelete: null, + onUpdate: null, + }; + + const snapshot: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "posts", foreignKeys: [fk] })], + }; + + const ops = diffSnapshots(snapshot, snapshot); + expect(ops).toEqual([]); + }); }); describe("unique constraints", () => { @@ -649,6 +712,58 @@ describe("diffSnapshots", () => { constraintName: "users_email_key", }); }); + + test("detects renamed unique constraint (same content, different name)", () => { + const oldUc = { + name: "users_email_key", + columns: ["email"], + }; + const newUc = { + ...oldUc, + name: "users_email_unique", + }; + + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users", uniqueConstraints: [oldUc] })], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users", uniqueConstraints: [newUc] })], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "renameUniqueConstraint", + schema: "app", + oldName: "users_email_key", + newName: "users_email_unique", + }); + expect(ops).not.toContainEqual( + expect.objectContaining({ type: "dropUniqueConstraint" }), + ); + expect(ops).not.toContainEqual( + expect.objectContaining({ type: "addUniqueConstraint" }), + ); + }); + + test("does not emit rename when unique constraint is unchanged", () => { + const uc = { + name: "users_email_key", + columns: ["email"], + }; + + const snapshot: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [makeTable({ name: "users", uniqueConstraints: [uc] })], + }; + + const ops = diffSnapshots(snapshot, snapshot); + expect(ops).toEqual([]); + }); }); describe("primary keys", () => { diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts index 4d9122ea..573652fb 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts @@ -205,29 +205,48 @@ const diffTable = ( } } - // 7. Add foreign keys - // Constraints are matched by content, not by name - only add if content doesn't exist + // 7. Add or rename foreign keys for (const [contentKey, desiredFk] of desiredFkMap) { - if (!currentFkMap.has(contentKey)) { + const currentFk = currentFkMap.get(contentKey); + if (!currentFk) { + // Content doesn't exist - add new constraint ops.push({ type: "addForeignKey", schema, table, foreignKey: desiredFk, }); + } else if (currentFk.name !== desiredFk.name) { + // Content matches but name differs - rename + ops.push({ + type: "renameForeignKey", + schema, + table, + oldName: currentFk.name, + newName: desiredFk.name, + }); } } - // 8. Add unique constraints - // Constraints are matched by content, not by name - only add if content doesn't exist + // 8. Add or rename unique constraints for (const [contentKey, desiredUc] of desiredUcMap) { - if (!currentUcMap.has(contentKey)) { + const currentUc = currentUcMap.get(contentKey); + if (!currentUc) { + // Content doesn't exist - add new constraint ops.push({ type: "addUniqueConstraint", schema, table, constraint: desiredUc, }); + } else if (currentUc.name !== desiredUc.name) { + // Content matches but name differs - rename + ops.push({ + type: "renameUniqueConstraint", + schema, + oldName: currentUc.name, + newName: desiredUc.name, + }); } } diff --git a/packages/orm-migrate/src/migrations/diff/operationToSql.ts b/packages/orm-migrate/src/migrations/diff/operationToSql.ts index d641e93b..f4267b12 100644 --- a/packages/orm-migrate/src/migrations/diff/operationToSql.ts +++ b/packages/orm-migrate/src/migrations/diff/operationToSql.ts @@ -58,6 +58,12 @@ export const operationToSql = (op: SchemaDiffOperation): SQLStatement => { case "dropUniqueConstraint": return sql`DROP INDEX IF EXISTS ${sql.ident(op.schema)}.${sql.ident(op.constraintName)};`; + case "renameForeignKey": + return sql`ALTER TABLE ${sql.ident(op.schema)}.${sql.ident(op.table)} RENAME CONSTRAINT ${sql.ident(op.oldName)} TO ${sql.ident(op.newName)};`; + + case "renameUniqueConstraint": + return sql`ALTER INDEX ${sql.ident(op.schema)}.${sql.ident(op.oldName)} RENAME TO ${sql.ident(op.newName)};`; + case "alterPrimaryKey": return alterPrimaryKeySql(op); } diff --git a/packages/orm-migrate/src/migrations/diff/types.ts b/packages/orm-migrate/src/migrations/diff/types.ts index 8786b275..7a59622a 100644 --- a/packages/orm-migrate/src/migrations/diff/types.ts +++ b/packages/orm-migrate/src/migrations/diff/types.ts @@ -63,6 +63,19 @@ export type SchemaDiffOperation = table: string; constraintName: string; } + | { + type: "renameForeignKey"; + schema: string; + table: string; + oldName: string; + newName: string; + } + | { + type: "renameUniqueConstraint"; + schema: string; + oldName: string; + newName: string; + } | { type: "alterPrimaryKey"; schema: string; From 9fb9814e66a3ed1a8be06ac4519e71cde5d20773 Mon Sep 17 00:00:00 2001 From: Russell Dunphy Date: Sat, 31 Jan 2026 11:50:12 +0000 Subject: [PATCH 4/7] oeu --- packages/orm-cli/src/commands/db-migrate.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/orm-cli/src/commands/db-migrate.test.ts b/packages/orm-cli/src/commands/db-migrate.test.ts index 5a214d40..a8e6c936 100644 --- a/packages/orm-cli/src/commands/db-migrate.test.ts +++ b/packages/orm-cli/src/commands/db-migrate.test.ts @@ -151,7 +151,7 @@ describe("db migrate", () => { SELECT name, checksum FROM public._orm_migrations `); expect(migrationRecord.rows.length).toBe(1); - expect(migrationRecord.rows[0].name).toMatch(/^\d{14}_create-users$/); + expect(migrationRecord.rows[0].name).toMatch(/^\d{14}-create-users$/); }); test("returns no changes when schema is in sync", async () => { From d01f22e2368c95230907ccfff12952d2c392693c Mon Sep 17 00:00:00 2001 From: Russell Dunphy Date: Sat, 31 Jan 2026 12:25:18 +0000 Subject: [PATCH 5/7] rename constraint fix --- packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts | 1 + packages/orm-migrate/src/migrations/diff/diffSnapshots.ts | 1 + packages/orm-migrate/src/migrations/diff/types.ts | 1 + 3 files changed, 3 insertions(+) diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts index cc1bcbb2..0aa4d5f7 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts @@ -738,6 +738,7 @@ describe("diffSnapshots", () => { expect(ops).toContainEqual({ type: "renameUniqueConstraint", schema: "app", + table: "users", oldName: "users_email_key", newName: "users_email_unique", }); diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts index 573652fb..a36b8dc9 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts @@ -244,6 +244,7 @@ const diffTable = ( ops.push({ type: "renameUniqueConstraint", schema, + table, oldName: currentUc.name, newName: desiredUc.name, }); diff --git a/packages/orm-migrate/src/migrations/diff/types.ts b/packages/orm-migrate/src/migrations/diff/types.ts index 7a59622a..427e40c9 100644 --- a/packages/orm-migrate/src/migrations/diff/types.ts +++ b/packages/orm-migrate/src/migrations/diff/types.ts @@ -73,6 +73,7 @@ export type SchemaDiffOperation = | { type: "renameUniqueConstraint"; schema: string; + table: string; oldName: string; newName: string; } From 0269305280d3efe2df7275d0d783ccf2f9911cad Mon Sep 17 00:00:00 2001 From: Russell Dunphy Date: Sat, 31 Jan 2026 12:28:42 +0000 Subject: [PATCH 6/7] format --- .../src/migrations/diff/diffSnapshots.test.ts | 8 ++++++-- .../orm-migrate/src/migrations/diff/diffSnapshots.ts | 10 ++++++++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts index 0aa4d5f7..471358c8 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts @@ -726,12 +726,16 @@ describe("diffSnapshots", () => { const current: SchemaSnapshot = { schemas: ["app"], extensions: [], - tables: [makeTable({ name: "users", uniqueConstraints: [oldUc] })], + tables: [ + makeTable({ name: "users", uniqueConstraints: [oldUc] }), + ], }; const desired: SchemaSnapshot = { schemas: ["app"], extensions: [], - tables: [makeTable({ name: "users", uniqueConstraints: [newUc] })], + tables: [ + makeTable({ name: "users", uniqueConstraints: [newUc] }), + ], }; const ops = diffSnapshots(current, desired); diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts index a36b8dc9..3c0fa0c7 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts @@ -132,10 +132,16 @@ const diffTable = ( ); const currentUcMap = new Map( - current.uniqueConstraints.map((uc) => [uniqueConstraintContentKey(uc), uc]), + current.uniqueConstraints.map((uc) => [ + uniqueConstraintContentKey(uc), + uc, + ]), ); const desiredUcMap = new Map( - desired.uniqueConstraints.map((uc) => [uniqueConstraintContentKey(uc), uc]), + desired.uniqueConstraints.map((uc) => [ + uniqueConstraintContentKey(uc), + uc, + ]), ); // 1. Add columns From fde2b5c11b221614baadd4540342b15eaf276574 Mon Sep 17 00:00:00 2001 From: Russell Dunphy Date: Sat, 31 Jan 2026 12:43:19 +0000 Subject: [PATCH 7/7] help --- .../src/migrations/configToSnapshot.ts | 2 +- .../src/migrations/diff/diffSnapshots.test.ts | 49 +++++++++++++++++++ .../src/migrations/diff/diffSnapshots.ts | 2 +- .../src/migrations/pulledToSnapshot.ts | 2 +- 4 files changed, 52 insertions(+), 3 deletions(-) diff --git a/packages/orm-migrate/src/migrations/configToSnapshot.ts b/packages/orm-migrate/src/migrations/configToSnapshot.ts index a47da684..76987a42 100644 --- a/packages/orm-migrate/src/migrations/configToSnapshot.ts +++ b/packages/orm-migrate/src/migrations/configToSnapshot.ts @@ -89,7 +89,7 @@ export const configToSnapshot = (config: NormalizedConfig): SchemaSnapshot => { name: uc.name, columns: uc.columns, nullsNotDistinct: uc.nullsNotDistinct ?? false, - where: uc.where ? uc.where.text : null, + where: uc.where ? uc.where.text.trim() : null, })); return { diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts index 471358c8..b843426f 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.test.ts @@ -754,6 +754,55 @@ describe("diffSnapshots", () => { ); }); + test("detects renamed unique constraint with WHERE clause", () => { + const oldUc = { + name: "activity_field_activity_id_field_id_ordinal", + columns: ["activity_id", "field_id", "ordinal"], + nullsNotDistinct: false, + where: "deleted_at IS NULL", + }; + const newUc = { + ...oldUc, + name: "activity_field_activity_id_field_id_ordinal_ukey", + }; + + const current: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "activity_field", + uniqueConstraints: [oldUc], + }), + ], + }; + const desired: SchemaSnapshot = { + schemas: ["app"], + extensions: [], + tables: [ + makeTable({ + name: "activity_field", + uniqueConstraints: [newUc], + }), + ], + }; + + const ops = diffSnapshots(current, desired); + expect(ops).toContainEqual({ + type: "renameUniqueConstraint", + schema: "app", + table: "activity_field", + oldName: "activity_field_activity_id_field_id_ordinal", + newName: "activity_field_activity_id_field_id_ordinal_ukey", + }); + expect(ops).not.toContainEqual( + expect.objectContaining({ type: "dropUniqueConstraint" }), + ); + expect(ops).not.toContainEqual( + expect.objectContaining({ type: "addUniqueConstraint" }), + ); + }); + test("does not emit rename when unique constraint is unchanged", () => { const uc = { name: "users_email_key", diff --git a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts index 3c0fa0c7..9f7b924a 100644 --- a/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts +++ b/packages/orm-migrate/src/migrations/diff/diffSnapshots.ts @@ -336,6 +336,6 @@ const uniqueConstraintContentKey = (uc: UniqueConstraintSnapshot): string => { return [ uc.columns.join(","), uc.nullsNotDistinct ?? false, - uc.where ?? "", + (uc.where ?? "").trim(), ].join("|"); }; diff --git a/packages/orm-migrate/src/migrations/pulledToSnapshot.ts b/packages/orm-migrate/src/migrations/pulledToSnapshot.ts index 23732b4c..8e5b852a 100644 --- a/packages/orm-migrate/src/migrations/pulledToSnapshot.ts +++ b/packages/orm-migrate/src/migrations/pulledToSnapshot.ts @@ -16,7 +16,7 @@ import type { */ const extractWhereClause = (definition: string): string | null => { const match = /\bWHERE\s+\((.+)\)\s*$/i.exec(definition); - return match?.[1] ?? null; + return match?.[1]?.trim() ?? null; }; /**