Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
158 changes: 151 additions & 7 deletions internal/ir/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"fmt"
"math"
"regexp"
"sort"
"strconv"
"strings"

Expand Down Expand Up @@ -1056,15 +1057,21 @@ func (p *Parser) parseConstraint(constraint *pg_query.Constraint, schemaName, ta
if constraint.Conname != "" {
constraintName = constraint.Conname
} else {
// For foreign key constraints, use FkAttrs if Keys is empty
var nameKeys []*pg_query.Node
if constraintType == ConstraintTypeForeignKey && len(constraint.Keys) == 0 && len(constraint.FkAttrs) > 0 {
nameKeys = constraint.FkAttrs
// For CHECK constraints, extract column names from the expression
if constraintType == ConstraintTypeCheck && constraint.RawExpr != nil {
columnNames := p.extractColumnNamesFromExpression(constraint.RawExpr)
constraintName = p.generateConstraintNameFromColumns(constraintType, tableName, columnNames)
} else {
nameKeys = constraint.Keys
// For other constraint types, use the Keys field
var nameKeys []*pg_query.Node
if constraintType == ConstraintTypeForeignKey && len(constraint.Keys) == 0 && len(constraint.FkAttrs) > 0 {
nameKeys = constraint.FkAttrs
} else {
nameKeys = constraint.Keys
}
// Generate default name based on type and columns
constraintName = p.generateConstraintName(constraintType, tableName, nameKeys)
}
// Generate default name based on type and columns
constraintName = p.generateConstraintName(constraintType, tableName, nameKeys)
}

c := &Constraint{
Expand Down Expand Up @@ -1217,6 +1224,75 @@ func (p *Parser) generateConstraintName(constraintType ConstraintType, tableName
}
}

// generateConstraintNameFromColumns generates a constraint name from column names
func (p *Parser) generateConstraintNameFromColumns(constraintType ConstraintType, tableName string, columnNames []string) string {
var suffix string
switch constraintType {
case ConstraintTypePrimaryKey:
suffix = "pkey"
case ConstraintTypeUnique:
suffix = "key"
case ConstraintTypeForeignKey:
suffix = "fkey"
case ConstraintTypeCheck:
suffix = "check"
default:
suffix = "constraint"
}

// Primary keys in PostgreSQL always use table_pkey format, never include column names
if constraintType == ConstraintTypePrimaryKey {
return fmt.Sprintf("%s_%s", tableName, suffix)
}

if len(columnNames) == 0 {
return fmt.Sprintf("%s_%s", tableName, suffix)
}

// For CHECK constraints, match PostgreSQL's actual naming behavior:
// - Single column: tableName_columnName_check
// - Zero or multiple columns: tableName_check (PostgreSQL doesn't include column names for complex expressions)
if constraintType == ConstraintTypeCheck {
if len(columnNames) == 1 {
// Single column CHECK constraint: include the column name
constraintName := fmt.Sprintf("%s_%s_%s", tableName, columnNames[0], suffix)

// PostgreSQL has a 63-character limit for identifiers
if len(constraintName) > 63 {
// Truncate to fit within limit, keeping suffix
maxPrefixLen := 63 - len(suffix) - 1
if maxPrefixLen > 0 {
constraintName = constraintName[:maxPrefixLen] + "_" + suffix
}
}
return constraintName
} else {
// Zero or multiple columns: use simple tableName_check format
return fmt.Sprintf("%s_%s", tableName, suffix)
}
}

// For UNIQUE and FOREIGN KEY constraints, include all column names
if constraintType == ConstraintTypeUnique || constraintType == ConstraintTypeForeignKey {
// Join all column names for unique and foreign key constraints
allColumns := strings.Join(columnNames, "_")
constraintName := fmt.Sprintf("%s_%s_%s", tableName, allColumns, suffix)

// PostgreSQL has a 63-character limit for identifiers
if len(constraintName) > 63 {
// Truncate to fit within limit, keeping suffix
maxPrefixLen := 63 - len(suffix) - 1
if maxPrefixLen > 0 {
constraintName = constraintName[:maxPrefixLen] + "_" + suffix
}
}
return constraintName
}

// Default fallback - use first column
return fmt.Sprintf("%s_%s_%s", tableName, columnNames[0], suffix)
}

// mapReferentialAction maps pg_query referential action to string
func (p *Parser) mapReferentialAction(action string) string {
switch action {
Expand Down Expand Up @@ -1260,6 +1336,74 @@ func (p *Parser) extractExpressionText(expr *pg_query.Node) string {
}
}

// extractColumnNamesFromExpression recursively extracts column names from CHECK constraint expressions
func (p *Parser) extractColumnNamesFromExpression(expr *pg_query.Node) []string {
if expr == nil {
return nil
}

var columnNames []string
columnSet := make(map[string]bool) // Use map to avoid duplicates

p.collectColumnNamesFromNode(expr, columnSet)

// Convert map keys to sorted slice
for columnName := range columnSet {
columnNames = append(columnNames, columnName)
}

// Sort for consistent ordering
sort.Strings(columnNames)

return columnNames
}

// collectColumnNamesFromNode recursively collects column names from AST nodes
func (p *Parser) collectColumnNamesFromNode(node *pg_query.Node, columnSet map[string]bool) {
if node == nil {
return
}

switch n := node.Node.(type) {
case *pg_query.Node_ColumnRef:
// Extract column name from ColumnRef
if len(n.ColumnRef.Fields) > 0 {
if str := n.ColumnRef.Fields[len(n.ColumnRef.Fields)-1].GetString_(); str != nil {
columnName := str.Sval
// Only include simple column names (not qualified with table names)
if !strings.Contains(columnName, ".") {
columnSet[columnName] = true
}
}
}
case *pg_query.Node_AExpr:
// Recursively process left and right expressions
p.collectColumnNamesFromNode(n.AExpr.Lexpr, columnSet)
p.collectColumnNamesFromNode(n.AExpr.Rexpr, columnSet)
case *pg_query.Node_BoolExpr:
// Recursively process all arguments in boolean expressions
for _, arg := range n.BoolExpr.Args {
p.collectColumnNamesFromNode(arg, columnSet)
}
case *pg_query.Node_FuncCall:
// Recursively process function arguments
if n.FuncCall.Args != nil {
for _, arg := range n.FuncCall.Args {
p.collectColumnNamesFromNode(arg, columnSet)
}
}
case *pg_query.Node_TypeCast:
// Recursively process the argument being cast
p.collectColumnNamesFromNode(n.TypeCast.Arg, columnSet)
case *pg_query.Node_List:
// Recursively process list items
for _, item := range n.List.Items {
p.collectColumnNamesFromNode(item, columnSet)
}
// For other node types (constants, etc.), we don't need to extract column names
}
}

// parseAExpr parses arithmetic/comparison expressions
func (p *Parser) parseAExpr(expr *pg_query.A_Expr) string {
// Handle IN expressions
Expand Down
11 changes: 10 additions & 1 deletion internal/plan/plan.go
Original file line number Diff line number Diff line change
Expand Up @@ -140,10 +140,19 @@ func groupDiffs(diffs []diff.Diff) []ExecutionGroup {
var groups []ExecutionGroup
var transactionalSteps []Step

// Track newly created tables to avoid concurrent rewrites for their indexes
newlyCreatedTables := make(map[string]bool)
for _, d := range diffs {
if d.Type == diff.DiffTypeTable && d.Operation == diff.DiffOperationCreate {
// Extract table name from path (schema.table)
newlyCreatedTables[d.Path] = true
}
}

// Convert diffs to steps
for _, d := range diffs {
// Try to generate rewrites if online operations are enabled
rewriteSteps := generateRewrite(d)
rewriteSteps := generateRewrite(d, newlyCreatedTables)

if len(rewriteSteps) > 0 {
// For operations with rewrites, create one step per rewrite statement
Expand Down
12 changes: 11 additions & 1 deletion internal/plan/rewrite.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,18 @@ type RewriteStep struct {
}

// generateRewrite generates rewrite steps for a diff if online operations are enabled
func generateRewrite(d diff.Diff) []RewriteStep {
func generateRewrite(d diff.Diff, newlyCreatedTables map[string]bool) []RewriteStep {
// Dispatch to specific rewrite generators based on diff type and source
switch d.Type {
case diff.DiffTypeTableIndex:
switch d.Operation {
case diff.DiffOperationCreate:
if index, ok := d.Source.(*ir.Index); ok {
// Skip rewrite for indexes on newly created tables
tableKey := index.Schema + "." + index.Table
if newlyCreatedTables[tableKey] {
return nil // No rewrite needed for indexes on new tables
}
return generateIndexRewrite(index)
}
case diff.DiffOperationAlter:
Expand All @@ -37,6 +42,11 @@ func generateRewrite(d diff.Diff) []RewriteStep {
case diff.DiffTypeTableConstraint:
if d.Operation == diff.DiffOperationCreate {
if constraint, ok := d.Source.(*ir.Constraint); ok {
// Skip rewrite for constraints on newly created tables
tableKey := constraint.Schema + "." + constraint.Table
if newlyCreatedTables[tableKey] {
return nil // No rewrite needed for constraints on new tables
}
switch constraint.Type {
case ir.ConstraintTypeCheck:
return generateConstraintRewrite(constraint)
Expand Down
30 changes: 4 additions & 26 deletions testdata/diff/create_table/add_table_like/plan.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"version": "1.0.0",
"pgschema_version": "1.0.2",
"pgschema_version": "1.0.3",
"created_at": "1970-01-01T00:00:00Z",
"source_fingerprint": {
"hash": "bcda23084995439e43e6387779062084fe7c9fab8123ca140161baf8ec4d2be6"
Expand All @@ -25,35 +25,13 @@
"type": "table.comment",
"operation": "create",
"path": "public.users"
}
]
},
{
"steps": [
{
"sql": "CREATE INDEX CONCURRENTLY IF NOT EXISTS users_created_at_idx ON users (created_at);",
"type": "table.index",
"operation": "create",
"path": "public.users.users_created_at_idx"
}
]
},
{
"steps": [
},
{
"sql": "SELECT \n COALESCE(i.indisvalid, false) as done,\n CASE \n WHEN p.blocks_total > 0 THEN p.blocks_done * 100 / p.blocks_total\n ELSE 0\n END as progress\nFROM pg_class c\nLEFT JOIN pg_index i ON c.oid = i.indexrelid\nLEFT JOIN pg_stat_progress_create_index p ON c.oid = p.index_relid\nWHERE c.relname = 'users_created_at_idx';",
"directive": {
"type": "wait",
"message": "Creating index users_created_at_idx"
},
"sql": "CREATE INDEX IF NOT EXISTS users_created_at_idx ON users (created_at);",
"type": "table.index",
"operation": "create",
"path": "public.users.users_created_at_idx"
}
]
},
{
"steps": [
},
{
"sql": "COMMENT ON COLUMN _template_timestamps.created_at IS NULL;",
"type": "table.column.comment",
Expand Down
14 changes: 1 addition & 13 deletions testdata/diff/create_table/add_table_like/plan.sql
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,6 @@ CREATE TABLE IF NOT EXISTS users (

COMMENT ON TABLE users IS 'Template for timestamp fields';

CREATE INDEX CONCURRENTLY IF NOT EXISTS users_created_at_idx ON users (created_at);

-- pgschema:wait
SELECT
COALESCE(i.indisvalid, false) as done,
CASE
WHEN p.blocks_total > 0 THEN p.blocks_done * 100 / p.blocks_total
ELSE 0
END as progress
FROM pg_class c
LEFT JOIN pg_index i ON c.oid = i.indexrelid
LEFT JOIN pg_stat_progress_create_index p ON c.oid = p.index_relid
WHERE c.relname = 'users_created_at_idx';
CREATE INDEX IF NOT EXISTS users_created_at_idx ON users (created_at);

COMMENT ON COLUMN _template_timestamps.created_at IS NULL;
20 changes: 2 additions & 18 deletions testdata/diff/create_table/add_table_like/plan.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ Tables:
DDL to be executed:
--------------------------------------------------

-- Transaction Group #1
CREATE TABLE IF NOT EXISTS products (
id SERIAL PRIMARY KEY,
created_at timestamptz DEFAULT now() NOT NULL,
Expand All @@ -32,21 +31,6 @@ CREATE TABLE IF NOT EXISTS users (

COMMENT ON TABLE users IS 'Template for timestamp fields';

-- Transaction Group #2
CREATE INDEX CONCURRENTLY IF NOT EXISTS users_created_at_idx ON users (created_at);

-- Transaction Group #3
-- pgschema:wait
SELECT
COALESCE(i.indisvalid, false) as done,
CASE
WHEN p.blocks_total > 0 THEN p.blocks_done * 100 / p.blocks_total
ELSE 0
END as progress
FROM pg_class c
LEFT JOIN pg_index i ON c.oid = i.indexrelid
LEFT JOIN pg_stat_progress_create_index p ON c.oid = p.index_relid
WHERE c.relname = 'users_created_at_idx';

-- Transaction Group #4
CREATE INDEX IF NOT EXISTS users_created_at_idx ON users (created_at);

COMMENT ON COLUMN _template_timestamps.created_at IS NULL;
15 changes: 15 additions & 0 deletions testdata/diff/create_table/add_table_no_online_rewrite/diff.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
CREATE TABLE IF NOT EXISTS companies (
id integer PRIMARY KEY,
name text NOT NULL
);

CREATE TABLE IF NOT EXISTS departments (
id integer PRIMARY KEY,
name text NOT NULL,
company_id integer NOT NULL REFERENCES companies (id),
budget numeric(10,2),
created_at timestamp DEFAULT now(),
CHECK (budget > 0)
);

CREATE INDEX IF NOT EXISTS idx_departments_name ON departments (name);
17 changes: 17 additions & 0 deletions testdata/diff/create_table/add_table_no_online_rewrite/new.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
-- First create a referenced table for FK constraint
CREATE TABLE public.companies (
id integer PRIMARY KEY,
name text NOT NULL
);

-- Main table with constraints and index
CREATE TABLE public.departments (
id integer PRIMARY KEY,
name text NOT NULL,
company_id integer NOT NULL REFERENCES companies(id),
budget numeric(10,2),
created_at timestamp DEFAULT now(),
CHECK (budget > 0)
);

CREATE INDEX idx_departments_name ON public.departments (name);
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
-- Empty schema (no tables)
Loading