Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions internal/diff/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,12 @@ func generateIndexSQLWithName(index *ir.Index, indexName string, targetSchema st
// - Expressions: ((expression))
builder.WriteString(col.Name)

// Add operator class if specified (non-default operator class)
if col.Operator != "" {
builder.WriteString(" ")
builder.WriteString(col.Operator)
}

// Add direction if specified
if col.Direction != "" && col.Direction != "ASC" {
builder.WriteString(" ")
Expand Down
31 changes: 18 additions & 13 deletions ir/inspector.go
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,6 @@ func (i *Inspector) buildPartitions(ctx context.Context, schema *IR, targetSchem
return nil
}


func (i *Inspector) buildConstraints(ctx context.Context, schema *IR, targetSchema string) error {
constraints, err := i.queries.GetConstraintsForSchema(ctx, sql.NullString{String: targetSchema, Valid: true})
if err != nil {
Expand Down Expand Up @@ -555,15 +554,15 @@ func (i *Inspector) buildConstraints(ctx context.Context, schema *IR, targetSche
sort.Slice(constraint.Columns, func(i, j int) bool {
return constraint.Columns[i].Position < constraint.Columns[j].Position
})

// Also sort referenced columns for foreign keys
if constraint.Type == ConstraintTypeForeignKey && len(constraint.ReferencedColumns) > 0 {
sort.Slice(constraint.ReferencedColumns, func(i, j int) bool {
return constraint.ReferencedColumns[i].Position < constraint.ReferencedColumns[j].Position
})
}
}

table.Constraints[key.name] = constraint

// For partitioned tables, ensure primary key columns are ordered with partition key first
Expand Down Expand Up @@ -717,20 +716,28 @@ func (i *Inspector) buildIndexes(ctx context.Context, schema *IR, targetSchema s
index.Where = indexRow.PartialPredicate.String
}

// Extract columns directly from query results (no parsing needed!)
// Extract columns directly from query results
// The query uses pg_get_indexdef(indexrelid, column_position, true) for each column
// and extracts ASC/DESC from the indoption array
// and operator class names from pg_index.indclass joined with pg_opclass
for idx := 0; idx < len(indexRow.ColumnDefinitions); idx++ {
columnName := indexRow.ColumnDefinitions[idx]
direction := "ASC" // Default
if idx < len(indexRow.ColumnDirections) {
direction = indexRow.ColumnDirections[idx]
}

// Get operator class from the ColumnOpclasses array
operatorClass := ""
if idx < len(indexRow.ColumnOpclasses) {
operatorClass = indexRow.ColumnOpclasses[idx]
}

indexColumn := &IndexColumn{
Name: columnName,
Position: idx + 1,
Direction: direction,
Operator: operatorClass,
}

index.Columns = append(index.Columns, indexColumn)
Expand All @@ -751,7 +758,6 @@ func (i *Inspector) buildIndexes(ctx context.Context, schema *IR, targetSchema s
return nil
}


func (i *Inspector) buildSequences(ctx context.Context, schema *IR, targetSchema string) error {
sequences, err := i.queries.GetSequencesForSchema(ctx, sql.NullString{String: targetSchema, Valid: true})
if err != nil {
Expand All @@ -774,8 +780,8 @@ func (i *Inspector) buildSequences(ctx context.Context, schema *IR, targetSchema
if dataType == "bigint" {
// Check if this is a default bigint by looking at min/max values
// Default bigint sequences have min_value=1 and max_value=9223372036854775807
if seq.MinimumValue.Valid && seq.MinimumValue.Int64 == 1 &&
seq.MaximumValue.Valid && seq.MaximumValue.Int64 == 9223372036854775807 {
if seq.MinimumValue.Valid && seq.MinimumValue.Int64 == 1 &&
seq.MaximumValue.Valid && seq.MaximumValue.Int64 == 9223372036854775807 {
dataType = "" // This means it was not explicitly specified
}
}
Expand Down Expand Up @@ -863,13 +869,13 @@ func (i *Inspector) isIdentityColumn(ctx context.Context, schemaName, tableName,
WHERE table_schema = $1
AND table_name = $2
AND column_name = $3`

var isIdentity string
err := i.db.QueryRowContext(ctx, query, schemaName, tableName, columnName).Scan(&isIdentity)
if err != nil {
return false
}

return isIdentity == "YES"
}

Expand Down Expand Up @@ -935,8 +941,8 @@ func (i *Inspector) buildFunctions(ctx context.Context, schema *IR, targetSchema
func splitParameterString(signature string) []string {
var params []string
var current strings.Builder
depth := 0 // Track nesting depth of (), [], {}
inQuote := false // Track if we're inside a string literal
depth := 0 // Track nesting depth of (), [], {}
inQuote := false // Track if we're inside a string literal

i := 0
for i < len(signature) {
Expand Down Expand Up @@ -1061,7 +1067,6 @@ func (i *Inspector) parseParametersFromSignature(signature string) []*Parameter
return parameters
}


// lookupTypeNameFromOID converts PostgreSQL type OID to type name
func (i *Inspector) lookupTypeNameFromOID(oid int64) string {
// Common type OID mappings (can be extended as needed)
Expand Down Expand Up @@ -1273,7 +1278,7 @@ func extractFunctionCallFromTriggerDef(triggerDef string) string {

// Start after "EXECUTE FUNCTION " or "EXECUTE PROCEDURE "
start := strings.Index(triggerDef[executeIdx:], " ") + executeIdx + 1 // Skip "EXECUTE"
start = strings.Index(triggerDef[start:], " ") + start + 1 // Skip "FUNCTION"/"PROCEDURE"
start = strings.Index(triggerDef[start:], " ") + start + 1 // Skip "FUNCTION"/"PROCEDURE"

// The function call extends to the end of the definition (or a semicolon if present)
end := len(triggerDef)
Expand Down
10 changes: 9 additions & 1 deletion ir/queries/queries.sql
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,15 @@ SELECT
ELSE 'ASC'
END
FROM generate_series(1, idx.indnatts) k
) as column_directions
) as column_directions,
ARRAY(
SELECT CASE
WHEN opc.opcdefault THEN '' -- Omit default operator classes
ELSE COALESCE(opc.opcname, '')
END
FROM generate_series(1, idx.indnatts) k
LEFT JOIN pg_opclass opc ON opc.oid = idx.indclass[k-1]
) as column_opclasses
FROM pg_index idx
JOIN pg_class i ON i.oid = idx.indexrelid
JOIN pg_class t ON t.oid = idx.indrelid
Expand Down
23 changes: 12 additions & 11 deletions ir/queries/queries.sql.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 12 additions & 0 deletions testdata/diff/create_index/add_index/diff.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
CREATE TABLE IF NOT EXISTS users (
id integer,
email varchar(255) NOT NULL,
name varchar(100),
CONSTRAINT users_pkey PRIMARY KEY (id)
);

CREATE INDEX IF NOT EXISTS idx_users_email ON users (email varchar_pattern_ops);

CREATE INDEX IF NOT EXISTS idx_users_id ON users (id);

CREATE INDEX IF NOT EXISTS idx_users_name ON users (name);
10 changes: 10 additions & 0 deletions testdata/diff/create_index/add_index/new.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
-- Create a new table with a simple index
CREATE TABLE public.users (
id INTEGER PRIMARY KEY,
email VARCHAR(255) NOT NULL,
name VARCHAR(100)
);

CREATE INDEX idx_users_name ON public.users (name);
CREATE INDEX idx_users_email ON public.users (email varchar_pattern_ops);
CREATE INDEX idx_users_id ON public.users (id);
1 change: 1 addition & 0 deletions testdata/diff/create_index/add_index/old.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
-- Empty schema (starting state)
38 changes: 38 additions & 0 deletions testdata/diff/create_index/add_index/plan.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
{
"version": "1.0.0",
"pgschema_version": "1.4.0",
"created_at": "1970-01-01T00:00:00Z",
"source_fingerprint": {
"hash": "965b1131737c955e24c7f827c55bd78e4cb49a75adfd04229e0ba297376f5085"
},
"groups": [
{
"steps": [
{
"sql": "CREATE TABLE IF NOT EXISTS users (\n id integer,\n email varchar(255) NOT NULL,\n name varchar(100),\n CONSTRAINT users_pkey PRIMARY KEY (id)\n);",
"type": "table",
"operation": "create",
"path": "public.users"
},
{
"sql": "CREATE INDEX IF NOT EXISTS idx_users_email ON users (email varchar_pattern_ops);",
"type": "table.index",
"operation": "create",
"path": "public.users.idx_users_email"
},
{
"sql": "CREATE INDEX IF NOT EXISTS idx_users_id ON users (id);",
"type": "table.index",
"operation": "create",
"path": "public.users.idx_users_id"
},
{
"sql": "CREATE INDEX IF NOT EXISTS idx_users_name ON users (name);",
"type": "table.index",
"operation": "create",
"path": "public.users.idx_users_name"
}
]
}
]
}
12 changes: 12 additions & 0 deletions testdata/diff/create_index/add_index/plan.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
CREATE TABLE IF NOT EXISTS users (
id integer,
email varchar(255) NOT NULL,
name varchar(100),
CONSTRAINT users_pkey PRIMARY KEY (id)
);

CREATE INDEX IF NOT EXISTS idx_users_email ON users (email varchar_pattern_ops);

CREATE INDEX IF NOT EXISTS idx_users_id ON users (id);

CREATE INDEX IF NOT EXISTS idx_users_name ON users (name);
26 changes: 26 additions & 0 deletions testdata/diff/create_index/add_index/plan.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
Plan: 1 to add.

Summary by type:
tables: 1 to add

Tables:
+ users
+ idx_users_email (index)
+ idx_users_id (index)
+ idx_users_name (index)

DDL to be executed:
--------------------------------------------------

CREATE TABLE IF NOT EXISTS users (
id integer,
email varchar(255) NOT NULL,
name varchar(100),
CONSTRAINT users_pkey PRIMARY KEY (id)
);

CREATE INDEX IF NOT EXISTS idx_users_email ON users (email varchar_pattern_ops);

CREATE INDEX IF NOT EXISTS idx_users_id ON users (id);

CREATE INDEX IF NOT EXISTS idx_users_name ON users (name);