4 Commits

Author SHA1 Message Date
f2d500f98d feat(merge): 🎉 Add support for constraints and indexes in merge results
All checks were successful
CI / Test (1.24) (push) Successful in -26m24s
CI / Test (1.25) (push) Successful in -26m10s
CI / Lint (push) Successful in -26m33s
CI / Build (push) Successful in -26m40s
Release / Build and Release (push) Successful in -26m23s
Integration Tests / Integration Tests (push) Successful in -25m53s
* Enhance MergeResult to track added constraints and indexes.
* Update merge logic to increment counters for added constraints and indexes.
* Modify GetMergeSummary to include constraints and indexes in the output.
* Add comprehensive tests for merging constraints and indexes.
2026-01-31 21:30:55 +02:00
2ec9991324 feat(merge): 🎉 Add support for merging constraints and indexes
Some checks failed
CI / Test (1.24) (push) Failing after -26m37s
CI / Test (1.25) (push) Successful in -26m8s
CI / Lint (push) Successful in -26m32s
CI / Build (push) Successful in -26m42s
Release / Build and Release (push) Successful in -26m26s
Integration Tests / Integration Tests (push) Successful in -26m3s
* Implement mergeConstraints to handle table constraints
* Implement mergeIndexes to handle table indexes
* Update mergeTables to include constraints and indexes during merge
2026-01-31 21:27:28 +02:00
a3e45c206d feat(writer): 🎉 Enhance SQL execution logging and add statement type detection
All checks were successful
CI / Test (1.24) (push) Successful in -26m21s
CI / Test (1.25) (push) Successful in -26m15s
CI / Build (push) Successful in -26m39s
CI / Lint (push) Successful in -26m29s
Release / Build and Release (push) Successful in -26m28s
Integration Tests / Integration Tests (push) Successful in -26m11s
* Log statement type during execution for better debugging
* Introduce detectStatementType function to categorize SQL statements
* Update unique constraint naming convention in tests
2026-01-31 21:19:48 +02:00
165623bb1d feat(pgsql): Add templates for constraints and sequences
All checks were successful
CI / Test (1.24) (push) Successful in -26m21s
CI / Test (1.25) (push) Successful in -26m13s
CI / Build (push) Successful in -26m39s
CI / Lint (push) Successful in -26m29s
Release / Build and Release (push) Successful in -26m28s
Integration Tests / Integration Tests (push) Successful in -26m10s
* Introduce new templates for creating unique, check, and foreign key constraints with existence checks.
* Add templates for setting sequence values and creating sequences.
* Refactor existing SQL generation logic to utilize new templates for better maintainability and readability.
* Ensure identifiers are properly quoted to handle special characters and reserved keywords.
2026-01-31 21:04:43 +02:00
27 changed files with 1362 additions and 262 deletions

View File

@@ -12,14 +12,16 @@ import (
// MergeResult represents the result of a merge operation // MergeResult represents the result of a merge operation
type MergeResult struct { type MergeResult struct {
SchemasAdded int SchemasAdded int
TablesAdded int TablesAdded int
ColumnsAdded int ColumnsAdded int
RelationsAdded int ConstraintsAdded int
DomainsAdded int IndexesAdded int
EnumsAdded int RelationsAdded int
ViewsAdded int DomainsAdded int
SequencesAdded int EnumsAdded int
ViewsAdded int
SequencesAdded int
} }
// MergeOptions contains options for merge operations // MergeOptions contains options for merge operations
@@ -120,8 +122,10 @@ func (r *MergeResult) mergeTables(schema *models.Schema, source *models.Schema,
} }
if tgtTable, exists := existingTables[tableName]; exists { if tgtTable, exists := existingTables[tableName]; exists {
// Table exists, merge its columns // Table exists, merge its columns, constraints, and indexes
r.mergeColumns(tgtTable, srcTable) r.mergeColumns(tgtTable, srcTable)
r.mergeConstraints(tgtTable, srcTable)
r.mergeIndexes(tgtTable, srcTable)
} else { } else {
// Table doesn't exist, add it // Table doesn't exist, add it
newTable := cloneTable(srcTable) newTable := cloneTable(srcTable)
@@ -151,6 +155,52 @@ func (r *MergeResult) mergeColumns(table *models.Table, srcTable *models.Table)
} }
} }
func (r *MergeResult) mergeConstraints(table *models.Table, srcTable *models.Table) {
// Initialize constraints map if nil
if table.Constraints == nil {
table.Constraints = make(map[string]*models.Constraint)
}
// Create map of existing constraints
existingConstraints := make(map[string]*models.Constraint)
for constName := range table.Constraints {
existingConstraints[constName] = table.Constraints[constName]
}
// Merge constraints
for constName, srcConst := range srcTable.Constraints {
if _, exists := existingConstraints[constName]; !exists {
// Constraint doesn't exist, add it
newConst := cloneConstraint(srcConst)
table.Constraints[constName] = newConst
r.ConstraintsAdded++
}
}
}
func (r *MergeResult) mergeIndexes(table *models.Table, srcTable *models.Table) {
// Initialize indexes map if nil
if table.Indexes == nil {
table.Indexes = make(map[string]*models.Index)
}
// Create map of existing indexes
existingIndexes := make(map[string]*models.Index)
for idxName := range table.Indexes {
existingIndexes[idxName] = table.Indexes[idxName]
}
// Merge indexes
for idxName, srcIdx := range srcTable.Indexes {
if _, exists := existingIndexes[idxName]; !exists {
// Index doesn't exist, add it
newIdx := cloneIndex(srcIdx)
table.Indexes[idxName] = newIdx
r.IndexesAdded++
}
}
}
func (r *MergeResult) mergeViews(schema *models.Schema, source *models.Schema) { func (r *MergeResult) mergeViews(schema *models.Schema, source *models.Schema) {
// Create map of existing views // Create map of existing views
existingViews := make(map[string]*models.View) existingViews := make(map[string]*models.View)
@@ -552,6 +602,8 @@ func GetMergeSummary(result *MergeResult) string {
fmt.Sprintf("Schemas added: %d", result.SchemasAdded), fmt.Sprintf("Schemas added: %d", result.SchemasAdded),
fmt.Sprintf("Tables added: %d", result.TablesAdded), fmt.Sprintf("Tables added: %d", result.TablesAdded),
fmt.Sprintf("Columns added: %d", result.ColumnsAdded), fmt.Sprintf("Columns added: %d", result.ColumnsAdded),
fmt.Sprintf("Constraints added: %d", result.ConstraintsAdded),
fmt.Sprintf("Indexes added: %d", result.IndexesAdded),
fmt.Sprintf("Views added: %d", result.ViewsAdded), fmt.Sprintf("Views added: %d", result.ViewsAdded),
fmt.Sprintf("Sequences added: %d", result.SequencesAdded), fmt.Sprintf("Sequences added: %d", result.SequencesAdded),
fmt.Sprintf("Enums added: %d", result.EnumsAdded), fmt.Sprintf("Enums added: %d", result.EnumsAdded),
@@ -560,6 +612,7 @@ func GetMergeSummary(result *MergeResult) string {
} }
totalAdded := result.SchemasAdded + result.TablesAdded + result.ColumnsAdded + totalAdded := result.SchemasAdded + result.TablesAdded + result.ColumnsAdded +
result.ConstraintsAdded + result.IndexesAdded +
result.ViewsAdded + result.SequencesAdded + result.EnumsAdded + result.ViewsAdded + result.SequencesAdded + result.EnumsAdded +
result.RelationsAdded + result.DomainsAdded result.RelationsAdded + result.DomainsAdded

617
pkg/merge/merge_test.go Normal file
View File

@@ -0,0 +1,617 @@
package merge
import (
"testing"
"git.warky.dev/wdevs/relspecgo/pkg/models"
)
func TestMergeDatabases_NilInputs(t *testing.T) {
result := MergeDatabases(nil, nil, nil)
if result == nil {
t.Fatal("Expected non-nil result")
}
if result.SchemasAdded != 0 {
t.Errorf("Expected 0 schemas added, got %d", result.SchemasAdded)
}
}
func TestMergeDatabases_NewSchema(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{Name: "public"},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{Name: "auth"},
},
}
result := MergeDatabases(target, source, nil)
if result.SchemasAdded != 1 {
t.Errorf("Expected 1 schema added, got %d", result.SchemasAdded)
}
if len(target.Schemas) != 2 {
t.Errorf("Expected 2 schemas in target, got %d", len(target.Schemas))
}
}
func TestMergeDatabases_ExistingSchema(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{Name: "public"},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{Name: "public"},
},
}
result := MergeDatabases(target, source, nil)
if result.SchemasAdded != 0 {
t.Errorf("Expected 0 schemas added, got %d", result.SchemasAdded)
}
if len(target.Schemas) != 1 {
t.Errorf("Expected 1 schema in target, got %d", len(target.Schemas))
}
}
func TestMergeTables_NewTable(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "posts",
Schema: "public",
Columns: map[string]*models.Column{},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.TablesAdded != 1 {
t.Errorf("Expected 1 table added, got %d", result.TablesAdded)
}
if len(target.Schemas[0].Tables) != 2 {
t.Errorf("Expected 2 tables in target schema, got %d", len(target.Schemas[0].Tables))
}
}
func TestMergeColumns_NewColumn(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"id": {Name: "id", Type: "int"},
},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"email": {Name: "email", Type: "varchar"},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.ColumnsAdded != 1 {
t.Errorf("Expected 1 column added, got %d", result.ColumnsAdded)
}
if len(target.Schemas[0].Tables[0].Columns) != 2 {
t.Errorf("Expected 2 columns in target table, got %d", len(target.Schemas[0].Tables[0].Columns))
}
}
func TestMergeConstraints_NewConstraint(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Constraints: map[string]*models.Constraint{},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Constraints: map[string]*models.Constraint{
"ukey_users_email": {
Type: models.UniqueConstraint,
Columns: []string{"email"},
Name: "ukey_users_email",
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.ConstraintsAdded != 1 {
t.Errorf("Expected 1 constraint added, got %d", result.ConstraintsAdded)
}
if len(target.Schemas[0].Tables[0].Constraints) != 1 {
t.Errorf("Expected 1 constraint in target table, got %d", len(target.Schemas[0].Tables[0].Constraints))
}
}
func TestMergeConstraints_NilConstraintsMap(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Constraints: nil, // Nil map
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Constraints: map[string]*models.Constraint{
"ukey_users_email": {
Type: models.UniqueConstraint,
Columns: []string{"email"},
Name: "ukey_users_email",
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.ConstraintsAdded != 1 {
t.Errorf("Expected 1 constraint added, got %d", result.ConstraintsAdded)
}
if target.Schemas[0].Tables[0].Constraints == nil {
t.Error("Expected constraints map to be initialized")
}
if len(target.Schemas[0].Tables[0].Constraints) != 1 {
t.Errorf("Expected 1 constraint in target table, got %d", len(target.Schemas[0].Tables[0].Constraints))
}
}
func TestMergeIndexes_NewIndex(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Indexes: map[string]*models.Index{},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Indexes: map[string]*models.Index{
"idx_users_email": {
Name: "idx_users_email",
Columns: []string{"email"},
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.IndexesAdded != 1 {
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
}
if len(target.Schemas[0].Tables[0].Indexes) != 1 {
t.Errorf("Expected 1 index in target table, got %d", len(target.Schemas[0].Tables[0].Indexes))
}
}
func TestMergeIndexes_NilIndexesMap(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Indexes: nil, // Nil map
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
Indexes: map[string]*models.Index{
"idx_users_email": {
Name: "idx_users_email",
Columns: []string{"email"},
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.IndexesAdded != 1 {
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
}
if target.Schemas[0].Tables[0].Indexes == nil {
t.Error("Expected indexes map to be initialized")
}
if len(target.Schemas[0].Tables[0].Indexes) != 1 {
t.Errorf("Expected 1 index in target table, got %d", len(target.Schemas[0].Tables[0].Indexes))
}
}
func TestMergeOptions_SkipTableNames(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{},
},
},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "migrations",
Schema: "public",
Columns: map[string]*models.Column{},
},
},
},
},
}
opts := &MergeOptions{
SkipTableNames: map[string]bool{
"migrations": true,
},
}
result := MergeDatabases(target, source, opts)
if result.TablesAdded != 0 {
t.Errorf("Expected 0 tables added (skipped), got %d", result.TablesAdded)
}
if len(target.Schemas[0].Tables) != 1 {
t.Errorf("Expected 1 table in target schema, got %d", len(target.Schemas[0].Tables))
}
}
func TestMergeViews_NewView(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Views: []*models.View{},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Views: []*models.View{
{
Name: "user_summary",
Schema: "public",
Definition: "SELECT * FROM users",
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.ViewsAdded != 1 {
t.Errorf("Expected 1 view added, got %d", result.ViewsAdded)
}
if len(target.Schemas[0].Views) != 1 {
t.Errorf("Expected 1 view in target schema, got %d", len(target.Schemas[0].Views))
}
}
func TestMergeEnums_NewEnum(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Enums: []*models.Enum{},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Enums: []*models.Enum{
{
Name: "user_role",
Schema: "public",
Values: []string{"admin", "user"},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.EnumsAdded != 1 {
t.Errorf("Expected 1 enum added, got %d", result.EnumsAdded)
}
if len(target.Schemas[0].Enums) != 1 {
t.Errorf("Expected 1 enum in target schema, got %d", len(target.Schemas[0].Enums))
}
}
func TestMergeDomains_NewDomain(t *testing.T) {
target := &models.Database{
Domains: []*models.Domain{},
}
source := &models.Database{
Domains: []*models.Domain{
{
Name: "auth",
Description: "Authentication domain",
},
},
}
result := MergeDatabases(target, source, nil)
if result.DomainsAdded != 1 {
t.Errorf("Expected 1 domain added, got %d", result.DomainsAdded)
}
if len(target.Domains) != 1 {
t.Errorf("Expected 1 domain in target, got %d", len(target.Domains))
}
}
func TestMergeRelations_NewRelation(t *testing.T) {
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Relations: []*models.Relationship{},
},
},
}
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Relations: []*models.Relationship{
{
Name: "fk_posts_user",
Type: models.OneToMany,
FromTable: "posts",
FromColumns: []string{"user_id"},
ToTable: "users",
ToColumns: []string{"id"},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
if result.RelationsAdded != 1 {
t.Errorf("Expected 1 relation added, got %d", result.RelationsAdded)
}
if len(target.Schemas[0].Relations) != 1 {
t.Errorf("Expected 1 relation in target schema, got %d", len(target.Schemas[0].Relations))
}
}
func TestGetMergeSummary(t *testing.T) {
result := &MergeResult{
SchemasAdded: 1,
TablesAdded: 2,
ColumnsAdded: 5,
ConstraintsAdded: 3,
IndexesAdded: 2,
ViewsAdded: 1,
}
summary := GetMergeSummary(result)
if summary == "" {
t.Error("Expected non-empty summary")
}
if len(summary) < 50 {
t.Errorf("Summary seems too short: %s", summary)
}
}
func TestGetMergeSummary_Nil(t *testing.T) {
summary := GetMergeSummary(nil)
if summary == "" {
t.Error("Expected non-empty summary for nil result")
}
}
func TestComplexMerge(t *testing.T) {
// Target with existing structure
target := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"id": {Name: "id", Type: "int"},
},
Constraints: map[string]*models.Constraint{},
Indexes: map[string]*models.Index{},
},
},
},
},
}
// Source with new columns, constraints, and indexes
source := &models.Database{
Schemas: []*models.Schema{
{
Name: "public",
Tables: []*models.Table{
{
Name: "users",
Schema: "public",
Columns: map[string]*models.Column{
"email": {Name: "email", Type: "varchar"},
"guid": {Name: "guid", Type: "uuid"},
},
Constraints: map[string]*models.Constraint{
"ukey_users_email": {
Type: models.UniqueConstraint,
Columns: []string{"email"},
Name: "ukey_users_email",
},
"ukey_users_guid": {
Type: models.UniqueConstraint,
Columns: []string{"guid"},
Name: "ukey_users_guid",
},
},
Indexes: map[string]*models.Index{
"idx_users_email": {
Name: "idx_users_email",
Columns: []string{"email"},
},
},
},
},
},
},
}
result := MergeDatabases(target, source, nil)
// Verify counts
if result.ColumnsAdded != 2 {
t.Errorf("Expected 2 columns added, got %d", result.ColumnsAdded)
}
if result.ConstraintsAdded != 2 {
t.Errorf("Expected 2 constraints added, got %d", result.ConstraintsAdded)
}
if result.IndexesAdded != 1 {
t.Errorf("Expected 1 index added, got %d", result.IndexesAdded)
}
// Verify target has merged data
table := target.Schemas[0].Tables[0]
if len(table.Columns) != 3 {
t.Errorf("Expected 3 columns in merged table, got %d", len(table.Columns))
}
if len(table.Constraints) != 2 {
t.Errorf("Expected 2 constraints in merged table, got %d", len(table.Constraints))
}
if len(table.Indexes) != 1 {
t.Errorf("Expected 1 index in merged table, got %d", len(table.Indexes))
}
// Verify specific constraint
if _, exists := table.Constraints["ukey_users_guid"]; !exists {
t.Error("Expected ukey_users_guid constraint to exist")
}
}

View File

@@ -603,8 +603,10 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
column.Default = strings.Trim(defaultVal, "'\"") column.Default = strings.Trim(defaultVal, "'\"")
} else if attr == "unique" { } else if attr == "unique" {
// Create a unique constraint // Create a unique constraint
// Clean table name by removing leading underscores to avoid double underscores
cleanTableName := strings.TrimLeft(tableName, "_")
uniqueConstraint := models.InitConstraint( uniqueConstraint := models.InitConstraint(
fmt.Sprintf("uq_%s_%s", tableName, columnName), fmt.Sprintf("ukey_%s_%s", cleanTableName, columnName),
models.UniqueConstraint, models.UniqueConstraint,
) )
uniqueConstraint.Schema = schemaName uniqueConstraint.Schema = schemaName

View File

@@ -809,14 +809,14 @@ func TestConstraintNaming(t *testing.T) {
t.Fatal("Posts table not found") t.Fatal("Posts table not found")
} }
// Test unique constraint naming: uq_table_column // Test unique constraint naming: ukey_table_column
if _, exists := usersTable.Constraints["uq_users_email"]; !exists { if _, exists := usersTable.Constraints["ukey_users_email"]; !exists {
t.Error("Expected unique constraint 'uq_users_email' not found") t.Error("Expected unique constraint 'ukey_users_email' not found")
t.Logf("Available constraints: %v", getKeys(usersTable.Constraints)) t.Logf("Available constraints: %v", getKeys(usersTable.Constraints))
} }
if _, exists := postsTable.Constraints["uq_posts_slug"]; !exists { if _, exists := postsTable.Constraints["ukey_posts_slug"]; !exists {
t.Error("Expected unique constraint 'uq_posts_slug' not found") t.Error("Expected unique constraint 'ukey_posts_slug' not found")
t.Logf("Available constraints: %v", getKeys(postsTable.Constraints)) t.Logf("Available constraints: %v", getKeys(postsTable.Constraints))
} }

View File

@@ -215,3 +215,70 @@ func TestTemplateExecutor_AuditFunction(t *testing.T) {
t.Error("SQL missing DELETE handling") t.Error("SQL missing DELETE handling")
} }
} }
func TestWriteMigration_NumericConstraintNames(t *testing.T) {
// Current database (empty)
current := models.InitDatabase("testdb")
currentSchema := models.InitSchema("entity")
current.Schemas = append(current.Schemas, currentSchema)
// Model database (with constraint starting with number)
model := models.InitDatabase("testdb")
modelSchema := models.InitSchema("entity")
// Create individual_actor_relationship table
table := models.InitTable("individual_actor_relationship", "entity")
idCol := models.InitColumn("id", "individual_actor_relationship", "entity")
idCol.Type = "integer"
idCol.IsPrimaryKey = true
table.Columns["id"] = idCol
actorIDCol := models.InitColumn("actor_id", "individual_actor_relationship", "entity")
actorIDCol.Type = "integer"
table.Columns["actor_id"] = actorIDCol
// Add constraint with name starting with number
constraint := &models.Constraint{
Name: "215162_fk_actor",
Type: models.ForeignKeyConstraint,
Columns: []string{"actor_id"},
ReferencedSchema: "entity",
ReferencedTable: "actor",
ReferencedColumns: []string{"id"},
OnDelete: "CASCADE",
OnUpdate: "NO ACTION",
}
table.Constraints["215162_fk_actor"] = constraint
modelSchema.Tables = append(modelSchema.Tables, table)
model.Schemas = append(model.Schemas, modelSchema)
// Generate migration
var buf bytes.Buffer
writer, err := NewMigrationWriter(&writers.WriterOptions{})
if err != nil {
t.Fatalf("Failed to create writer: %v", err)
}
writer.writer = &buf
err = writer.WriteMigration(model, current)
if err != nil {
t.Fatalf("WriteMigration failed: %v", err)
}
output := buf.String()
t.Logf("Generated migration:\n%s", output)
// Verify constraint name is properly quoted
if !strings.Contains(output, `"215162_fk_actor"`) {
t.Error("Constraint name starting with number should be quoted")
}
// Verify the SQL is syntactically correct (contains required keywords)
if !strings.Contains(output, "ADD CONSTRAINT") {
t.Error("Migration missing ADD CONSTRAINT")
}
if !strings.Contains(output, "FOREIGN KEY") {
t.Error("Migration missing FOREIGN KEY")
}
}

View File

@@ -21,6 +21,7 @@ func TemplateFunctions() map[string]interface{} {
"quote": quote, "quote": quote,
"escape": escape, "escape": escape,
"safe_identifier": safeIdentifier, "safe_identifier": safeIdentifier,
"quote_ident": quoteIdent,
// Type conversion // Type conversion
"goTypeToSQL": goTypeToSQL, "goTypeToSQL": goTypeToSQL,
@@ -122,6 +123,43 @@ func safeIdentifier(s string) string {
return strings.ToLower(safe) return strings.ToLower(safe)
} }
// quoteIdent quotes a PostgreSQL identifier if necessary
// Identifiers need quoting if they:
// - Start with a digit
// - Contain special characters
// - Are reserved keywords
// - Contain uppercase letters (to preserve case)
func quoteIdent(s string) string {
if s == "" {
return `""`
}
// Check if quoting is needed
needsQuoting := unicode.IsDigit(rune(s[0]))
// Starts with digit
// Contains uppercase letters or special characters
for _, r := range s {
if unicode.IsUpper(r) {
needsQuoting = true
break
}
if !unicode.IsLetter(r) && !unicode.IsDigit(r) && r != '_' {
needsQuoting = true
break
}
}
if needsQuoting {
// Escape double quotes by doubling them
escaped := strings.ReplaceAll(s, `"`, `""`)
return `"` + escaped + `"`
}
return s
}
// Type conversion functions // Type conversion functions
// goTypeToSQL converts Go type to PostgreSQL type // goTypeToSQL converts Go type to PostgreSQL type

View File

@@ -101,6 +101,31 @@ func TestSafeIdentifier(t *testing.T) {
} }
} }
func TestQuoteIdent(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"valid_name", "valid_name"},
{"ValidName", `"ValidName"`},
{"123column", `"123column"`},
{"215162_fk_constraint", `"215162_fk_constraint"`},
{"user-id", `"user-id"`},
{"user@domain", `"user@domain"`},
{`"quoted"`, `"""quoted"""`},
{"", `""`},
{"lowercase", "lowercase"},
{"with_underscore", "with_underscore"},
}
for _, tt := range tests {
result := quoteIdent(tt.input)
if result != tt.expected {
t.Errorf("quoteIdent(%q) = %q, want %q", tt.input, result, tt.expected)
}
}
}
func TestGoTypeToSQL(t *testing.T) { func TestGoTypeToSQL(t *testing.T) {
tests := []struct { tests := []struct {
input string input string
@@ -243,7 +268,7 @@ func TestTemplateFunctions(t *testing.T) {
// Check that all expected functions are registered // Check that all expected functions are registered
expectedFuncs := []string{ expectedFuncs := []string{
"upper", "lower", "snake_case", "camelCase", "upper", "lower", "snake_case", "camelCase",
"indent", "quote", "escape", "safe_identifier", "indent", "quote", "escape", "safe_identifier", "quote_ident",
"goTypeToSQL", "sqlTypeToGo", "isNumeric", "isText", "goTypeToSQL", "sqlTypeToGo", "isNumeric", "isText",
"first", "last", "filter", "mapFunc", "join_with", "first", "last", "filter", "mapFunc", "join_with",
"join", "join",

View File

@@ -177,6 +177,72 @@ type AuditTriggerData struct {
Events string Events string
} }
// CreateUniqueConstraintData contains data for create unique constraint template
type CreateUniqueConstraintData struct {
SchemaName string
TableName string
ConstraintName string
Columns string
}
// CreateCheckConstraintData contains data for create check constraint template
type CreateCheckConstraintData struct {
SchemaName string
TableName string
ConstraintName string
Expression string
}
// CreateForeignKeyWithCheckData contains data for create foreign key with existence check template
type CreateForeignKeyWithCheckData struct {
SchemaName string
TableName string
ConstraintName string
SourceColumns string
TargetSchema string
TargetTable string
TargetColumns string
OnDelete string
OnUpdate string
Deferrable bool
}
// SetSequenceValueData contains data for set sequence value template
type SetSequenceValueData struct {
SchemaName string
TableName string
SequenceName string
ColumnName string
}
// CreateSequenceData contains data for create sequence template
type CreateSequenceData struct {
SchemaName string
SequenceName string
Increment int
MinValue int64
MaxValue int64
StartValue int64
CacheSize int
}
// AddColumnWithCheckData contains data for add column with existence check template
type AddColumnWithCheckData struct {
SchemaName string
TableName string
ColumnName string
ColumnDefinition string
}
// CreatePrimaryKeyWithAutoGenCheckData contains data for primary key with auto-generated key check template
type CreatePrimaryKeyWithAutoGenCheckData struct {
SchemaName string
TableName string
ConstraintName string
AutoGenNames string // Comma-separated list of names like "'name1', 'name2'"
Columns string
}
// Execute methods for each template // Execute methods for each template
// ExecuteCreateTable executes the create table template // ExecuteCreateTable executes the create table template
@@ -319,6 +385,76 @@ func (te *TemplateExecutor) ExecuteAuditTrigger(data AuditTriggerData) (string,
return buf.String(), nil return buf.String(), nil
} }
// ExecuteCreateUniqueConstraint executes the create unique constraint template
func (te *TemplateExecutor) ExecuteCreateUniqueConstraint(data CreateUniqueConstraintData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_unique_constraint.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_unique_constraint template: %w", err)
}
return buf.String(), nil
}
// ExecuteCreateCheckConstraint executes the create check constraint template
func (te *TemplateExecutor) ExecuteCreateCheckConstraint(data CreateCheckConstraintData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_check_constraint.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_check_constraint template: %w", err)
}
return buf.String(), nil
}
// ExecuteCreateForeignKeyWithCheck executes the create foreign key with check template
func (te *TemplateExecutor) ExecuteCreateForeignKeyWithCheck(data CreateForeignKeyWithCheckData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_foreign_key_with_check.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_foreign_key_with_check template: %w", err)
}
return buf.String(), nil
}
// ExecuteSetSequenceValue executes the set sequence value template
func (te *TemplateExecutor) ExecuteSetSequenceValue(data SetSequenceValueData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "set_sequence_value.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute set_sequence_value template: %w", err)
}
return buf.String(), nil
}
// ExecuteCreateSequence executes the create sequence template
func (te *TemplateExecutor) ExecuteCreateSequence(data CreateSequenceData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_sequence.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_sequence template: %w", err)
}
return buf.String(), nil
}
// ExecuteAddColumnWithCheck executes the add column with check template
func (te *TemplateExecutor) ExecuteAddColumnWithCheck(data AddColumnWithCheckData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "add_column_with_check.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute add_column_with_check template: %w", err)
}
return buf.String(), nil
}
// ExecuteCreatePrimaryKeyWithAutoGenCheck executes the create primary key with auto-generated key check template
func (te *TemplateExecutor) ExecuteCreatePrimaryKeyWithAutoGenCheck(data CreatePrimaryKeyWithAutoGenCheckData) (string, error) {
var buf bytes.Buffer
err := te.templates.ExecuteTemplate(&buf, "create_primary_key_with_autogen_check.tmpl", data)
if err != nil {
return "", fmt.Errorf("failed to execute create_primary_key_with_autogen_check template: %w", err)
}
return buf.String(), nil
}
// Helper functions to build template data from models // Helper functions to build template data from models
// BuildCreateTableData builds CreateTableData from a models.Table // BuildCreateTableData builds CreateTableData from a models.Table

View File

@@ -1,4 +1,4 @@
ALTER TABLE {{.SchemaName}}.{{.TableName}} ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
ADD COLUMN IF NOT EXISTS {{.ColumnName}} {{.ColumnType}} ADD COLUMN IF NOT EXISTS {{quote_ident .ColumnName}} {{.ColumnType}}
{{- if .Default}} DEFAULT {{.Default}}{{end}} {{- if .Default}} DEFAULT {{.Default}}{{end}}
{{- if .NotNull}} NOT NULL{{end}}; {{- if .NotNull}} NOT NULL{{end}};

View File

@@ -0,0 +1,12 @@
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND column_name = '{{.ColumnName}}'
) THEN
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} ADD COLUMN {{.ColumnDefinition}};
END IF;
END;
$$;

View File

@@ -1,7 +1,7 @@
{{- if .SetDefault -}} {{- if .SetDefault -}}
ALTER TABLE {{.SchemaName}}.{{.TableName}} ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
ALTER COLUMN {{.ColumnName}} SET DEFAULT {{.DefaultValue}}; ALTER COLUMN {{quote_ident .ColumnName}} SET DEFAULT {{.DefaultValue}};
{{- else -}} {{- else -}}
ALTER TABLE {{.SchemaName}}.{{.TableName}} ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
ALTER COLUMN {{.ColumnName}} DROP DEFAULT; ALTER COLUMN {{quote_ident .ColumnName}} DROP DEFAULT;
{{- end -}} {{- end -}}

View File

@@ -1,2 +1,2 @@
ALTER TABLE {{.SchemaName}}.{{.TableName}} ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
ALTER COLUMN {{.ColumnName}} TYPE {{.NewType}}; ALTER COLUMN {{quote_ident .ColumnName}} TYPE {{.NewType}};

View File

@@ -1 +1 @@
COMMENT ON COLUMN {{.SchemaName}}.{{.TableName}}.{{.ColumnName}} IS '{{.Comment}}'; COMMENT ON COLUMN {{quote_ident .SchemaName}}.{{quote_ident .TableName}}.{{quote_ident .ColumnName}} IS '{{.Comment}}';

View File

@@ -1 +1 @@
COMMENT ON TABLE {{.SchemaName}}.{{.TableName}} IS '{{.Comment}}'; COMMENT ON TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} IS '{{.Comment}}';

View File

@@ -0,0 +1,12 @@
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND constraint_name = '{{.ConstraintName}}'
) THEN
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} CHECK ({{.Expression}});
END IF;
END;
$$;

View File

@@ -1,10 +1,10 @@
ALTER TABLE {{.SchemaName}}.{{.TableName}} ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
DROP CONSTRAINT IF EXISTS {{.ConstraintName}}; DROP CONSTRAINT IF EXISTS {{quote_ident .ConstraintName}};
ALTER TABLE {{.SchemaName}}.{{.TableName}} ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
ADD CONSTRAINT {{.ConstraintName}} ADD CONSTRAINT {{quote_ident .ConstraintName}}
FOREIGN KEY ({{.SourceColumns}}) FOREIGN KEY ({{.SourceColumns}})
REFERENCES {{.TargetSchema}}.{{.TargetTable}} ({{.TargetColumns}}) REFERENCES {{quote_ident .TargetSchema}}.{{quote_ident .TargetTable}} ({{.TargetColumns}})
ON DELETE {{.OnDelete}} ON DELETE {{.OnDelete}}
ON UPDATE {{.OnUpdate}} ON UPDATE {{.OnUpdate}}
DEFERRABLE; DEFERRABLE;

View File

@@ -0,0 +1,18 @@
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND constraint_name = '{{.ConstraintName}}'
) THEN
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
ADD CONSTRAINT {{quote_ident .ConstraintName}}
FOREIGN KEY ({{.SourceColumns}})
REFERENCES {{quote_ident .TargetSchema}}.{{quote_ident .TargetTable}} ({{.TargetColumns}})
ON DELETE {{.OnDelete}}
ON UPDATE {{.OnUpdate}}{{if .Deferrable}}
DEFERRABLE{{end}};
END IF;
END;
$$;

View File

@@ -1,2 +1,2 @@
CREATE {{if .Unique}}UNIQUE {{end}}INDEX IF NOT EXISTS {{.IndexName}} CREATE {{if .Unique}}UNIQUE {{end}}INDEX IF NOT EXISTS {{quote_ident .IndexName}}
ON {{.SchemaName}}.{{.TableName}} USING {{.IndexType}} ({{.Columns}}); ON {{quote_ident .SchemaName}}.{{quote_ident .TableName}} USING {{.IndexType}} ({{.Columns}});

View File

@@ -6,8 +6,8 @@ BEGIN
AND table_name = '{{.TableName}}' AND table_name = '{{.TableName}}'
AND constraint_name = '{{.ConstraintName}}' AND constraint_name = '{{.ConstraintName}}'
) THEN ) THEN
ALTER TABLE {{.SchemaName}}.{{.TableName}} ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
ADD CONSTRAINT {{.ConstraintName}} PRIMARY KEY ({{.Columns}}); ADD CONSTRAINT {{quote_ident .ConstraintName}} PRIMARY KEY ({{.Columns}});
END IF; END IF;
END; END;
$$; $$;

View File

@@ -0,0 +1,27 @@
DO $$
DECLARE
auto_pk_name text;
BEGIN
-- Drop auto-generated primary key if it exists
SELECT constraint_name INTO auto_pk_name
FROM information_schema.table_constraints
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND constraint_type = 'PRIMARY KEY'
AND constraint_name IN ({{.AutoGenNames}});
IF auto_pk_name IS NOT NULL THEN
EXECUTE 'ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} DROP CONSTRAINT ' || quote_ident(auto_pk_name);
END IF;
-- Add named primary key if it doesn't exist
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND constraint_name = '{{.ConstraintName}}'
) THEN
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} PRIMARY KEY ({{.Columns}});
END IF;
END;
$$;

View File

@@ -0,0 +1,6 @@
CREATE SEQUENCE IF NOT EXISTS {{quote_ident .SchemaName}}.{{quote_ident .SequenceName}}
INCREMENT {{.Increment}}
MINVALUE {{.MinValue}}
MAXVALUE {{.MaxValue}}
START {{.StartValue}}
CACHE {{.CacheSize}};

View File

@@ -1,7 +1,7 @@
CREATE TABLE IF NOT EXISTS {{.SchemaName}}.{{.TableName}} ( CREATE TABLE IF NOT EXISTS {{quote_ident .SchemaName}}.{{quote_ident .TableName}} (
{{- range $i, $col := .Columns}} {{- range $i, $col := .Columns}}
{{- if $i}},{{end}} {{- if $i}},{{end}}
{{$col.Name}} {{$col.Type}} {{quote_ident $col.Name}} {{$col.Type}}
{{- if $col.Default}} DEFAULT {{$col.Default}}{{end}} {{- if $col.Default}} DEFAULT {{$col.Default}}{{end}}
{{- if $col.NotNull}} NOT NULL{{end}} {{- if $col.NotNull}} NOT NULL{{end}}
{{- end}} {{- end}}

View File

@@ -0,0 +1,12 @@
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE table_schema = '{{.SchemaName}}'
AND table_name = '{{.TableName}}'
AND constraint_name = '{{.ConstraintName}}'
) THEN
ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} ADD CONSTRAINT {{quote_ident .ConstraintName}} UNIQUE ({{.Columns}});
END IF;
END;
$$;

View File

@@ -1 +1 @@
ALTER TABLE {{.SchemaName}}.{{.TableName}} DROP CONSTRAINT IF EXISTS {{.ConstraintName}}; ALTER TABLE {{quote_ident .SchemaName}}.{{quote_ident .TableName}} DROP CONSTRAINT IF EXISTS {{quote_ident .ConstraintName}};

View File

@@ -1 +1 @@
DROP INDEX IF EXISTS {{.SchemaName}}.{{.IndexName}} CASCADE; DROP INDEX IF EXISTS {{quote_ident .SchemaName}}.{{quote_ident .IndexName}} CASCADE;

View File

@@ -0,0 +1,19 @@
DO $$
DECLARE
m_cnt bigint;
BEGIN
IF EXISTS (
SELECT 1 FROM pg_class c
INNER JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE c.relname = '{{.SequenceName}}'
AND n.nspname = '{{.SchemaName}}'
AND c.relkind = 'S'
) THEN
SELECT COALESCE(MAX({{quote_ident .ColumnName}}), 0) + 1
FROM {{quote_ident .SchemaName}}.{{quote_ident .TableName}}
INTO m_cnt;
PERFORM setval('{{quote_ident .SchemaName}}.{{quote_ident .SequenceName}}'::regclass, m_cnt);
END IF;
END;
$$;

View File

@@ -22,6 +22,7 @@ type Writer struct {
options *writers.WriterOptions options *writers.WriterOptions
writer io.Writer writer io.Writer
executionReport *ExecutionReport executionReport *ExecutionReport
executor *TemplateExecutor
} }
// ExecutionReport tracks the execution status of SQL statements // ExecutionReport tracks the execution status of SQL statements
@@ -57,8 +58,10 @@ type ExecutionError struct {
// NewWriter creates a new PostgreSQL SQL writer // NewWriter creates a new PostgreSQL SQL writer
func NewWriter(options *writers.WriterOptions) *Writer { func NewWriter(options *writers.WriterOptions) *Writer {
executor, _ := NewTemplateExecutor()
return &Writer{ return &Writer{
options: options, options: options,
executor: executor,
} }
} }
@@ -215,36 +218,19 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
fmt.Sprintf("%s_%s_pkey", schema.Name, table.Name), fmt.Sprintf("%s_%s_pkey", schema.Name, table.Name),
} }
// Wrap in DO block to drop auto-generated PK and add our named PK // Use template to generate primary key statement
stmt := fmt.Sprintf("DO $$\nDECLARE\n"+ data := CreatePrimaryKeyWithAutoGenCheckData{
" auto_pk_name text;\n"+ SchemaName: schema.Name,
"BEGIN\n"+ TableName: table.Name,
" -- Drop auto-generated primary key if it exists\n"+ ConstraintName: pkName,
" SELECT constraint_name INTO auto_pk_name\n"+ AutoGenNames: formatStringList(autoGenPKNames),
" FROM information_schema.table_constraints\n"+ Columns: strings.Join(pkColumns, ", "),
" WHERE table_schema = '%s'\n"+ }
" AND table_name = '%s'\n"+
" AND constraint_type = 'PRIMARY KEY'\n"+ stmt, err := w.executor.ExecuteCreatePrimaryKeyWithAutoGenCheck(data)
" AND constraint_name IN (%s);\n"+ if err != nil {
"\n"+ return nil, fmt.Errorf("failed to generate primary key for %s.%s: %w", schema.Name, table.Name, err)
" IF auto_pk_name IS NOT NULL THEN\n"+ }
" EXECUTE 'ALTER TABLE %s.%s DROP CONSTRAINT ' || quote_ident(auto_pk_name);\n"+
" END IF;\n"+
"\n"+
" -- Add named primary key if it doesn't exist\n"+
" IF NOT EXISTS (\n"+
" SELECT 1 FROM information_schema.table_constraints\n"+
" WHERE table_schema = '%s'\n"+
" AND table_name = '%s'\n"+
" AND constraint_name = '%s'\n"+
" ) THEN\n"+
" ALTER TABLE %s.%s ADD CONSTRAINT %s PRIMARY KEY (%s);\n"+
" END IF;\n"+
"END;\n$$",
schema.Name, table.Name, formatStringList(autoGenPKNames),
schema.SQLName(), table.SQLName(),
schema.Name, table.Name, pkName,
schema.SQLName(), table.SQLName(), pkName, strings.Join(pkColumns, ", "))
statements = append(statements, stmt) statements = append(statements, stmt)
} }
} }
@@ -290,7 +276,7 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
} }
stmt := fmt.Sprintf("CREATE %sINDEX IF NOT EXISTS %s ON %s.%s USING %s (%s)%s", stmt := fmt.Sprintf("CREATE %sINDEX IF NOT EXISTS %s ON %s.%s USING %s (%s)%s",
uniqueStr, index.Name, schema.SQLName(), table.SQLName(), indexType, strings.Join(columnExprs, ", "), whereClause) uniqueStr, quoteIdentifier(index.Name), schema.SQLName(), table.SQLName(), indexType, strings.Join(columnExprs, ", "), whereClause)
statements = append(statements, stmt) statements = append(statements, stmt)
} }
} }
@@ -302,20 +288,18 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
continue continue
} }
// Wrap in DO block to check for existing constraint // Use template to generate unique constraint statement
stmt := fmt.Sprintf("DO $$\nBEGIN\n"+ data := CreateUniqueConstraintData{
" IF NOT EXISTS (\n"+ SchemaName: schema.Name,
" SELECT 1 FROM information_schema.table_constraints\n"+ TableName: table.Name,
" WHERE table_schema = '%s'\n"+ ConstraintName: constraint.Name,
" AND table_name = '%s'\n"+ Columns: strings.Join(constraint.Columns, ", "),
" AND constraint_name = '%s'\n"+ }
" ) THEN\n"+
" ALTER TABLE %s.%s ADD CONSTRAINT %s UNIQUE (%s);\n"+ stmt, err := w.executor.ExecuteCreateUniqueConstraint(data)
" END IF;\n"+ if err != nil {
"END;\n$$", return nil, fmt.Errorf("failed to generate unique constraint for %s.%s: %w", schema.Name, table.Name, err)
schema.Name, table.Name, constraint.Name, }
schema.SQLName(), table.SQLName(), constraint.Name,
strings.Join(constraint.Columns, ", "))
statements = append(statements, stmt) statements = append(statements, stmt)
} }
} }
@@ -327,20 +311,18 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
continue continue
} }
// Wrap in DO block to check for existing constraint // Use template to generate check constraint statement
stmt := fmt.Sprintf("DO $$\nBEGIN\n"+ data := CreateCheckConstraintData{
" IF NOT EXISTS (\n"+ SchemaName: schema.Name,
" SELECT 1 FROM information_schema.table_constraints\n"+ TableName: table.Name,
" WHERE table_schema = '%s'\n"+ ConstraintName: constraint.Name,
" AND table_name = '%s'\n"+ Expression: constraint.Expression,
" AND constraint_name = '%s'\n"+ }
" ) THEN\n"+
" ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK (%s);\n"+ stmt, err := w.executor.ExecuteCreateCheckConstraint(data)
" END IF;\n"+ if err != nil {
"END;\n$$", return nil, fmt.Errorf("failed to generate check constraint for %s.%s: %w", schema.Name, table.Name, err)
schema.Name, table.Name, constraint.Name, }
schema.SQLName(), table.SQLName(), constraint.Name,
constraint.Expression)
statements = append(statements, stmt) statements = append(statements, stmt)
} }
} }
@@ -367,23 +349,24 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
onUpdate = "NO ACTION" onUpdate = "NO ACTION"
} }
// Wrap in DO block to check for existing constraint // Use template to generate foreign key statement
stmt := fmt.Sprintf("DO $$\nBEGIN\n"+ data := CreateForeignKeyWithCheckData{
" IF NOT EXISTS (\n"+ SchemaName: schema.Name,
" SELECT 1 FROM information_schema.table_constraints\n"+ TableName: table.Name,
" WHERE table_schema = '%s'\n"+ ConstraintName: constraint.Name,
" AND table_name = '%s'\n"+ SourceColumns: strings.Join(constraint.Columns, ", "),
" AND constraint_name = '%s'\n"+ TargetSchema: refSchema,
" ) THEN\n"+ TargetTable: constraint.ReferencedTable,
" ALTER TABLE %s.%s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s.%s(%s) ON DELETE %s ON UPDATE %s;\n"+ TargetColumns: strings.Join(constraint.ReferencedColumns, ", "),
" END IF;\n"+ OnDelete: onDelete,
"END;\n$$", OnUpdate: onUpdate,
schema.Name, table.Name, constraint.Name, Deferrable: false,
schema.SQLName(), table.SQLName(), constraint.Name, }
strings.Join(constraint.Columns, ", "),
strings.ToLower(refSchema), strings.ToLower(constraint.ReferencedTable), stmt, err := w.executor.ExecuteCreateForeignKeyWithCheck(data)
strings.Join(constraint.ReferencedColumns, ", "), if err != nil {
onDelete, onUpdate) return nil, fmt.Errorf("failed to generate foreign key for %s.%s: %w", schema.Name, table.Name, err)
}
statements = append(statements, stmt) statements = append(statements, stmt)
} }
} }
@@ -431,19 +414,18 @@ func (w *Writer) GenerateAddColumnStatements(schema *models.Schema) ([]string, e
for _, col := range columns { for _, col := range columns {
colDef := w.generateColumnDefinition(col) colDef := w.generateColumnDefinition(col)
// Generate DO block that checks if column exists before adding // Use template to generate add column statement
stmt := fmt.Sprintf("DO $$\nBEGIN\n"+ data := AddColumnWithCheckData{
" IF NOT EXISTS (\n"+ SchemaName: schema.Name,
" SELECT 1 FROM information_schema.columns\n"+ TableName: table.Name,
" WHERE table_schema = '%s'\n"+ ColumnName: col.Name,
" AND table_name = '%s'\n"+ ColumnDefinition: colDef,
" AND column_name = '%s'\n"+ }
" ) THEN\n"+
" ALTER TABLE %s.%s ADD COLUMN %s;\n"+ stmt, err := w.executor.ExecuteAddColumnWithCheck(data)
" END IF;\n"+ if err != nil {
"END;\n$$", return nil, fmt.Errorf("failed to generate add column for %s.%s.%s: %w", schema.Name, table.Name, col.Name, err)
schema.Name, table.Name, col.Name, }
schema.SQLName(), table.SQLName(), colDef)
statements = append(statements, stmt) statements = append(statements, stmt)
} }
} }
@@ -699,13 +681,23 @@ func (w *Writer) writeSequences(schema *models.Schema) error {
} }
seqName := fmt.Sprintf("identity_%s_%s", table.SQLName(), pk.SQLName()) seqName := fmt.Sprintf("identity_%s_%s", table.SQLName(), pk.SQLName())
fmt.Fprintf(w.writer, "CREATE SEQUENCE IF NOT EXISTS %s.%s\n",
schema.SQLName(), seqName) data := CreateSequenceData{
fmt.Fprintf(w.writer, " INCREMENT 1\n") SchemaName: schema.Name,
fmt.Fprintf(w.writer, " MINVALUE 1\n") SequenceName: seqName,
fmt.Fprintf(w.writer, " MAXVALUE 9223372036854775807\n") Increment: 1,
fmt.Fprintf(w.writer, " START 1\n") MinValue: 1,
fmt.Fprintf(w.writer, " CACHE 1;\n\n") MaxValue: 9223372036854775807,
StartValue: 1,
CacheSize: 1,
}
sql, err := w.executor.ExecuteCreateSequence(data)
if err != nil {
return fmt.Errorf("failed to generate create sequence for %s.%s: %w", schema.Name, seqName, err)
}
fmt.Fprint(w.writer, sql)
fmt.Fprint(w.writer, "\n")
} }
return nil return nil
@@ -747,18 +739,19 @@ func (w *Writer) writeAddColumns(schema *models.Schema) error {
for _, col := range columns { for _, col := range columns {
colDef := w.generateColumnDefinition(col) colDef := w.generateColumnDefinition(col)
// Generate DO block that checks if column exists before adding data := AddColumnWithCheckData{
fmt.Fprintf(w.writer, "DO $$\nBEGIN\n") SchemaName: schema.Name,
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n") TableName: table.Name,
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.columns\n") ColumnName: col.Name,
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name) ColumnDefinition: colDef,
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name) }
fmt.Fprintf(w.writer, " AND column_name = '%s'\n", col.Name)
fmt.Fprintf(w.writer, " ) THEN\n") sql, err := w.executor.ExecuteAddColumnWithCheck(data)
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s ADD COLUMN %s;\n", if err != nil {
schema.SQLName(), table.SQLName(), colDef) return fmt.Errorf("failed to generate add column for %s.%s.%s: %w", schema.Name, table.Name, col.Name, err)
fmt.Fprintf(w.writer, " END IF;\n") }
fmt.Fprintf(w.writer, "END;\n$$;\n\n") fmt.Fprint(w.writer, sql)
fmt.Fprint(w.writer, "\n")
} }
} }
@@ -812,37 +805,20 @@ func (w *Writer) writePrimaryKeys(schema *models.Schema) error {
fmt.Sprintf("%s_%s_pkey", schema.Name, table.Name), fmt.Sprintf("%s_%s_pkey", schema.Name, table.Name),
} }
fmt.Fprintf(w.writer, "DO $$\nDECLARE\n") data := CreatePrimaryKeyWithAutoGenCheckData{
fmt.Fprintf(w.writer, " auto_pk_name text;\nBEGIN\n") SchemaName: schema.Name,
TableName: table.Name,
ConstraintName: pkName,
AutoGenNames: formatStringList(autoGenPKNames),
Columns: strings.Join(columnNames, ", "),
}
// Check for and drop auto-generated primary keys sql, err := w.executor.ExecuteCreatePrimaryKeyWithAutoGenCheck(data)
fmt.Fprintf(w.writer, " -- Drop auto-generated primary key if it exists\n") if err != nil {
fmt.Fprintf(w.writer, " SELECT constraint_name INTO auto_pk_name\n") return fmt.Errorf("failed to generate primary key for %s.%s: %w", schema.Name, table.Name, err)
fmt.Fprintf(w.writer, " FROM information_schema.table_constraints\n") }
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name) fmt.Fprint(w.writer, sql)
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name) fmt.Fprint(w.writer, "\n")
fmt.Fprintf(w.writer, " AND constraint_type = 'PRIMARY KEY'\n")
fmt.Fprintf(w.writer, " AND constraint_name IN (%s);\n", formatStringList(autoGenPKNames))
fmt.Fprintf(w.writer, "\n")
fmt.Fprintf(w.writer, " IF auto_pk_name IS NOT NULL THEN\n")
fmt.Fprintf(w.writer, " EXECUTE 'ALTER TABLE %s.%s DROP CONSTRAINT ' || quote_ident(auto_pk_name);\n",
schema.SQLName(), table.SQLName())
fmt.Fprintf(w.writer, " END IF;\n")
fmt.Fprintf(w.writer, "\n")
// Add our named primary key if it doesn't exist
fmt.Fprintf(w.writer, " -- Add named primary key if it doesn't exist\n")
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n")
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.table_constraints\n")
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name)
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name)
fmt.Fprintf(w.writer, " AND constraint_name = '%s'\n", pkName)
fmt.Fprintf(w.writer, " ) THEN\n")
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s\n", schema.SQLName(), table.SQLName())
fmt.Fprintf(w.writer, " ADD CONSTRAINT %s PRIMARY KEY (%s);\n",
pkName, strings.Join(columnNames, ", "))
fmt.Fprintf(w.writer, " END IF;\n")
fmt.Fprintf(w.writer, "END;\n$$;\n\n")
} }
return nil return nil
@@ -954,20 +930,17 @@ func (w *Writer) writeUniqueConstraints(schema *models.Schema) error {
continue continue
} }
// Wrap in DO block to check for existing constraint sql, err := w.executor.ExecuteCreateUniqueConstraint(CreateUniqueConstraintData{
fmt.Fprintf(w.writer, "DO $$\n") SchemaName: schema.Name,
fmt.Fprintf(w.writer, "BEGIN\n") TableName: table.Name,
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n") ConstraintName: constraint.Name,
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.table_constraints\n") Columns: strings.Join(columnExprs, ", "),
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name) })
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name) if err != nil {
fmt.Fprintf(w.writer, " AND constraint_name = '%s'\n", constraint.Name) return fmt.Errorf("failed to generate unique constraint: %w", err)
fmt.Fprintf(w.writer, " ) THEN\n") }
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s ADD CONSTRAINT %s UNIQUE (%s);\n",
schema.SQLName(), table.SQLName(), constraint.Name, strings.Join(columnExprs, ", ")) fmt.Fprintf(w.writer, "%s\n\n", sql)
fmt.Fprintf(w.writer, " END IF;\n")
fmt.Fprintf(w.writer, "END;\n")
fmt.Fprintf(w.writer, "$$;\n\n")
} }
} }
@@ -996,20 +969,17 @@ func (w *Writer) writeCheckConstraints(schema *models.Schema) error {
continue continue
} }
// Wrap in DO block to check for existing constraint sql, err := w.executor.ExecuteCreateCheckConstraint(CreateCheckConstraintData{
fmt.Fprintf(w.writer, "DO $$\n") SchemaName: schema.Name,
fmt.Fprintf(w.writer, "BEGIN\n") TableName: table.Name,
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n") ConstraintName: constraint.Name,
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.table_constraints\n") Expression: constraint.Expression,
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name) })
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name) if err != nil {
fmt.Fprintf(w.writer, " AND constraint_name = '%s'\n", constraint.Name) return fmt.Errorf("failed to generate check constraint: %w", err)
fmt.Fprintf(w.writer, " ) THEN\n") }
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK (%s);\n",
schema.SQLName(), table.SQLName(), constraint.Name, constraint.Expression) fmt.Fprintf(w.writer, "%s\n\n", sql)
fmt.Fprintf(w.writer, " END IF;\n")
fmt.Fprintf(w.writer, "END;\n")
fmt.Fprintf(w.writer, "$$;\n\n")
} }
} }
@@ -1093,24 +1063,24 @@ func (w *Writer) writeForeignKeys(schema *models.Schema) error {
refTable = rel.ToTable refTable = rel.ToTable
} }
// Use DO block to check if constraint exists before adding // Use template executor to generate foreign key with existence check
fmt.Fprintf(w.writer, "DO $$\nBEGIN\n") data := CreateForeignKeyWithCheckData{
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n") SchemaName: schema.Name,
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.table_constraints\n") TableName: table.Name,
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name) ConstraintName: fkName,
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name) SourceColumns: strings.Join(sourceColumns, ", "),
fmt.Fprintf(w.writer, " AND constraint_name = '%s'\n", fkName) TargetSchema: refSchema,
fmt.Fprintf(w.writer, " ) THEN\n") TargetTable: refTable,
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s\n", schema.SQLName(), table.SQLName()) TargetColumns: strings.Join(targetColumns, ", "),
fmt.Fprintf(w.writer, " ADD CONSTRAINT %s\n", fkName) OnDelete: onDelete,
fmt.Fprintf(w.writer, " FOREIGN KEY (%s)\n", strings.Join(sourceColumns, ", ")) OnUpdate: onUpdate,
fmt.Fprintf(w.writer, " REFERENCES %s.%s (%s)\n", Deferrable: true,
refSchema, refTable, strings.Join(targetColumns, ", ")) }
fmt.Fprintf(w.writer, " ON DELETE %s\n", onDelete) sql, err := w.executor.ExecuteCreateForeignKeyWithCheck(data)
fmt.Fprintf(w.writer, " ON UPDATE %s\n", onUpdate) if err != nil {
fmt.Fprintf(w.writer, " DEFERRABLE;\n") return fmt.Errorf("failed to generate foreign key for %s.%s: %w", schema.Name, table.Name, err)
fmt.Fprintf(w.writer, " END IF;\n") }
fmt.Fprintf(w.writer, "END;\n$$;\n\n") fmt.Fprint(w.writer, sql)
} }
// Also process any foreign key constraints that don't have a relationship // Also process any foreign key constraints that don't have a relationship
@@ -1172,23 +1142,24 @@ func (w *Writer) writeForeignKeys(schema *models.Schema) error {
} }
refTable := constraint.ReferencedTable refTable := constraint.ReferencedTable
// Use DO block to check if constraint exists before adding // Use template executor to generate foreign key with existence check
fmt.Fprintf(w.writer, "DO $$\nBEGIN\n") data := CreateForeignKeyWithCheckData{
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n") SchemaName: schema.Name,
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.table_constraints\n") TableName: table.Name,
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name) ConstraintName: constraint.Name,
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name) SourceColumns: strings.Join(sourceColumns, ", "),
fmt.Fprintf(w.writer, " AND constraint_name = '%s'\n", constraint.Name) TargetSchema: refSchema,
fmt.Fprintf(w.writer, " ) THEN\n") TargetTable: refTable,
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s\n", schema.SQLName(), table.SQLName()) TargetColumns: strings.Join(targetColumns, ", "),
fmt.Fprintf(w.writer, " ADD CONSTRAINT %s\n", constraint.Name) OnDelete: onDelete,
fmt.Fprintf(w.writer, " FOREIGN KEY (%s)\n", strings.Join(sourceColumns, ", ")) OnUpdate: onUpdate,
fmt.Fprintf(w.writer, " REFERENCES %s.%s (%s)\n", Deferrable: false,
refSchema, refTable, strings.Join(targetColumns, ", ")) }
fmt.Fprintf(w.writer, " ON DELETE %s\n", onDelete) sql, err := w.executor.ExecuteCreateForeignKeyWithCheck(data)
fmt.Fprintf(w.writer, " ON UPDATE %s;\n", onUpdate) if err != nil {
fmt.Fprintf(w.writer, " END IF;\n") return fmt.Errorf("failed to generate foreign key for %s.%s: %w", schema.Name, table.Name, err)
fmt.Fprintf(w.writer, "END;\n$$;\n\n") }
fmt.Fprint(w.writer, sql)
} }
} }
@@ -1207,26 +1178,19 @@ func (w *Writer) writeSetSequenceValues(schema *models.Schema) error {
seqName := fmt.Sprintf("identity_%s_%s", table.SQLName(), pk.SQLName()) seqName := fmt.Sprintf("identity_%s_%s", table.SQLName(), pk.SQLName())
fmt.Fprintf(w.writer, "DO $$\n") // Use template executor to generate set sequence value statement
fmt.Fprintf(w.writer, "DECLARE\n") data := SetSequenceValueData{
fmt.Fprintf(w.writer, " m_cnt bigint;\n") SchemaName: schema.Name,
fmt.Fprintf(w.writer, "BEGIN\n") TableName: table.Name,
fmt.Fprintf(w.writer, " IF EXISTS (\n") SequenceName: seqName,
fmt.Fprintf(w.writer, " SELECT 1 FROM pg_class c\n") ColumnName: pk.Name,
fmt.Fprintf(w.writer, " INNER JOIN pg_namespace n ON n.oid = c.relnamespace\n") }
fmt.Fprintf(w.writer, " WHERE c.relname = '%s'\n", seqName) sql, err := w.executor.ExecuteSetSequenceValue(data)
fmt.Fprintf(w.writer, " AND n.nspname = '%s'\n", schema.Name) if err != nil {
fmt.Fprintf(w.writer, " AND c.relkind = 'S'\n") return fmt.Errorf("failed to generate set sequence value for %s.%s: %w", schema.Name, table.Name, err)
fmt.Fprintf(w.writer, " ) THEN\n") }
fmt.Fprintf(w.writer, " SELECT COALESCE(MAX(%s), 0) + 1\n", pk.SQLName()) fmt.Fprint(w.writer, sql)
fmt.Fprintf(w.writer, " FROM %s.%s\n", schema.SQLName(), table.SQLName()) fmt.Fprint(w.writer, "\n")
fmt.Fprintf(w.writer, " INTO m_cnt;\n")
fmt.Fprintf(w.writer, " \n")
fmt.Fprintf(w.writer, " PERFORM setval('%s.%s'::regclass, m_cnt);\n",
schema.SQLName(), seqName)
fmt.Fprintf(w.writer, " END IF;\n")
fmt.Fprintf(w.writer, "END;\n")
fmt.Fprintf(w.writer, "$$;\n\n")
} }
return nil return nil
@@ -1446,7 +1410,8 @@ func (w *Writer) executeDatabaseSQL(db *models.Database, connString string) erro
continue continue
} }
fmt.Fprintf(os.Stderr, "Executing statement %d/%d...\n", i+1, len(statements)) stmtType := detectStatementType(stmtTrimmed)
fmt.Fprintf(os.Stderr, "Executing statement %d/%d [%s]...\n", i+1, len(statements), stmtType)
_, execErr := conn.Exec(ctx, stmt) _, execErr := conn.Exec(ctx, stmt)
if execErr != nil { if execErr != nil {
@@ -1580,3 +1545,94 @@ func truncateStatement(stmt string) string {
func getCurrentTimestamp() string { func getCurrentTimestamp() string {
return time.Now().Format("2006-01-02 15:04:05") return time.Now().Format("2006-01-02 15:04:05")
} }
// detectStatementType detects the type of SQL statement for logging
func detectStatementType(stmt string) string {
upperStmt := strings.ToUpper(stmt)
// Check for DO blocks (used for conditional DDL)
if strings.HasPrefix(upperStmt, "DO $$") || strings.HasPrefix(upperStmt, "DO $") {
// Look inside the DO block for the actual operation
if strings.Contains(upperStmt, "ALTER TABLE") && strings.Contains(upperStmt, "ADD CONSTRAINT") {
if strings.Contains(upperStmt, "UNIQUE") {
return "ADD UNIQUE CONSTRAINT"
} else if strings.Contains(upperStmt, "FOREIGN KEY") {
return "ADD FOREIGN KEY"
} else if strings.Contains(upperStmt, "PRIMARY KEY") {
return "ADD PRIMARY KEY"
} else if strings.Contains(upperStmt, "CHECK") {
return "ADD CHECK CONSTRAINT"
}
return "ADD CONSTRAINT"
}
if strings.Contains(upperStmt, "ALTER TABLE") && strings.Contains(upperStmt, "ADD COLUMN") {
return "ADD COLUMN"
}
if strings.Contains(upperStmt, "DROP CONSTRAINT") {
return "DROP CONSTRAINT"
}
return "DO BLOCK"
}
// Direct DDL statements
if strings.HasPrefix(upperStmt, "CREATE SCHEMA") {
return "CREATE SCHEMA"
}
if strings.HasPrefix(upperStmt, "CREATE SEQUENCE") {
return "CREATE SEQUENCE"
}
if strings.HasPrefix(upperStmt, "CREATE TABLE") {
return "CREATE TABLE"
}
if strings.HasPrefix(upperStmt, "CREATE INDEX") {
return "CREATE INDEX"
}
if strings.HasPrefix(upperStmt, "CREATE UNIQUE INDEX") {
return "CREATE UNIQUE INDEX"
}
if strings.HasPrefix(upperStmt, "ALTER TABLE") {
if strings.Contains(upperStmt, "ADD CONSTRAINT") {
if strings.Contains(upperStmt, "FOREIGN KEY") {
return "ADD FOREIGN KEY"
} else if strings.Contains(upperStmt, "PRIMARY KEY") {
return "ADD PRIMARY KEY"
} else if strings.Contains(upperStmt, "UNIQUE") {
return "ADD UNIQUE CONSTRAINT"
} else if strings.Contains(upperStmt, "CHECK") {
return "ADD CHECK CONSTRAINT"
}
return "ADD CONSTRAINT"
}
if strings.Contains(upperStmt, "ADD COLUMN") {
return "ADD COLUMN"
}
if strings.Contains(upperStmt, "DROP CONSTRAINT") {
return "DROP CONSTRAINT"
}
if strings.Contains(upperStmt, "ALTER COLUMN") {
return "ALTER COLUMN"
}
return "ALTER TABLE"
}
if strings.HasPrefix(upperStmt, "COMMENT ON TABLE") {
return "COMMENT ON TABLE"
}
if strings.HasPrefix(upperStmt, "COMMENT ON COLUMN") {
return "COMMENT ON COLUMN"
}
if strings.HasPrefix(upperStmt, "DROP TABLE") {
return "DROP TABLE"
}
if strings.HasPrefix(upperStmt, "DROP INDEX") {
return "DROP INDEX"
}
// Default
return "SQL"
}
// quoteIdentifier wraps an identifier in double quotes if necessary
// This is needed for identifiers that start with numbers or contain special characters
func quoteIdentifier(s string) string {
return quoteIdent(s)
}