Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3c20c3c5d9 | |||
| a54594e49b | |||
| cafe6a461f |
@@ -18,6 +18,7 @@ var (
|
||||
scriptsConn string
|
||||
scriptsSchemaName string
|
||||
scriptsDBName string
|
||||
scriptsIgnoreErrors bool
|
||||
)
|
||||
|
||||
var scriptsCmd = &cobra.Command{
|
||||
@@ -62,7 +63,7 @@ var scriptsExecuteCmd = &cobra.Command{
|
||||
Long: `Execute SQL scripts from a directory against a PostgreSQL database.
|
||||
|
||||
Scripts are executed in order: Priority (ascending), Sequence (ascending), Name (alphabetical).
|
||||
Execution stops immediately on the first error.
|
||||
By default, execution stops immediately on the first error. Use --ignore-errors to continue execution.
|
||||
|
||||
The directory is scanned recursively for all subdirectories and files matching the patterns:
|
||||
{priority}_{sequence}_{name}.sql or .pgsql (underscore format)
|
||||
@@ -86,7 +87,12 @@ Examples:
|
||||
|
||||
# Execute with SSL disabled
|
||||
relspec scripts execute --dir ./sql \
|
||||
--conn "postgres://user:pass@localhost/db?sslmode=disable"`,
|
||||
--conn "postgres://user:pass@localhost/db?sslmode=disable"
|
||||
|
||||
# Continue executing even if errors occur
|
||||
relspec scripts execute --dir ./migrations \
|
||||
--conn "postgres://localhost/mydb" \
|
||||
--ignore-errors`,
|
||||
RunE: runScriptsExecute,
|
||||
}
|
||||
|
||||
@@ -105,6 +111,7 @@ func init() {
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsConn, "conn", "", "PostgreSQL connection string (required)")
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsSchemaName, "schema", "public", "Schema name (optional, default: public)")
|
||||
scriptsExecuteCmd.Flags().StringVar(&scriptsDBName, "database", "database", "Database name (optional, default: database)")
|
||||
scriptsExecuteCmd.Flags().BoolVar(&scriptsIgnoreErrors, "ignore-errors", false, "Continue executing scripts even if errors occur")
|
||||
|
||||
err = scriptsExecuteCmd.MarkFlagRequired("dir")
|
||||
if err != nil {
|
||||
@@ -250,17 +257,39 @@ func runScriptsExecute(cmd *cobra.Command, args []string) error {
|
||||
writer := sqlexec.NewWriter(&writers.WriterOptions{
|
||||
Metadata: map[string]any{
|
||||
"connection_string": scriptsConn,
|
||||
"ignore_errors": scriptsIgnoreErrors,
|
||||
},
|
||||
})
|
||||
|
||||
if err := writer.WriteSchema(schema); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "\n")
|
||||
return fmt.Errorf("execution failed: %w", err)
|
||||
return fmt.Errorf("script execution failed: %w", err)
|
||||
}
|
||||
|
||||
// Get execution results from writer metadata
|
||||
totalCount := len(schema.Scripts)
|
||||
successCount := totalCount
|
||||
failedCount := 0
|
||||
|
||||
opts := writer.Options()
|
||||
if total, exists := opts.Metadata["execution_total"].(int); exists {
|
||||
totalCount = total
|
||||
}
|
||||
if success, exists := opts.Metadata["execution_success"].(int); exists {
|
||||
successCount = success
|
||||
}
|
||||
if failed, exists := opts.Metadata["execution_failed"].(int); exists {
|
||||
failedCount = failed
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\n=== Execution Complete ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Completed at: %s\n", getCurrentTimestamp())
|
||||
fmt.Fprintf(os.Stderr, "Successfully executed %d script(s)\n\n", len(schema.Scripts))
|
||||
fmt.Fprintf(os.Stderr, "Total scripts: %d\n", totalCount)
|
||||
fmt.Fprintf(os.Stderr, "Successful: %d\n", successCount)
|
||||
if failedCount > 0 {
|
||||
fmt.Fprintf(os.Stderr, "Failed: %d\n", failedCount)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "\n")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -604,7 +604,7 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
|
||||
} else if attr == "unique" {
|
||||
// Create a unique constraint
|
||||
uniqueConstraint := models.InitConstraint(
|
||||
fmt.Sprintf("uq_%s", columnName),
|
||||
fmt.Sprintf("uq_%s_%s", tableName, columnName),
|
||||
models.UniqueConstraint,
|
||||
)
|
||||
uniqueConstraint.Schema = schemaName
|
||||
@@ -652,8 +652,8 @@ func (r *Reader) parseColumn(line, tableName, schemaName string) (*models.Column
|
||||
constraint.Table = tableName
|
||||
constraint.Columns = []string{columnName}
|
||||
}
|
||||
// Generate short constraint name based on the column
|
||||
constraint.Name = fmt.Sprintf("fk_%s", constraint.Columns[0])
|
||||
// Generate constraint name based on table and columns
|
||||
constraint.Name = fmt.Sprintf("fk_%s_%s", constraint.Table, strings.Join(constraint.Columns, "_"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -737,7 +737,11 @@ func (r *Reader) parseIndex(line, tableName, schemaName string) *models.Index {
|
||||
|
||||
// Generate name if not provided
|
||||
if index.Name == "" {
|
||||
index.Name = fmt.Sprintf("idx_%s_%s", tableName, strings.Join(columns, "_"))
|
||||
prefix := "idx"
|
||||
if index.Unique {
|
||||
prefix = "uidx"
|
||||
}
|
||||
index.Name = fmt.Sprintf("%s_%s_%s", prefix, tableName, strings.Join(columns, "_"))
|
||||
}
|
||||
|
||||
return index
|
||||
@@ -797,10 +801,10 @@ func (r *Reader) parseRef(refStr string) *models.Constraint {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Generate short constraint name based on the source column
|
||||
constraintName := fmt.Sprintf("fk_%s_%s", fromTable, toTable)
|
||||
if len(fromColumns) > 0 {
|
||||
constraintName = fmt.Sprintf("fk_%s", fromColumns[0])
|
||||
// Generate constraint name based on table and columns
|
||||
constraintName := fmt.Sprintf("fk_%s_%s", fromTable, strings.Join(fromColumns, "_"))
|
||||
if len(fromColumns) == 0 {
|
||||
constraintName = fmt.Sprintf("fk_%s_%s", fromTable, toTable)
|
||||
}
|
||||
|
||||
constraint := models.InitConstraint(
|
||||
|
||||
@@ -777,6 +777,76 @@ func TestParseFilePrefix(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestConstraintNaming(t *testing.T) {
|
||||
// Test that constraints are named with proper prefixes
|
||||
opts := &readers.ReaderOptions{
|
||||
FilePath: filepath.Join("..", "..", "..", "tests", "assets", "dbml", "complex.dbml"),
|
||||
}
|
||||
|
||||
reader := NewReader(opts)
|
||||
db, err := reader.ReadDatabase()
|
||||
if err != nil {
|
||||
t.Fatalf("ReadDatabase() error = %v", err)
|
||||
}
|
||||
|
||||
// Find users table
|
||||
var usersTable *models.Table
|
||||
var postsTable *models.Table
|
||||
for _, schema := range db.Schemas {
|
||||
for _, table := range schema.Tables {
|
||||
if table.Name == "users" {
|
||||
usersTable = table
|
||||
} else if table.Name == "posts" {
|
||||
postsTable = table
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if usersTable == nil {
|
||||
t.Fatal("Users table not found")
|
||||
}
|
||||
if postsTable == nil {
|
||||
t.Fatal("Posts table not found")
|
||||
}
|
||||
|
||||
// Test unique constraint naming: uq_table_column
|
||||
if _, exists := usersTable.Constraints["uq_users_email"]; !exists {
|
||||
t.Error("Expected unique constraint 'uq_users_email' not found")
|
||||
t.Logf("Available constraints: %v", getKeys(usersTable.Constraints))
|
||||
}
|
||||
|
||||
if _, exists := postsTable.Constraints["uq_posts_slug"]; !exists {
|
||||
t.Error("Expected unique constraint 'uq_posts_slug' not found")
|
||||
t.Logf("Available constraints: %v", getKeys(postsTable.Constraints))
|
||||
}
|
||||
|
||||
// Test foreign key naming: fk_table_column
|
||||
if _, exists := postsTable.Constraints["fk_posts_user_id"]; !exists {
|
||||
t.Error("Expected foreign key 'fk_posts_user_id' not found")
|
||||
t.Logf("Available constraints: %v", getKeys(postsTable.Constraints))
|
||||
}
|
||||
|
||||
// Test unique index naming: uidx_table_columns
|
||||
if _, exists := postsTable.Indexes["uidx_posts_slug"]; !exists {
|
||||
t.Error("Expected unique index 'uidx_posts_slug' not found")
|
||||
t.Logf("Available indexes: %v", getKeys(postsTable.Indexes))
|
||||
}
|
||||
|
||||
// Test regular index naming: idx_table_columns
|
||||
if _, exists := postsTable.Indexes["idx_posts_user_id_published"]; !exists {
|
||||
t.Error("Expected index 'idx_posts_user_id_published' not found")
|
||||
t.Logf("Available indexes: %v", getKeys(postsTable.Indexes))
|
||||
}
|
||||
}
|
||||
|
||||
func getKeys[V any](m map[string]V) []string {
|
||||
keys := make([]string, 0, len(m))
|
||||
for k := range m {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func TestHasCommentedRefs(t *testing.T) {
|
||||
// Test with the actual multifile test fixtures
|
||||
tests := []struct {
|
||||
|
||||
@@ -295,6 +295,56 @@ func (w *Writer) GenerateSchemaStatements(schema *models.Schema) ([]string, erro
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 5.5: Unique constraints
|
||||
for _, table := range schema.Tables {
|
||||
for _, constraint := range table.Constraints {
|
||||
if constraint.Type != models.UniqueConstraint {
|
||||
continue
|
||||
}
|
||||
|
||||
// Wrap in DO block to check for existing constraint
|
||||
stmt := fmt.Sprintf("DO $$\nBEGIN\n"+
|
||||
" IF NOT EXISTS (\n"+
|
||||
" SELECT 1 FROM information_schema.table_constraints\n"+
|
||||
" WHERE table_schema = '%s'\n"+
|
||||
" AND table_name = '%s'\n"+
|
||||
" AND constraint_name = '%s'\n"+
|
||||
" ) THEN\n"+
|
||||
" ALTER TABLE %s.%s ADD CONSTRAINT %s UNIQUE (%s);\n"+
|
||||
" END IF;\n"+
|
||||
"END;\n$$",
|
||||
schema.Name, table.Name, constraint.Name,
|
||||
schema.SQLName(), table.SQLName(), constraint.Name,
|
||||
strings.Join(constraint.Columns, ", "))
|
||||
statements = append(statements, stmt)
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 5.7: Check constraints
|
||||
for _, table := range schema.Tables {
|
||||
for _, constraint := range table.Constraints {
|
||||
if constraint.Type != models.CheckConstraint {
|
||||
continue
|
||||
}
|
||||
|
||||
// Wrap in DO block to check for existing constraint
|
||||
stmt := fmt.Sprintf("DO $$\nBEGIN\n"+
|
||||
" IF NOT EXISTS (\n"+
|
||||
" SELECT 1 FROM information_schema.table_constraints\n"+
|
||||
" WHERE table_schema = '%s'\n"+
|
||||
" AND table_name = '%s'\n"+
|
||||
" AND constraint_name = '%s'\n"+
|
||||
" ) THEN\n"+
|
||||
" ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK (%s);\n"+
|
||||
" END IF;\n"+
|
||||
"END;\n$$",
|
||||
schema.Name, table.Name, constraint.Name,
|
||||
schema.SQLName(), table.SQLName(), constraint.Name,
|
||||
constraint.Expression)
|
||||
statements = append(statements, stmt)
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 6: Foreign keys
|
||||
for _, table := range schema.Tables {
|
||||
for _, constraint := range table.Constraints {
|
||||
@@ -542,6 +592,16 @@ func (w *Writer) WriteSchema(schema *models.Schema) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// Phase 5.5: Create unique constraints (priority 185)
|
||||
if err := w.writeUniqueConstraints(schema); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Phase 5.7: Create check constraints (priority 190)
|
||||
if err := w.writeCheckConstraints(schema); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Phase 6: Create foreign key constraints (priority 195)
|
||||
if err := w.writeForeignKeys(schema); err != nil {
|
||||
return err
|
||||
@@ -865,6 +925,97 @@ func (w *Writer) writeIndexes(schema *models.Schema) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// writeUniqueConstraints generates ALTER TABLE statements for unique constraints
|
||||
func (w *Writer) writeUniqueConstraints(schema *models.Schema) error {
|
||||
fmt.Fprintf(w.writer, "-- Unique constraints for schema: %s\n", schema.Name)
|
||||
|
||||
for _, table := range schema.Tables {
|
||||
// Sort constraints by name for consistent output
|
||||
constraintNames := make([]string, 0, len(table.Constraints))
|
||||
for name, constraint := range table.Constraints {
|
||||
if constraint.Type == models.UniqueConstraint {
|
||||
constraintNames = append(constraintNames, name)
|
||||
}
|
||||
}
|
||||
sort.Strings(constraintNames)
|
||||
|
||||
for _, name := range constraintNames {
|
||||
constraint := table.Constraints[name]
|
||||
|
||||
// Build column list
|
||||
columnExprs := make([]string, 0, len(constraint.Columns))
|
||||
for _, colName := range constraint.Columns {
|
||||
if col, ok := table.Columns[colName]; ok {
|
||||
columnExprs = append(columnExprs, col.SQLName())
|
||||
}
|
||||
}
|
||||
|
||||
if len(columnExprs) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Wrap in DO block to check for existing constraint
|
||||
fmt.Fprintf(w.writer, "DO $$\n")
|
||||
fmt.Fprintf(w.writer, "BEGIN\n")
|
||||
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n")
|
||||
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.table_constraints\n")
|
||||
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name)
|
||||
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name)
|
||||
fmt.Fprintf(w.writer, " AND constraint_name = '%s'\n", constraint.Name)
|
||||
fmt.Fprintf(w.writer, " ) THEN\n")
|
||||
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s ADD CONSTRAINT %s UNIQUE (%s);\n",
|
||||
schema.SQLName(), table.SQLName(), constraint.Name, strings.Join(columnExprs, ", "))
|
||||
fmt.Fprintf(w.writer, " END IF;\n")
|
||||
fmt.Fprintf(w.writer, "END;\n")
|
||||
fmt.Fprintf(w.writer, "$$;\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// writeCheckConstraints generates ALTER TABLE statements for check constraints
|
||||
func (w *Writer) writeCheckConstraints(schema *models.Schema) error {
|
||||
fmt.Fprintf(w.writer, "-- Check constraints for schema: %s\n", schema.Name)
|
||||
|
||||
for _, table := range schema.Tables {
|
||||
// Sort constraints by name for consistent output
|
||||
constraintNames := make([]string, 0, len(table.Constraints))
|
||||
for name, constraint := range table.Constraints {
|
||||
if constraint.Type == models.CheckConstraint {
|
||||
constraintNames = append(constraintNames, name)
|
||||
}
|
||||
}
|
||||
sort.Strings(constraintNames)
|
||||
|
||||
for _, name := range constraintNames {
|
||||
constraint := table.Constraints[name]
|
||||
|
||||
// Skip if expression is empty
|
||||
if constraint.Expression == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Wrap in DO block to check for existing constraint
|
||||
fmt.Fprintf(w.writer, "DO $$\n")
|
||||
fmt.Fprintf(w.writer, "BEGIN\n")
|
||||
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n")
|
||||
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.table_constraints\n")
|
||||
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name)
|
||||
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name)
|
||||
fmt.Fprintf(w.writer, " AND constraint_name = '%s'\n", constraint.Name)
|
||||
fmt.Fprintf(w.writer, " ) THEN\n")
|
||||
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK (%s);\n",
|
||||
schema.SQLName(), table.SQLName(), constraint.Name, constraint.Expression)
|
||||
fmt.Fprintf(w.writer, " END IF;\n")
|
||||
fmt.Fprintf(w.writer, "END;\n")
|
||||
fmt.Fprintf(w.writer, "$$;\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// writeForeignKeys generates ALTER TABLE statements for foreign keys
|
||||
func (w *Writer) writeForeignKeys(schema *models.Schema) error {
|
||||
fmt.Fprintf(w.writer, "-- Foreign keys for schema: %s\n", schema.Name)
|
||||
@@ -961,6 +1112,84 @@ func (w *Writer) writeForeignKeys(schema *models.Schema) error {
|
||||
fmt.Fprintf(w.writer, " END IF;\n")
|
||||
fmt.Fprintf(w.writer, "END;\n$$;\n\n")
|
||||
}
|
||||
|
||||
// Also process any foreign key constraints that don't have a relationship
|
||||
processedConstraints := make(map[string]bool)
|
||||
for _, rel := range table.Relationships {
|
||||
fkName := rel.ForeignKey
|
||||
if fkName == "" {
|
||||
fkName = rel.Name
|
||||
}
|
||||
if fkName != "" {
|
||||
processedConstraints[fkName] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Find unprocessed foreign key constraints
|
||||
constraintNames := make([]string, 0)
|
||||
for name, constraint := range table.Constraints {
|
||||
if constraint.Type == models.ForeignKeyConstraint && !processedConstraints[name] {
|
||||
constraintNames = append(constraintNames, name)
|
||||
}
|
||||
}
|
||||
sort.Strings(constraintNames)
|
||||
|
||||
for _, name := range constraintNames {
|
||||
constraint := table.Constraints[name]
|
||||
|
||||
// Build column lists
|
||||
sourceColumns := make([]string, 0, len(constraint.Columns))
|
||||
for _, colName := range constraint.Columns {
|
||||
if col, ok := table.Columns[colName]; ok {
|
||||
sourceColumns = append(sourceColumns, col.SQLName())
|
||||
} else {
|
||||
sourceColumns = append(sourceColumns, colName)
|
||||
}
|
||||
}
|
||||
|
||||
targetColumns := make([]string, 0, len(constraint.ReferencedColumns))
|
||||
for _, colName := range constraint.ReferencedColumns {
|
||||
targetColumns = append(targetColumns, strings.ToLower(colName))
|
||||
}
|
||||
|
||||
if len(sourceColumns) == 0 || len(targetColumns) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
onDelete := "NO ACTION"
|
||||
if constraint.OnDelete != "" {
|
||||
onDelete = strings.ToUpper(constraint.OnDelete)
|
||||
}
|
||||
|
||||
onUpdate := "NO ACTION"
|
||||
if constraint.OnUpdate != "" {
|
||||
onUpdate = strings.ToUpper(constraint.OnUpdate)
|
||||
}
|
||||
|
||||
refSchema := constraint.ReferencedSchema
|
||||
if refSchema == "" {
|
||||
refSchema = schema.Name
|
||||
}
|
||||
refTable := constraint.ReferencedTable
|
||||
|
||||
// Use DO block to check if constraint exists before adding
|
||||
fmt.Fprintf(w.writer, "DO $$\nBEGIN\n")
|
||||
fmt.Fprintf(w.writer, " IF NOT EXISTS (\n")
|
||||
fmt.Fprintf(w.writer, " SELECT 1 FROM information_schema.table_constraints\n")
|
||||
fmt.Fprintf(w.writer, " WHERE table_schema = '%s'\n", schema.Name)
|
||||
fmt.Fprintf(w.writer, " AND table_name = '%s'\n", table.Name)
|
||||
fmt.Fprintf(w.writer, " AND constraint_name = '%s'\n", constraint.Name)
|
||||
fmt.Fprintf(w.writer, " ) THEN\n")
|
||||
fmt.Fprintf(w.writer, " ALTER TABLE %s.%s\n", schema.SQLName(), table.SQLName())
|
||||
fmt.Fprintf(w.writer, " ADD CONSTRAINT %s\n", constraint.Name)
|
||||
fmt.Fprintf(w.writer, " FOREIGN KEY (%s)\n", strings.Join(sourceColumns, ", "))
|
||||
fmt.Fprintf(w.writer, " REFERENCES %s.%s (%s)\n",
|
||||
refSchema, refTable, strings.Join(targetColumns, ", "))
|
||||
fmt.Fprintf(w.writer, " ON DELETE %s\n", onDelete)
|
||||
fmt.Fprintf(w.writer, " ON UPDATE %s;\n", onUpdate)
|
||||
fmt.Fprintf(w.writer, " END IF;\n")
|
||||
fmt.Fprintf(w.writer, "END;\n$$;\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -164,6 +164,296 @@ func TestWriteForeignKeys(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteUniqueConstraints(t *testing.T) {
|
||||
// Create a test database with unique constraints
|
||||
db := models.InitDatabase("testdb")
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
// Create table with unique constraints
|
||||
table := models.InitTable("users", "public")
|
||||
|
||||
// Add columns
|
||||
emailCol := models.InitColumn("email", "users", "public")
|
||||
emailCol.Type = "varchar(255)"
|
||||
emailCol.NotNull = true
|
||||
table.Columns["email"] = emailCol
|
||||
|
||||
guidCol := models.InitColumn("guid", "users", "public")
|
||||
guidCol.Type = "uuid"
|
||||
guidCol.NotNull = true
|
||||
table.Columns["guid"] = guidCol
|
||||
|
||||
// Add unique constraints
|
||||
emailConstraint := &models.Constraint{
|
||||
Name: "uq_email",
|
||||
Type: models.UniqueConstraint,
|
||||
Schema: "public",
|
||||
Table: "users",
|
||||
Columns: []string{"email"},
|
||||
}
|
||||
table.Constraints["uq_email"] = emailConstraint
|
||||
|
||||
guidConstraint := &models.Constraint{
|
||||
Name: "uq_guid",
|
||||
Type: models.UniqueConstraint,
|
||||
Schema: "public",
|
||||
Table: "users",
|
||||
Columns: []string{"guid"},
|
||||
}
|
||||
table.Constraints["uq_guid"] = guidConstraint
|
||||
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
|
||||
// Create writer with output to buffer
|
||||
var buf bytes.Buffer
|
||||
options := &writers.WriterOptions{}
|
||||
writer := NewWriter(options)
|
||||
writer.writer = &buf
|
||||
|
||||
// Write the database
|
||||
err := writer.WriteDatabase(db)
|
||||
if err != nil {
|
||||
t.Fatalf("WriteDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
output := buf.String()
|
||||
|
||||
// Print output for debugging
|
||||
t.Logf("Generated SQL:\n%s", output)
|
||||
|
||||
// Verify unique constraints are present
|
||||
if !strings.Contains(output, "-- Unique constraints for schema: public") {
|
||||
t.Errorf("Output missing unique constraints header")
|
||||
}
|
||||
if !strings.Contains(output, "ADD CONSTRAINT uq_email UNIQUE (email)") {
|
||||
t.Errorf("Output missing uq_email unique constraint\nFull output:\n%s", output)
|
||||
}
|
||||
if !strings.Contains(output, "ADD CONSTRAINT uq_guid UNIQUE (guid)") {
|
||||
t.Errorf("Output missing uq_guid unique constraint\nFull output:\n%s", output)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteCheckConstraints(t *testing.T) {
|
||||
// Create a test database with check constraints
|
||||
db := models.InitDatabase("testdb")
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
// Create table with check constraints
|
||||
table := models.InitTable("products", "public")
|
||||
|
||||
// Add columns
|
||||
priceCol := models.InitColumn("price", "products", "public")
|
||||
priceCol.Type = "numeric(10,2)"
|
||||
table.Columns["price"] = priceCol
|
||||
|
||||
statusCol := models.InitColumn("status", "products", "public")
|
||||
statusCol.Type = "varchar(20)"
|
||||
table.Columns["status"] = statusCol
|
||||
|
||||
quantityCol := models.InitColumn("quantity", "products", "public")
|
||||
quantityCol.Type = "integer"
|
||||
table.Columns["quantity"] = quantityCol
|
||||
|
||||
// Add check constraints
|
||||
priceConstraint := &models.Constraint{
|
||||
Name: "ck_price_positive",
|
||||
Type: models.CheckConstraint,
|
||||
Schema: "public",
|
||||
Table: "products",
|
||||
Expression: "price >= 0",
|
||||
}
|
||||
table.Constraints["ck_price_positive"] = priceConstraint
|
||||
|
||||
statusConstraint := &models.Constraint{
|
||||
Name: "ck_status_valid",
|
||||
Type: models.CheckConstraint,
|
||||
Schema: "public",
|
||||
Table: "products",
|
||||
Expression: "status IN ('active', 'inactive', 'discontinued')",
|
||||
}
|
||||
table.Constraints["ck_status_valid"] = statusConstraint
|
||||
|
||||
quantityConstraint := &models.Constraint{
|
||||
Name: "ck_quantity_nonnegative",
|
||||
Type: models.CheckConstraint,
|
||||
Schema: "public",
|
||||
Table: "products",
|
||||
Expression: "quantity >= 0",
|
||||
}
|
||||
table.Constraints["ck_quantity_nonnegative"] = quantityConstraint
|
||||
|
||||
schema.Tables = append(schema.Tables, table)
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
|
||||
// Create writer with output to buffer
|
||||
var buf bytes.Buffer
|
||||
options := &writers.WriterOptions{}
|
||||
writer := NewWriter(options)
|
||||
writer.writer = &buf
|
||||
|
||||
// Write the database
|
||||
err := writer.WriteDatabase(db)
|
||||
if err != nil {
|
||||
t.Fatalf("WriteDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
output := buf.String()
|
||||
|
||||
// Print output for debugging
|
||||
t.Logf("Generated SQL:\n%s", output)
|
||||
|
||||
// Verify check constraints are present
|
||||
if !strings.Contains(output, "-- Check constraints for schema: public") {
|
||||
t.Errorf("Output missing check constraints header")
|
||||
}
|
||||
if !strings.Contains(output, "ADD CONSTRAINT ck_price_positive CHECK (price >= 0)") {
|
||||
t.Errorf("Output missing ck_price_positive check constraint\nFull output:\n%s", output)
|
||||
}
|
||||
if !strings.Contains(output, "ADD CONSTRAINT ck_status_valid CHECK (status IN ('active', 'inactive', 'discontinued'))") {
|
||||
t.Errorf("Output missing ck_status_valid check constraint\nFull output:\n%s", output)
|
||||
}
|
||||
if !strings.Contains(output, "ADD CONSTRAINT ck_quantity_nonnegative CHECK (quantity >= 0)") {
|
||||
t.Errorf("Output missing ck_quantity_nonnegative check constraint\nFull output:\n%s", output)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteAllConstraintTypes(t *testing.T) {
|
||||
// Create a comprehensive test with all constraint types
|
||||
db := models.InitDatabase("testdb")
|
||||
schema := models.InitSchema("public")
|
||||
|
||||
// Create orders table
|
||||
ordersTable := models.InitTable("orders", "public")
|
||||
|
||||
// Add columns
|
||||
idCol := models.InitColumn("id", "orders", "public")
|
||||
idCol.Type = "integer"
|
||||
idCol.IsPrimaryKey = true
|
||||
ordersTable.Columns["id"] = idCol
|
||||
|
||||
userIDCol := models.InitColumn("user_id", "orders", "public")
|
||||
userIDCol.Type = "integer"
|
||||
userIDCol.NotNull = true
|
||||
ordersTable.Columns["user_id"] = userIDCol
|
||||
|
||||
orderNumberCol := models.InitColumn("order_number", "orders", "public")
|
||||
orderNumberCol.Type = "varchar(50)"
|
||||
orderNumberCol.NotNull = true
|
||||
ordersTable.Columns["order_number"] = orderNumberCol
|
||||
|
||||
totalCol := models.InitColumn("total", "orders", "public")
|
||||
totalCol.Type = "numeric(10,2)"
|
||||
ordersTable.Columns["total"] = totalCol
|
||||
|
||||
statusCol := models.InitColumn("status", "orders", "public")
|
||||
statusCol.Type = "varchar(20)"
|
||||
ordersTable.Columns["status"] = statusCol
|
||||
|
||||
// Add primary key constraint
|
||||
pkConstraint := &models.Constraint{
|
||||
Name: "pk_orders",
|
||||
Type: models.PrimaryKeyConstraint,
|
||||
Schema: "public",
|
||||
Table: "orders",
|
||||
Columns: []string{"id"},
|
||||
}
|
||||
ordersTable.Constraints["pk_orders"] = pkConstraint
|
||||
|
||||
// Add unique constraint
|
||||
uniqueConstraint := &models.Constraint{
|
||||
Name: "uq_order_number",
|
||||
Type: models.UniqueConstraint,
|
||||
Schema: "public",
|
||||
Table: "orders",
|
||||
Columns: []string{"order_number"},
|
||||
}
|
||||
ordersTable.Constraints["uq_order_number"] = uniqueConstraint
|
||||
|
||||
// Add check constraint
|
||||
checkConstraint := &models.Constraint{
|
||||
Name: "ck_total_positive",
|
||||
Type: models.CheckConstraint,
|
||||
Schema: "public",
|
||||
Table: "orders",
|
||||
Expression: "total > 0",
|
||||
}
|
||||
ordersTable.Constraints["ck_total_positive"] = checkConstraint
|
||||
|
||||
statusCheckConstraint := &models.Constraint{
|
||||
Name: "ck_status_valid",
|
||||
Type: models.CheckConstraint,
|
||||
Schema: "public",
|
||||
Table: "orders",
|
||||
Expression: "status IN ('pending', 'completed', 'cancelled')",
|
||||
}
|
||||
ordersTable.Constraints["ck_status_valid"] = statusCheckConstraint
|
||||
|
||||
// Add foreign key constraint (referencing a users table)
|
||||
fkConstraint := &models.Constraint{
|
||||
Name: "fk_orders_user",
|
||||
Type: models.ForeignKeyConstraint,
|
||||
Schema: "public",
|
||||
Table: "orders",
|
||||
Columns: []string{"user_id"},
|
||||
ReferencedSchema: "public",
|
||||
ReferencedTable: "users",
|
||||
ReferencedColumns: []string{"id"},
|
||||
OnDelete: "CASCADE",
|
||||
OnUpdate: "CASCADE",
|
||||
}
|
||||
ordersTable.Constraints["fk_orders_user"] = fkConstraint
|
||||
|
||||
schema.Tables = append(schema.Tables, ordersTable)
|
||||
db.Schemas = append(db.Schemas, schema)
|
||||
|
||||
// Create writer with output to buffer
|
||||
var buf bytes.Buffer
|
||||
options := &writers.WriterOptions{}
|
||||
writer := NewWriter(options)
|
||||
writer.writer = &buf
|
||||
|
||||
// Write the database
|
||||
err := writer.WriteDatabase(db)
|
||||
if err != nil {
|
||||
t.Fatalf("WriteDatabase failed: %v", err)
|
||||
}
|
||||
|
||||
output := buf.String()
|
||||
|
||||
// Print output for debugging
|
||||
t.Logf("Generated SQL:\n%s", output)
|
||||
|
||||
// Verify all constraint types are present
|
||||
expectedConstraints := map[string]string{
|
||||
"Primary Key": "PRIMARY KEY",
|
||||
"Unique": "ADD CONSTRAINT uq_order_number UNIQUE (order_number)",
|
||||
"Check (total)": "ADD CONSTRAINT ck_total_positive CHECK (total > 0)",
|
||||
"Check (status)": "ADD CONSTRAINT ck_status_valid CHECK (status IN ('pending', 'completed', 'cancelled'))",
|
||||
"Foreign Key": "FOREIGN KEY",
|
||||
}
|
||||
|
||||
for name, expected := range expectedConstraints {
|
||||
if !strings.Contains(output, expected) {
|
||||
t.Errorf("Output missing %s constraint: %s\nFull output:\n%s", name, expected, output)
|
||||
}
|
||||
}
|
||||
|
||||
// Verify section headers
|
||||
sections := []string{
|
||||
"-- Primary keys for schema: public",
|
||||
"-- Unique constraints for schema: public",
|
||||
"-- Check constraints for schema: public",
|
||||
"-- Foreign keys for schema: public",
|
||||
}
|
||||
|
||||
for _, section := range sections {
|
||||
if !strings.Contains(output, section) {
|
||||
t.Errorf("Output missing section header: %s", section)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteTable(t *testing.T) {
|
||||
// Create a single table
|
||||
table := models.InitTable("products", "public")
|
||||
|
||||
@@ -23,6 +23,11 @@ func NewWriter(options *writers.WriterOptions) *Writer {
|
||||
}
|
||||
}
|
||||
|
||||
// Options returns the writer options (useful for reading execution results)
|
||||
func (w *Writer) Options() *writers.WriterOptions {
|
||||
return w.options
|
||||
}
|
||||
|
||||
// WriteDatabase executes all scripts from all schemas in the database
|
||||
func (w *Writer) WriteDatabase(db *models.Database) error {
|
||||
if db == nil {
|
||||
@@ -92,6 +97,22 @@ func (w *Writer) executeScripts(ctx context.Context, conn *pgx.Conn, scripts []*
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check if we should ignore errors
|
||||
ignoreErrors := false
|
||||
if val, ok := w.options.Metadata["ignore_errors"].(bool); ok {
|
||||
ignoreErrors = val
|
||||
}
|
||||
|
||||
// Track failed scripts and execution counts
|
||||
var failedScripts []struct {
|
||||
name string
|
||||
priority int
|
||||
sequence uint
|
||||
err error
|
||||
}
|
||||
successCount := 0
|
||||
totalCount := 0
|
||||
|
||||
// Sort scripts by Priority (ascending), Sequence (ascending), then Name (ascending)
|
||||
sortedScripts := make([]*models.Script, len(scripts))
|
||||
copy(sortedScripts, scripts)
|
||||
@@ -111,18 +132,49 @@ func (w *Writer) executeScripts(ctx context.Context, conn *pgx.Conn, scripts []*
|
||||
continue
|
||||
}
|
||||
|
||||
totalCount++
|
||||
fmt.Printf("Executing script: %s (Priority=%d, Sequence=%d)\n",
|
||||
script.Name, script.Priority, script.Sequence)
|
||||
|
||||
// Execute the SQL script
|
||||
_, err := conn.Exec(ctx, script.SQL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to execute script %s (Priority=%d, Sequence=%d): %w",
|
||||
if ignoreErrors {
|
||||
fmt.Printf("⚠ Error executing %s: %v (continuing due to --ignore-errors)\n", script.Name, err)
|
||||
failedScripts = append(failedScripts, struct {
|
||||
name string
|
||||
priority int
|
||||
sequence uint
|
||||
err error
|
||||
}{
|
||||
name: script.Name,
|
||||
priority: script.Priority,
|
||||
sequence: script.Sequence,
|
||||
err: err,
|
||||
})
|
||||
continue
|
||||
}
|
||||
return fmt.Errorf("script %s (Priority=%d, Sequence=%d): %w",
|
||||
script.Name, script.Priority, script.Sequence, err)
|
||||
}
|
||||
|
||||
successCount++
|
||||
fmt.Printf("✓ Successfully executed: %s\n", script.Name)
|
||||
}
|
||||
|
||||
// Store execution results in metadata for caller
|
||||
w.options.Metadata["execution_total"] = totalCount
|
||||
w.options.Metadata["execution_success"] = successCount
|
||||
w.options.Metadata["execution_failed"] = len(failedScripts)
|
||||
|
||||
// Print summary of failed scripts if any
|
||||
if len(failedScripts) > 0 {
|
||||
fmt.Printf("\n⚠ Failed Scripts Summary (%d failed):\n", len(failedScripts))
|
||||
for i, failed := range failedScripts {
|
||||
fmt.Printf(" %d. %s (Priority=%d, Sequence=%d)\n Error: %v\n",
|
||||
i+1, failed.name, failed.priority, failed.sequence, failed.err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user